Searched refs:vacc01n3 (Results 1 – 5 of 5) sorted by relevance
/external/XNNPACK/src/f32-spmm/gen/ |
D | 4x4-minmax-neonfma.c | 122 float32x2_t vacc01n3 = vld1_dup_f32(w); w += 1; in xnn_f32_spmm_minmax_ukernel_4x4__neonfma() local 133 vacc01n3 = vfma_laneq_f32(vacc01n3, vi01, vw, 3); in xnn_f32_spmm_minmax_ukernel_4x4__neonfma() 139 float32x2_t vout01n3 = vmin_f32(vacc01n3, vget_low_f32(vmax)); in xnn_f32_spmm_minmax_ukernel_4x4__neonfma()
|
D | 8x4-minmax-neonfma.c | 221 float32x2_t vacc01n3 = vld1_dup_f32(w); w += 1; in xnn_f32_spmm_minmax_ukernel_8x4__neonfma() local 232 vacc01n3 = vfma_laneq_f32(vacc01n3, vi01, vw, 3); in xnn_f32_spmm_minmax_ukernel_8x4__neonfma() 238 float32x2_t vout01n3 = vmin_f32(vacc01n3, vget_low_f32(vmax)); in xnn_f32_spmm_minmax_ukernel_8x4__neonfma()
|
D | 12x4-minmax-neonfma.c | 347 float32x2_t vacc01n3 = vld1_dup_f32(w); w += 1; in xnn_f32_spmm_minmax_ukernel_12x4__neonfma() local 358 vacc01n3 = vfma_laneq_f32(vacc01n3, vi01, vw, 3); in xnn_f32_spmm_minmax_ukernel_12x4__neonfma() 364 float32x2_t vout01n3 = vmin_f32(vacc01n3, vget_low_f32(vmax)); in xnn_f32_spmm_minmax_ukernel_12x4__neonfma()
|
D | 16x4-minmax-neonfma.c | 374 float32x2_t vacc01n3 = vld1_dup_f32(w); w += 1; in xnn_f32_spmm_minmax_ukernel_16x4__neonfma() local 385 vacc01n3 = vfma_laneq_f32(vacc01n3, vi01, vw, 3); in xnn_f32_spmm_minmax_ukernel_16x4__neonfma() 391 float32x2_t vout01n3 = vmin_f32(vacc01n3, vget_low_f32(vmax)); in xnn_f32_spmm_minmax_ukernel_16x4__neonfma()
|
D | 32x4-minmax-neonfma.c | 637 float32x2_t vacc01n3 = vld1_dup_f32(w); w += 1; in xnn_f32_spmm_minmax_ukernel_32x4__neonfma() local 648 vacc01n3 = vfma_laneq_f32(vacc01n3, vi01, vw, 3); in xnn_f32_spmm_minmax_ukernel_32x4__neonfma() 654 float32x2_t vout01n3 = vmin_f32(vacc01n3, vget_low_f32(vmax)); in xnn_f32_spmm_minmax_ukernel_32x4__neonfma()
|