• Home
  • Raw
  • Download

Lines Matching refs:float32x2_t

59     float32x2_t vacc0x01 = vld1_f32(w); w += 2;  in xnn_f32_igemm_minmax_ukernel_4x2__neonfma_lane_ld64()
60 float32x2_t vacc1x01 = vacc0x01; in xnn_f32_igemm_minmax_ukernel_4x2__neonfma_lane_ld64()
61 float32x2_t vacc2x01 = vacc0x01; in xnn_f32_igemm_minmax_ukernel_4x2__neonfma_lane_ld64()
62 float32x2_t vacc3x01 = vacc0x01; in xnn_f32_igemm_minmax_ukernel_4x2__neonfma_lane_ld64()
90 const float32x2_t va0 = vld1_f32(a0); a0 += 2; in xnn_f32_igemm_minmax_ukernel_4x2__neonfma_lane_ld64()
91 const float32x2_t va1 = vld1_f32(a1); a1 += 2; in xnn_f32_igemm_minmax_ukernel_4x2__neonfma_lane_ld64()
92 const float32x2_t va2 = vld1_f32(a2); a2 += 2; in xnn_f32_igemm_minmax_ukernel_4x2__neonfma_lane_ld64()
93 const float32x2_t va3 = vld1_f32(a3); a3 += 2; in xnn_f32_igemm_minmax_ukernel_4x2__neonfma_lane_ld64()
95 const float32x2_t vb01c0 = vld1_f32(w); w += 2; in xnn_f32_igemm_minmax_ukernel_4x2__neonfma_lane_ld64()
103 const float32x2_t va0c0 = vdup_lane_f32(va0, 0); in xnn_f32_igemm_minmax_ukernel_4x2__neonfma_lane_ld64()
104 const float32x2_t va1c0 = vdup_lane_f32(va1, 0); in xnn_f32_igemm_minmax_ukernel_4x2__neonfma_lane_ld64()
105 const float32x2_t va2c0 = vdup_lane_f32(va2, 0); in xnn_f32_igemm_minmax_ukernel_4x2__neonfma_lane_ld64()
106 const float32x2_t va3c0 = vdup_lane_f32(va3, 0); in xnn_f32_igemm_minmax_ukernel_4x2__neonfma_lane_ld64()
112 const float32x2_t vb01c1 = vld1_f32(w); w += 2; in xnn_f32_igemm_minmax_ukernel_4x2__neonfma_lane_ld64()
120 const float32x2_t va0c1 = vdup_lane_f32(va0, 1); in xnn_f32_igemm_minmax_ukernel_4x2__neonfma_lane_ld64()
121 const float32x2_t va1c1 = vdup_lane_f32(va1, 1); in xnn_f32_igemm_minmax_ukernel_4x2__neonfma_lane_ld64()
122 const float32x2_t va2c1 = vdup_lane_f32(va2, 1); in xnn_f32_igemm_minmax_ukernel_4x2__neonfma_lane_ld64()
123 const float32x2_t va3c1 = vdup_lane_f32(va3, 1); in xnn_f32_igemm_minmax_ukernel_4x2__neonfma_lane_ld64()
131 const float32x2_t va0 = vld1_dup_f32(a0); in xnn_f32_igemm_minmax_ukernel_4x2__neonfma_lane_ld64()
132 const float32x2_t va1 = vld1_dup_f32(a1); in xnn_f32_igemm_minmax_ukernel_4x2__neonfma_lane_ld64()
133 const float32x2_t va2 = vld1_dup_f32(a2); in xnn_f32_igemm_minmax_ukernel_4x2__neonfma_lane_ld64()
134 const float32x2_t va3 = vld1_dup_f32(a3); in xnn_f32_igemm_minmax_ukernel_4x2__neonfma_lane_ld64()
136 const float32x2_t vb01 = vld1_f32(w); w += 2; in xnn_f32_igemm_minmax_ukernel_4x2__neonfma_lane_ld64()
146 const float32x2_t vmax = vld1_dup_f32(&params->scalar.max); in xnn_f32_igemm_minmax_ukernel_4x2__neonfma_lane_ld64()
152 const float32x2_t vmin = vld1_dup_f32(&params->scalar.min); in xnn_f32_igemm_minmax_ukernel_4x2__neonfma_lane_ld64()