Home
last modified time | relevance | path

Searched refs:vb01c1 (Results 1 – 4 of 4) sorted by relevance

/external/XNNPACK/src/f32-gemm/gen/
D4x2-neonfma-lane-ld64.c90 const float32x2_t vb01c1 = vld1_f32(w); w += 2; in xnn_f32_gemm_ukernel_4x2__neonfma_lane_ld64() local
93 vacc0x01 = vfma_lane_f32(vacc0x01, vb01c1, va0, 1); in xnn_f32_gemm_ukernel_4x2__neonfma_lane_ld64()
94 vacc1x01 = vfma_lane_f32(vacc1x01, vb01c1, va1, 1); in xnn_f32_gemm_ukernel_4x2__neonfma_lane_ld64()
95 vacc2x01 = vfma_lane_f32(vacc2x01, vb01c1, va2, 1); in xnn_f32_gemm_ukernel_4x2__neonfma_lane_ld64()
96 vacc3x01 = vfma_lane_f32(vacc3x01, vb01c1, va3, 1); in xnn_f32_gemm_ukernel_4x2__neonfma_lane_ld64()
102 vacc0x01 = vfma_f32(vacc0x01, va0c1, vb01c1); in xnn_f32_gemm_ukernel_4x2__neonfma_lane_ld64()
103 vacc1x01 = vfma_f32(vacc1x01, va1c1, vb01c1); in xnn_f32_gemm_ukernel_4x2__neonfma_lane_ld64()
104 vacc2x01 = vfma_f32(vacc2x01, va2c1, vb01c1); in xnn_f32_gemm_ukernel_4x2__neonfma_lane_ld64()
105 vacc3x01 = vfma_f32(vacc3x01, va3c1, vb01c1); in xnn_f32_gemm_ukernel_4x2__neonfma_lane_ld64()
D4x2-neon-lane-ld64.c79 const float32x2_t vb01c1 = vld1_f32(w); w += 2; in xnn_f32_gemm_ukernel_4x2__neon_lane_ld64() local
81 vacc0x01 = vmla_lane_f32(vacc0x01, vb01c1, va0, 1); in xnn_f32_gemm_ukernel_4x2__neon_lane_ld64()
82 vacc1x01 = vmla_lane_f32(vacc1x01, vb01c1, va1, 1); in xnn_f32_gemm_ukernel_4x2__neon_lane_ld64()
83 vacc2x01 = vmla_lane_f32(vacc2x01, vb01c1, va2, 1); in xnn_f32_gemm_ukernel_4x2__neon_lane_ld64()
84 vacc3x01 = vmla_lane_f32(vacc3x01, vb01c1, va3, 1); in xnn_f32_gemm_ukernel_4x2__neon_lane_ld64()
/external/XNNPACK/src/f32-igemm/gen/
D4x2-neonfma-lane-ld64.c112 const float32x2_t vb01c1 = vld1_f32(w); w += 2; in xnn_f32_igemm_ukernel_4x2__neonfma_lane_ld64() local
115 vacc0x01 = vfma_lane_f32(vacc0x01, vb01c1, va0, 1); in xnn_f32_igemm_ukernel_4x2__neonfma_lane_ld64()
116 vacc1x01 = vfma_lane_f32(vacc1x01, vb01c1, va1, 1); in xnn_f32_igemm_ukernel_4x2__neonfma_lane_ld64()
117 vacc2x01 = vfma_lane_f32(vacc2x01, vb01c1, va2, 1); in xnn_f32_igemm_ukernel_4x2__neonfma_lane_ld64()
118 vacc3x01 = vfma_lane_f32(vacc3x01, vb01c1, va3, 1); in xnn_f32_igemm_ukernel_4x2__neonfma_lane_ld64()
124 vacc0x01 = vfma_f32(vacc0x01, va0c1, vb01c1); in xnn_f32_igemm_ukernel_4x2__neonfma_lane_ld64()
125 vacc1x01 = vfma_f32(vacc1x01, va1c1, vb01c1); in xnn_f32_igemm_ukernel_4x2__neonfma_lane_ld64()
126 vacc2x01 = vfma_f32(vacc2x01, va2c1, vb01c1); in xnn_f32_igemm_ukernel_4x2__neonfma_lane_ld64()
127 vacc3x01 = vfma_f32(vacc3x01, va3c1, vb01c1); in xnn_f32_igemm_ukernel_4x2__neonfma_lane_ld64()
D4x2-neon-lane-ld64.c101 const float32x2_t vb01c1 = vld1_f32(w); w += 2; in xnn_f32_igemm_ukernel_4x2__neon_lane_ld64() local
103 vacc0x01 = vmla_lane_f32(vacc0x01, vb01c1, va0, 1); in xnn_f32_igemm_ukernel_4x2__neon_lane_ld64()
104 vacc1x01 = vmla_lane_f32(vacc1x01, vb01c1, va1, 1); in xnn_f32_igemm_ukernel_4x2__neon_lane_ld64()
105 vacc2x01 = vmla_lane_f32(vacc2x01, vb01c1, va2, 1); in xnn_f32_igemm_ukernel_4x2__neon_lane_ld64()
106 vacc3x01 = vmla_lane_f32(vacc3x01, vb01c1, va3, 1); in xnn_f32_igemm_ukernel_4x2__neon_lane_ld64()