Home
last modified time | relevance | path

Searched refs:va5c1 (Results 1 – 25 of 30) sorted by relevance

12

/external/XNNPACK/src/f32-gemm/gen/
D6x8-minmax-neon-dup-ld64.c124 const float32x4_t va5c1 = vdupq_lane_f32(va5, 1); in xnn_f32_gemm_minmax_ukernel_6x8__neon_dup_ld64() local
130 vacc5x0123 = vmlaq_f32(vacc5x0123, va5c1, vb0123c1); in xnn_f32_gemm_minmax_ukernel_6x8__neon_dup_ld64()
136 vacc5x4567 = vmlaq_f32(vacc5x4567, va5c1, vb4567c1); in xnn_f32_gemm_minmax_ukernel_6x8__neon_dup_ld64()
D6x8-minmax-neonfma-dup-ld64.c124 const float32x4_t va5c1 = vdupq_lane_f32(va5, 1); in xnn_f32_gemm_minmax_ukernel_6x8__neonfma_dup_ld64() local
130 vacc5x0123 = vfmaq_f32(vacc5x0123, va5c1, vb0123c1); in xnn_f32_gemm_minmax_ukernel_6x8__neonfma_dup_ld64()
136 vacc5x4567 = vfmaq_f32(vacc5x4567, va5c1, vb4567c1); in xnn_f32_gemm_minmax_ukernel_6x8__neonfma_dup_ld64()
D6x8-minmax-neonfma-dup-ld128.c126 const float32x4_t va5c1 = vdupq_lane_f32(vget_low_f32(va5), 1); in xnn_f32_gemm_minmax_ukernel_6x8__neonfma_dup_ld128() local
132 vacc5x0123 = vfmaq_f32(vacc5x0123, va5c1, vb0123c1); in xnn_f32_gemm_minmax_ukernel_6x8__neonfma_dup_ld128()
138 vacc5x4567 = vfmaq_f32(vacc5x4567, va5c1, vb4567c1); in xnn_f32_gemm_minmax_ukernel_6x8__neonfma_dup_ld128()
D6x8-minmax-wasmsimd-arm-splat.c130 const v128_t va5c1 = wasm_v32x4_shuffle(va5, va5, 1, 1, 1, 1); in xnn_f32_gemm_minmax_ukernel_6x8__wasmsimd_arm_splat() local
140 vacc5x0123 = wasm_f32x4_add(vacc5x0123, wasm_f32x4_mul(va5c1, vb0123c1)); in xnn_f32_gemm_minmax_ukernel_6x8__wasmsimd_arm_splat()
146 vacc5x4567 = wasm_f32x4_add(vacc5x4567, wasm_f32x4_mul(va5c1, vb4567c1)); in xnn_f32_gemm_minmax_ukernel_6x8__wasmsimd_arm_splat()
D6x8-minmax-neon-dup-ld128.c126 const float32x4_t va5c1 = vdupq_lane_f32(vget_low_f32(va5), 1); in xnn_f32_gemm_minmax_ukernel_6x8__neon_dup_ld128() local
132 vacc5x0123 = vmlaq_f32(vacc5x0123, va5c1, vb0123c1); in xnn_f32_gemm_minmax_ukernel_6x8__neon_dup_ld128()
138 vacc5x4567 = vmlaq_f32(vacc5x4567, va5c1, vb4567c1); in xnn_f32_gemm_minmax_ukernel_6x8__neon_dup_ld128()
D6x8-minmax-wasmsimd-x86-splat.c128 const v128_t va5c1 = wasm_v32x4_shuffle(va5, va5, 1, 1, 1, 1); in xnn_f32_gemm_minmax_ukernel_6x8__wasmsimd_x86_splat() local
138 vacc5x0123 = wasm_f32x4_add(vacc5x0123, wasm_f32x4_mul(va5c1, vb0123c1)); in xnn_f32_gemm_minmax_ukernel_6x8__wasmsimd_x86_splat()
144 vacc5x4567 = wasm_f32x4_add(vacc5x4567, wasm_f32x4_mul(va5c1, vb4567c1)); in xnn_f32_gemm_minmax_ukernel_6x8__wasmsimd_x86_splat()
/external/XNNPACK/src/f32-gemm/gen-inc/
D6x8inc-minmax-neon-dup-ld64.c126 const float32x4_t va5c1 = vdupq_lane_f32(va5, 1); in xnn_f32_gemminc_minmax_ukernel_6x8__neon_dup_ld64() local
132 vacc5x0123 = vmlaq_f32(vacc5x0123, va5c1, vb0123c1); in xnn_f32_gemminc_minmax_ukernel_6x8__neon_dup_ld64()
138 vacc5x4567 = vmlaq_f32(vacc5x4567, va5c1, vb4567c1); in xnn_f32_gemminc_minmax_ukernel_6x8__neon_dup_ld64()
D6x8inc-minmax-neonfma-dup-ld64.c126 const float32x4_t va5c1 = vdupq_lane_f32(va5, 1); in xnn_f32_gemminc_minmax_ukernel_6x8__neonfma_dup_ld64() local
132 vacc5x0123 = vfmaq_f32(vacc5x0123, va5c1, vb0123c1); in xnn_f32_gemminc_minmax_ukernel_6x8__neonfma_dup_ld64()
138 vacc5x4567 = vfmaq_f32(vacc5x4567, va5c1, vb4567c1); in xnn_f32_gemminc_minmax_ukernel_6x8__neonfma_dup_ld64()
D6x8inc-minmax-neon-dup-ld128.c128 const float32x4_t va5c1 = vdupq_lane_f32(vget_low_f32(va5), 1); in xnn_f32_gemminc_minmax_ukernel_6x8__neon_dup_ld128() local
134 vacc5x0123 = vmlaq_f32(vacc5x0123, va5c1, vb0123c1); in xnn_f32_gemminc_minmax_ukernel_6x8__neon_dup_ld128()
140 vacc5x4567 = vmlaq_f32(vacc5x4567, va5c1, vb4567c1); in xnn_f32_gemminc_minmax_ukernel_6x8__neon_dup_ld128()
D6x8inc-minmax-neonfma-dup-ld128.c128 const float32x4_t va5c1 = vdupq_lane_f32(vget_low_f32(va5), 1); in xnn_f32_gemminc_minmax_ukernel_6x8__neonfma_dup_ld128() local
134 vacc5x0123 = vfmaq_f32(vacc5x0123, va5c1, vb0123c1); in xnn_f32_gemminc_minmax_ukernel_6x8__neonfma_dup_ld128()
140 vacc5x4567 = vfmaq_f32(vacc5x4567, va5c1, vb4567c1); in xnn_f32_gemminc_minmax_ukernel_6x8__neonfma_dup_ld128()
D6x8inc-minmax-wasmsimd-x86-splat.c130 const v128_t va5c1 = wasm_v32x4_shuffle(va5, va5, 1, 1, 1, 1); in xnn_f32_gemminc_minmax_ukernel_6x8__wasmsimd_x86_splat() local
140 vacc5x0123 = wasm_f32x4_add(vacc5x0123, wasm_f32x4_mul(va5c1, vb0123c1)); in xnn_f32_gemminc_minmax_ukernel_6x8__wasmsimd_x86_splat()
146 vacc5x4567 = wasm_f32x4_add(vacc5x4567, wasm_f32x4_mul(va5c1, vb4567c1)); in xnn_f32_gemminc_minmax_ukernel_6x8__wasmsimd_x86_splat()
D6x8inc-minmax-wasmsimd-arm-splat.c132 const v128_t va5c1 = wasm_v32x4_shuffle(va5, va5, 1, 1, 1, 1); in xnn_f32_gemminc_minmax_ukernel_6x8__wasmsimd_arm_splat() local
142 vacc5x0123 = wasm_f32x4_add(vacc5x0123, wasm_f32x4_mul(va5c1, vb0123c1)); in xnn_f32_gemminc_minmax_ukernel_6x8__wasmsimd_arm_splat()
148 vacc5x4567 = wasm_f32x4_add(vacc5x4567, wasm_f32x4_mul(va5c1, vb4567c1)); in xnn_f32_gemminc_minmax_ukernel_6x8__wasmsimd_arm_splat()
/external/XNNPACK/src/f32-igemm/gen/
D6x8-minmax-neonfma-dup-ld64.c152 const float32x4_t va5c1 = vdupq_lane_f32(va5, 1); in xnn_f32_igemm_minmax_ukernel_6x8__neonfma_dup_ld64() local
158 vacc5x0123 = vfmaq_f32(vacc5x0123, va5c1, vb0123c1); in xnn_f32_igemm_minmax_ukernel_6x8__neonfma_dup_ld64()
164 vacc5x4567 = vfmaq_f32(vacc5x4567, va5c1, vb4567c1); in xnn_f32_igemm_minmax_ukernel_6x8__neonfma_dup_ld64()
D6x8-minmax-neon-dup-ld64.c152 const float32x4_t va5c1 = vdupq_lane_f32(va5, 1); in xnn_f32_igemm_minmax_ukernel_6x8__neon_dup_ld64() local
158 vacc5x0123 = vmlaq_f32(vacc5x0123, va5c1, vb0123c1); in xnn_f32_igemm_minmax_ukernel_6x8__neon_dup_ld64()
164 vacc5x4567 = vmlaq_f32(vacc5x4567, va5c1, vb4567c1); in xnn_f32_igemm_minmax_ukernel_6x8__neon_dup_ld64()
D6x8-minmax-neon-dup-ld128.c154 const float32x4_t va5c1 = vdupq_lane_f32(vget_low_f32(va5), 1); in xnn_f32_igemm_minmax_ukernel_6x8__neon_dup_ld128() local
160 vacc5x0123 = vmlaq_f32(vacc5x0123, va5c1, vb0123c1); in xnn_f32_igemm_minmax_ukernel_6x8__neon_dup_ld128()
166 vacc5x4567 = vmlaq_f32(vacc5x4567, va5c1, vb4567c1); in xnn_f32_igemm_minmax_ukernel_6x8__neon_dup_ld128()
D6x8-minmax-neonfma-dup-ld128.c154 const float32x4_t va5c1 = vdupq_lane_f32(vget_low_f32(va5), 1); in xnn_f32_igemm_minmax_ukernel_6x8__neonfma_dup_ld128() local
160 vacc5x0123 = vfmaq_f32(vacc5x0123, va5c1, vb0123c1); in xnn_f32_igemm_minmax_ukernel_6x8__neonfma_dup_ld128()
166 vacc5x4567 = vfmaq_f32(vacc5x4567, va5c1, vb4567c1); in xnn_f32_igemm_minmax_ukernel_6x8__neonfma_dup_ld128()
D6x8-minmax-wasmsimd-arm-splat.c158 const v128_t va5c1 = wasm_v32x4_shuffle(va5, va5, 1, 1, 1, 1); in xnn_f32_igemm_minmax_ukernel_6x8__wasmsimd_arm_splat() local
168 vacc5x0123 = wasm_f32x4_add(vacc5x0123, wasm_f32x4_mul(va5c1, vb0123c1)); in xnn_f32_igemm_minmax_ukernel_6x8__wasmsimd_arm_splat()
174 vacc5x4567 = wasm_f32x4_add(vacc5x4567, wasm_f32x4_mul(va5c1, vb4567c1)); in xnn_f32_igemm_minmax_ukernel_6x8__wasmsimd_arm_splat()
D6x8-minmax-wasmsimd-x86-splat.c156 const v128_t va5c1 = wasm_v32x4_shuffle(va5, va5, 1, 1, 1, 1); in xnn_f32_igemm_minmax_ukernel_6x8__wasmsimd_x86_splat() local
166 vacc5x0123 = wasm_f32x4_add(vacc5x0123, wasm_f32x4_mul(va5c1, vb0123c1)); in xnn_f32_igemm_minmax_ukernel_6x8__wasmsimd_x86_splat()
172 vacc5x4567 = wasm_f32x4_add(vacc5x4567, wasm_f32x4_mul(va5c1, vb4567c1)); in xnn_f32_igemm_minmax_ukernel_6x8__wasmsimd_x86_splat()
/external/XNNPACK/src/f16-gemm/gen/
D6x16-minmax-neonfp16arith-ld64.c156 const float16x8_t va5c1 = vdupq_lane_f16(va5, 1); in xnn_f16_gemm_minmax_ukernel_6x16__neonfp16arith_ld64() local
163 vacc5x01234567 = vfmaq_f16(vacc5x01234567, va5c1, vb01234567c1); in xnn_f16_gemm_minmax_ukernel_6x16__neonfp16arith_ld64()
169 vacc5x89ABCDEF = vfmaq_f16(vacc5x89ABCDEF, va5c1, vb89ABCDEFc1); in xnn_f16_gemm_minmax_ukernel_6x16__neonfp16arith_ld64()
D6x8-minmax-neonfp16arith-ld64.c130 const float16x8_t va5c1 = vdupq_lane_f16(va5, 1); in xnn_f16_gemm_minmax_ukernel_6x8__neonfp16arith_ld64() local
137 vacc5x01234567 = vfmaq_f16(vacc5x01234567, va5c1, vb01234567c1); in xnn_f16_gemm_minmax_ukernel_6x8__neonfp16arith_ld64()
/external/XNNPACK/src/f16-igemm/gen/
D6x16-minmax-neonfp16arith-ld64.c182 const float16x8_t va5c1 = vdupq_lane_f16(va5, 1); in xnn_f16_igemm_minmax_ukernel_6x16__neonfp16arith_ld64() local
189 vacc5x01234567 = vfmaq_f16(vacc5x01234567, va5c1, vb01234567c1); in xnn_f16_igemm_minmax_ukernel_6x16__neonfp16arith_ld64()
195 vacc5x89ABCDEF = vfmaq_f16(vacc5x89ABCDEF, va5c1, vb89ABCDEFc1); in xnn_f16_igemm_minmax_ukernel_6x16__neonfp16arith_ld64()
D6x8-minmax-neonfp16arith-ld64.c156 const float16x8_t va5c1 = vdupq_lane_f16(va5, 1); in xnn_f16_igemm_minmax_ukernel_6x8__neonfp16arith_ld64() local
163 vacc5x01234567 = vfmaq_f16(vacc5x01234567, va5c1, vb01234567c1); in xnn_f16_igemm_minmax_ukernel_6x8__neonfp16arith_ld64()
/external/XNNPACK/src/f16-gemm/gen-inc/
D6x16inc-minmax-neonfp16arith-ld64.c158 const float16x8_t va5c1 = vdupq_lane_f16(va5, 1); in xnn_f16_gemminc_minmax_ukernel_6x16__neonfp16arith_ld64() local
165 vacc5x01234567 = vfmaq_f16(vacc5x01234567, va5c1, vb01234567c1); in xnn_f16_gemminc_minmax_ukernel_6x16__neonfp16arith_ld64()
171 vacc5x89ABCDEF = vfmaq_f16(vacc5x89ABCDEF, va5c1, vb89ABCDEFc1); in xnn_f16_gemminc_minmax_ukernel_6x16__neonfp16arith_ld64()
D6x8inc-minmax-neonfp16arith-ld64.c132 const float16x8_t va5c1 = vdupq_lane_f16(va5, 1); in xnn_f16_gemminc_minmax_ukernel_6x8__neonfp16arith_ld64() local
139 vacc5x01234567 = vfmaq_f16(vacc5x01234567, va5c1, vb01234567c1); in xnn_f16_gemminc_minmax_ukernel_6x8__neonfp16arith_ld64()
D8x16inc-minmax-neonfp16arith-ld64.c190 const float16x8_t va5c1 = vdupq_lane_f16(va5, 1); in xnn_f16_gemminc_minmax_ukernel_8x16__neonfp16arith_ld64() local
199 vacc5x01234567 = vfmaq_f16(vacc5x01234567, va5c1, vb01234567c1); in xnn_f16_gemminc_minmax_ukernel_8x16__neonfp16arith_ld64()
207 vacc5x89ABCDEF = vfmaq_f16(vacc5x89ABCDEF, va5c1, vb89ABCDEFc1); in xnn_f16_gemminc_minmax_ukernel_8x16__neonfp16arith_ld64()

12