Home
last modified time | relevance | path

Searched refs:va0c0 (Results 1 – 25 of 98) sorted by relevance

1234

/external/XNNPACK/src/f32-gemm/gen-inc/
D1x8inc-minmax-neon-dup-ld64.c55 const float32x4_t va0c0 = vdupq_lane_f32(va0, 0); in xnn_f32_gemminc_minmax_ukernel_1x8__neon_dup_ld64() local
56 vacc0x0123 = vmlaq_f32(vacc0x0123, va0c0, vb0123c0); in xnn_f32_gemminc_minmax_ukernel_1x8__neon_dup_ld64()
57 vacc0x4567 = vmlaq_f32(vacc0x4567, va0c0, vb4567c0); in xnn_f32_gemminc_minmax_ukernel_1x8__neon_dup_ld64()
D1x8inc-minmax-neonfma-dup-ld64.c55 const float32x4_t va0c0 = vdupq_lane_f32(va0, 0); in xnn_f32_gemminc_minmax_ukernel_1x8__neonfma_dup_ld64() local
56 vacc0x0123 = vfmaq_f32(vacc0x0123, va0c0, vb0123c0); in xnn_f32_gemminc_minmax_ukernel_1x8__neonfma_dup_ld64()
57 vacc0x4567 = vfmaq_f32(vacc0x4567, va0c0, vb4567c0); in xnn_f32_gemminc_minmax_ukernel_1x8__neonfma_dup_ld64()
D1x8inc-minmax-wasmsimd-arm-splat.c55 const v128_t va0c0 = wasm_v32x4_shuffle(va0, va0, 0, 0, 0, 0); in xnn_f32_gemminc_minmax_ukernel_1x8__wasmsimd_arm_splat() local
60 vacc0x0123 = wasm_f32x4_add(vacc0x0123, wasm_f32x4_mul(va0c0, vb0123c0)); in xnn_f32_gemminc_minmax_ukernel_1x8__wasmsimd_arm_splat()
61 vacc0x4567 = wasm_f32x4_add(vacc0x4567, wasm_f32x4_mul(va0c0, vb4567c0)); in xnn_f32_gemminc_minmax_ukernel_1x8__wasmsimd_arm_splat()
D1x8inc-minmax-wasmsimd-x86-splat.c53 const v128_t va0c0 = wasm_v32x4_shuffle(va0, va0, 0, 0, 0, 0); in xnn_f32_gemminc_minmax_ukernel_1x8__wasmsimd_x86_splat() local
58 vacc0x0123 = wasm_f32x4_add(vacc0x0123, wasm_f32x4_mul(va0c0, vb0123c0)); in xnn_f32_gemminc_minmax_ukernel_1x8__wasmsimd_x86_splat()
59 vacc0x4567 = wasm_f32x4_add(vacc0x4567, wasm_f32x4_mul(va0c0, vb4567c0)); in xnn_f32_gemminc_minmax_ukernel_1x8__wasmsimd_x86_splat()
/external/XNNPACK/src/f32-gemm/gen/
D1x8-minmax-neonfma-dup-ld64.c53 const float32x4_t va0c0 = vdupq_lane_f32(va0, 0); in xnn_f32_gemm_minmax_ukernel_1x8__neonfma_dup_ld64() local
54 vacc0x0123 = vfmaq_f32(vacc0x0123, va0c0, vb0123c0); in xnn_f32_gemm_minmax_ukernel_1x8__neonfma_dup_ld64()
55 vacc0x4567 = vfmaq_f32(vacc0x4567, va0c0, vb4567c0); in xnn_f32_gemm_minmax_ukernel_1x8__neonfma_dup_ld64()
D1x8-minmax-neon-dup-ld64.c53 const float32x4_t va0c0 = vdupq_lane_f32(va0, 0); in xnn_f32_gemm_minmax_ukernel_1x8__neon_dup_ld64() local
54 vacc0x0123 = vmlaq_f32(vacc0x0123, va0c0, vb0123c0); in xnn_f32_gemm_minmax_ukernel_1x8__neon_dup_ld64()
55 vacc0x4567 = vmlaq_f32(vacc0x4567, va0c0, vb4567c0); in xnn_f32_gemm_minmax_ukernel_1x8__neon_dup_ld64()
D1x8-wasmsimd-splat.c51 const v128_t va0c0 = wasm_v32x4_shuffle(va0, va0, 0, 0, 0, 0); in xnn_f32_gemm_ukernel_1x8__wasmsimd_splat() local
56 vacc0x0123 = wasm_f32x4_add(vacc0x0123, wasm_f32x4_mul(va0c0, vb0123c0)); in xnn_f32_gemm_ukernel_1x8__wasmsimd_splat()
57 vacc0x4567 = wasm_f32x4_add(vacc0x4567, wasm_f32x4_mul(va0c0, vb4567c0)); in xnn_f32_gemm_ukernel_1x8__wasmsimd_splat()
D1x8-relu-wasmsimd-splat.c51 const v128_t va0c0 = wasm_v32x4_shuffle(va0, va0, 0, 0, 0, 0); in xnn_f32_gemm_relu_ukernel_1x8__wasmsimd_splat() local
56 vacc0x0123 = wasm_f32x4_add(vacc0x0123, wasm_f32x4_mul(va0c0, vb0123c0)); in xnn_f32_gemm_relu_ukernel_1x8__wasmsimd_splat()
57 vacc0x4567 = wasm_f32x4_add(vacc0x4567, wasm_f32x4_mul(va0c0, vb4567c0)); in xnn_f32_gemm_relu_ukernel_1x8__wasmsimd_splat()
D1x8-minmax-wasmsimd-arm-splat.c53 const v128_t va0c0 = wasm_v32x4_shuffle(va0, va0, 0, 0, 0, 0); in xnn_f32_gemm_minmax_ukernel_1x8__wasmsimd_arm_splat() local
58 vacc0x0123 = wasm_f32x4_add(vacc0x0123, wasm_f32x4_mul(va0c0, vb0123c0)); in xnn_f32_gemm_minmax_ukernel_1x8__wasmsimd_arm_splat()
59 vacc0x4567 = wasm_f32x4_add(vacc0x4567, wasm_f32x4_mul(va0c0, vb4567c0)); in xnn_f32_gemm_minmax_ukernel_1x8__wasmsimd_arm_splat()
D1x8-minmax-wasmsimd-x86-splat.c51 const v128_t va0c0 = wasm_v32x4_shuffle(va0, va0, 0, 0, 0, 0); in xnn_f32_gemm_minmax_ukernel_1x8__wasmsimd_x86_splat() local
56 vacc0x0123 = wasm_f32x4_add(vacc0x0123, wasm_f32x4_mul(va0c0, vb0123c0)); in xnn_f32_gemm_minmax_ukernel_1x8__wasmsimd_x86_splat()
57 vacc0x4567 = wasm_f32x4_add(vacc0x4567, wasm_f32x4_mul(va0c0, vb4567c0)); in xnn_f32_gemm_minmax_ukernel_1x8__wasmsimd_x86_splat()
D4x8-minmax-neonfma-dup-ld64.c80 const float32x4_t va0c0 = vdupq_lane_f32(va0, 0); in xnn_f32_gemm_minmax_ukernel_4x8__neonfma_dup_ld64() local
84 vacc0x0123 = vfmaq_f32(vacc0x0123, va0c0, vb0123c0); in xnn_f32_gemm_minmax_ukernel_4x8__neonfma_dup_ld64()
88 vacc0x4567 = vfmaq_f32(vacc0x4567, va0c0, vb4567c0); in xnn_f32_gemm_minmax_ukernel_4x8__neonfma_dup_ld64()
/external/XNNPACK/src/f32-igemm/gen/
D1x8-minmax-neonfma-dup-ld64.c66 const float32x4_t va0c0 = vdupq_lane_f32(va0, 0); in xnn_f32_igemm_minmax_ukernel_1x8__neonfma_dup_ld64() local
67 vacc0x0123 = vfmaq_f32(vacc0x0123, va0c0, vb0123c0); in xnn_f32_igemm_minmax_ukernel_1x8__neonfma_dup_ld64()
68 vacc0x4567 = vfmaq_f32(vacc0x4567, va0c0, vb4567c0); in xnn_f32_igemm_minmax_ukernel_1x8__neonfma_dup_ld64()
D1x8-minmax-neon-dup-ld64.c66 const float32x4_t va0c0 = vdupq_lane_f32(va0, 0); in xnn_f32_igemm_minmax_ukernel_1x8__neon_dup_ld64() local
67 vacc0x0123 = vmlaq_f32(vacc0x0123, va0c0, vb0123c0); in xnn_f32_igemm_minmax_ukernel_1x8__neon_dup_ld64()
68 vacc0x4567 = vmlaq_f32(vacc0x4567, va0c0, vb4567c0); in xnn_f32_igemm_minmax_ukernel_1x8__neon_dup_ld64()
D1x8-wasmsimd-splat.c64 const v128_t va0c0 = wasm_v32x4_shuffle(va0, va0, 0, 0, 0, 0); in xnn_f32_igemm_ukernel_1x8__wasmsimd_splat() local
69 vacc0x0123 = wasm_f32x4_add(vacc0x0123, wasm_f32x4_mul(va0c0, vb0123c0)); in xnn_f32_igemm_ukernel_1x8__wasmsimd_splat()
70 vacc0x4567 = wasm_f32x4_add(vacc0x4567, wasm_f32x4_mul(va0c0, vb4567c0)); in xnn_f32_igemm_ukernel_1x8__wasmsimd_splat()
D1x8-relu-wasmsimd-splat.c64 const v128_t va0c0 = wasm_v32x4_shuffle(va0, va0, 0, 0, 0, 0); in xnn_f32_igemm_relu_ukernel_1x8__wasmsimd_splat() local
69 vacc0x0123 = wasm_f32x4_add(vacc0x0123, wasm_f32x4_mul(va0c0, vb0123c0)); in xnn_f32_igemm_relu_ukernel_1x8__wasmsimd_splat()
70 vacc0x4567 = wasm_f32x4_add(vacc0x4567, wasm_f32x4_mul(va0c0, vb4567c0)); in xnn_f32_igemm_relu_ukernel_1x8__wasmsimd_splat()
D1x8-minmax-wasmsimd-x86-splat.c64 const v128_t va0c0 = wasm_v32x4_shuffle(va0, va0, 0, 0, 0, 0); in xnn_f32_igemm_minmax_ukernel_1x8__wasmsimd_x86_splat() local
69 vacc0x0123 = wasm_f32x4_add(vacc0x0123, wasm_f32x4_mul(va0c0, vb0123c0)); in xnn_f32_igemm_minmax_ukernel_1x8__wasmsimd_x86_splat()
70 vacc0x4567 = wasm_f32x4_add(vacc0x4567, wasm_f32x4_mul(va0c0, vb4567c0)); in xnn_f32_igemm_minmax_ukernel_1x8__wasmsimd_x86_splat()
D1x8-minmax-wasmsimd-arm-splat.c66 const v128_t va0c0 = wasm_v32x4_shuffle(va0, va0, 0, 0, 0, 0); in xnn_f32_igemm_minmax_ukernel_1x8__wasmsimd_arm_splat() local
71 vacc0x0123 = wasm_f32x4_add(vacc0x0123, wasm_f32x4_mul(va0c0, vb0123c0)); in xnn_f32_igemm_minmax_ukernel_1x8__wasmsimd_arm_splat()
72 vacc0x4567 = wasm_f32x4_add(vacc0x4567, wasm_f32x4_mul(va0c0, vb4567c0)); in xnn_f32_igemm_minmax_ukernel_1x8__wasmsimd_arm_splat()
D4x8-minmax-neon-dup-ld64.c102 const float32x4_t va0c0 = vdupq_lane_f32(va0, 0); in xnn_f32_igemm_minmax_ukernel_4x8__neon_dup_ld64() local
106 vacc0x0123 = vmlaq_f32(vacc0x0123, va0c0, vb0123c0); in xnn_f32_igemm_minmax_ukernel_4x8__neon_dup_ld64()
110 vacc0x4567 = vmlaq_f32(vacc0x4567, va0c0, vb4567c0); in xnn_f32_igemm_minmax_ukernel_4x8__neon_dup_ld64()
D4x8-minmax-neonfma-dup-ld64.c102 const float32x4_t va0c0 = vdupq_lane_f32(va0, 0); in xnn_f32_igemm_minmax_ukernel_4x8__neonfma_dup_ld64() local
106 vacc0x0123 = vfmaq_f32(vacc0x0123, va0c0, vb0123c0); in xnn_f32_igemm_minmax_ukernel_4x8__neonfma_dup_ld64()
110 vacc0x4567 = vfmaq_f32(vacc0x4567, va0c0, vb4567c0); in xnn_f32_igemm_minmax_ukernel_4x8__neonfma_dup_ld64()
/external/XNNPACK/src/f16-gemm/gen/
D1x16-minmax-neonfp16arith-ld64.c59 const float16x8_t va0c0 = vdupq_lane_f16(va0, 0); in xnn_f16_gemm_minmax_ukernel_1x16__neonfp16arith_ld64() local
61 vacc0x01234567 = vfmaq_f16(vacc0x01234567, va0c0, vb01234567c0); in xnn_f16_gemm_minmax_ukernel_1x16__neonfp16arith_ld64()
62 vacc0x89ABCDEF = vfmaq_f16(vacc0x89ABCDEF, va0c0, vb89ABCDEFc0); in xnn_f16_gemm_minmax_ukernel_1x16__neonfp16arith_ld64()
D1x8-minmax-neonfp16arith-ld64.c56 const float16x8_t va0c0 = vdupq_lane_f16(va0, 0); in xnn_f16_gemm_minmax_ukernel_1x8__neonfp16arith_ld64() local
58 vacc0x01234567 = vfmaq_f16(vacc0x01234567, va0c0, vb01234567c0); in xnn_f16_gemm_minmax_ukernel_1x8__neonfp16arith_ld64()
/external/XNNPACK/src/f16-gemm/gen-inc/
D1x16inc-minmax-neonfp16arith-ld64.c61 const float16x8_t va0c0 = vdupq_lane_f16(va0, 0); in xnn_f16_gemminc_minmax_ukernel_1x16__neonfp16arith_ld64() local
63 vacc0x01234567 = vfmaq_f16(vacc0x01234567, va0c0, vb01234567c0); in xnn_f16_gemminc_minmax_ukernel_1x16__neonfp16arith_ld64()
64 vacc0x89ABCDEF = vfmaq_f16(vacc0x89ABCDEF, va0c0, vb89ABCDEFc0); in xnn_f16_gemminc_minmax_ukernel_1x16__neonfp16arith_ld64()
D1x8inc-minmax-neonfp16arith-ld64.c58 const float16x8_t va0c0 = vdupq_lane_f16(va0, 0); in xnn_f16_gemminc_minmax_ukernel_1x8__neonfp16arith_ld64() local
60 vacc0x01234567 = vfmaq_f16(vacc0x01234567, va0c0, vb01234567c0); in xnn_f16_gemminc_minmax_ukernel_1x8__neonfp16arith_ld64()
/external/XNNPACK/src/f16-igemm/gen/
D1x16-minmax-neonfp16arith-ld64.c70 const float16x8_t va0c0 = vdupq_lane_f16(va0, 0); in xnn_f16_igemm_minmax_ukernel_1x16__neonfp16arith_ld64() local
72 vacc0x01234567 = vfmaq_f16(vacc0x01234567, va0c0, vb01234567c0); in xnn_f16_igemm_minmax_ukernel_1x16__neonfp16arith_ld64()
73 vacc0x89ABCDEF = vfmaq_f16(vacc0x89ABCDEF, va0c0, vb89ABCDEFc0); in xnn_f16_igemm_minmax_ukernel_1x16__neonfp16arith_ld64()
D1x8-minmax-neonfp16arith-ld64.c67 const float16x8_t va0c0 = vdupq_lane_f16(va0, 0); in xnn_f16_igemm_minmax_ukernel_1x8__neonfp16arith_ld64() local
69 vacc0x01234567 = vfmaq_f16(vacc0x01234567, va0c0, vb01234567c0); in xnn_f16_igemm_minmax_ukernel_1x8__neonfp16arith_ld64()

1234