Home
last modified time | relevance | path

Searched refs:vi6x4567 (Results 1 – 25 of 146) sorted by relevance

123456

/external/XNNPACK/src/f16-dwconv2d-chw/gen/
D5x5p2-minmax-neonfp16arith-3x4.c88 float16x4_t vi6x4567 = vld1_f16(i6); i6 += 4; in xnn_f16_dwconv2d_chw_ukernel_5x5p2__neonfp16arith_3x4() local
122 vo2p0 = vfma_laneq_f16(vo2p0, vi6x4567, vwGHIJKLMN, 7); in xnn_f16_dwconv2d_chw_ukernel_5x5p2__neonfp16arith_3x4()
130 const float16x4_t vi6x3456 = vext_f16(vi6x0123, vi6x4567, 3); in xnn_f16_dwconv2d_chw_ukernel_5x5p2__neonfp16arith_3x4()
164 const float16x4_t vi6x2345 = vext_f16(vi6x0123, vi6x4567, 2); in xnn_f16_dwconv2d_chw_ukernel_5x5p2__neonfp16arith_3x4()
165 vi6x0123 = vi6x4567; in xnn_f16_dwconv2d_chw_ukernel_5x5p2__neonfp16arith_3x4()
193 const float16x4_t vi6x5678 = vext_f16(vi6x4567, vi6x89AB, 1); in xnn_f16_dwconv2d_chw_ukernel_5x5p2__neonfp16arith_3x4()
227 const float16x4_t vi6x6789 = vext_f16(vi6x4567, vi6x89AB, 2); in xnn_f16_dwconv2d_chw_ukernel_5x5p2__neonfp16arith_3x4()
228 vi6x4567 = vi6x89AB; in xnn_f16_dwconv2d_chw_ukernel_5x5p2__neonfp16arith_3x4()
303 vo2p0 = vfma_laneq_f16(vo2p0, vi6x4567, vwGHIJKLMN, 7); in xnn_f16_dwconv2d_chw_ukernel_5x5p2__neonfp16arith_3x4()
311 const float16x4_t vi6x3456 = vext_f16(vi6x0123, vi6x4567, 3); in xnn_f16_dwconv2d_chw_ukernel_5x5p2__neonfp16arith_3x4()
[all …]
D5x5p2-minmax-neonfp16arith-3x4-acc2.c88 float16x4_t vi6x4567 = vld1_f16(i6); i6 += 4; in xnn_f16_dwconv2d_chw_ukernel_5x5p2__neonfp16arith_3x4_acc2() local
122 vo2p0 = vfma_laneq_f16(vo2p0, vi6x4567, vwGHIJKLMN, 7); in xnn_f16_dwconv2d_chw_ukernel_5x5p2__neonfp16arith_3x4_acc2()
130 const float16x4_t vi6x3456 = vext_f16(vi6x0123, vi6x4567, 3); in xnn_f16_dwconv2d_chw_ukernel_5x5p2__neonfp16arith_3x4_acc2()
164 const float16x4_t vi6x2345 = vext_f16(vi6x0123, vi6x4567, 2); in xnn_f16_dwconv2d_chw_ukernel_5x5p2__neonfp16arith_3x4_acc2()
165 vi6x0123 = vi6x4567; in xnn_f16_dwconv2d_chw_ukernel_5x5p2__neonfp16arith_3x4_acc2()
193 const float16x4_t vi6x5678 = vext_f16(vi6x4567, vi6x89AB, 1); in xnn_f16_dwconv2d_chw_ukernel_5x5p2__neonfp16arith_3x4_acc2()
227 const float16x4_t vi6x6789 = vext_f16(vi6x4567, vi6x89AB, 2); in xnn_f16_dwconv2d_chw_ukernel_5x5p2__neonfp16arith_3x4_acc2()
228 vi6x4567 = vi6x89AB; in xnn_f16_dwconv2d_chw_ukernel_5x5p2__neonfp16arith_3x4_acc2()
306 vo2p0 = vfma_laneq_f16(vo2p0, vi6x4567, vwGHIJKLMN, 7); in xnn_f16_dwconv2d_chw_ukernel_5x5p2__neonfp16arith_3x4_acc2()
314 const float16x4_t vi6x3456 = vext_f16(vi6x0123, vi6x4567, 3); in xnn_f16_dwconv2d_chw_ukernel_5x5p2__neonfp16arith_3x4_acc2()
[all …]
D5x5p2-minmax-neonfp16arith-4x4.c95 float16x4_t vi6x4567 = vld1_f16(i6); i6 += 4; in xnn_f16_dwconv2d_chw_ukernel_5x5p2__neonfp16arith_4x4() local
132 vo3p0 = vfma_laneq_f16(vo3p0, vi6x4567, vwGHIJKLMN, 2); in xnn_f16_dwconv2d_chw_ukernel_5x5p2__neonfp16arith_4x4()
136 vo2p0 = vfma_laneq_f16(vo2p0, vi6x4567, vwGHIJKLMN, 7); in xnn_f16_dwconv2d_chw_ukernel_5x5p2__neonfp16arith_4x4()
145 const float16x4_t vi6x3456 = vext_f16(vi6x0123, vi6x4567, 3); in xnn_f16_dwconv2d_chw_ukernel_5x5p2__neonfp16arith_4x4()
185 const float16x4_t vi6x2345 = vext_f16(vi6x0123, vi6x4567, 2); in xnn_f16_dwconv2d_chw_ukernel_5x5p2__neonfp16arith_4x4()
186 vi6x0123 = vi6x4567; in xnn_f16_dwconv2d_chw_ukernel_5x5p2__neonfp16arith_4x4()
221 const float16x4_t vi6x5678 = vext_f16(vi6x4567, vi6x89AB, 1); in xnn_f16_dwconv2d_chw_ukernel_5x5p2__neonfp16arith_4x4()
261 const float16x4_t vi6x6789 = vext_f16(vi6x4567, vi6x89AB, 2); in xnn_f16_dwconv2d_chw_ukernel_5x5p2__neonfp16arith_4x4()
262 vi6x4567 = vi6x89AB; in xnn_f16_dwconv2d_chw_ukernel_5x5p2__neonfp16arith_4x4()
350 vo3p0 = vfma_laneq_f16(vo3p0, vi6x4567, vwGHIJKLMN, 2); in xnn_f16_dwconv2d_chw_ukernel_5x5p2__neonfp16arith_4x4()
[all …]
D5x5p2-minmax-neonfp16arith-4x4-acc2.c95 float16x4_t vi6x4567 = vld1_f16(i6); i6 += 4; in xnn_f16_dwconv2d_chw_ukernel_5x5p2__neonfp16arith_4x4_acc2() local
132 vo3p1 = vfma_laneq_f16(vo3p1, vi6x4567, vwGHIJKLMN, 2); in xnn_f16_dwconv2d_chw_ukernel_5x5p2__neonfp16arith_4x4_acc2()
136 vo2p0 = vfma_laneq_f16(vo2p0, vi6x4567, vwGHIJKLMN, 7); in xnn_f16_dwconv2d_chw_ukernel_5x5p2__neonfp16arith_4x4_acc2()
145 const float16x4_t vi6x3456 = vext_f16(vi6x0123, vi6x4567, 3); in xnn_f16_dwconv2d_chw_ukernel_5x5p2__neonfp16arith_4x4_acc2()
185 const float16x4_t vi6x2345 = vext_f16(vi6x0123, vi6x4567, 2); in xnn_f16_dwconv2d_chw_ukernel_5x5p2__neonfp16arith_4x4_acc2()
186 vi6x0123 = vi6x4567; in xnn_f16_dwconv2d_chw_ukernel_5x5p2__neonfp16arith_4x4_acc2()
221 const float16x4_t vi6x5678 = vext_f16(vi6x4567, vi6x89AB, 1); in xnn_f16_dwconv2d_chw_ukernel_5x5p2__neonfp16arith_4x4_acc2()
261 const float16x4_t vi6x6789 = vext_f16(vi6x4567, vi6x89AB, 2); in xnn_f16_dwconv2d_chw_ukernel_5x5p2__neonfp16arith_4x4_acc2()
262 vi6x4567 = vi6x89AB; in xnn_f16_dwconv2d_chw_ukernel_5x5p2__neonfp16arith_4x4_acc2()
354 vo3p1 = vfma_laneq_f16(vo3p1, vi6x4567, vwGHIJKLMN, 2); in xnn_f16_dwconv2d_chw_ukernel_5x5p2__neonfp16arith_4x4_acc2()
[all …]
/external/XNNPACK/src/f32-dwconv2d-chw/gen/
D5x5p2-minmax-wasmsimd-x86-loadsplat-3x4-acc2.c117 v128_t vi6x4567 = wasm_v128_load(i6); i6 += 4; in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_x86_loadsplat_3x4_acc2() local
151 vo2p0 = wasm_f32x4_add(vo2p0, wasm_f32x4_mul(vi6x4567, vk42)); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_x86_loadsplat_3x4_acc2()
159 const v128_t vi6x3456 = wasm_v32x4_shuffle(vi6x0123, vi6x4567, 3, 4, 5, 6); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_x86_loadsplat_3x4_acc2()
193 const v128_t vi6x2345 = wasm_v32x4_shuffle(vi6x0123, vi6x4567, 2, 3, 4, 5); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_x86_loadsplat_3x4_acc2()
194 vi6x0123 = vi6x4567; in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_x86_loadsplat_3x4_acc2()
222 const v128_t vi6x5678 = wasm_v32x4_shuffle(vi6x4567, vi6x89AB, 1, 2, 3, 4); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_x86_loadsplat_3x4_acc2()
256 const v128_t vi6x6789 = wasm_v32x4_shuffle(vi6x4567, vi6x89AB, 2, 3, 4, 5); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_x86_loadsplat_3x4_acc2()
257 vi6x4567 = vi6x89AB; in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_x86_loadsplat_3x4_acc2()
334 vo2p0 = wasm_f32x4_add(vo2p0, wasm_f32x4_mul(vi6x4567, vk42)); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_x86_loadsplat_3x4_acc2()
342 const v128_t vi6x3456 = wasm_v32x4_shuffle(vi6x0123, vi6x4567, 3, 4, 5, 6); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_x86_loadsplat_3x4_acc2()
[all …]
D5x5p2-minmax-neon-3x4.c90 float32x4_t vi6x4567 = vld1q_f32(i6); i6 += 4; in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_3x4() local
124 vo2p0 = vmlaq_lane_f32(vo2p0, vi6x4567, vget_high_f32(vwKLMN), 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_3x4()
132 const float32x4_t vi6x3456 = vextq_f32(vi6x0123, vi6x4567, 3); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_3x4()
166 const float32x4_t vi6x2345 = vextq_f32(vi6x0123, vi6x4567, 2); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_3x4()
167 vi6x0123 = vi6x4567; in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_3x4()
195 const float32x4_t vi6x5678 = vextq_f32(vi6x4567, vi6x89AB, 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_3x4()
229 const float32x4_t vi6x6789 = vextq_f32(vi6x4567, vi6x89AB, 2); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_3x4()
230 vi6x4567 = vi6x89AB; in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_3x4()
305 vo2p0 = vmlaq_lane_f32(vo2p0, vi6x4567, vget_high_f32(vwKLMN), 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_3x4()
313 const float32x4_t vi6x3456 = vextq_f32(vi6x0123, vi6x4567, 3); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_3x4()
[all …]
D5x5p2-minmax-wasmsimd-arm-loadsplat-3x4.c117 v128_t vi6x4567 = wasm_v128_load(i6); i6 += 4; in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_arm_loadsplat_3x4() local
151 vo2p0 = wasm_f32x4_add(vo2p0, wasm_f32x4_mul(vi6x4567, vk42)); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_arm_loadsplat_3x4()
159 const v128_t vi6x3456 = wasm_v32x4_shuffle(vi6x0123, vi6x4567, 3, 4, 5, 6); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_arm_loadsplat_3x4()
193 const v128_t vi6x2345 = wasm_v32x4_shuffle(vi6x0123, vi6x4567, 2, 3, 4, 5); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_arm_loadsplat_3x4()
194 vi6x0123 = vi6x4567; in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_arm_loadsplat_3x4()
222 const v128_t vi6x5678 = wasm_v32x4_shuffle(vi6x4567, vi6x89AB, 1, 2, 3, 4); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_arm_loadsplat_3x4()
256 const v128_t vi6x6789 = wasm_v32x4_shuffle(vi6x4567, vi6x89AB, 2, 3, 4, 5); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_arm_loadsplat_3x4()
257 vi6x4567 = vi6x89AB; in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_arm_loadsplat_3x4()
331 vo2p0 = wasm_f32x4_add(vo2p0, wasm_f32x4_mul(vi6x4567, vk42)); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_arm_loadsplat_3x4()
339 const v128_t vi6x3456 = wasm_v32x4_shuffle(vi6x0123, vi6x4567, 3, 4, 5, 6); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_arm_loadsplat_3x4()
[all …]
D5x5p2-minmax-wasmsimd-arm-loadsplat-3x4-acc2.c117 v128_t vi6x4567 = wasm_v128_load(i6); i6 += 4; in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_arm_loadsplat_3x4_acc2() local
151 vo2p0 = wasm_f32x4_add(vo2p0, wasm_f32x4_mul(vi6x4567, vk42)); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_arm_loadsplat_3x4_acc2()
159 const v128_t vi6x3456 = wasm_v32x4_shuffle(vi6x0123, vi6x4567, 3, 4, 5, 6); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_arm_loadsplat_3x4_acc2()
193 const v128_t vi6x2345 = wasm_v32x4_shuffle(vi6x0123, vi6x4567, 2, 3, 4, 5); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_arm_loadsplat_3x4_acc2()
194 vi6x0123 = vi6x4567; in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_arm_loadsplat_3x4_acc2()
222 const v128_t vi6x5678 = wasm_v32x4_shuffle(vi6x4567, vi6x89AB, 1, 2, 3, 4); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_arm_loadsplat_3x4_acc2()
256 const v128_t vi6x6789 = wasm_v32x4_shuffle(vi6x4567, vi6x89AB, 2, 3, 4, 5); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_arm_loadsplat_3x4_acc2()
257 vi6x4567 = vi6x89AB; in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_arm_loadsplat_3x4_acc2()
334 vo2p0 = wasm_f32x4_add(vo2p0, wasm_f32x4_mul(vi6x4567, vk42)); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_arm_loadsplat_3x4_acc2()
342 const v128_t vi6x3456 = wasm_v32x4_shuffle(vi6x0123, vi6x4567, 3, 4, 5, 6); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_arm_loadsplat_3x4_acc2()
[all …]
D5x5p2-minmax-wasmsimd-x86-loadsplat-3x4.c117 v128_t vi6x4567 = wasm_v128_load(i6); i6 += 4; in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_x86_loadsplat_3x4() local
151 vo2p0 = wasm_f32x4_add(vo2p0, wasm_f32x4_mul(vi6x4567, vk42)); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_x86_loadsplat_3x4()
159 const v128_t vi6x3456 = wasm_v32x4_shuffle(vi6x0123, vi6x4567, 3, 4, 5, 6); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_x86_loadsplat_3x4()
193 const v128_t vi6x2345 = wasm_v32x4_shuffle(vi6x0123, vi6x4567, 2, 3, 4, 5); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_x86_loadsplat_3x4()
194 vi6x0123 = vi6x4567; in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_x86_loadsplat_3x4()
222 const v128_t vi6x5678 = wasm_v32x4_shuffle(vi6x4567, vi6x89AB, 1, 2, 3, 4); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_x86_loadsplat_3x4()
256 const v128_t vi6x6789 = wasm_v32x4_shuffle(vi6x4567, vi6x89AB, 2, 3, 4, 5); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_x86_loadsplat_3x4()
257 vi6x4567 = vi6x89AB; in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_x86_loadsplat_3x4()
331 vo2p0 = wasm_f32x4_add(vo2p0, wasm_f32x4_mul(vi6x4567, vk42)); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_x86_loadsplat_3x4()
339 const v128_t vi6x3456 = wasm_v32x4_shuffle(vi6x0123, vi6x4567, 3, 4, 5, 6); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_x86_loadsplat_3x4()
[all …]
D5x5p2-minmax-neonfma-3x4-acc2.c90 float32x4_t vi6x4567 = vld1q_f32(i6); i6 += 4; in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_3x4_acc2() local
124 vo2p0 = vfmaq_lane_f32(vo2p0, vi6x4567, vget_high_f32(vwKLMN), 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_3x4_acc2()
132 const float32x4_t vi6x3456 = vextq_f32(vi6x0123, vi6x4567, 3); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_3x4_acc2()
166 const float32x4_t vi6x2345 = vextq_f32(vi6x0123, vi6x4567, 2); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_3x4_acc2()
167 vi6x0123 = vi6x4567; in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_3x4_acc2()
195 const float32x4_t vi6x5678 = vextq_f32(vi6x4567, vi6x89AB, 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_3x4_acc2()
229 const float32x4_t vi6x6789 = vextq_f32(vi6x4567, vi6x89AB, 2); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_3x4_acc2()
230 vi6x4567 = vi6x89AB; in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_3x4_acc2()
308 vo2p0 = vfmaq_lane_f32(vo2p0, vi6x4567, vget_high_f32(vwKLMN), 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_3x4_acc2()
316 const float32x4_t vi6x3456 = vextq_f32(vi6x0123, vi6x4567, 3); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_3x4_acc2()
[all …]
D5x5p2-minmax-neon-3x4-acc2.c90 float32x4_t vi6x4567 = vld1q_f32(i6); i6 += 4; in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_3x4_acc2() local
124 vo2p0 = vmlaq_lane_f32(vo2p0, vi6x4567, vget_high_f32(vwKLMN), 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_3x4_acc2()
132 const float32x4_t vi6x3456 = vextq_f32(vi6x0123, vi6x4567, 3); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_3x4_acc2()
166 const float32x4_t vi6x2345 = vextq_f32(vi6x0123, vi6x4567, 2); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_3x4_acc2()
167 vi6x0123 = vi6x4567; in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_3x4_acc2()
195 const float32x4_t vi6x5678 = vextq_f32(vi6x4567, vi6x89AB, 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_3x4_acc2()
229 const float32x4_t vi6x6789 = vextq_f32(vi6x4567, vi6x89AB, 2); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_3x4_acc2()
230 vi6x4567 = vi6x89AB; in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_3x4_acc2()
308 vo2p0 = vmlaq_lane_f32(vo2p0, vi6x4567, vget_high_f32(vwKLMN), 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_3x4_acc2()
316 const float32x4_t vi6x3456 = vextq_f32(vi6x0123, vi6x4567, 3); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_3x4_acc2()
[all …]
D5x5p2-minmax-neonfma-3x4.c90 float32x4_t vi6x4567 = vld1q_f32(i6); i6 += 4; in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_3x4() local
124 vo2p0 = vfmaq_lane_f32(vo2p0, vi6x4567, vget_high_f32(vwKLMN), 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_3x4()
132 const float32x4_t vi6x3456 = vextq_f32(vi6x0123, vi6x4567, 3); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_3x4()
166 const float32x4_t vi6x2345 = vextq_f32(vi6x0123, vi6x4567, 2); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_3x4()
167 vi6x0123 = vi6x4567; in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_3x4()
195 const float32x4_t vi6x5678 = vextq_f32(vi6x4567, vi6x89AB, 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_3x4()
229 const float32x4_t vi6x6789 = vextq_f32(vi6x4567, vi6x89AB, 2); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_3x4()
230 vi6x4567 = vi6x89AB; in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_3x4()
305 vo2p0 = vfmaq_lane_f32(vo2p0, vi6x4567, vget_high_f32(vwKLMN), 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_3x4()
313 const float32x4_t vi6x3456 = vextq_f32(vi6x0123, vi6x4567, 3); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_3x4()
[all …]
D5x5p2-minmax-neon-4x4.c97 float32x4_t vi6x4567 = vld1q_f32(i6); i6 += 4; in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_4x4() local
134 vo3p0 = vmlaq_lane_f32(vo3p0, vi6x4567, vget_high_f32(vwGHIJ), 0); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_4x4()
138 vo2p0 = vmlaq_lane_f32(vo2p0, vi6x4567, vget_high_f32(vwKLMN), 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_4x4()
147 const float32x4_t vi6x3456 = vextq_f32(vi6x0123, vi6x4567, 3); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_4x4()
187 const float32x4_t vi6x2345 = vextq_f32(vi6x0123, vi6x4567, 2); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_4x4()
188 vi6x0123 = vi6x4567; in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_4x4()
223 const float32x4_t vi6x5678 = vextq_f32(vi6x4567, vi6x89AB, 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_4x4()
263 const float32x4_t vi6x6789 = vextq_f32(vi6x4567, vi6x89AB, 2); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_4x4()
264 vi6x4567 = vi6x89AB; in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_4x4()
352 vo3p0 = vmlaq_lane_f32(vo3p0, vi6x4567, vget_high_f32(vwGHIJ), 0); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_4x4()
[all …]
D5x5p2-minmax-neon-4x4-acc2.c97 float32x4_t vi6x4567 = vld1q_f32(i6); i6 += 4; in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_4x4_acc2() local
134 vo3p1 = vmlaq_lane_f32(vo3p1, vi6x4567, vget_high_f32(vwGHIJ), 0); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_4x4_acc2()
138 vo2p0 = vmlaq_lane_f32(vo2p0, vi6x4567, vget_high_f32(vwKLMN), 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_4x4_acc2()
147 const float32x4_t vi6x3456 = vextq_f32(vi6x0123, vi6x4567, 3); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_4x4_acc2()
187 const float32x4_t vi6x2345 = vextq_f32(vi6x0123, vi6x4567, 2); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_4x4_acc2()
188 vi6x0123 = vi6x4567; in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_4x4_acc2()
223 const float32x4_t vi6x5678 = vextq_f32(vi6x4567, vi6x89AB, 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_4x4_acc2()
263 const float32x4_t vi6x6789 = vextq_f32(vi6x4567, vi6x89AB, 2); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_4x4_acc2()
264 vi6x4567 = vi6x89AB; in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_4x4_acc2()
356 vo3p1 = vmlaq_lane_f32(vo3p1, vi6x4567, vget_high_f32(vwGHIJ), 0); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_4x4_acc2()
[all …]
D5x5p2-minmax-neonfma-4x4.c97 float32x4_t vi6x4567 = vld1q_f32(i6); i6 += 4; in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_4x4() local
134 vo3p0 = vfmaq_lane_f32(vo3p0, vi6x4567, vget_high_f32(vwGHIJ), 0); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_4x4()
138 vo2p0 = vfmaq_lane_f32(vo2p0, vi6x4567, vget_high_f32(vwKLMN), 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_4x4()
147 const float32x4_t vi6x3456 = vextq_f32(vi6x0123, vi6x4567, 3); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_4x4()
187 const float32x4_t vi6x2345 = vextq_f32(vi6x0123, vi6x4567, 2); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_4x4()
188 vi6x0123 = vi6x4567; in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_4x4()
223 const float32x4_t vi6x5678 = vextq_f32(vi6x4567, vi6x89AB, 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_4x4()
263 const float32x4_t vi6x6789 = vextq_f32(vi6x4567, vi6x89AB, 2); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_4x4()
264 vi6x4567 = vi6x89AB; in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_4x4()
352 vo3p0 = vfmaq_lane_f32(vo3p0, vi6x4567, vget_high_f32(vwGHIJ), 0); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_4x4()
[all …]
D5x5p2-minmax-neonfma-4x4-acc2.c97 float32x4_t vi6x4567 = vld1q_f32(i6); i6 += 4; in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_4x4_acc2() local
134 vo3p1 = vfmaq_lane_f32(vo3p1, vi6x4567, vget_high_f32(vwGHIJ), 0); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_4x4_acc2()
138 vo2p0 = vfmaq_lane_f32(vo2p0, vi6x4567, vget_high_f32(vwKLMN), 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_4x4_acc2()
147 const float32x4_t vi6x3456 = vextq_f32(vi6x0123, vi6x4567, 3); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_4x4_acc2()
187 const float32x4_t vi6x2345 = vextq_f32(vi6x0123, vi6x4567, 2); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_4x4_acc2()
188 vi6x0123 = vi6x4567; in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_4x4_acc2()
223 const float32x4_t vi6x5678 = vextq_f32(vi6x4567, vi6x89AB, 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_4x4_acc2()
263 const float32x4_t vi6x6789 = vextq_f32(vi6x4567, vi6x89AB, 2); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_4x4_acc2()
264 vi6x4567 = vi6x89AB; in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_4x4_acc2()
356 vo3p1 = vfmaq_lane_f32(vo3p1, vi6x4567, vget_high_f32(vwGHIJ), 0); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_4x4_acc2()
[all …]
D3x3p1-minmax-sse-6x4.c121 __m128 vi6x4567 = _mm_loadu_ps(i6); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__sse_6x4() local
166 const __m128 vi6x7456 = _mm_shuffle_ps(vi6x4567, vi6x4567, _MM_SHUFFLE(2, 1, 0, 3)); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__sse_6x4()
181 vo5p0 = _mm_add_ps(vo5p0, _mm_mul_ps(vi6x4567, vk11)); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__sse_6x4()
186 vo4p0 = _mm_add_ps(vo4p0, _mm_mul_ps(vi6x4567, vk21)); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__sse_6x4()
247 const __m128 vi6x8567 = _mm_move_ss(vi6x4567, vi6x89AB); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__sse_6x4()
293 vi6x4567 = vi6x89AB; in xnn_f32_dwconv2d_chw_ukernel_3x3p1__sse_6x4()
334 vi6x4567 = _mm_and_ps(vmask, vi6x4567); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__sse_6x4()
350 const __m128 vi6x7456 = _mm_shuffle_ps(vi6x4567, vi6x4567, _MM_SHUFFLE(2, 1, 0, 3)); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__sse_6x4()
365 vo5p0 = _mm_add_ps(vo5p0, _mm_mul_ps(vi6x4567, vk11)); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__sse_6x4()
370 vo4p0 = _mm_add_ps(vo4p0, _mm_mul_ps(vi6x4567, vk21)); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__sse_6x4()
[all …]
D3x3p1-minmax-ssse3-6x4.c113 __m128 vi6x4567 = _mm_loadu_ps(i6); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__ssse3_6x4() local
148 vo5p0 = _mm_add_ps(vo5p0, _mm_mul_ps(vi6x4567, vk11)); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__ssse3_6x4()
153 vo4p0 = _mm_add_ps(vo4p0, _mm_mul_ps(vi6x4567, vk21)); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__ssse3_6x4()
162 …const __m128 vi6x3456 = _mm_castsi128_ps(_mm_alignr_epi8(_mm_castps_si128(vi6x4567), _mm_castps_si… in xnn_f32_dwconv2d_chw_ukernel_3x3p1__ssse3_6x4()
190 vi6x0123 = vi6x4567; in xnn_f32_dwconv2d_chw_ukernel_3x3p1__ssse3_6x4()
199 …678 = _mm_castsi128_ps(_mm_alignr_epi8(_mm_castps_si128(vi6x89AB), _mm_castps_si128(vi6x4567), 4)); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__ssse3_6x4()
227 vi6x4567 = vi6x89AB; in xnn_f32_dwconv2d_chw_ukernel_3x3p1__ssse3_6x4()
268 vi6x4567 = _mm_and_ps(vmask, vi6x4567); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__ssse3_6x4()
282 vo5p0 = _mm_add_ps(vo5p0, _mm_mul_ps(vi6x4567, vk11)); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__ssse3_6x4()
287 vo4p0 = _mm_add_ps(vo4p0, _mm_mul_ps(vi6x4567, vk21)); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__ssse3_6x4()
[all …]
D3x3p1-minmax-neon-6x4.c100 float32x4_t vi6x4567 = vld1q_f32(i6); i6 += 4; in xnn_f32_dwconv2d_chw_ukernel_3x3p1__neon_6x4() local
133 vo5p0 = vmlaq_lane_f32(vo5p0, vi6x4567, vget_low_f32(vw4567), 1); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__neon_6x4()
139 vo4p0 = vmlaq_lane_f32(vo4p0, vi6x4567, vw89, 0); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__neon_6x4()
148 const float32x4_t vi6x3456 = vextq_f32(vi6x0123, vi6x4567, 3); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__neon_6x4()
178 vi6x0123 = vi6x4567; in xnn_f32_dwconv2d_chw_ukernel_3x3p1__neon_6x4()
187 const float32x4_t vi6x5678 = vextq_f32(vi6x4567, vi6x89AB, 1); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__neon_6x4()
217 vi6x4567 = vi6x89AB; in xnn_f32_dwconv2d_chw_ukernel_3x3p1__neon_6x4()
259 vi6x4567 = vreinterpretq_f32_u32(vandq_u32(vmask, vreinterpretq_u32_f32(vi6x4567))); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__neon_6x4()
274 vo5p0 = vmlaq_lane_f32(vo5p0, vi6x4567, vget_low_f32(vw4567), 1); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__neon_6x4()
280 vo4p0 = vmlaq_lane_f32(vo4p0, vi6x4567, vw89, 0); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__neon_6x4()
[all …]
D3x3p1-minmax-wasmsimd-x86-loadsplat-6x4.c118 v128_t vi6x4567 = wasm_v128_load(i6); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__wasmsimd_x86_loadsplat_6x4() local
153 vo5p0 = wasm_f32x4_add(vo5p0, wasm_f32x4_mul(vi6x4567, vk11)); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__wasmsimd_x86_loadsplat_6x4()
158 vo4p0 = wasm_f32x4_add(vo4p0, wasm_f32x4_mul(vi6x4567, vk21)); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__wasmsimd_x86_loadsplat_6x4()
167 const v128_t vi6x3456 = wasm_v32x4_shuffle(vi6x0123, vi6x4567, 3, 4, 5, 6); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__wasmsimd_x86_loadsplat_6x4()
195 vi6x0123 = vi6x4567; in xnn_f32_dwconv2d_chw_ukernel_3x3p1__wasmsimd_x86_loadsplat_6x4()
204 const v128_t vi6x5678 = wasm_v32x4_shuffle(vi6x4567, vi6x89AB, 1, 2, 3, 4); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__wasmsimd_x86_loadsplat_6x4()
232 vi6x4567 = vi6x89AB; in xnn_f32_dwconv2d_chw_ukernel_3x3p1__wasmsimd_x86_loadsplat_6x4()
272 vi6x4567 = wasm_v128_and(vmask, vi6x4567); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__wasmsimd_x86_loadsplat_6x4()
286 vo5p0 = wasm_f32x4_add(vo5p0, wasm_f32x4_mul(vi6x4567, vk11)); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__wasmsimd_x86_loadsplat_6x4()
291 vo4p0 = wasm_f32x4_add(vo4p0, wasm_f32x4_mul(vi6x4567, vk21)); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__wasmsimd_x86_loadsplat_6x4()
[all …]
D3x3p1-minmax-wasmsimd-arm-loadsplat-6x4.c118 v128_t vi6x4567 = wasm_v128_load(i6); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__wasmsimd_arm_loadsplat_6x4() local
153 vo5p0 = wasm_f32x4_add(vo5p0, wasm_f32x4_mul(vi6x4567, vk11)); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__wasmsimd_arm_loadsplat_6x4()
158 vo4p0 = wasm_f32x4_add(vo4p0, wasm_f32x4_mul(vi6x4567, vk21)); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__wasmsimd_arm_loadsplat_6x4()
167 const v128_t vi6x3456 = wasm_v32x4_shuffle(vi6x0123, vi6x4567, 3, 4, 5, 6); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__wasmsimd_arm_loadsplat_6x4()
195 vi6x0123 = vi6x4567; in xnn_f32_dwconv2d_chw_ukernel_3x3p1__wasmsimd_arm_loadsplat_6x4()
204 const v128_t vi6x5678 = wasm_v32x4_shuffle(vi6x4567, vi6x89AB, 1, 2, 3, 4); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__wasmsimd_arm_loadsplat_6x4()
232 vi6x4567 = vi6x89AB; in xnn_f32_dwconv2d_chw_ukernel_3x3p1__wasmsimd_arm_loadsplat_6x4()
272 vi6x4567 = wasm_v128_and(vmask, vi6x4567); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__wasmsimd_arm_loadsplat_6x4()
286 vo5p0 = wasm_f32x4_add(vo5p0, wasm_f32x4_mul(vi6x4567, vk11)); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__wasmsimd_arm_loadsplat_6x4()
291 vo4p0 = wasm_f32x4_add(vo4p0, wasm_f32x4_mul(vi6x4567, vk21)); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__wasmsimd_arm_loadsplat_6x4()
[all …]
D3x3p1-minmax-neonfma-6x4.c100 float32x4_t vi6x4567 = vld1q_f32(i6); i6 += 4; in xnn_f32_dwconv2d_chw_ukernel_3x3p1__neonfma_6x4() local
133 vo5p0 = vfmaq_lane_f32(vo5p0, vi6x4567, vget_low_f32(vw4567), 1); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__neonfma_6x4()
139 vo4p0 = vfmaq_lane_f32(vo4p0, vi6x4567, vw89, 0); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__neonfma_6x4()
148 const float32x4_t vi6x3456 = vextq_f32(vi6x0123, vi6x4567, 3); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__neonfma_6x4()
178 vi6x0123 = vi6x4567; in xnn_f32_dwconv2d_chw_ukernel_3x3p1__neonfma_6x4()
187 const float32x4_t vi6x5678 = vextq_f32(vi6x4567, vi6x89AB, 1); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__neonfma_6x4()
217 vi6x4567 = vi6x89AB; in xnn_f32_dwconv2d_chw_ukernel_3x3p1__neonfma_6x4()
259 vi6x4567 = vreinterpretq_f32_u32(vandq_u32(vmask, vreinterpretq_u32_f32(vi6x4567))); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__neonfma_6x4()
274 vo5p0 = vfmaq_lane_f32(vo5p0, vi6x4567, vget_low_f32(vw4567), 1); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__neonfma_6x4()
280 vo4p0 = vfmaq_lane_f32(vo4p0, vi6x4567, vw89, 0); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__neonfma_6x4()
[all …]
D3x3p1-minmax-sse-5x4.c113 __m128 vi6x4567 = _mm_loadu_ps(i6); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__sse_5x4() local
153 const __m128 vi6x7456 = _mm_shuffle_ps(vi6x4567, vi6x4567, _MM_SHUFFLE(2, 1, 0, 3)); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__sse_5x4()
169 vo4p0 = _mm_add_ps(vo4p0, _mm_mul_ps(vi6x4567, vk21)); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__sse_5x4()
223 const __m128 vi6x8567 = _mm_move_ss(vi6x4567, vi6x89AB); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__sse_5x4()
262 vi6x4567 = vi6x89AB; in xnn_f32_dwconv2d_chw_ukernel_3x3p1__sse_5x4()
298 vi6x4567 = _mm_and_ps(vmask, vi6x4567); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__sse_5x4()
313 const __m128 vi6x7456 = _mm_shuffle_ps(vi6x4567, vi6x4567, _MM_SHUFFLE(2, 1, 0, 3)); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__sse_5x4()
329 vo4p0 = _mm_add_ps(vo4p0, _mm_mul_ps(vi6x4567, vk21)); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__sse_5x4()
376 const __m128 vi6x8567 = _mm_move_ss(vi6x4567, vzero); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__sse_5x4()
D3x3p1-minmax-wasmsimd-x86-loadsplat-5x4.c111 v128_t vi6x4567 = wasm_v128_load(i6); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__wasmsimd_x86_loadsplat_5x4() local
145 vo4p0 = wasm_f32x4_add(vo4p0, wasm_f32x4_mul(vi6x4567, vk21)); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__wasmsimd_x86_loadsplat_5x4()
153 const v128_t vi6x3456 = wasm_v32x4_shuffle(vi6x0123, vi6x4567, 3, 4, 5, 6); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__wasmsimd_x86_loadsplat_5x4()
177 vi6x0123 = vi6x4567; in xnn_f32_dwconv2d_chw_ukernel_3x3p1__wasmsimd_x86_loadsplat_5x4()
185 const v128_t vi6x5678 = wasm_v32x4_shuffle(vi6x4567, vi6x89AB, 1, 2, 3, 4); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__wasmsimd_x86_loadsplat_5x4()
209 vi6x4567 = vi6x89AB; in xnn_f32_dwconv2d_chw_ukernel_3x3p1__wasmsimd_x86_loadsplat_5x4()
244 vi6x4567 = wasm_v128_and(vmask, vi6x4567); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__wasmsimd_x86_loadsplat_5x4()
260 vo4p0 = wasm_f32x4_add(vo4p0, wasm_f32x4_mul(vi6x4567, vk21)); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__wasmsimd_x86_loadsplat_5x4()
268 const v128_t vi6x3456 = wasm_v32x4_shuffle(vi6x0123, vi6x4567, 3, 4, 5, 6); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__wasmsimd_x86_loadsplat_5x4()
293 const v128_t vi6x5678 = wasm_v32x4_shuffle(vi6x4567, vzero, 1, 2, 3, 4); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__wasmsimd_x86_loadsplat_5x4()
D3x3p1-minmax-wasmsimd-arm-loadsplat-5x4.c111 v128_t vi6x4567 = wasm_v128_load(i6); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__wasmsimd_arm_loadsplat_5x4() local
145 vo4p0 = wasm_f32x4_add(vo4p0, wasm_f32x4_mul(vi6x4567, vk21)); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__wasmsimd_arm_loadsplat_5x4()
153 const v128_t vi6x3456 = wasm_v32x4_shuffle(vi6x0123, vi6x4567, 3, 4, 5, 6); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__wasmsimd_arm_loadsplat_5x4()
177 vi6x0123 = vi6x4567; in xnn_f32_dwconv2d_chw_ukernel_3x3p1__wasmsimd_arm_loadsplat_5x4()
185 const v128_t vi6x5678 = wasm_v32x4_shuffle(vi6x4567, vi6x89AB, 1, 2, 3, 4); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__wasmsimd_arm_loadsplat_5x4()
209 vi6x4567 = vi6x89AB; in xnn_f32_dwconv2d_chw_ukernel_3x3p1__wasmsimd_arm_loadsplat_5x4()
244 vi6x4567 = wasm_v128_and(vmask, vi6x4567); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__wasmsimd_arm_loadsplat_5x4()
260 vo4p0 = wasm_f32x4_add(vo4p0, wasm_f32x4_mul(vi6x4567, vk21)); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__wasmsimd_arm_loadsplat_5x4()
268 const v128_t vi6x3456 = wasm_v32x4_shuffle(vi6x0123, vi6x4567, 3, 4, 5, 6); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__wasmsimd_arm_loadsplat_5x4()
293 const v128_t vi6x5678 = wasm_v32x4_shuffle(vi6x4567, vzero, 1, 2, 3, 4); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__wasmsimd_arm_loadsplat_5x4()

123456