Home
last modified time | relevance | path

Searched refs:vi6x4567 (Results 1 – 25 of 73) sorted by relevance

123

/external/XNNPACK/src/f32-dwconv2d-chw/gen/
D5x5p2-minmax-neon-3x4.c90 float32x4_t vi6x4567 = vld1q_f32(i6); i6 += 4; in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_3x4() local
124 vo2p0 = vmlaq_lane_f32(vo2p0, vi6x4567, vget_high_f32(vwKLMN), 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_3x4()
132 const float32x4_t vi6x3456 = vextq_f32(vi6x0123, vi6x4567, 3); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_3x4()
166 const float32x4_t vi6x2345 = vextq_f32(vi6x0123, vi6x4567, 2); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_3x4()
167 vi6x0123 = vi6x4567; in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_3x4()
195 const float32x4_t vi6x5678 = vextq_f32(vi6x4567, vi6x89AB, 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_3x4()
229 const float32x4_t vi6x6789 = vextq_f32(vi6x4567, vi6x89AB, 2); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_3x4()
230 vi6x4567 = vi6x89AB; in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_3x4()
305 vo2p0 = vmlaq_lane_f32(vo2p0, vi6x4567, vget_high_f32(vwKLMN), 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_3x4()
313 const float32x4_t vi6x3456 = vextq_f32(vi6x0123, vi6x4567, 3); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_3x4()
[all …]
D5x5p2-minmax-wasmsimd-arm-loadsplat-3x4-acc2.c119 v128_t vi6x4567 = wasm_v128_load(i6); i6 += 4; in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_arm_loadsplat_3x4_acc2() local
153 vo2p0 = wasm_f32x4_add(vo2p0, wasm_f32x4_mul(vi6x4567, vk42)); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_arm_loadsplat_3x4_acc2()
161 const v128_t vi6x3456 = wasm_v32x4_shuffle(vi6x0123, vi6x4567, 3, 4, 5, 6); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_arm_loadsplat_3x4_acc2()
195 const v128_t vi6x2345 = wasm_v32x4_shuffle(vi6x0123, vi6x4567, 2, 3, 4, 5); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_arm_loadsplat_3x4_acc2()
196 vi6x0123 = vi6x4567; in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_arm_loadsplat_3x4_acc2()
224 const v128_t vi6x5678 = wasm_v32x4_shuffle(vi6x4567, vi6x89AB, 1, 2, 3, 4); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_arm_loadsplat_3x4_acc2()
258 const v128_t vi6x6789 = wasm_v32x4_shuffle(vi6x4567, vi6x89AB, 2, 3, 4, 5); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_arm_loadsplat_3x4_acc2()
259 vi6x4567 = vi6x89AB; in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_arm_loadsplat_3x4_acc2()
336 vo2p0 = wasm_f32x4_add(vo2p0, wasm_f32x4_mul(vi6x4567, vk42)); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_arm_loadsplat_3x4_acc2()
344 const v128_t vi6x3456 = wasm_v32x4_shuffle(vi6x0123, vi6x4567, 3, 4, 5, 6); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_arm_loadsplat_3x4_acc2()
[all …]
D5x5p2-minmax-neonfma-3x4-acc2.c90 float32x4_t vi6x4567 = vld1q_f32(i6); i6 += 4; in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_3x4_acc2() local
124 vo2p0 = vfmaq_lane_f32(vo2p0, vi6x4567, vget_high_f32(vwKLMN), 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_3x4_acc2()
132 const float32x4_t vi6x3456 = vextq_f32(vi6x0123, vi6x4567, 3); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_3x4_acc2()
166 const float32x4_t vi6x2345 = vextq_f32(vi6x0123, vi6x4567, 2); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_3x4_acc2()
167 vi6x0123 = vi6x4567; in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_3x4_acc2()
195 const float32x4_t vi6x5678 = vextq_f32(vi6x4567, vi6x89AB, 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_3x4_acc2()
229 const float32x4_t vi6x6789 = vextq_f32(vi6x4567, vi6x89AB, 2); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_3x4_acc2()
230 vi6x4567 = vi6x89AB; in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_3x4_acc2()
308 vo2p0 = vfmaq_lane_f32(vo2p0, vi6x4567, vget_high_f32(vwKLMN), 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_3x4_acc2()
316 const float32x4_t vi6x3456 = vextq_f32(vi6x0123, vi6x4567, 3); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_3x4_acc2()
[all …]
D5x5p2-minmax-wasmsimd-x86-loadsplat-3x4.c119 v128_t vi6x4567 = wasm_v128_load(i6); i6 += 4; in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_x86_loadsplat_3x4() local
153 vo2p0 = wasm_f32x4_add(vo2p0, wasm_f32x4_mul(vi6x4567, vk42)); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_x86_loadsplat_3x4()
161 const v128_t vi6x3456 = wasm_v32x4_shuffle(vi6x0123, vi6x4567, 3, 4, 5, 6); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_x86_loadsplat_3x4()
195 const v128_t vi6x2345 = wasm_v32x4_shuffle(vi6x0123, vi6x4567, 2, 3, 4, 5); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_x86_loadsplat_3x4()
196 vi6x0123 = vi6x4567; in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_x86_loadsplat_3x4()
224 const v128_t vi6x5678 = wasm_v32x4_shuffle(vi6x4567, vi6x89AB, 1, 2, 3, 4); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_x86_loadsplat_3x4()
258 const v128_t vi6x6789 = wasm_v32x4_shuffle(vi6x4567, vi6x89AB, 2, 3, 4, 5); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_x86_loadsplat_3x4()
259 vi6x4567 = vi6x89AB; in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_x86_loadsplat_3x4()
333 vo2p0 = wasm_f32x4_add(vo2p0, wasm_f32x4_mul(vi6x4567, vk42)); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_x86_loadsplat_3x4()
341 const v128_t vi6x3456 = wasm_v32x4_shuffle(vi6x0123, vi6x4567, 3, 4, 5, 6); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_x86_loadsplat_3x4()
[all …]
D5x5p2-minmax-wasmsimd-arm-loadsplat-3x4.c119 v128_t vi6x4567 = wasm_v128_load(i6); i6 += 4; in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_arm_loadsplat_3x4() local
153 vo2p0 = wasm_f32x4_add(vo2p0, wasm_f32x4_mul(vi6x4567, vk42)); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_arm_loadsplat_3x4()
161 const v128_t vi6x3456 = wasm_v32x4_shuffle(vi6x0123, vi6x4567, 3, 4, 5, 6); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_arm_loadsplat_3x4()
195 const v128_t vi6x2345 = wasm_v32x4_shuffle(vi6x0123, vi6x4567, 2, 3, 4, 5); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_arm_loadsplat_3x4()
196 vi6x0123 = vi6x4567; in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_arm_loadsplat_3x4()
224 const v128_t vi6x5678 = wasm_v32x4_shuffle(vi6x4567, vi6x89AB, 1, 2, 3, 4); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_arm_loadsplat_3x4()
258 const v128_t vi6x6789 = wasm_v32x4_shuffle(vi6x4567, vi6x89AB, 2, 3, 4, 5); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_arm_loadsplat_3x4()
259 vi6x4567 = vi6x89AB; in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_arm_loadsplat_3x4()
333 vo2p0 = wasm_f32x4_add(vo2p0, wasm_f32x4_mul(vi6x4567, vk42)); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_arm_loadsplat_3x4()
341 const v128_t vi6x3456 = wasm_v32x4_shuffle(vi6x0123, vi6x4567, 3, 4, 5, 6); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_arm_loadsplat_3x4()
[all …]
D5x5p2-minmax-neonfma-3x4.c90 float32x4_t vi6x4567 = vld1q_f32(i6); i6 += 4; in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_3x4() local
124 vo2p0 = vfmaq_lane_f32(vo2p0, vi6x4567, vget_high_f32(vwKLMN), 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_3x4()
132 const float32x4_t vi6x3456 = vextq_f32(vi6x0123, vi6x4567, 3); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_3x4()
166 const float32x4_t vi6x2345 = vextq_f32(vi6x0123, vi6x4567, 2); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_3x4()
167 vi6x0123 = vi6x4567; in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_3x4()
195 const float32x4_t vi6x5678 = vextq_f32(vi6x4567, vi6x89AB, 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_3x4()
229 const float32x4_t vi6x6789 = vextq_f32(vi6x4567, vi6x89AB, 2); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_3x4()
230 vi6x4567 = vi6x89AB; in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_3x4()
305 vo2p0 = vfmaq_lane_f32(vo2p0, vi6x4567, vget_high_f32(vwKLMN), 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_3x4()
313 const float32x4_t vi6x3456 = vextq_f32(vi6x0123, vi6x4567, 3); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_3x4()
[all …]
D5x5p2-minmax-neon-3x4-acc2.c90 float32x4_t vi6x4567 = vld1q_f32(i6); i6 += 4; in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_3x4_acc2() local
124 vo2p0 = vmlaq_lane_f32(vo2p0, vi6x4567, vget_high_f32(vwKLMN), 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_3x4_acc2()
132 const float32x4_t vi6x3456 = vextq_f32(vi6x0123, vi6x4567, 3); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_3x4_acc2()
166 const float32x4_t vi6x2345 = vextq_f32(vi6x0123, vi6x4567, 2); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_3x4_acc2()
167 vi6x0123 = vi6x4567; in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_3x4_acc2()
195 const float32x4_t vi6x5678 = vextq_f32(vi6x4567, vi6x89AB, 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_3x4_acc2()
229 const float32x4_t vi6x6789 = vextq_f32(vi6x4567, vi6x89AB, 2); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_3x4_acc2()
230 vi6x4567 = vi6x89AB; in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_3x4_acc2()
308 vo2p0 = vmlaq_lane_f32(vo2p0, vi6x4567, vget_high_f32(vwKLMN), 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_3x4_acc2()
316 const float32x4_t vi6x3456 = vextq_f32(vi6x0123, vi6x4567, 3); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_3x4_acc2()
[all …]
D3x3p1-minmax-wasmsimd-arm-loadsplat-5x4.c113 v128_t vi6x4567 = wasm_v128_load(i6); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__wasmsimd_arm_loadsplat_5x4() local
147 vo4p0 = wasm_f32x4_add(vo4p0, wasm_f32x4_mul(vi6x4567, vk21)); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__wasmsimd_arm_loadsplat_5x4()
155 const v128_t vi6x3456 = wasm_v32x4_shuffle(vi6x0123, vi6x4567, 3, 4, 5, 6); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__wasmsimd_arm_loadsplat_5x4()
179 vi6x0123 = vi6x4567; in xnn_f32_dwconv2d_chw_ukernel_3x3p1__wasmsimd_arm_loadsplat_5x4()
187 const v128_t vi6x5678 = wasm_v32x4_shuffle(vi6x4567, vi6x89AB, 1, 2, 3, 4); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__wasmsimd_arm_loadsplat_5x4()
211 vi6x4567 = vi6x89AB; in xnn_f32_dwconv2d_chw_ukernel_3x3p1__wasmsimd_arm_loadsplat_5x4()
246 vi6x4567 = wasm_v128_and(vmask, vi6x4567); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__wasmsimd_arm_loadsplat_5x4()
262 vo4p0 = wasm_f32x4_add(vo4p0, wasm_f32x4_mul(vi6x4567, vk21)); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__wasmsimd_arm_loadsplat_5x4()
270 const v128_t vi6x3456 = wasm_v32x4_shuffle(vi6x0123, vi6x4567, 3, 4, 5, 6); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__wasmsimd_arm_loadsplat_5x4()
294 const v128_t vi6x5678 = wasm_v32x4_shuffle(vi6x4567, vzero, 1, 2, 3, 4); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__wasmsimd_arm_loadsplat_5x4()
D3x3p1-minmax-neonfma-5x4.c93 float32x4_t vi6x4567 = vld1q_f32(i6); i6 += 4; in xnn_f32_dwconv2d_chw_ukernel_3x3p1__neonfma_5x4() local
127 vo4p0 = vfmaq_lane_f32(vo4p0, vi6x4567, vw89, 0); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__neonfma_5x4()
135 const float32x4_t vi6x3456 = vextq_f32(vi6x0123, vi6x4567, 3); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__neonfma_5x4()
161 vi6x0123 = vi6x4567; in xnn_f32_dwconv2d_chw_ukernel_3x3p1__neonfma_5x4()
169 const float32x4_t vi6x5678 = vextq_f32(vi6x4567, vi6x89AB, 1); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__neonfma_5x4()
195 vi6x4567 = vi6x89AB; in xnn_f32_dwconv2d_chw_ukernel_3x3p1__neonfma_5x4()
232 vi6x4567 = vreinterpretq_f32_u32(vandq_u32(vmask, vreinterpretq_u32_f32(vi6x4567))); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__neonfma_5x4()
250 vo4p0 = vfmaq_lane_f32(vo4p0, vi6x4567, vw89, 0); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__neonfma_5x4()
258 const float32x4_t vi6x3456 = vextq_f32(vi6x0123, vi6x4567, 3); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__neonfma_5x4()
285 const float32x4_t vi6x5678 = vextq_f32(vi6x4567, vzero, 1); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__neonfma_5x4()
D3x3p1-minmax-neon-5x4.c93 float32x4_t vi6x4567 = vld1q_f32(i6); i6 += 4; in xnn_f32_dwconv2d_chw_ukernel_3x3p1__neon_5x4() local
127 vo4p0 = vmlaq_lane_f32(vo4p0, vi6x4567, vw89, 0); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__neon_5x4()
135 const float32x4_t vi6x3456 = vextq_f32(vi6x0123, vi6x4567, 3); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__neon_5x4()
161 vi6x0123 = vi6x4567; in xnn_f32_dwconv2d_chw_ukernel_3x3p1__neon_5x4()
169 const float32x4_t vi6x5678 = vextq_f32(vi6x4567, vi6x89AB, 1); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__neon_5x4()
195 vi6x4567 = vi6x89AB; in xnn_f32_dwconv2d_chw_ukernel_3x3p1__neon_5x4()
232 vi6x4567 = vreinterpretq_f32_u32(vandq_u32(vmask, vreinterpretq_u32_f32(vi6x4567))); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__neon_5x4()
250 vo4p0 = vmlaq_lane_f32(vo4p0, vi6x4567, vw89, 0); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__neon_5x4()
258 const float32x4_t vi6x3456 = vextq_f32(vi6x0123, vi6x4567, 3); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__neon_5x4()
285 const float32x4_t vi6x5678 = vextq_f32(vi6x4567, vzero, 1); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__neon_5x4()
D3x3p1-minmax-sse-5x4.c113 __m128 vi6x4567 = _mm_loadu_ps(i6); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__sse_5x4() local
153 const __m128 vi6x7456 = _mm_shuffle_ps(vi6x4567, vi6x4567, _MM_SHUFFLE(2, 1, 0, 3)); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__sse_5x4()
169 vo4p0 = _mm_add_ps(vo4p0, _mm_mul_ps(vi6x4567, vk21)); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__sse_5x4()
223 const __m128 vi6x8567 = _mm_move_ss(vi6x4567, vi6x89AB); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__sse_5x4()
262 vi6x4567 = vi6x89AB; in xnn_f32_dwconv2d_chw_ukernel_3x3p1__sse_5x4()
298 vi6x4567 = _mm_and_ps(vmask, vi6x4567); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__sse_5x4()
313 const __m128 vi6x7456 = _mm_shuffle_ps(vi6x4567, vi6x4567, _MM_SHUFFLE(2, 1, 0, 3)); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__sse_5x4()
329 vo4p0 = _mm_add_ps(vo4p0, _mm_mul_ps(vi6x4567, vk21)); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__sse_5x4()
376 const __m128 vi6x8567 = _mm_move_ss(vi6x4567, vzero); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__sse_5x4()
D3x3p1-minmax-neon-6x4.c100 float32x4_t vi6x4567 = vld1q_f32(i6); i6 += 4; in xnn_f32_dwconv2d_chw_ukernel_3x3p1__neon_6x4() local
133 vo5p0 = vmlaq_lane_f32(vo5p0, vi6x4567, vget_low_f32(vw4567), 1); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__neon_6x4()
139 vo4p0 = vmlaq_lane_f32(vo4p0, vi6x4567, vw89, 0); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__neon_6x4()
148 const float32x4_t vi6x3456 = vextq_f32(vi6x0123, vi6x4567, 3); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__neon_6x4()
178 vi6x0123 = vi6x4567; in xnn_f32_dwconv2d_chw_ukernel_3x3p1__neon_6x4()
187 const float32x4_t vi6x5678 = vextq_f32(vi6x4567, vi6x89AB, 1); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__neon_6x4()
217 vi6x4567 = vi6x89AB; in xnn_f32_dwconv2d_chw_ukernel_3x3p1__neon_6x4()
259 vi6x4567 = vreinterpretq_f32_u32(vandq_u32(vmask, vreinterpretq_u32_f32(vi6x4567))); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__neon_6x4()
274 vo5p0 = vmlaq_lane_f32(vo5p0, vi6x4567, vget_low_f32(vw4567), 1); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__neon_6x4()
280 vo4p0 = vmlaq_lane_f32(vo4p0, vi6x4567, vw89, 0); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__neon_6x4()
[all …]
D3x3p1-minmax-ssse3-6x4.c113 __m128 vi6x4567 = _mm_loadu_ps(i6); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__ssse3_6x4() local
148 vo5p0 = _mm_add_ps(vo5p0, _mm_mul_ps(vi6x4567, vk11)); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__ssse3_6x4()
153 vo4p0 = _mm_add_ps(vo4p0, _mm_mul_ps(vi6x4567, vk21)); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__ssse3_6x4()
162 …const __m128 vi6x3456 = _mm_castsi128_ps(_mm_alignr_epi8(_mm_castps_si128(vi6x4567), _mm_castps_si… in xnn_f32_dwconv2d_chw_ukernel_3x3p1__ssse3_6x4()
190 vi6x0123 = vi6x4567; in xnn_f32_dwconv2d_chw_ukernel_3x3p1__ssse3_6x4()
199 …678 = _mm_castsi128_ps(_mm_alignr_epi8(_mm_castps_si128(vi6x89AB), _mm_castps_si128(vi6x4567), 4)); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__ssse3_6x4()
227 vi6x4567 = vi6x89AB; in xnn_f32_dwconv2d_chw_ukernel_3x3p1__ssse3_6x4()
268 vi6x4567 = _mm_and_ps(vmask, vi6x4567); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__ssse3_6x4()
282 vo5p0 = _mm_add_ps(vo5p0, _mm_mul_ps(vi6x4567, vk11)); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__ssse3_6x4()
287 vo4p0 = _mm_add_ps(vo4p0, _mm_mul_ps(vi6x4567, vk21)); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__ssse3_6x4()
[all …]
D3x3p1-minmax-neonfma-6x4.c100 float32x4_t vi6x4567 = vld1q_f32(i6); i6 += 4; in xnn_f32_dwconv2d_chw_ukernel_3x3p1__neonfma_6x4() local
133 vo5p0 = vfmaq_lane_f32(vo5p0, vi6x4567, vget_low_f32(vw4567), 1); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__neonfma_6x4()
139 vo4p0 = vfmaq_lane_f32(vo4p0, vi6x4567, vw89, 0); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__neonfma_6x4()
148 const float32x4_t vi6x3456 = vextq_f32(vi6x0123, vi6x4567, 3); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__neonfma_6x4()
178 vi6x0123 = vi6x4567; in xnn_f32_dwconv2d_chw_ukernel_3x3p1__neonfma_6x4()
187 const float32x4_t vi6x5678 = vextq_f32(vi6x4567, vi6x89AB, 1); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__neonfma_6x4()
217 vi6x4567 = vi6x89AB; in xnn_f32_dwconv2d_chw_ukernel_3x3p1__neonfma_6x4()
259 vi6x4567 = vreinterpretq_f32_u32(vandq_u32(vmask, vreinterpretq_u32_f32(vi6x4567))); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__neonfma_6x4()
274 vo5p0 = vfmaq_lane_f32(vo5p0, vi6x4567, vget_low_f32(vw4567), 1); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__neonfma_6x4()
280 vo4p0 = vfmaq_lane_f32(vo4p0, vi6x4567, vw89, 0); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__neonfma_6x4()
[all …]
D3x3p1-minmax-wasmsimd-x86-loadsplat-6x4.c120 v128_t vi6x4567 = wasm_v128_load(i6); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__wasmsimd_x86_loadsplat_6x4() local
155 vo5p0 = wasm_f32x4_add(vo5p0, wasm_f32x4_mul(vi6x4567, vk11)); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__wasmsimd_x86_loadsplat_6x4()
160 vo4p0 = wasm_f32x4_add(vo4p0, wasm_f32x4_mul(vi6x4567, vk21)); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__wasmsimd_x86_loadsplat_6x4()
169 const v128_t vi6x3456 = wasm_v32x4_shuffle(vi6x0123, vi6x4567, 3, 4, 5, 6); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__wasmsimd_x86_loadsplat_6x4()
197 vi6x0123 = vi6x4567; in xnn_f32_dwconv2d_chw_ukernel_3x3p1__wasmsimd_x86_loadsplat_6x4()
206 const v128_t vi6x5678 = wasm_v32x4_shuffle(vi6x4567, vi6x89AB, 1, 2, 3, 4); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__wasmsimd_x86_loadsplat_6x4()
234 vi6x4567 = vi6x89AB; in xnn_f32_dwconv2d_chw_ukernel_3x3p1__wasmsimd_x86_loadsplat_6x4()
274 vi6x4567 = wasm_v128_and(vmask, vi6x4567); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__wasmsimd_x86_loadsplat_6x4()
288 vo5p0 = wasm_f32x4_add(vo5p0, wasm_f32x4_mul(vi6x4567, vk11)); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__wasmsimd_x86_loadsplat_6x4()
293 vo4p0 = wasm_f32x4_add(vo4p0, wasm_f32x4_mul(vi6x4567, vk21)); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__wasmsimd_x86_loadsplat_6x4()
[all …]
D3x3p1-minmax-wasmsimd-arm-loadsplat-6x4.c120 v128_t vi6x4567 = wasm_v128_load(i6); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__wasmsimd_arm_loadsplat_6x4() local
155 vo5p0 = wasm_f32x4_add(vo5p0, wasm_f32x4_mul(vi6x4567, vk11)); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__wasmsimd_arm_loadsplat_6x4()
160 vo4p0 = wasm_f32x4_add(vo4p0, wasm_f32x4_mul(vi6x4567, vk21)); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__wasmsimd_arm_loadsplat_6x4()
169 const v128_t vi6x3456 = wasm_v32x4_shuffle(vi6x0123, vi6x4567, 3, 4, 5, 6); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__wasmsimd_arm_loadsplat_6x4()
197 vi6x0123 = vi6x4567; in xnn_f32_dwconv2d_chw_ukernel_3x3p1__wasmsimd_arm_loadsplat_6x4()
206 const v128_t vi6x5678 = wasm_v32x4_shuffle(vi6x4567, vi6x89AB, 1, 2, 3, 4); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__wasmsimd_arm_loadsplat_6x4()
234 vi6x4567 = vi6x89AB; in xnn_f32_dwconv2d_chw_ukernel_3x3p1__wasmsimd_arm_loadsplat_6x4()
274 vi6x4567 = wasm_v128_and(vmask, vi6x4567); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__wasmsimd_arm_loadsplat_6x4()
288 vo5p0 = wasm_f32x4_add(vo5p0, wasm_f32x4_mul(vi6x4567, vk11)); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__wasmsimd_arm_loadsplat_6x4()
293 vo4p0 = wasm_f32x4_add(vo4p0, wasm_f32x4_mul(vi6x4567, vk21)); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__wasmsimd_arm_loadsplat_6x4()
[all …]
D3x3p1-minmax-sse-6x4.c121 __m128 vi6x4567 = _mm_loadu_ps(i6); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__sse_6x4() local
166 const __m128 vi6x7456 = _mm_shuffle_ps(vi6x4567, vi6x4567, _MM_SHUFFLE(2, 1, 0, 3)); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__sse_6x4()
181 vo5p0 = _mm_add_ps(vo5p0, _mm_mul_ps(vi6x4567, vk11)); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__sse_6x4()
186 vo4p0 = _mm_add_ps(vo4p0, _mm_mul_ps(vi6x4567, vk21)); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__sse_6x4()
247 const __m128 vi6x8567 = _mm_move_ss(vi6x4567, vi6x89AB); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__sse_6x4()
293 vi6x4567 = vi6x89AB; in xnn_f32_dwconv2d_chw_ukernel_3x3p1__sse_6x4()
334 vi6x4567 = _mm_and_ps(vmask, vi6x4567); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__sse_6x4()
350 const __m128 vi6x7456 = _mm_shuffle_ps(vi6x4567, vi6x4567, _MM_SHUFFLE(2, 1, 0, 3)); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__sse_6x4()
365 vo5p0 = _mm_add_ps(vo5p0, _mm_mul_ps(vi6x4567, vk11)); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__sse_6x4()
370 vo4p0 = _mm_add_ps(vo4p0, _mm_mul_ps(vi6x4567, vk21)); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__sse_6x4()
[all …]
D5x5p2-minmax-neon-4x4-acc2.c97 float32x4_t vi6x4567 = vld1q_f32(i6); i6 += 4; in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_4x4_acc2() local
134 vo3p1 = vmlaq_lane_f32(vo3p1, vi6x4567, vget_high_f32(vwGHIJ), 0); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_4x4_acc2()
138 vo2p0 = vmlaq_lane_f32(vo2p0, vi6x4567, vget_high_f32(vwKLMN), 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_4x4_acc2()
147 const float32x4_t vi6x3456 = vextq_f32(vi6x0123, vi6x4567, 3); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_4x4_acc2()
187 const float32x4_t vi6x2345 = vextq_f32(vi6x0123, vi6x4567, 2); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_4x4_acc2()
188 vi6x0123 = vi6x4567; in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_4x4_acc2()
223 const float32x4_t vi6x5678 = vextq_f32(vi6x4567, vi6x89AB, 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_4x4_acc2()
263 const float32x4_t vi6x6789 = vextq_f32(vi6x4567, vi6x89AB, 2); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_4x4_acc2()
264 vi6x4567 = vi6x89AB; in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_4x4_acc2()
356 vo3p1 = vmlaq_lane_f32(vo3p1, vi6x4567, vget_high_f32(vwGHIJ), 0); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_4x4_acc2()
[all …]
D5x5p2-minmax-neon-4x4.c97 float32x4_t vi6x4567 = vld1q_f32(i6); i6 += 4; in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_4x4() local
134 vo3p0 = vmlaq_lane_f32(vo3p0, vi6x4567, vget_high_f32(vwGHIJ), 0); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_4x4()
138 vo2p0 = vmlaq_lane_f32(vo2p0, vi6x4567, vget_high_f32(vwKLMN), 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_4x4()
147 const float32x4_t vi6x3456 = vextq_f32(vi6x0123, vi6x4567, 3); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_4x4()
187 const float32x4_t vi6x2345 = vextq_f32(vi6x0123, vi6x4567, 2); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_4x4()
188 vi6x0123 = vi6x4567; in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_4x4()
223 const float32x4_t vi6x5678 = vextq_f32(vi6x4567, vi6x89AB, 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_4x4()
263 const float32x4_t vi6x6789 = vextq_f32(vi6x4567, vi6x89AB, 2); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_4x4()
264 vi6x4567 = vi6x89AB; in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_4x4()
352 vo3p0 = vmlaq_lane_f32(vo3p0, vi6x4567, vget_high_f32(vwGHIJ), 0); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_4x4()
[all …]
D5x5p2-minmax-neonfma-4x4.c97 float32x4_t vi6x4567 = vld1q_f32(i6); i6 += 4; in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_4x4() local
134 vo3p0 = vfmaq_lane_f32(vo3p0, vi6x4567, vget_high_f32(vwGHIJ), 0); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_4x4()
138 vo2p0 = vfmaq_lane_f32(vo2p0, vi6x4567, vget_high_f32(vwKLMN), 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_4x4()
147 const float32x4_t vi6x3456 = vextq_f32(vi6x0123, vi6x4567, 3); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_4x4()
187 const float32x4_t vi6x2345 = vextq_f32(vi6x0123, vi6x4567, 2); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_4x4()
188 vi6x0123 = vi6x4567; in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_4x4()
223 const float32x4_t vi6x5678 = vextq_f32(vi6x4567, vi6x89AB, 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_4x4()
263 const float32x4_t vi6x6789 = vextq_f32(vi6x4567, vi6x89AB, 2); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_4x4()
264 vi6x4567 = vi6x89AB; in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_4x4()
352 vo3p0 = vfmaq_lane_f32(vo3p0, vi6x4567, vget_high_f32(vwGHIJ), 0); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_4x4()
[all …]
D5x5p2-minmax-neonfma-4x4-acc2.c97 float32x4_t vi6x4567 = vld1q_f32(i6); i6 += 4; in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_4x4_acc2() local
134 vo3p1 = vfmaq_lane_f32(vo3p1, vi6x4567, vget_high_f32(vwGHIJ), 0); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_4x4_acc2()
138 vo2p0 = vfmaq_lane_f32(vo2p0, vi6x4567, vget_high_f32(vwKLMN), 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_4x4_acc2()
147 const float32x4_t vi6x3456 = vextq_f32(vi6x0123, vi6x4567, 3); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_4x4_acc2()
187 const float32x4_t vi6x2345 = vextq_f32(vi6x0123, vi6x4567, 2); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_4x4_acc2()
188 vi6x0123 = vi6x4567; in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_4x4_acc2()
223 const float32x4_t vi6x5678 = vextq_f32(vi6x4567, vi6x89AB, 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_4x4_acc2()
263 const float32x4_t vi6x6789 = vextq_f32(vi6x4567, vi6x89AB, 2); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_4x4_acc2()
264 vi6x4567 = vi6x89AB; in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_4x4_acc2()
356 vo3p1 = vfmaq_lane_f32(vo3p1, vi6x4567, vget_high_f32(vwGHIJ), 0); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_4x4_acc2()
[all …]
D5x5p2-minmax-wasmsimd-x86-loadsplat-3x4-acc2.c119 v128_t vi6x4567 = wasm_v128_load(i6); i6 += 4; in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_x86_loadsplat_3x4_acc2() local
153 vo2p0 = wasm_f32x4_add(vo2p0, wasm_f32x4_mul(vi6x4567, vk42)); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_x86_loadsplat_3x4_acc2()
161 const v128_t vi6x3456 = wasm_v32x4_shuffle(vi6x0123, vi6x4567, 3, 4, 5, 6); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_x86_loadsplat_3x4_acc2()
195 const v128_t vi6x2345 = wasm_v32x4_shuffle(vi6x0123, vi6x4567, 2, 3, 4, 5); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_x86_loadsplat_3x4_acc2()
196 vi6x0123 = vi6x4567; in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_x86_loadsplat_3x4_acc2()
224 const v128_t vi6x5678 = wasm_v32x4_shuffle(vi6x4567, vi6x89AB, 1, 2, 3, 4); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_x86_loadsplat_3x4_acc2()
258 const v128_t vi6x6789 = wasm_v32x4_shuffle(vi6x4567, vi6x89AB, 2, 3, 4, 5); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_x86_loadsplat_3x4_acc2()
259 vi6x4567 = vi6x89AB; in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_x86_loadsplat_3x4_acc2()
336 vo2p0 = wasm_f32x4_add(vo2p0, wasm_f32x4_mul(vi6x4567, vk42)); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_x86_loadsplat_3x4_acc2()
344 const v128_t vi6x3456 = wasm_v32x4_shuffle(vi6x0123, vi6x4567, 3, 4, 5, 6); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_x86_loadsplat_3x4_acc2()
[all …]
D5x5p2-minmax-wasmsimd-arm-loadsplat-4x4-acc2.c126 v128_t vi6x4567 = wasm_v128_load(i6); i6 += 4; in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_arm_loadsplat_4x4_acc2() local
163 vo3p1 = wasm_f32x4_add(vo3p1, wasm_f32x4_mul(vi6x4567, vk32)); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_arm_loadsplat_4x4_acc2()
167 vo2p0 = wasm_f32x4_add(vo2p0, wasm_f32x4_mul(vi6x4567, vk42)); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_arm_loadsplat_4x4_acc2()
176 const v128_t vi6x3456 = wasm_v32x4_shuffle(vi6x0123, vi6x4567, 3, 4, 5, 6); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_arm_loadsplat_4x4_acc2()
216 const v128_t vi6x2345 = wasm_v32x4_shuffle(vi6x0123, vi6x4567, 2, 3, 4, 5); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_arm_loadsplat_4x4_acc2()
217 vi6x0123 = vi6x4567; in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_arm_loadsplat_4x4_acc2()
252 const v128_t vi6x5678 = wasm_v32x4_shuffle(vi6x4567, vi6x89AB, 1, 2, 3, 4); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_arm_loadsplat_4x4_acc2()
292 const v128_t vi6x6789 = wasm_v32x4_shuffle(vi6x4567, vi6x89AB, 2, 3, 4, 5); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_arm_loadsplat_4x4_acc2()
293 vi6x4567 = vi6x89AB; in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_arm_loadsplat_4x4_acc2()
384 vo3p1 = wasm_f32x4_add(vo3p1, wasm_f32x4_mul(vi6x4567, vk32)); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_arm_loadsplat_4x4_acc2()
[all …]
D5x5p2-minmax-wasmsimd-x86-loadsplat-4x4.c126 v128_t vi6x4567 = wasm_v128_load(i6); i6 += 4; in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_x86_loadsplat_4x4() local
163 vo3p0 = wasm_f32x4_add(vo3p0, wasm_f32x4_mul(vi6x4567, vk32)); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_x86_loadsplat_4x4()
167 vo2p0 = wasm_f32x4_add(vo2p0, wasm_f32x4_mul(vi6x4567, vk42)); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_x86_loadsplat_4x4()
176 const v128_t vi6x3456 = wasm_v32x4_shuffle(vi6x0123, vi6x4567, 3, 4, 5, 6); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_x86_loadsplat_4x4()
216 const v128_t vi6x2345 = wasm_v32x4_shuffle(vi6x0123, vi6x4567, 2, 3, 4, 5); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_x86_loadsplat_4x4()
217 vi6x0123 = vi6x4567; in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_x86_loadsplat_4x4()
252 const v128_t vi6x5678 = wasm_v32x4_shuffle(vi6x4567, vi6x89AB, 1, 2, 3, 4); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_x86_loadsplat_4x4()
292 const v128_t vi6x6789 = wasm_v32x4_shuffle(vi6x4567, vi6x89AB, 2, 3, 4, 5); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_x86_loadsplat_4x4()
293 vi6x4567 = vi6x89AB; in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_x86_loadsplat_4x4()
380 vo3p0 = wasm_f32x4_add(vo3p0, wasm_f32x4_mul(vi6x4567, vk32)); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_x86_loadsplat_4x4()
[all …]
D5x5p2-minmax-wasmsimd-x86-loadsplat-4x4-acc2.c126 v128_t vi6x4567 = wasm_v128_load(i6); i6 += 4; in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_x86_loadsplat_4x4_acc2() local
163 vo3p1 = wasm_f32x4_add(vo3p1, wasm_f32x4_mul(vi6x4567, vk32)); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_x86_loadsplat_4x4_acc2()
167 vo2p0 = wasm_f32x4_add(vo2p0, wasm_f32x4_mul(vi6x4567, vk42)); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_x86_loadsplat_4x4_acc2()
176 const v128_t vi6x3456 = wasm_v32x4_shuffle(vi6x0123, vi6x4567, 3, 4, 5, 6); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_x86_loadsplat_4x4_acc2()
216 const v128_t vi6x2345 = wasm_v32x4_shuffle(vi6x0123, vi6x4567, 2, 3, 4, 5); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_x86_loadsplat_4x4_acc2()
217 vi6x0123 = vi6x4567; in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_x86_loadsplat_4x4_acc2()
252 const v128_t vi6x5678 = wasm_v32x4_shuffle(vi6x4567, vi6x89AB, 1, 2, 3, 4); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_x86_loadsplat_4x4_acc2()
292 const v128_t vi6x6789 = wasm_v32x4_shuffle(vi6x4567, vi6x89AB, 2, 3, 4, 5); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_x86_loadsplat_4x4_acc2()
293 vi6x4567 = vi6x89AB; in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_x86_loadsplat_4x4_acc2()
384 vo3p1 = wasm_f32x4_add(vo3p1, wasm_f32x4_mul(vi6x4567, vk32)); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_x86_loadsplat_4x4_acc2()
[all …]

123