/external/XNNPACK/src/f32-dwconv2d-chw/gen/ |
D | 3x3p1-minmax-neon-6x4.c | 147 const float32x4_t vi5x3456 = vextq_f32(vi5x0123, vi5x4567, 3); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__neon_6x4() local 156 vo5p0 = vmlaq_lane_f32(vo5p0, vi5x3456, vget_low_f32(vw0123), 1); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__neon_6x4() 162 vo4p0 = vmlaq_lane_f32(vo4p0, vi5x3456, vget_low_f32(vw4567), 0); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__neon_6x4() 168 vo3p0 = vmlaq_lane_f32(vo3p0, vi5x3456, vget_high_f32(vw4567), 1); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__neon_6x4() 288 const float32x4_t vi5x3456 = vextq_f32(vi5x0123, vi5x4567, 3); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__neon_6x4() local 297 vo5p0 = vmlaq_lane_f32(vo5p0, vi5x3456, vget_low_f32(vw0123), 1); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__neon_6x4() 303 vo4p0 = vmlaq_lane_f32(vo4p0, vi5x3456, vget_low_f32(vw4567), 0); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__neon_6x4() 309 vo3p0 = vmlaq_lane_f32(vo3p0, vi5x3456, vget_high_f32(vw4567), 1); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__neon_6x4()
|
D | 3x3p1-minmax-ssse3-6x4.c | 161 …const __m128 vi5x3456 = _mm_castsi128_ps(_mm_alignr_epi8(_mm_castps_si128(vi5x4567), _mm_castps_si… in xnn_f32_dwconv2d_chw_ukernel_3x3p1__ssse3_6x4() local 170 vo5p0 = _mm_add_ps(vo5p0, _mm_mul_ps(vi5x3456, vk00)); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__ssse3_6x4() 175 vo4p0 = _mm_add_ps(vo4p0, _mm_mul_ps(vi5x3456, vk10)); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__ssse3_6x4() 180 vo3p0 = _mm_add_ps(vo3p0, _mm_mul_ps(vi5x3456, vk20)); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__ssse3_6x4() 295 …const __m128 vi5x3456 = _mm_castsi128_ps(_mm_alignr_epi8(_mm_castps_si128(vi5x4567), _mm_castps_si… in xnn_f32_dwconv2d_chw_ukernel_3x3p1__ssse3_6x4() local 304 vo5p0 = _mm_add_ps(vo5p0, _mm_mul_ps(vi5x3456, vk00)); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__ssse3_6x4() 309 vo4p0 = _mm_add_ps(vo4p0, _mm_mul_ps(vi5x3456, vk10)); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__ssse3_6x4() 314 vo3p0 = _mm_add_ps(vo3p0, _mm_mul_ps(vi5x3456, vk20)); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__ssse3_6x4()
|
D | 3x3p1-minmax-neonfma-6x4.c | 147 const float32x4_t vi5x3456 = vextq_f32(vi5x0123, vi5x4567, 3); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__neonfma_6x4() local 156 vo5p0 = vfmaq_lane_f32(vo5p0, vi5x3456, vget_low_f32(vw0123), 1); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__neonfma_6x4() 162 vo4p0 = vfmaq_lane_f32(vo4p0, vi5x3456, vget_low_f32(vw4567), 0); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__neonfma_6x4() 168 vo3p0 = vfmaq_lane_f32(vo3p0, vi5x3456, vget_high_f32(vw4567), 1); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__neonfma_6x4() 288 const float32x4_t vi5x3456 = vextq_f32(vi5x0123, vi5x4567, 3); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__neonfma_6x4() local 297 vo5p0 = vfmaq_lane_f32(vo5p0, vi5x3456, vget_low_f32(vw0123), 1); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__neonfma_6x4() 303 vo4p0 = vfmaq_lane_f32(vo4p0, vi5x3456, vget_low_f32(vw4567), 0); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__neonfma_6x4() 309 vo3p0 = vfmaq_lane_f32(vo3p0, vi5x3456, vget_high_f32(vw4567), 1); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__neonfma_6x4()
|
D | 3x3p1-minmax-wasmsimd-x86-loadsplat-6x4.c | 168 const v128_t vi5x3456 = wasm_v32x4_shuffle(vi5x0123, vi5x4567, 3, 4, 5, 6); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__wasmsimd_x86_loadsplat_6x4() local 177 vo5p0 = wasm_f32x4_add(vo5p0, wasm_f32x4_mul(vi5x3456, vk00)); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__wasmsimd_x86_loadsplat_6x4() 182 vo4p0 = wasm_f32x4_add(vo4p0, wasm_f32x4_mul(vi5x3456, vk10)); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__wasmsimd_x86_loadsplat_6x4() 187 vo3p0 = wasm_f32x4_add(vo3p0, wasm_f32x4_mul(vi5x3456, vk20)); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__wasmsimd_x86_loadsplat_6x4() 301 const v128_t vi5x3456 = wasm_v32x4_shuffle(vi5x0123, vi5x4567, 3, 4, 5, 6); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__wasmsimd_x86_loadsplat_6x4() local 310 vo5p0 = wasm_f32x4_add(vo5p0, wasm_f32x4_mul(vi5x3456, vk00)); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__wasmsimd_x86_loadsplat_6x4() 315 vo4p0 = wasm_f32x4_add(vo4p0, wasm_f32x4_mul(vi5x3456, vk10)); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__wasmsimd_x86_loadsplat_6x4() 320 vo3p0 = wasm_f32x4_add(vo3p0, wasm_f32x4_mul(vi5x3456, vk20)); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__wasmsimd_x86_loadsplat_6x4()
|
D | 3x3p1-minmax-wasmsimd-arm-loadsplat-6x4.c | 168 const v128_t vi5x3456 = wasm_v32x4_shuffle(vi5x0123, vi5x4567, 3, 4, 5, 6); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__wasmsimd_arm_loadsplat_6x4() local 177 vo5p0 = wasm_f32x4_add(vo5p0, wasm_f32x4_mul(vi5x3456, vk00)); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__wasmsimd_arm_loadsplat_6x4() 182 vo4p0 = wasm_f32x4_add(vo4p0, wasm_f32x4_mul(vi5x3456, vk10)); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__wasmsimd_arm_loadsplat_6x4() 187 vo3p0 = wasm_f32x4_add(vo3p0, wasm_f32x4_mul(vi5x3456, vk20)); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__wasmsimd_arm_loadsplat_6x4() 301 const v128_t vi5x3456 = wasm_v32x4_shuffle(vi5x0123, vi5x4567, 3, 4, 5, 6); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__wasmsimd_arm_loadsplat_6x4() local 310 vo5p0 = wasm_f32x4_add(vo5p0, wasm_f32x4_mul(vi5x3456, vk00)); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__wasmsimd_arm_loadsplat_6x4() 315 vo4p0 = wasm_f32x4_add(vo4p0, wasm_f32x4_mul(vi5x3456, vk10)); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__wasmsimd_arm_loadsplat_6x4() 320 vo3p0 = wasm_f32x4_add(vo3p0, wasm_f32x4_mul(vi5x3456, vk20)); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__wasmsimd_arm_loadsplat_6x4()
|
D | 3x3p1-minmax-sse-6x4.c | 200 const __m128 vi5x3456 = _mm_move_ss(vi5x7456, vi5x3012); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__sse_6x4() local 211 vo5p0 = _mm_add_ps(vo5p0, _mm_mul_ps(vi5x3456, vk00)); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__sse_6x4() 216 vo4p0 = _mm_add_ps(vo4p0, _mm_mul_ps(vi5x3456, vk10)); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__sse_6x4() 221 vo3p0 = _mm_add_ps(vo3p0, _mm_mul_ps(vi5x3456, vk20)); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__sse_6x4() 384 const __m128 vi5x3456 = _mm_move_ss(vi5x7456, vi5x3012); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__sse_6x4() local 395 vo5p0 = _mm_add_ps(vo5p0, _mm_mul_ps(vi5x3456, vk00)); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__sse_6x4() 400 vo4p0 = _mm_add_ps(vo4p0, _mm_mul_ps(vi5x3456, vk10)); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__sse_6x4() 405 vo3p0 = _mm_add_ps(vo3p0, _mm_mul_ps(vi5x3456, vk20)); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__sse_6x4()
|
D | 3x3p1-minmax-wasmsimd-arm-loadsplat-5x4.c | 154 const v128_t vi5x3456 = wasm_v32x4_shuffle(vi5x0123, vi5x4567, 3, 4, 5, 6); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__wasmsimd_arm_loadsplat_5x4() local 166 vo4p0 = wasm_f32x4_add(vo4p0, wasm_f32x4_mul(vi5x3456, vk10)); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__wasmsimd_arm_loadsplat_5x4() 170 vo3p0 = wasm_f32x4_add(vo3p0, wasm_f32x4_mul(vi5x3456, vk20)); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__wasmsimd_arm_loadsplat_5x4() 269 const v128_t vi5x3456 = wasm_v32x4_shuffle(vi5x0123, vi5x4567, 3, 4, 5, 6); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__wasmsimd_arm_loadsplat_5x4() local 281 vo4p0 = wasm_f32x4_add(vo4p0, wasm_f32x4_mul(vi5x3456, vk10)); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__wasmsimd_arm_loadsplat_5x4() 285 vo3p0 = wasm_f32x4_add(vo3p0, wasm_f32x4_mul(vi5x3456, vk20)); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__wasmsimd_arm_loadsplat_5x4()
|
D | 3x3p1-minmax-neonfma-5x4.c | 134 const float32x4_t vi5x3456 = vextq_f32(vi5x0123, vi5x4567, 3); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__neonfma_5x4() local 147 vo4p0 = vfmaq_lane_f32(vo4p0, vi5x3456, vget_low_f32(vw4567), 0); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__neonfma_5x4() 152 vo3p0 = vfmaq_lane_f32(vo3p0, vi5x3456, vget_high_f32(vw4567), 1); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__neonfma_5x4() 257 const float32x4_t vi5x3456 = vextq_f32(vi5x0123, vi5x4567, 3); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__neonfma_5x4() local 270 vo4p0 = vfmaq_lane_f32(vo4p0, vi5x3456, vget_low_f32(vw4567), 0); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__neonfma_5x4() 275 vo3p0 = vfmaq_lane_f32(vo3p0, vi5x3456, vget_high_f32(vw4567), 1); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__neonfma_5x4()
|
D | 3x3p1-minmax-neon-5x4.c | 134 const float32x4_t vi5x3456 = vextq_f32(vi5x0123, vi5x4567, 3); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__neon_5x4() local 147 vo4p0 = vmlaq_lane_f32(vo4p0, vi5x3456, vget_low_f32(vw4567), 0); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__neon_5x4() 152 vo3p0 = vmlaq_lane_f32(vo3p0, vi5x3456, vget_high_f32(vw4567), 1); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__neon_5x4() 257 const float32x4_t vi5x3456 = vextq_f32(vi5x0123, vi5x4567, 3); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__neon_5x4() local 270 vo4p0 = vmlaq_lane_f32(vo4p0, vi5x3456, vget_low_f32(vw4567), 0); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__neon_5x4() 275 vo3p0 = vmlaq_lane_f32(vo3p0, vi5x3456, vget_high_f32(vw4567), 1); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__neon_5x4()
|
D | 3x3p1-minmax-wasmsimd-arm-splat-6x4.c | 150 const v128_t vi5x3456 = wasm_v32x4_shuffle(vi5x0123, vi5x4567, 3, 4, 5, 6); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__wasmsimd_arm_splat_6x4() local 159 …vo5p0 = wasm_f32x4_add(vo5p0, wasm_f32x4_mul(vi5x3456, wasm_v32x4_shuffle(vw0123, vw0123, 1, 1, 1,… in xnn_f32_dwconv2d_chw_ukernel_3x3p1__wasmsimd_arm_splat_6x4() 165 …vo4p0 = wasm_f32x4_add(vo4p0, wasm_f32x4_mul(vi5x3456, wasm_v32x4_shuffle(vw4567, vw4567, 0, 0, 0,… in xnn_f32_dwconv2d_chw_ukernel_3x3p1__wasmsimd_arm_splat_6x4() 171 …vo3p0 = wasm_f32x4_add(vo3p0, wasm_f32x4_mul(vi5x3456, wasm_v32x4_shuffle(vw4567, vw4567, 3, 3, 3,… in xnn_f32_dwconv2d_chw_ukernel_3x3p1__wasmsimd_arm_splat_6x4() 290 const v128_t vi5x3456 = wasm_v32x4_shuffle(vi5x0123, vi5x4567, 3, 4, 5, 6); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__wasmsimd_arm_splat_6x4() local 299 …vo5p0 = wasm_f32x4_add(vo5p0, wasm_f32x4_mul(vi5x3456, wasm_v32x4_shuffle(vw0123, vw0123, 1, 1, 1,… in xnn_f32_dwconv2d_chw_ukernel_3x3p1__wasmsimd_arm_splat_6x4() 305 …vo4p0 = wasm_f32x4_add(vo4p0, wasm_f32x4_mul(vi5x3456, wasm_v32x4_shuffle(vw4567, vw4567, 0, 0, 0,… in xnn_f32_dwconv2d_chw_ukernel_3x3p1__wasmsimd_arm_splat_6x4() 311 …vo3p0 = wasm_f32x4_add(vo3p0, wasm_f32x4_mul(vi5x3456, wasm_v32x4_shuffle(vw4567, vw4567, 3, 3, 3,… in xnn_f32_dwconv2d_chw_ukernel_3x3p1__wasmsimd_arm_splat_6x4()
|
D | 3x3p1-minmax-wasmsimd-x86-splat-6x4.c | 150 const v128_t vi5x3456 = wasm_v32x4_shuffle(vi5x0123, vi5x4567, 3, 4, 5, 6); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__wasmsimd_x86_splat_6x4() local 159 …vo5p0 = wasm_f32x4_add(vo5p0, wasm_f32x4_mul(vi5x3456, wasm_v32x4_shuffle(vw0123, vw0123, 1, 1, 1,… in xnn_f32_dwconv2d_chw_ukernel_3x3p1__wasmsimd_x86_splat_6x4() 165 …vo4p0 = wasm_f32x4_add(vo4p0, wasm_f32x4_mul(vi5x3456, wasm_v32x4_shuffle(vw4567, vw4567, 0, 0, 0,… in xnn_f32_dwconv2d_chw_ukernel_3x3p1__wasmsimd_x86_splat_6x4() 171 …vo3p0 = wasm_f32x4_add(vo3p0, wasm_f32x4_mul(vi5x3456, wasm_v32x4_shuffle(vw4567, vw4567, 3, 3, 3,… in xnn_f32_dwconv2d_chw_ukernel_3x3p1__wasmsimd_x86_splat_6x4() 290 const v128_t vi5x3456 = wasm_v32x4_shuffle(vi5x0123, vi5x4567, 3, 4, 5, 6); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__wasmsimd_x86_splat_6x4() local 299 …vo5p0 = wasm_f32x4_add(vo5p0, wasm_f32x4_mul(vi5x3456, wasm_v32x4_shuffle(vw0123, vw0123, 1, 1, 1,… in xnn_f32_dwconv2d_chw_ukernel_3x3p1__wasmsimd_x86_splat_6x4() 305 …vo4p0 = wasm_f32x4_add(vo4p0, wasm_f32x4_mul(vi5x3456, wasm_v32x4_shuffle(vw4567, vw4567, 0, 0, 0,… in xnn_f32_dwconv2d_chw_ukernel_3x3p1__wasmsimd_x86_splat_6x4() 311 …vo3p0 = wasm_f32x4_add(vo3p0, wasm_f32x4_mul(vi5x3456, wasm_v32x4_shuffle(vw4567, vw4567, 3, 3, 3,… in xnn_f32_dwconv2d_chw_ukernel_3x3p1__wasmsimd_x86_splat_6x4()
|
D | 3x3p1-minmax-wasmsimd-x86-loadsplat-5x4.c | 154 const v128_t vi5x3456 = wasm_v32x4_shuffle(vi5x0123, vi5x4567, 3, 4, 5, 6); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__wasmsimd_x86_loadsplat_5x4() local 166 vo4p0 = wasm_f32x4_add(vo4p0, wasm_f32x4_mul(vi5x3456, vk10)); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__wasmsimd_x86_loadsplat_5x4() 170 vo3p0 = wasm_f32x4_add(vo3p0, wasm_f32x4_mul(vi5x3456, vk20)); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__wasmsimd_x86_loadsplat_5x4() 269 const v128_t vi5x3456 = wasm_v32x4_shuffle(vi5x0123, vi5x4567, 3, 4, 5, 6); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__wasmsimd_x86_loadsplat_5x4() local 281 vo4p0 = wasm_f32x4_add(vo4p0, wasm_f32x4_mul(vi5x3456, vk10)); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__wasmsimd_x86_loadsplat_5x4() 285 vo3p0 = wasm_f32x4_add(vo3p0, wasm_f32x4_mul(vi5x3456, vk20)); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__wasmsimd_x86_loadsplat_5x4()
|
D | 3x3p1-minmax-ssse3-5x4.c | 147 …const __m128 vi5x3456 = _mm_castsi128_ps(_mm_alignr_epi8(_mm_castps_si128(vi5x4567), _mm_castps_si… in xnn_f32_dwconv2d_chw_ukernel_3x3p1__ssse3_5x4() local 159 vo4p0 = _mm_add_ps(vo4p0, _mm_mul_ps(vi5x3456, vk10)); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__ssse3_5x4() 163 vo3p0 = _mm_add_ps(vo3p0, _mm_mul_ps(vi5x3456, vk20)); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__ssse3_5x4() 263 …const __m128 vi5x3456 = _mm_castsi128_ps(_mm_alignr_epi8(_mm_castps_si128(vi5x4567), _mm_castps_si… in xnn_f32_dwconv2d_chw_ukernel_3x3p1__ssse3_5x4() local 275 vo4p0 = _mm_add_ps(vo4p0, _mm_mul_ps(vi5x3456, vk10)); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__ssse3_5x4() 279 vo3p0 = _mm_add_ps(vo3p0, _mm_mul_ps(vi5x3456, vk20)); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__ssse3_5x4()
|
D | 3x3p1-minmax-sse-5x4.c | 182 const __m128 vi5x3456 = _mm_move_ss(vi5x7456, vi5x3012); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__sse_5x4() local 195 vo4p0 = _mm_add_ps(vo4p0, _mm_mul_ps(vi5x3456, vk10)); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__sse_5x4() 199 vo3p0 = _mm_add_ps(vo3p0, _mm_mul_ps(vi5x3456, vk20)); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__sse_5x4() 342 const __m128 vi5x3456 = _mm_move_ss(vi5x7456, vi5x3012); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__sse_5x4() local 355 vo4p0 = _mm_add_ps(vo4p0, _mm_mul_ps(vi5x3456, vk10)); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__sse_5x4() 359 vo3p0 = _mm_add_ps(vo3p0, _mm_mul_ps(vi5x3456, vk20)); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__sse_5x4()
|
D | 5x5p2-minmax-neon-4x4-acc2.c | 146 const float32x4_t vi5x3456 = vextq_f32(vi5x0123, vi5x4567, 3); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_4x4_acc2() local 163 vo3p1 = vmlaq_lane_f32(vo3p1, vi5x3456, vget_low_f32(vwCDEF), 0); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_4x4_acc2() 167 vo2p0 = vmlaq_lane_f32(vo2p0, vi5x3456, vget_low_f32(vwGHIJ), 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_4x4_acc2() 171 vo1p1 = vmlaq_lane_f32(vo1p1, vi5x3456, vget_high_f32(vwKLMN), 0); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_4x4_acc2() 368 const float32x4_t vi5x3456 = vextq_f32(vi5x0123, vi5x4567, 3); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_4x4_acc2() local 385 vo3p1 = vmlaq_lane_f32(vo3p1, vi5x3456, vget_low_f32(vwCDEF), 0); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_4x4_acc2() 389 vo2p0 = vmlaq_lane_f32(vo2p0, vi5x3456, vget_low_f32(vwGHIJ), 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_4x4_acc2() 393 vo1p1 = vmlaq_lane_f32(vo1p1, vi5x3456, vget_high_f32(vwKLMN), 0); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_4x4_acc2() 584 const float32x4_t vi5x3456 = vextq_f32(vi5x0123, vi5x4567, 3); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_4x4_acc2() local 601 vo3p1 = vmlaq_lane_f32(vo3p1, vi5x3456, vget_low_f32(vwCDEF), 0); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_4x4_acc2() [all …]
|
D | 5x5p2-minmax-neon-4x4.c | 146 const float32x4_t vi5x3456 = vextq_f32(vi5x0123, vi5x4567, 3); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_4x4() local 163 vo3p0 = vmlaq_lane_f32(vo3p0, vi5x3456, vget_low_f32(vwCDEF), 0); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_4x4() 167 vo2p0 = vmlaq_lane_f32(vo2p0, vi5x3456, vget_low_f32(vwGHIJ), 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_4x4() 171 vo1p0 = vmlaq_lane_f32(vo1p0, vi5x3456, vget_high_f32(vwKLMN), 0); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_4x4() 364 const float32x4_t vi5x3456 = vextq_f32(vi5x0123, vi5x4567, 3); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_4x4() local 381 vo3p0 = vmlaq_lane_f32(vo3p0, vi5x3456, vget_low_f32(vwCDEF), 0); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_4x4() 385 vo2p0 = vmlaq_lane_f32(vo2p0, vi5x3456, vget_low_f32(vwGHIJ), 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_4x4() 389 vo1p0 = vmlaq_lane_f32(vo1p0, vi5x3456, vget_high_f32(vwKLMN), 0); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_4x4() 576 const float32x4_t vi5x3456 = vextq_f32(vi5x0123, vi5x4567, 3); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_4x4() local 593 vo3p0 = vmlaq_lane_f32(vo3p0, vi5x3456, vget_low_f32(vwCDEF), 0); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_4x4() [all …]
|
D | 5x5p2-minmax-neonfma-4x4.c | 146 const float32x4_t vi5x3456 = vextq_f32(vi5x0123, vi5x4567, 3); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_4x4() local 163 vo3p0 = vfmaq_lane_f32(vo3p0, vi5x3456, vget_low_f32(vwCDEF), 0); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_4x4() 167 vo2p0 = vfmaq_lane_f32(vo2p0, vi5x3456, vget_low_f32(vwGHIJ), 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_4x4() 171 vo1p0 = vfmaq_lane_f32(vo1p0, vi5x3456, vget_high_f32(vwKLMN), 0); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_4x4() 364 const float32x4_t vi5x3456 = vextq_f32(vi5x0123, vi5x4567, 3); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_4x4() local 381 vo3p0 = vfmaq_lane_f32(vo3p0, vi5x3456, vget_low_f32(vwCDEF), 0); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_4x4() 385 vo2p0 = vfmaq_lane_f32(vo2p0, vi5x3456, vget_low_f32(vwGHIJ), 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_4x4() 389 vo1p0 = vfmaq_lane_f32(vo1p0, vi5x3456, vget_high_f32(vwKLMN), 0); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_4x4() 576 const float32x4_t vi5x3456 = vextq_f32(vi5x0123, vi5x4567, 3); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_4x4() local 593 vo3p0 = vfmaq_lane_f32(vo3p0, vi5x3456, vget_low_f32(vwCDEF), 0); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_4x4() [all …]
|
D | 5x5p2-minmax-neonfma-4x4-acc2.c | 146 const float32x4_t vi5x3456 = vextq_f32(vi5x0123, vi5x4567, 3); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_4x4_acc2() local 163 vo3p1 = vfmaq_lane_f32(vo3p1, vi5x3456, vget_low_f32(vwCDEF), 0); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_4x4_acc2() 167 vo2p0 = vfmaq_lane_f32(vo2p0, vi5x3456, vget_low_f32(vwGHIJ), 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_4x4_acc2() 171 vo1p1 = vfmaq_lane_f32(vo1p1, vi5x3456, vget_high_f32(vwKLMN), 0); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_4x4_acc2() 368 const float32x4_t vi5x3456 = vextq_f32(vi5x0123, vi5x4567, 3); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_4x4_acc2() local 385 vo3p1 = vfmaq_lane_f32(vo3p1, vi5x3456, vget_low_f32(vwCDEF), 0); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_4x4_acc2() 389 vo2p0 = vfmaq_lane_f32(vo2p0, vi5x3456, vget_low_f32(vwGHIJ), 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_4x4_acc2() 393 vo1p1 = vfmaq_lane_f32(vo1p1, vi5x3456, vget_high_f32(vwKLMN), 0); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_4x4_acc2() 584 const float32x4_t vi5x3456 = vextq_f32(vi5x0123, vi5x4567, 3); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_4x4_acc2() local 601 vo3p1 = vfmaq_lane_f32(vo3p1, vi5x3456, vget_low_f32(vwCDEF), 0); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_4x4_acc2() [all …]
|
D | 5x5p2-minmax-neonfma-5x4.c | 161 const float32x4_t vi5x3456 = vextq_f32(vi5x0123, vi5x4567, 3); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_5x4() local 176 vo4p0 = vfmaq_lane_f32(vo4p0, vi5x3456, vget_high_f32(vw4567), 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_5x4() 181 vo3p0 = vfmaq_lane_f32(vo3p0, vi5x3456, vget_low_f32(vwCDEF), 0); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_5x4() 186 vo2p0 = vfmaq_lane_f32(vo2p0, vi5x3456, vget_low_f32(vwGHIJ), 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_5x4() 191 vo1p0 = vfmaq_lane_f32(vo1p0, vi5x3456, vget_high_f32(vwKLMN), 0); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_5x4() 416 const float32x4_t vi5x3456 = vextq_f32(vi5x0123, vi5x4567, 3); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_5x4() local 431 vo4p0 = vfmaq_lane_f32(vo4p0, vi5x3456, vget_high_f32(vw4567), 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_5x4() 436 vo3p0 = vfmaq_lane_f32(vo3p0, vi5x3456, vget_low_f32(vwCDEF), 0); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_5x4() 441 vo2p0 = vfmaq_lane_f32(vo2p0, vi5x3456, vget_low_f32(vwGHIJ), 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_5x4() 446 vo1p0 = vfmaq_lane_f32(vo1p0, vi5x3456, vget_high_f32(vwKLMN), 0); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_5x4() [all …]
|
D | 5x5p2-minmax-neon-5x4.c | 161 const float32x4_t vi5x3456 = vextq_f32(vi5x0123, vi5x4567, 3); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_5x4() local 176 vo4p0 = vmlaq_lane_f32(vo4p0, vi5x3456, vget_high_f32(vw4567), 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_5x4() 181 vo3p0 = vmlaq_lane_f32(vo3p0, vi5x3456, vget_low_f32(vwCDEF), 0); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_5x4() 186 vo2p0 = vmlaq_lane_f32(vo2p0, vi5x3456, vget_low_f32(vwGHIJ), 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_5x4() 191 vo1p0 = vmlaq_lane_f32(vo1p0, vi5x3456, vget_high_f32(vwKLMN), 0); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_5x4() 416 const float32x4_t vi5x3456 = vextq_f32(vi5x0123, vi5x4567, 3); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_5x4() local 431 vo4p0 = vmlaq_lane_f32(vo4p0, vi5x3456, vget_high_f32(vw4567), 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_5x4() 436 vo3p0 = vmlaq_lane_f32(vo3p0, vi5x3456, vget_low_f32(vwCDEF), 0); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_5x4() 441 vo2p0 = vmlaq_lane_f32(vo2p0, vi5x3456, vget_low_f32(vwGHIJ), 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_5x4() 446 vo1p0 = vmlaq_lane_f32(vo1p0, vi5x3456, vget_high_f32(vwKLMN), 0); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_5x4() [all …]
|
D | 5x5p2-minmax-wasmsimd-arm-loadsplat-5x4.c | 190 const v128_t vi5x3456 = wasm_v32x4_shuffle(vi5x0123, vi5x4567, 3, 4, 5, 6); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_arm_loadsplat_5x4() local 205 vo4p0 = wasm_f32x4_add(vo4p0, wasm_f32x4_mul(vi5x3456, vk11)); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_arm_loadsplat_5x4() 210 vo3p0 = wasm_f32x4_add(vo3p0, wasm_f32x4_mul(vi5x3456, vk21)); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_arm_loadsplat_5x4() 215 vo2p0 = wasm_f32x4_add(vo2p0, wasm_f32x4_mul(vi5x3456, vk31)); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_arm_loadsplat_5x4() 220 vo1p0 = wasm_f32x4_add(vo1p0, wasm_f32x4_mul(vi5x3456, vk41)); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_arm_loadsplat_5x4() 444 const v128_t vi5x3456 = wasm_v32x4_shuffle(vi5x0123, vi5x4567, 3, 4, 5, 6); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_arm_loadsplat_5x4() local 459 vo4p0 = wasm_f32x4_add(vo4p0, wasm_f32x4_mul(vi5x3456, vk11)); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_arm_loadsplat_5x4() 464 vo3p0 = wasm_f32x4_add(vo3p0, wasm_f32x4_mul(vi5x3456, vk21)); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_arm_loadsplat_5x4() 469 vo2p0 = wasm_f32x4_add(vo2p0, wasm_f32x4_mul(vi5x3456, vk31)); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_arm_loadsplat_5x4() 474 vo1p0 = wasm_f32x4_add(vo1p0, wasm_f32x4_mul(vi5x3456, vk41)); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_arm_loadsplat_5x4() [all …]
|
D | 5x5p2-minmax-wasmsimd-x86-loadsplat-5x4.c | 190 const v128_t vi5x3456 = wasm_v32x4_shuffle(vi5x0123, vi5x4567, 3, 4, 5, 6); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_x86_loadsplat_5x4() local 205 vo4p0 = wasm_f32x4_add(vo4p0, wasm_f32x4_mul(vi5x3456, vk11)); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_x86_loadsplat_5x4() 210 vo3p0 = wasm_f32x4_add(vo3p0, wasm_f32x4_mul(vi5x3456, vk21)); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_x86_loadsplat_5x4() 215 vo2p0 = wasm_f32x4_add(vo2p0, wasm_f32x4_mul(vi5x3456, vk31)); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_x86_loadsplat_5x4() 220 vo1p0 = wasm_f32x4_add(vo1p0, wasm_f32x4_mul(vi5x3456, vk41)); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_x86_loadsplat_5x4() 444 const v128_t vi5x3456 = wasm_v32x4_shuffle(vi5x0123, vi5x4567, 3, 4, 5, 6); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_x86_loadsplat_5x4() local 459 vo4p0 = wasm_f32x4_add(vo4p0, wasm_f32x4_mul(vi5x3456, vk11)); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_x86_loadsplat_5x4() 464 vo3p0 = wasm_f32x4_add(vo3p0, wasm_f32x4_mul(vi5x3456, vk21)); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_x86_loadsplat_5x4() 469 vo2p0 = wasm_f32x4_add(vo2p0, wasm_f32x4_mul(vi5x3456, vk31)); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_x86_loadsplat_5x4() 474 vo1p0 = wasm_f32x4_add(vo1p0, wasm_f32x4_mul(vi5x3456, vk41)); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_x86_loadsplat_5x4() [all …]
|
D | 5x5p2-minmax-sse-5x4.c | 198 const __m128 vi5x3456 = _mm_move_ss(vi5x7456, vi5x3012); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__sse_5x4() local 212 vo4p0 = _mm_add_ps(vo4p0, _mm_mul_ps(vi5x3456, vk11)); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__sse_5x4() 216 vo3p0 = _mm_add_ps(vo3p0, _mm_mul_ps(vi5x3456, vk21)); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__sse_5x4() 220 vo2p0 = _mm_add_ps(vo2p0, _mm_mul_ps(vi5x3456, vk31)); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__sse_5x4() 224 vo1p0 = _mm_add_ps(vo1p0, _mm_mul_ps(vi5x3456, vk41)); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__sse_5x4() 451 const __m128 vi5x3456 = _mm_move_ss(vi5x7456, vi5x3012); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__sse_5x4() local 465 vo4p0 = _mm_add_ps(vo4p0, _mm_mul_ps(vi5x3456, vk11)); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__sse_5x4() 469 vo3p0 = _mm_add_ps(vo3p0, _mm_mul_ps(vi5x3456, vk21)); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__sse_5x4() 473 vo2p0 = _mm_add_ps(vo2p0, _mm_mul_ps(vi5x3456, vk31)); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__sse_5x4() 477 vo1p0 = _mm_add_ps(vo1p0, _mm_mul_ps(vi5x3456, vk41)); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__sse_5x4() [all …]
|
D | 3x3p1-minmax-wasmsimd-arm-splat-5x4.c | 137 const v128_t vi5x3456 = wasm_v32x4_shuffle(vi5x0123, vi5x4567, 3, 4, 5, 6); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__wasmsimd_arm_splat_5x4() local 150 …vo4p0 = wasm_f32x4_add(vo4p0, wasm_f32x4_mul(vi5x3456, wasm_v32x4_shuffle(vw4567, vw4567, 0, 0, 0,… in xnn_f32_dwconv2d_chw_ukernel_3x3p1__wasmsimd_arm_splat_5x4() 155 …vo3p0 = wasm_f32x4_add(vo3p0, wasm_f32x4_mul(vi5x3456, wasm_v32x4_shuffle(vw4567, vw4567, 3, 3, 3,… in xnn_f32_dwconv2d_chw_ukernel_3x3p1__wasmsimd_arm_splat_5x4() 259 const v128_t vi5x3456 = wasm_v32x4_shuffle(vi5x0123, vi5x4567, 3, 4, 5, 6); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__wasmsimd_arm_splat_5x4() local 272 …vo4p0 = wasm_f32x4_add(vo4p0, wasm_f32x4_mul(vi5x3456, wasm_v32x4_shuffle(vw4567, vw4567, 0, 0, 0,… in xnn_f32_dwconv2d_chw_ukernel_3x3p1__wasmsimd_arm_splat_5x4() 277 …vo3p0 = wasm_f32x4_add(vo3p0, wasm_f32x4_mul(vi5x3456, wasm_v32x4_shuffle(vw4567, vw4567, 3, 3, 3,… in xnn_f32_dwconv2d_chw_ukernel_3x3p1__wasmsimd_arm_splat_5x4()
|
D | 3x3p1-minmax-wasmsimd-x86-splat-5x4.c | 137 const v128_t vi5x3456 = wasm_v32x4_shuffle(vi5x0123, vi5x4567, 3, 4, 5, 6); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__wasmsimd_x86_splat_5x4() local 150 …vo4p0 = wasm_f32x4_add(vo4p0, wasm_f32x4_mul(vi5x3456, wasm_v32x4_shuffle(vw4567, vw4567, 0, 0, 0,… in xnn_f32_dwconv2d_chw_ukernel_3x3p1__wasmsimd_x86_splat_5x4() 155 …vo3p0 = wasm_f32x4_add(vo3p0, wasm_f32x4_mul(vi5x3456, wasm_v32x4_shuffle(vw4567, vw4567, 3, 3, 3,… in xnn_f32_dwconv2d_chw_ukernel_3x3p1__wasmsimd_x86_splat_5x4() 259 const v128_t vi5x3456 = wasm_v32x4_shuffle(vi5x0123, vi5x4567, 3, 4, 5, 6); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__wasmsimd_x86_splat_5x4() local 272 …vo4p0 = wasm_f32x4_add(vo4p0, wasm_f32x4_mul(vi5x3456, wasm_v32x4_shuffle(vw4567, vw4567, 0, 0, 0,… in xnn_f32_dwconv2d_chw_ukernel_3x3p1__wasmsimd_x86_splat_5x4() 277 …vo3p0 = wasm_f32x4_add(vo3p0, wasm_f32x4_mul(vi5x3456, wasm_v32x4_shuffle(vw4567, vw4567, 3, 3, 3,… in xnn_f32_dwconv2d_chw_ukernel_3x3p1__wasmsimd_x86_splat_5x4()
|