Lines Matching refs:vi8x4567
135 v128_t vi8x4567 = wasm_v128_load(i8); i8 += 4; in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_arm_loadsplat_5x4() local
183 vo4p0 = wasm_f32x4_add(vo4p0, wasm_f32x4_mul(vi8x4567, vk42)); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_arm_loadsplat_5x4()
193 const v128_t vi8x3456 = wasm_v32x4_shuffle(vi8x0123, vi8x4567, 3, 4, 5, 6); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_arm_loadsplat_5x4()
241 const v128_t vi8x2345 = wasm_v32x4_shuffle(vi8x0123, vi8x4567, 2, 3, 4, 5); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_arm_loadsplat_5x4()
242 vi8x0123 = vi8x4567; in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_arm_loadsplat_5x4()
282 const v128_t vi8x5678 = wasm_v32x4_shuffle(vi8x4567, vi8x89AB, 1, 2, 3, 4); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_arm_loadsplat_5x4()
330 const v128_t vi8x6789 = wasm_v32x4_shuffle(vi8x4567, vi8x89AB, 2, 3, 4, 5); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_arm_loadsplat_5x4()
331 vi8x4567 = vi8x89AB; in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_arm_loadsplat_5x4()
437 vo4p0 = wasm_f32x4_add(vo4p0, wasm_f32x4_mul(vi8x4567, vk42)); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_arm_loadsplat_5x4()
447 const v128_t vi8x3456 = wasm_v32x4_shuffle(vi8x0123, vi8x4567, 3, 4, 5, 6); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_arm_loadsplat_5x4()
495 const v128_t vi8x2345 = wasm_v32x4_shuffle(vi8x0123, vi8x4567, 2, 3, 4, 5); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_arm_loadsplat_5x4()
496 vi8x0123 = vi8x4567; in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_arm_loadsplat_5x4()
536 const v128_t vi8x5678 = wasm_v32x4_shuffle(vi8x4567, vi8x89AB, 1, 2, 3, 4); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_arm_loadsplat_5x4()
584 const v128_t vi8x6789 = wasm_v32x4_shuffle(vi8x4567, vi8x89AB, 2, 3, 4, 5); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_arm_loadsplat_5x4()
585 vi8x4567 = vi8x89AB; in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_arm_loadsplat_5x4()
654 vi8x4567 = wasm_v128_and(vmask, vi8x4567); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_arm_loadsplat_5x4()
684 vo4p0 = wasm_f32x4_add(vo4p0, wasm_f32x4_mul(vi8x4567, vk42)); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_arm_loadsplat_5x4()
694 const v128_t vi8x3456 = wasm_v32x4_shuffle(vi8x0123, vi8x4567, 3, 4, 5, 6); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_arm_loadsplat_5x4()
734 const v128_t vi8x2345 = wasm_v32x4_shuffle(vi8x0123, vi8x4567, 2, 3, 4, 5); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_arm_loadsplat_5x4()
774 const v128_t vi8x5678 = wasm_v32x4_shuffle(vi8x4567, vzero, 1, 2, 3, 4); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_arm_loadsplat_5x4()