• Home
  • Raw
  • Download

Lines Matching refs:wasm_i32x4_add

98         vacc0x0 = wasm_i32x4_add(vacc0x0, wasm_i32x4_widen_low_i16x8(vprod0x0));  in xnn_qs8_igemm_minmax_ukernel_3x4c8__wasmsimd_ld64()
99 vacc0x0 = wasm_i32x4_add(vacc0x0, wasm_i32x4_widen_high_i16x8(vprod0x0)); in xnn_qs8_igemm_minmax_ukernel_3x4c8__wasmsimd_ld64()
101 vacc1x0 = wasm_i32x4_add(vacc1x0, wasm_i32x4_widen_low_i16x8(vprod1x0)); in xnn_qs8_igemm_minmax_ukernel_3x4c8__wasmsimd_ld64()
102 vacc1x0 = wasm_i32x4_add(vacc1x0, wasm_i32x4_widen_high_i16x8(vprod1x0)); in xnn_qs8_igemm_minmax_ukernel_3x4c8__wasmsimd_ld64()
104 vacc2x0 = wasm_i32x4_add(vacc2x0, wasm_i32x4_widen_low_i16x8(vprod2x0)); in xnn_qs8_igemm_minmax_ukernel_3x4c8__wasmsimd_ld64()
105 vacc2x0 = wasm_i32x4_add(vacc2x0, wasm_i32x4_widen_high_i16x8(vprod2x0)); in xnn_qs8_igemm_minmax_ukernel_3x4c8__wasmsimd_ld64()
109 vacc0x1 = wasm_i32x4_add(vacc0x1, wasm_i32x4_widen_low_i16x8(vprod0x1)); in xnn_qs8_igemm_minmax_ukernel_3x4c8__wasmsimd_ld64()
110 vacc0x1 = wasm_i32x4_add(vacc0x1, wasm_i32x4_widen_high_i16x8(vprod0x1)); in xnn_qs8_igemm_minmax_ukernel_3x4c8__wasmsimd_ld64()
112 vacc1x1 = wasm_i32x4_add(vacc1x1, wasm_i32x4_widen_low_i16x8(vprod1x1)); in xnn_qs8_igemm_minmax_ukernel_3x4c8__wasmsimd_ld64()
113 vacc1x1 = wasm_i32x4_add(vacc1x1, wasm_i32x4_widen_high_i16x8(vprod1x1)); in xnn_qs8_igemm_minmax_ukernel_3x4c8__wasmsimd_ld64()
115 vacc2x1 = wasm_i32x4_add(vacc2x1, wasm_i32x4_widen_low_i16x8(vprod2x1)); in xnn_qs8_igemm_minmax_ukernel_3x4c8__wasmsimd_ld64()
116 vacc2x1 = wasm_i32x4_add(vacc2x1, wasm_i32x4_widen_high_i16x8(vprod2x1)); in xnn_qs8_igemm_minmax_ukernel_3x4c8__wasmsimd_ld64()
120 vacc0x2 = wasm_i32x4_add(vacc0x2, wasm_i32x4_widen_low_i16x8(vprod0x2)); in xnn_qs8_igemm_minmax_ukernel_3x4c8__wasmsimd_ld64()
121 vacc0x2 = wasm_i32x4_add(vacc0x2, wasm_i32x4_widen_high_i16x8(vprod0x2)); in xnn_qs8_igemm_minmax_ukernel_3x4c8__wasmsimd_ld64()
123 vacc1x2 = wasm_i32x4_add(vacc1x2, wasm_i32x4_widen_low_i16x8(vprod1x2)); in xnn_qs8_igemm_minmax_ukernel_3x4c8__wasmsimd_ld64()
124 vacc1x2 = wasm_i32x4_add(vacc1x2, wasm_i32x4_widen_high_i16x8(vprod1x2)); in xnn_qs8_igemm_minmax_ukernel_3x4c8__wasmsimd_ld64()
126 vacc2x2 = wasm_i32x4_add(vacc2x2, wasm_i32x4_widen_low_i16x8(vprod2x2)); in xnn_qs8_igemm_minmax_ukernel_3x4c8__wasmsimd_ld64()
127 vacc2x2 = wasm_i32x4_add(vacc2x2, wasm_i32x4_widen_high_i16x8(vprod2x2)); in xnn_qs8_igemm_minmax_ukernel_3x4c8__wasmsimd_ld64()
131 vacc0x3 = wasm_i32x4_add(vacc0x3, wasm_i32x4_widen_low_i16x8(vprod0x3)); in xnn_qs8_igemm_minmax_ukernel_3x4c8__wasmsimd_ld64()
132 vacc0x3 = wasm_i32x4_add(vacc0x3, wasm_i32x4_widen_high_i16x8(vprod0x3)); in xnn_qs8_igemm_minmax_ukernel_3x4c8__wasmsimd_ld64()
134 vacc1x3 = wasm_i32x4_add(vacc1x3, wasm_i32x4_widen_low_i16x8(vprod1x3)); in xnn_qs8_igemm_minmax_ukernel_3x4c8__wasmsimd_ld64()
135 vacc1x3 = wasm_i32x4_add(vacc1x3, wasm_i32x4_widen_high_i16x8(vprod1x3)); in xnn_qs8_igemm_minmax_ukernel_3x4c8__wasmsimd_ld64()
137 vacc2x3 = wasm_i32x4_add(vacc2x3, wasm_i32x4_widen_low_i16x8(vprod2x3)); in xnn_qs8_igemm_minmax_ukernel_3x4c8__wasmsimd_ld64()
138 vacc2x3 = wasm_i32x4_add(vacc2x3, wasm_i32x4_widen_high_i16x8(vprod2x3)); in xnn_qs8_igemm_minmax_ukernel_3x4c8__wasmsimd_ld64()
146 …const v128_t vacc0x02 = wasm_i32x4_add(wasm_v32x4_shuffle(vacc0x0, vacc0x2, 0, 4, 1, 5), wasm_v32x… in xnn_qs8_igemm_minmax_ukernel_3x4c8__wasmsimd_ld64()
147 …const v128_t vacc0x13 = wasm_i32x4_add(wasm_v32x4_shuffle(vacc0x1, vacc0x3, 0, 4, 1, 5), wasm_v32x… in xnn_qs8_igemm_minmax_ukernel_3x4c8__wasmsimd_ld64()
148 …const v128_t vacc1x02 = wasm_i32x4_add(wasm_v32x4_shuffle(vacc1x0, vacc1x2, 0, 4, 1, 5), wasm_v32x… in xnn_qs8_igemm_minmax_ukernel_3x4c8__wasmsimd_ld64()
149 …const v128_t vacc1x13 = wasm_i32x4_add(wasm_v32x4_shuffle(vacc1x1, vacc1x3, 0, 4, 1, 5), wasm_v32x… in xnn_qs8_igemm_minmax_ukernel_3x4c8__wasmsimd_ld64()
150 …const v128_t vacc2x02 = wasm_i32x4_add(wasm_v32x4_shuffle(vacc2x0, vacc2x2, 0, 4, 1, 5), wasm_v32x… in xnn_qs8_igemm_minmax_ukernel_3x4c8__wasmsimd_ld64()
151 …const v128_t vacc2x13 = wasm_i32x4_add(wasm_v32x4_shuffle(vacc2x1, vacc2x3, 0, 4, 1, 5), wasm_v32x… in xnn_qs8_igemm_minmax_ukernel_3x4c8__wasmsimd_ld64()
153 …v128_t vacc0x0123 = wasm_i32x4_add(wasm_v32x4_shuffle(vacc0x02, vacc0x13, 0, 4, 1, 5), wasm_v32x4_… in xnn_qs8_igemm_minmax_ukernel_3x4c8__wasmsimd_ld64()
154 …v128_t vacc1x0123 = wasm_i32x4_add(wasm_v32x4_shuffle(vacc1x02, vacc1x13, 0, 4, 1, 5), wasm_v32x4_… in xnn_qs8_igemm_minmax_ukernel_3x4c8__wasmsimd_ld64()
155 …v128_t vacc2x0123 = wasm_i32x4_add(wasm_v32x4_shuffle(vacc2x02, vacc2x13, 0, 4, 1, 5), wasm_v32x4_… in xnn_qs8_igemm_minmax_ukernel_3x4c8__wasmsimd_ld64()
183 …const v128_t vrem0x0123 = wasm_i32x4_add(wasm_v128_and(vq31prod0x0123, vremainder_mask), wasm_i32x… in xnn_qs8_igemm_minmax_ukernel_3x4c8__wasmsimd_ld64()
184 …const v128_t vrem1x0123 = wasm_i32x4_add(wasm_v128_and(vq31prod1x0123, vremainder_mask), wasm_i32x… in xnn_qs8_igemm_minmax_ukernel_3x4c8__wasmsimd_ld64()
185 …const v128_t vrem2x0123 = wasm_i32x4_add(wasm_v128_and(vq31prod2x0123, vremainder_mask), wasm_i32x… in xnn_qs8_igemm_minmax_ukernel_3x4c8__wasmsimd_ld64()