• Home
  • Raw
  • Download

Lines Matching refs:wasm_f32x4_sub

98     vn0123 = wasm_f32x4_sub(vn0123, vmagic_bias);  in xnn_f32_sigmoid_ukernel__wasmsimd_lut64_p2_div_x16()
99 vn4567 = wasm_f32x4_sub(vn4567, vmagic_bias); in xnn_f32_sigmoid_ukernel__wasmsimd_lut64_p2_div_x16()
100 vn89AB = wasm_f32x4_sub(vn89AB, vmagic_bias); in xnn_f32_sigmoid_ukernel__wasmsimd_lut64_p2_div_x16()
101 vnCDEF = wasm_f32x4_sub(vnCDEF, vmagic_bias); in xnn_f32_sigmoid_ukernel__wasmsimd_lut64_p2_div_x16()
118 vp0123 = wasm_f32x4_sub(vt0123, wasm_f32x4_mul(vp0123, vt0123)); in xnn_f32_sigmoid_ukernel__wasmsimd_lut64_p2_div_x16()
119 vp4567 = wasm_f32x4_sub(vt4567, wasm_f32x4_mul(vp4567, vt4567)); in xnn_f32_sigmoid_ukernel__wasmsimd_lut64_p2_div_x16()
120 vp89AB = wasm_f32x4_sub(vt89AB, wasm_f32x4_mul(vp89AB, vt89AB)); in xnn_f32_sigmoid_ukernel__wasmsimd_lut64_p2_div_x16()
121 vpCDEF = wasm_f32x4_sub(vtCDEF, wasm_f32x4_mul(vpCDEF, vtCDEF)); in xnn_f32_sigmoid_ukernel__wasmsimd_lut64_p2_div_x16()
123 const v128_t vy0123 = wasm_f32x4_sub(vs0123, wasm_f32x4_mul(vs0123, vp0123)); in xnn_f32_sigmoid_ukernel__wasmsimd_lut64_p2_div_x16()
124 const v128_t vy4567 = wasm_f32x4_sub(vs4567, wasm_f32x4_mul(vs4567, vp4567)); in xnn_f32_sigmoid_ukernel__wasmsimd_lut64_p2_div_x16()
125 const v128_t vy89AB = wasm_f32x4_sub(vs89AB, wasm_f32x4_mul(vs89AB, vp89AB)); in xnn_f32_sigmoid_ukernel__wasmsimd_lut64_p2_div_x16()
126 const v128_t vyCDEF = wasm_f32x4_sub(vsCDEF, wasm_f32x4_mul(vsCDEF, vpCDEF)); in xnn_f32_sigmoid_ukernel__wasmsimd_lut64_p2_div_x16()
143 vf0123 = wasm_v128_bitselect(vf0123, wasm_f32x4_sub(vone, vf0123), wasm_i32x4_shr(vx0123, 31)); in xnn_f32_sigmoid_ukernel__wasmsimd_lut64_p2_div_x16()
144 vf4567 = wasm_v128_bitselect(vf4567, wasm_f32x4_sub(vone, vf4567), wasm_i32x4_shr(vx4567, 31)); in xnn_f32_sigmoid_ukernel__wasmsimd_lut64_p2_div_x16()
145 vf89AB = wasm_v128_bitselect(vf89AB, wasm_f32x4_sub(vone, vf89AB), wasm_i32x4_shr(vx89AB, 31)); in xnn_f32_sigmoid_ukernel__wasmsimd_lut64_p2_div_x16()
146 vfCDEF = wasm_v128_bitselect(vfCDEF, wasm_f32x4_sub(vone, vfCDEF), wasm_i32x4_shr(vxCDEF, 31)); in xnn_f32_sigmoid_ukernel__wasmsimd_lut64_p2_div_x16()
173 vn = wasm_f32x4_sub(vn, vmagic_bias); in xnn_f32_sigmoid_ukernel__wasmsimd_lut64_p2_div_x16()
179 vp = wasm_f32x4_sub(vt, wasm_f32x4_mul(vp, vt)); in xnn_f32_sigmoid_ukernel__wasmsimd_lut64_p2_div_x16()
181 const v128_t vy = wasm_f32x4_sub(vs, wasm_f32x4_mul(vs, vp)); in xnn_f32_sigmoid_ukernel__wasmsimd_lut64_p2_div_x16()
186 vf = wasm_v128_bitselect(vf, wasm_f32x4_sub(vone, vf), wasm_i32x4_shr(vx, 31)); in xnn_f32_sigmoid_ukernel__wasmsimd_lut64_p2_div_x16()
209 vn = wasm_f32x4_sub(vn, vmagic_bias); in xnn_f32_sigmoid_ukernel__wasmsimd_lut64_p2_div_x16()
215 vp = wasm_f32x4_sub(vt, wasm_f32x4_mul(vp, vt)); in xnn_f32_sigmoid_ukernel__wasmsimd_lut64_p2_div_x16()
217 const v128_t vy = wasm_f32x4_sub(vs, wasm_f32x4_mul(vs, vp)); in xnn_f32_sigmoid_ukernel__wasmsimd_lut64_p2_div_x16()
222 vf = wasm_v128_bitselect(vf, wasm_f32x4_sub(vone, vf), wasm_i32x4_shr(vx, 31)); in xnn_f32_sigmoid_ukernel__wasmsimd_lut64_p2_div_x16()