• Home
  • Raw
  • Download

Lines Matching refs:vmlaq_f32

63     float32x4_t vn0123 = vmlaq_f32(vmagic_bias, vx0123, vlog2e);  in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x16()
64 float32x4_t vn4567 = vmlaq_f32(vmagic_bias, vx4567, vlog2e); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x16()
65 float32x4_t vn89AB = vmlaq_f32(vmagic_bias, vx89AB, vlog2e); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x16()
66 float32x4_t vnCDEF = vmlaq_f32(vmagic_bias, vxCDEF, vlog2e); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x16()
83 float32x4_t vt0123 = vmlaq_f32(vx0123, vn0123, vminus_ln2_hi); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x16()
84 float32x4_t vt4567 = vmlaq_f32(vx4567, vn4567, vminus_ln2_hi); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x16()
85 float32x4_t vt89AB = vmlaq_f32(vx89AB, vn89AB, vminus_ln2_hi); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x16()
86 float32x4_t vtCDEF = vmlaq_f32(vxCDEF, vnCDEF, vminus_ln2_hi); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x16()
88 vt0123 = vmlaq_f32(vt0123, vn0123, vminus_ln2_lo); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x16()
89 vt4567 = vmlaq_f32(vt4567, vn4567, vminus_ln2_lo); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x16()
90 vt89AB = vmlaq_f32(vt89AB, vn89AB, vminus_ln2_lo); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x16()
91 vtCDEF = vmlaq_f32(vtCDEF, vnCDEF, vminus_ln2_lo); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x16()
94 float32x4_t vp0123 = vmlaq_f32(vc4, vc5, vt0123); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x16()
95 float32x4_t vp4567 = vmlaq_f32(vc4, vc5, vt4567); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x16()
96 float32x4_t vp89AB = vmlaq_f32(vc4, vc5, vt89AB); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x16()
97 float32x4_t vpCDEF = vmlaq_f32(vc4, vc5, vtCDEF); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x16()
99 vp0123 = vmlaq_f32(vc3, vp0123, vt0123); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x16()
100 vp4567 = vmlaq_f32(vc3, vp4567, vt4567); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x16()
101 vp89AB = vmlaq_f32(vc3, vp89AB, vt89AB); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x16()
102 vpCDEF = vmlaq_f32(vc3, vpCDEF, vtCDEF); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x16()
104 vp0123 = vmlaq_f32(vc2, vp0123, vt0123); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x16()
105 vp4567 = vmlaq_f32(vc2, vp4567, vt4567); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x16()
106 vp89AB = vmlaq_f32(vc2, vp89AB, vt89AB); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x16()
107 vpCDEF = vmlaq_f32(vc2, vpCDEF, vtCDEF); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x16()
109 vp0123 = vmlaq_f32(vc1, vp0123, vt0123); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x16()
110 vp4567 = vmlaq_f32(vc1, vp4567, vt4567); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x16()
111 vp89AB = vmlaq_f32(vc1, vp89AB, vt89AB); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x16()
112 vpCDEF = vmlaq_f32(vc1, vpCDEF, vtCDEF); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x16()
123 float32x4_t vf0123 = vmlaq_f32(vs0123, vp0123, vt0123); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x16()
124 float32x4_t vf4567 = vmlaq_f32(vs4567, vp4567, vt4567); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x16()
125 float32x4_t vf89AB = vmlaq_f32(vs89AB, vp89AB, vt89AB); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x16()
126 float32x4_t vfCDEF = vmlaq_f32(vsCDEF, vpCDEF, vtCDEF); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x16()
162 float32x4_t vn = vmlaq_f32(vmagic_bias, vx, vlog2e); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x16()
173 float32x4_t vt = vmlaq_f32(vx, vn, vminus_ln2_hi); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x16()
174 vt = vmlaq_f32(vt, vn, vminus_ln2_lo); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x16()
177 float32x4_t vp = vmlaq_f32(vc4, vc5, vt); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x16()
178 vp = vmlaq_f32(vc3, vp, vt); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x16()
179 vp = vmlaq_f32(vc2, vp, vt); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x16()
180 vp = vmlaq_f32(vc1, vp, vt); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x16()
187 float32x4_t vf = vmlaq_f32(vs, vp, vt); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x16()
219 float32x4_t vn = vmlaq_f32(vmagic_bias, vx, vlog2e); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x16()
230 float32x4_t vt = vmlaq_f32(vx, vn, vminus_ln2_hi); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x16()
231 vt = vmlaq_f32(vt, vn, vminus_ln2_lo); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x16()
234 float32x4_t vp = vmlaq_f32(vc4, vc5, vt); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x16()
235 vp = vmlaq_f32(vc3, vp, vt); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x16()
236 vp = vmlaq_f32(vc2, vp, vt); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x16()
237 vp = vmlaq_f32(vc1, vp, vt); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x16()
244 float32x4_t vf = vmlaq_f32(vs, vp, vt); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x16()