• Home
  • Raw
  • Download

Lines Matching refs:vmlaq_f32

59     float32x4_t vn0123 = vmlaq_f32(vmagic_bias, vx0123, vlog2e);  in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x8()
60 float32x4_t vn4567 = vmlaq_f32(vmagic_bias, vx4567, vlog2e); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x8()
73 float32x4_t vt0123 = vmlaq_f32(vx0123, vn0123, vminus_ln2_hi); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x8()
74 float32x4_t vt4567 = vmlaq_f32(vx4567, vn4567, vminus_ln2_hi); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x8()
76 vt0123 = vmlaq_f32(vt0123, vn0123, vminus_ln2_lo); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x8()
77 vt4567 = vmlaq_f32(vt4567, vn4567, vminus_ln2_lo); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x8()
80 float32x4_t vp0123 = vmlaq_f32(vc4, vc5, vt0123); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x8()
81 float32x4_t vp4567 = vmlaq_f32(vc4, vc5, vt4567); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x8()
83 vp0123 = vmlaq_f32(vc3, vp0123, vt0123); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x8()
84 vp4567 = vmlaq_f32(vc3, vp4567, vt4567); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x8()
86 vp0123 = vmlaq_f32(vc2, vp0123, vt0123); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x8()
87 vp4567 = vmlaq_f32(vc2, vp4567, vt4567); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x8()
89 vp0123 = vmlaq_f32(vc1, vp0123, vt0123); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x8()
90 vp4567 = vmlaq_f32(vc1, vp4567, vt4567); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x8()
99 float32x4_t vf0123 = vmlaq_f32(vs0123, vp0123, vt0123); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x8()
100 float32x4_t vf4567 = vmlaq_f32(vs4567, vp4567, vt4567); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x8()
130 float32x4_t vn = vmlaq_f32(vmagic_bias, vx, vlog2e); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x8()
141 float32x4_t vt = vmlaq_f32(vx, vn, vminus_ln2_hi); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x8()
142 vt = vmlaq_f32(vt, vn, vminus_ln2_lo); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x8()
145 float32x4_t vp = vmlaq_f32(vc4, vc5, vt); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x8()
146 vp = vmlaq_f32(vc3, vp, vt); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x8()
147 vp = vmlaq_f32(vc2, vp, vt); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x8()
148 vp = vmlaq_f32(vc1, vp, vt); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x8()
155 float32x4_t vf = vmlaq_f32(vs, vp, vt); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x8()
187 float32x4_t vn = vmlaq_f32(vmagic_bias, vx, vlog2e); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x8()
198 float32x4_t vt = vmlaq_f32(vx, vn, vminus_ln2_hi); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x8()
199 vt = vmlaq_f32(vt, vn, vminus_ln2_lo); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x8()
202 float32x4_t vp = vmlaq_f32(vc4, vc5, vt); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x8()
203 vp = vmlaq_f32(vc3, vp, vt); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x8()
204 vp = vmlaq_f32(vc2, vp, vt); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x8()
205 vp = vmlaq_f32(vc1, vp, vt); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x8()
212 float32x4_t vf = vmlaq_f32(vs, vp, vt); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x8()