Home
last modified time | relevance | path

Searched refs:vaddq_f32 (Results 1 – 25 of 334) sorted by relevance

12345678910>>...14

/external/XNNPACK/src/f32-gavgpool/
D7p7x-minmax-neon-c4.c47 const float32x4_t vsum01 = vaddq_f32(vi0, vi1); in xnn_f32_gavgpool_minmax_ukernel_7p7x__neon_c4()
48 const float32x4_t vsum23 = vaddq_f32(vi2, vi3); in xnn_f32_gavgpool_minmax_ukernel_7p7x__neon_c4()
49 const float32x4_t vsum45 = vaddq_f32(vi4, vi5); in xnn_f32_gavgpool_minmax_ukernel_7p7x__neon_c4()
51 const float32x4_t vsum016 = vaddq_f32(vsum01, vi6); in xnn_f32_gavgpool_minmax_ukernel_7p7x__neon_c4()
52 const float32x4_t vsum2345 = vaddq_f32(vsum23, vsum45); in xnn_f32_gavgpool_minmax_ukernel_7p7x__neon_c4()
54 const float32x4_t vsum = vaddq_f32(vsum016, vsum2345); in xnn_f32_gavgpool_minmax_ukernel_7p7x__neon_c4()
79 const float32x4_t vsum01 = vaddq_f32(vi0, vi1); in xnn_f32_gavgpool_minmax_ukernel_7p7x__neon_c4()
80 const float32x4_t vsum23 = vaddq_f32(vi2, vi3); in xnn_f32_gavgpool_minmax_ukernel_7p7x__neon_c4()
81 const float32x4_t vsum45 = vaddq_f32(vi4, vi5); in xnn_f32_gavgpool_minmax_ukernel_7p7x__neon_c4()
82 const float32x4_t vsum6a = vaddq_f32(vi6, vacc); in xnn_f32_gavgpool_minmax_ukernel_7p7x__neon_c4()
[all …]
D7x-minmax-neon-c4.c64 const float32x4_t vsum01 = vaddq_f32(vi0, vi1); in xnn_f32_gavgpool_minmax_ukernel_7x__neon_c4()
65 const float32x4_t vsum23 = vaddq_f32(vi2, vi3); in xnn_f32_gavgpool_minmax_ukernel_7x__neon_c4()
66 const float32x4_t vsum45 = vaddq_f32(vi4, vi5); in xnn_f32_gavgpool_minmax_ukernel_7x__neon_c4()
68 const float32x4_t vsum016 = vaddq_f32(vsum01, vi6); in xnn_f32_gavgpool_minmax_ukernel_7x__neon_c4()
69 const float32x4_t vsum2345 = vaddq_f32(vsum23, vsum45); in xnn_f32_gavgpool_minmax_ukernel_7x__neon_c4()
71 const float32x4_t vsum = vaddq_f32(vsum016, vsum2345); in xnn_f32_gavgpool_minmax_ukernel_7x__neon_c4()
90 const float32x4_t vsum01 = vaddq_f32(vi0, vi1); in xnn_f32_gavgpool_minmax_ukernel_7x__neon_c4()
91 const float32x4_t vsum23 = vaddq_f32(vi2, vi3); in xnn_f32_gavgpool_minmax_ukernel_7x__neon_c4()
92 const float32x4_t vsum45 = vaddq_f32(vi4, vi5); in xnn_f32_gavgpool_minmax_ukernel_7x__neon_c4()
94 const float32x4_t vsum016 = vaddq_f32(vsum01, vi6); in xnn_f32_gavgpool_minmax_ukernel_7x__neon_c4()
[all …]
/external/XNNPACK/src/f32-avgpool/
D9p8x-minmax-neon-c4.c94 const float32x4_t vsum01 = vaddq_f32(vi0, vi1); in xnn_f32_avgpool_minmax_ukernel_9p8x__neon_c4()
95 const float32x4_t vsum23 = vaddq_f32(vi2, vi3); in xnn_f32_avgpool_minmax_ukernel_9p8x__neon_c4()
96 const float32x4_t vsum45 = vaddq_f32(vi4, vi5); in xnn_f32_avgpool_minmax_ukernel_9p8x__neon_c4()
97 const float32x4_t vsum67 = vaddq_f32(vi6, vi7); in xnn_f32_avgpool_minmax_ukernel_9p8x__neon_c4()
98 const float32x4_t vsum018 = vaddq_f32(vsum01, vi8); in xnn_f32_avgpool_minmax_ukernel_9p8x__neon_c4()
99 const float32x4_t vsum2345 = vaddq_f32(vsum23, vsum45); in xnn_f32_avgpool_minmax_ukernel_9p8x__neon_c4()
100 const float32x4_t vsum01678 = vaddq_f32(vsum018, vsum67); in xnn_f32_avgpool_minmax_ukernel_9p8x__neon_c4()
101 const float32x4_t vsum = vaddq_f32(vsum2345, vsum01678); in xnn_f32_avgpool_minmax_ukernel_9p8x__neon_c4()
162 const float32x4_t vsum01 = vaddq_f32(vi0, vi1); in xnn_f32_avgpool_minmax_ukernel_9p8x__neon_c4()
163 const float32x4_t vsum23 = vaddq_f32(vi2, vi3); in xnn_f32_avgpool_minmax_ukernel_9p8x__neon_c4()
[all …]
D9x-minmax-neon-c4.c118 const float32x4_t vsum01 = vaddq_f32(vi0, vi1); in xnn_f32_avgpool_minmax_ukernel_9x__neon_c4()
119 const float32x4_t vsum23 = vaddq_f32(vi2, vi3); in xnn_f32_avgpool_minmax_ukernel_9x__neon_c4()
120 const float32x4_t vsum45 = vaddq_f32(vi4, vi5); in xnn_f32_avgpool_minmax_ukernel_9x__neon_c4()
121 const float32x4_t vsum67 = vaddq_f32(vi6, vi7); in xnn_f32_avgpool_minmax_ukernel_9x__neon_c4()
122 const float32x4_t vsum018 = vaddq_f32(vsum01, vi8); in xnn_f32_avgpool_minmax_ukernel_9x__neon_c4()
123 const float32x4_t vsum2345 = vaddq_f32(vsum23, vsum45); in xnn_f32_avgpool_minmax_ukernel_9x__neon_c4()
124 const float32x4_t vsum01678 = vaddq_f32(vsum018, vsum67); in xnn_f32_avgpool_minmax_ukernel_9x__neon_c4()
125 const float32x4_t vsum = vaddq_f32(vsum2345, vsum01678); in xnn_f32_avgpool_minmax_ukernel_9x__neon_c4()
146 const float32x4_t vsum01 = vaddq_f32(vi0, vi1); in xnn_f32_avgpool_minmax_ukernel_9x__neon_c4()
147 const float32x4_t vsum23 = vaddq_f32(vi2, vi3); in xnn_f32_avgpool_minmax_ukernel_9x__neon_c4()
[all …]
/external/XNNPACK/src/f32-pavgpool/
D9p8x-minmax-neon-c4.c94 const float32x4_t vsum01 = vaddq_f32(vi0, vi1); in xnn_f32_pavgpool_minmax_ukernel_9p8x__neon_c4()
95 const float32x4_t vsum23 = vaddq_f32(vi2, vi3); in xnn_f32_pavgpool_minmax_ukernel_9p8x__neon_c4()
96 const float32x4_t vsum45 = vaddq_f32(vi4, vi5); in xnn_f32_pavgpool_minmax_ukernel_9p8x__neon_c4()
97 const float32x4_t vsum67 = vaddq_f32(vi6, vi7); in xnn_f32_pavgpool_minmax_ukernel_9p8x__neon_c4()
98 const float32x4_t vsum018 = vaddq_f32(vsum01, vi8); in xnn_f32_pavgpool_minmax_ukernel_9p8x__neon_c4()
99 const float32x4_t vsum2345 = vaddq_f32(vsum23, vsum45); in xnn_f32_pavgpool_minmax_ukernel_9p8x__neon_c4()
100 const float32x4_t vsum01678 = vaddq_f32(vsum018, vsum67); in xnn_f32_pavgpool_minmax_ukernel_9p8x__neon_c4()
101 const float32x4_t vsum = vaddq_f32(vsum2345, vsum01678); in xnn_f32_pavgpool_minmax_ukernel_9p8x__neon_c4()
162 const float32x4_t vsum01 = vaddq_f32(vi0, vi1); in xnn_f32_pavgpool_minmax_ukernel_9p8x__neon_c4()
163 const float32x4_t vsum23 = vaddq_f32(vi2, vi3); in xnn_f32_pavgpool_minmax_ukernel_9p8x__neon_c4()
[all …]
D9x-minmax-neon-c4.c120 const float32x4_t vsum01 = vaddq_f32(vi0, vi1); in xnn_f32_pavgpool_minmax_ukernel_9x__neon_c4()
121 const float32x4_t vsum23 = vaddq_f32(vi2, vi3); in xnn_f32_pavgpool_minmax_ukernel_9x__neon_c4()
122 const float32x4_t vsum45 = vaddq_f32(vi4, vi5); in xnn_f32_pavgpool_minmax_ukernel_9x__neon_c4()
123 const float32x4_t vsum67 = vaddq_f32(vi6, vi7); in xnn_f32_pavgpool_minmax_ukernel_9x__neon_c4()
124 const float32x4_t vsum018 = vaddq_f32(vsum01, vi8); in xnn_f32_pavgpool_minmax_ukernel_9x__neon_c4()
125 const float32x4_t vsum2345 = vaddq_f32(vsum23, vsum45); in xnn_f32_pavgpool_minmax_ukernel_9x__neon_c4()
126 const float32x4_t vsum01678 = vaddq_f32(vsum018, vsum67); in xnn_f32_pavgpool_minmax_ukernel_9x__neon_c4()
127 const float32x4_t vsum = vaddq_f32(vsum2345, vsum01678); in xnn_f32_pavgpool_minmax_ukernel_9x__neon_c4()
148 const float32x4_t vsum01 = vaddq_f32(vi0, vi1); in xnn_f32_pavgpool_minmax_ukernel_9x__neon_c4()
149 const float32x4_t vsum23 = vaddq_f32(vi2, vi3); in xnn_f32_pavgpool_minmax_ukernel_9x__neon_c4()
[all …]
/external/XNNPACK/src/f32-gavgpool-cw/
Dneon-x4.c47 vsum0 = vaddq_f32(vsum0, vi0); in xnn_f32_gavgpool_cw_ukernel__neon_x4()
48 vsum1 = vaddq_f32(vsum1, vi1); in xnn_f32_gavgpool_cw_ukernel__neon_x4()
49 vsum2 = vaddq_f32(vsum2, vi2); in xnn_f32_gavgpool_cw_ukernel__neon_x4()
50 vsum3 = vaddq_f32(vsum3, vi3); in xnn_f32_gavgpool_cw_ukernel__neon_x4()
65 vsum0 = vaddq_f32(vsum0, vi0); in xnn_f32_gavgpool_cw_ukernel__neon_x4()
66 vsum1 = vaddq_f32(vsum1, vi1); in xnn_f32_gavgpool_cw_ukernel__neon_x4()
67 vsum2 = vaddq_f32(vsum2, vi2); in xnn_f32_gavgpool_cw_ukernel__neon_x4()
68 vsum3 = vaddq_f32(vsum3, vi3); in xnn_f32_gavgpool_cw_ukernel__neon_x4()
104 vsum0 = vaddq_f32(vsum0, vi0); in xnn_f32_gavgpool_cw_ukernel__neon_x4()
111 vsum0 = vaddq_f32(vsum0, vi0); in xnn_f32_gavgpool_cw_ukernel__neon_x4()
/external/eigen/Eigen/src/Core/arch/NEON/
DMathFunctions.h62 y = vaddq_f32(y, p4f_cephes_exp_p1);
64 y = vaddq_f32(y, p4f_cephes_exp_p2);
66 y = vaddq_f32(y, p4f_cephes_exp_p3);
68 y = vaddq_f32(y, p4f_cephes_exp_p4);
70 y = vaddq_f32(y, p4f_cephes_exp_p5);
73 y = vaddq_f32(y, x);
74 y = vaddq_f32(y, p4f_1);
/external/XNNPACK/src/f32-vmulcaddc/gen/
Dc8-minmax-neon-2x.c66 vacc0x0123 = vaddq_f32(vacc0x0123, vbias0123); in xnn_f32_vmulcaddc_minmax_ukernel_c8__neon_2x()
67 vacc0x4567 = vaddq_f32(vacc0x4567, vbias4567); in xnn_f32_vmulcaddc_minmax_ukernel_c8__neon_2x()
68 vacc1x0123 = vaddq_f32(vacc1x0123, vbias0123); in xnn_f32_vmulcaddc_minmax_ukernel_c8__neon_2x()
69 vacc1x4567 = vaddq_f32(vacc1x4567, vbias4567); in xnn_f32_vmulcaddc_minmax_ukernel_c8__neon_2x()
97 vacc0x0123 = vaddq_f32(vacc0x0123, vbias0123); in xnn_f32_vmulcaddc_minmax_ukernel_c8__neon_2x()
98 vacc1x0123 = vaddq_f32(vacc1x0123, vbias0123); in xnn_f32_vmulcaddc_minmax_ukernel_c8__neon_2x()
120 vacc0x0123 = vaddq_f32(vacc0x0123, vbias0123); in xnn_f32_vmulcaddc_minmax_ukernel_c8__neon_2x()
121 vacc1x0123 = vaddq_f32(vacc1x0123, vbias0123); in xnn_f32_vmulcaddc_minmax_ukernel_c8__neon_2x()
/external/XNNPACK/src/f32-hswish/gen/
Dhswish-neon-x16.c38 float32x4_t vacc0123 = vaddq_f32(vx0123, vthree); in xnn_f32_hswish_ukernel__neon_x16()
40 float32x4_t vacc4567 = vaddq_f32(vx4567, vthree); in xnn_f32_hswish_ukernel__neon_x16()
42 float32x4_t vacc89AB = vaddq_f32(vx89AB, vthree); in xnn_f32_hswish_ukernel__neon_x16()
44 float32x4_t vaccCDEF = vaddq_f32(vxCDEF, vthree); in xnn_f32_hswish_ukernel__neon_x16()
69 float32x4_t vacc = vaddq_f32(vx, vthree); in xnn_f32_hswish_ukernel__neon_x16()
78 float32x4_t vacc = vaddq_f32(vx, vthree); in xnn_f32_hswish_ukernel__neon_x16()
Dhswish-neon-x8.c36 float32x4_t vacc0123 = vaddq_f32(vx0123, vthree); in xnn_f32_hswish_ukernel__neon_x8()
38 float32x4_t vacc4567 = vaddq_f32(vx4567, vthree); in xnn_f32_hswish_ukernel__neon_x8()
55 float32x4_t vacc = vaddq_f32(vx, vthree); in xnn_f32_hswish_ukernel__neon_x8()
64 float32x4_t vacc = vaddq_f32(vx, vthree); in xnn_f32_hswish_ukernel__neon_x8()
/external/webrtc/common_audio/third_party/ooura/fft_size_128/
Dooura_fft_neon.cc45 float32x4_t x0v = vaddq_f32(a01v, a23v); in cft1st_128_neon()
47 const float32x4_t x2v = vaddq_f32(a45v, a67v); in cft1st_128_neon()
51 a01v = vaddq_f32(x0v, x2v); in cft1st_128_neon()
88 const float32x4_t x0r0_0i0_0r1_x0i1 = vaddq_f32(a_00_32, a_08_40); in cftmdl_128_neon()
96 const float32x4_t x2r0_2i0_2r1_x2i1 = vaddq_f32(a_16_48, a_24_56); in cftmdl_128_neon()
98 const float32x4_t xx0 = vaddq_f32(x0r0_0i0_0r1_x0i1, x2r0_2i0_2r1_x2i1); in cftmdl_128_neon()
146 const float32x4_t x0r0_0i0_0r1_x0i1 = vaddq_f32(a_00_32, a_08_40); in cftmdl_128_neon()
154 const float32x4_t x2r0_2i0_2r1_x2i1 = vaddq_f32(a_16_48, a_24_56); in cftmdl_128_neon()
156 const float32x4_t xx = vaddq_f32(x0r0_0i0_0r1_x0i1, x2r0_2i0_2r1_x2i1); in cftmdl_128_neon()
215 const float32x4_t xi_ = vaddq_f32(a_j2_p.val[1], a_k2_p1); in rftfsub_128_neon()
[all …]
/external/XNNPACK/src/f32-raddstoreexpminusmax/gen/
Dneon-p5-x20-acc5.c161 vacc0 = vaddq_f32(vacc0, vf0123); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x20_acc5()
162 vacc4 = vaddq_f32(vacc4, vf4567); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x20_acc5()
163 vacc3 = vaddq_f32(vacc3, vf89AB); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x20_acc5()
164 vacc2 = vaddq_f32(vacc2, vfCDEF); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x20_acc5()
165 vacc1 = vaddq_f32(vacc1, vfGHIJ); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x20_acc5()
168 vacc0 = vaddq_f32(vacc0, vacc1); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x20_acc5()
169 vacc2 = vaddq_f32(vacc2, vacc3); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x20_acc5()
170 vacc0 = vaddq_f32(vacc0, vacc2); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x20_acc5()
171 vacc0 = vaddq_f32(vacc0, vacc4); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x20_acc5()
222 vacc = vaddq_f32(vacc, vf); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x20_acc5()
Dneonfma-p5-x20-acc5.c160 vacc0 = vaddq_f32(vacc0, vf0123); in xnn_f32_raddstoreexpminusmax_ukernel__neonfma_p5_x20_acc5()
161 vacc4 = vaddq_f32(vacc4, vf4567); in xnn_f32_raddstoreexpminusmax_ukernel__neonfma_p5_x20_acc5()
162 vacc3 = vaddq_f32(vacc3, vf89AB); in xnn_f32_raddstoreexpminusmax_ukernel__neonfma_p5_x20_acc5()
163 vacc2 = vaddq_f32(vacc2, vfCDEF); in xnn_f32_raddstoreexpminusmax_ukernel__neonfma_p5_x20_acc5()
164 vacc1 = vaddq_f32(vacc1, vfGHIJ); in xnn_f32_raddstoreexpminusmax_ukernel__neonfma_p5_x20_acc5()
167 vacc0 = vaddq_f32(vacc0, vacc1); in xnn_f32_raddstoreexpminusmax_ukernel__neonfma_p5_x20_acc5()
168 vacc2 = vaddq_f32(vacc2, vacc3); in xnn_f32_raddstoreexpminusmax_ukernel__neonfma_p5_x20_acc5()
169 vacc0 = vaddq_f32(vacc0, vacc2); in xnn_f32_raddstoreexpminusmax_ukernel__neonfma_p5_x20_acc5()
170 vacc0 = vaddq_f32(vacc0, vacc4); in xnn_f32_raddstoreexpminusmax_ukernel__neonfma_p5_x20_acc5()
221 vacc = vaddq_f32(vacc, vf); in xnn_f32_raddstoreexpminusmax_ukernel__neonfma_p5_x20_acc5()
Dneonfma-p5-x16-acc4.c144 vacc0 = vaddq_f32(vacc0, vf0123); in xnn_f32_raddstoreexpminusmax_ukernel__neonfma_p5_x16_acc4()
145 vacc0 = vaddq_f32(vacc0, vf4567); in xnn_f32_raddstoreexpminusmax_ukernel__neonfma_p5_x16_acc4()
146 vacc0 = vaddq_f32(vacc0, vf89AB); in xnn_f32_raddstoreexpminusmax_ukernel__neonfma_p5_x16_acc4()
147 vacc0 = vaddq_f32(vacc0, vfCDEF); in xnn_f32_raddstoreexpminusmax_ukernel__neonfma_p5_x16_acc4()
150 vacc0 = vaddq_f32(vacc0, vacc1); in xnn_f32_raddstoreexpminusmax_ukernel__neonfma_p5_x16_acc4()
151 vacc2 = vaddq_f32(vacc2, vacc3); in xnn_f32_raddstoreexpminusmax_ukernel__neonfma_p5_x16_acc4()
152 vacc0 = vaddq_f32(vacc0, vacc2); in xnn_f32_raddstoreexpminusmax_ukernel__neonfma_p5_x16_acc4()
203 vacc = vaddq_f32(vacc, vf); in xnn_f32_raddstoreexpminusmax_ukernel__neonfma_p5_x16_acc4()
Dneon-p5-x16-acc4.c145 vacc0 = vaddq_f32(vacc0, vf0123); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x16_acc4()
146 vacc0 = vaddq_f32(vacc0, vf4567); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x16_acc4()
147 vacc0 = vaddq_f32(vacc0, vf89AB); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x16_acc4()
148 vacc0 = vaddq_f32(vacc0, vfCDEF); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x16_acc4()
151 vacc0 = vaddq_f32(vacc0, vacc1); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x16_acc4()
152 vacc2 = vaddq_f32(vacc2, vacc3); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x16_acc4()
153 vacc0 = vaddq_f32(vacc0, vacc2); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x16_acc4()
204 vacc = vaddq_f32(vacc, vf); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x16_acc4()
Dneonfma-lut64-p2-x20-acc5.c203 vacc0 = vaddq_f32(vacc0, vf0123); in xnn_f32_raddstoreexpminusmax_ukernel__neonfma_lut64_p2_x20_acc5()
204 vacc4 = vaddq_f32(vacc4, vf4567); in xnn_f32_raddstoreexpminusmax_ukernel__neonfma_lut64_p2_x20_acc5()
205 vacc3 = vaddq_f32(vacc3, vf89AB); in xnn_f32_raddstoreexpminusmax_ukernel__neonfma_lut64_p2_x20_acc5()
206 vacc2 = vaddq_f32(vacc2, vfCDEF); in xnn_f32_raddstoreexpminusmax_ukernel__neonfma_lut64_p2_x20_acc5()
207 vacc1 = vaddq_f32(vacc1, vfGHIJ); in xnn_f32_raddstoreexpminusmax_ukernel__neonfma_lut64_p2_x20_acc5()
210 vacc0 = vaddq_f32(vacc0, vacc1); in xnn_f32_raddstoreexpminusmax_ukernel__neonfma_lut64_p2_x20_acc5()
211 vacc2 = vaddq_f32(vacc2, vacc3); in xnn_f32_raddstoreexpminusmax_ukernel__neonfma_lut64_p2_x20_acc5()
212 vacc0 = vaddq_f32(vacc0, vacc2); in xnn_f32_raddstoreexpminusmax_ukernel__neonfma_lut64_p2_x20_acc5()
213 vacc0 = vaddq_f32(vacc0, vacc4); in xnn_f32_raddstoreexpminusmax_ukernel__neonfma_lut64_p2_x20_acc5()
283 vacc = vaddq_f32(vacc, vf); in xnn_f32_raddstoreexpminusmax_ukernel__neonfma_lut64_p2_x20_acc5()
/external/XNNPACK/src/f32-vbinary/gen/
Dvaddc-minmax-neon-x8.c39 float32x4_t vy0123 = vaddq_f32(va0123, vb); in xnn_f32_vaddc_minmax_ukernel__neon_x8()
40 float32x4_t vy4567 = vaddq_f32(va4567, vb); in xnn_f32_vaddc_minmax_ukernel__neon_x8()
55 float32x4_t vy0123 = vaddq_f32(va0123, vb); in xnn_f32_vaddc_minmax_ukernel__neon_x8()
63 float32x4_t vy0123 = vaddq_f32(va0123, vb); in xnn_f32_vaddc_minmax_ukernel__neon_x8()
Dvadd-minmax-neon-x8.c40 float32x4_t vy0123 = vaddq_f32(va0123, vb0123); in xnn_f32_vadd_minmax_ukernel__neon_x8()
41 float32x4_t vy4567 = vaddq_f32(va4567, vb4567); in xnn_f32_vadd_minmax_ukernel__neon_x8()
57 float32x4_t vy0123 = vaddq_f32(va0123, vb0123); in xnn_f32_vadd_minmax_ukernel__neon_x8()
66 float32x4_t vy0123 = vaddq_f32(va0123, vb0123); in xnn_f32_vadd_minmax_ukernel__neon_x8()
/external/XNNPACK/src/f32-vrnd/gen/
Dvrndne-neon-x8.c38 float32x4_t vrndabsx0123 = vaddq_f32(vabsx0123, vmagic_number); in xnn_f32_vrndne_ukernel__neon_x8()
39 float32x4_t vrndabsx4567 = vaddq_f32(vabsx4567, vmagic_number); in xnn_f32_vrndne_ukernel__neon_x8()
57 float32x4_t vrndabsx = vaddq_f32(vabsx, vmagic_number); in xnn_f32_vrndne_ukernel__neon_x8()
67 float32x4_t vrndabsx = vaddq_f32(vabsx, vmagic_number); in xnn_f32_vrndne_ukernel__neon_x8()
/external/tensorflow/tensorflow/core/kernels/
Dquantized_instance_norm.cc102 vaddq_f32(vmulq_n_f32(xA[i], nA), vmulq_n_f32(xB, nB)), 1.0f / nX); in ColMeanAndVariance()
113 M2A[i] = vaddq_f32(vaddq_f32(M2A[i], M2B), last_term); in ColMeanAndVariance()
155 vrsqrteq_f32(vaddq_f32(variance[0], eps)), in MinAndMax()
156 vrsqrteq_f32(vaddq_f32(variance[1], eps)), in MinAndMax()
157 vrsqrteq_f32(vaddq_f32(variance[2], eps)), in MinAndMax()
158 vrsqrteq_f32(vaddq_f32(variance[3], eps))}; in MinAndMax()
212 vrsqrteq_f32(vaddq_f32(variance[0], eps)), in InstanceNorm()
213 vrsqrteq_f32(vaddq_f32(variance[1], eps)), in InstanceNorm()
214 vrsqrteq_f32(vaddq_f32(variance[2], eps)), in InstanceNorm()
215 vrsqrteq_f32(vaddq_f32(variance[3], eps))}; in InstanceNorm()
/external/XNNPACK/src/f32-dwconv2d-chw/gen/
D3x3p1-minmax-neon-1x4-acc4.c105 vo0p0 = vaddq_f32(vo0p0, vo0p1); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__neon_1x4_acc4()
106 vo0p2 = vaddq_f32(vo0p2, vo0p3); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__neon_1x4_acc4()
107 vo0p0 = vaddq_f32(vo0p0, vo0p2); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__neon_1x4_acc4()
152 vo0p0 = vaddq_f32(vo0p0, vo0p1); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__neon_1x4_acc4()
153 vo0p2 = vaddq_f32(vo0p2, vo0p3); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__neon_1x4_acc4()
154 vo0p0 = vaddq_f32(vo0p0, vo0p2); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__neon_1x4_acc4()
D3x3p1-minmax-neonfma-1x4-acc4.c105 vo0p0 = vaddq_f32(vo0p0, vo0p1); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__neonfma_1x4_acc4()
106 vo0p2 = vaddq_f32(vo0p2, vo0p3); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__neonfma_1x4_acc4()
107 vo0p0 = vaddq_f32(vo0p0, vo0p2); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__neonfma_1x4_acc4()
152 vo0p0 = vaddq_f32(vo0p0, vo0p1); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__neonfma_1x4_acc4()
153 vo0p2 = vaddq_f32(vo0p2, vo0p3); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__neonfma_1x4_acc4()
154 vo0p0 = vaddq_f32(vo0p0, vo0p2); in xnn_f32_dwconv2d_chw_ukernel_3x3p1__neonfma_1x4_acc4()
D3x3s2p1-minmax-neon-1x4-acc4.c98 vo0p0 = vaddq_f32(vo0p0, vo0p1); in xnn_f32_dwconv2d_chw_ukernel_3x3s2p1__neon_1x4_acc4()
99 vo0p2 = vaddq_f32(vo0p2, vo0p3); in xnn_f32_dwconv2d_chw_ukernel_3x3s2p1__neon_1x4_acc4()
100 vo0p0 = vaddq_f32(vo0p0, vo0p2); in xnn_f32_dwconv2d_chw_ukernel_3x3s2p1__neon_1x4_acc4()
146 vo0p0 = vaddq_f32(vo0p0, vo0p1); in xnn_f32_dwconv2d_chw_ukernel_3x3s2p1__neon_1x4_acc4()
147 vo0p2 = vaddq_f32(vo0p2, vo0p3); in xnn_f32_dwconv2d_chw_ukernel_3x3s2p1__neon_1x4_acc4()
148 vo0p0 = vaddq_f32(vo0p0, vo0p2); in xnn_f32_dwconv2d_chw_ukernel_3x3s2p1__neon_1x4_acc4()
D3x3s2p1-minmax-neonfma-1x4-acc4.c98 vo0p0 = vaddq_f32(vo0p0, vo0p1); in xnn_f32_dwconv2d_chw_ukernel_3x3s2p1__neonfma_1x4_acc4()
99 vo0p2 = vaddq_f32(vo0p2, vo0p3); in xnn_f32_dwconv2d_chw_ukernel_3x3s2p1__neonfma_1x4_acc4()
100 vo0p0 = vaddq_f32(vo0p0, vo0p2); in xnn_f32_dwconv2d_chw_ukernel_3x3s2p1__neonfma_1x4_acc4()
146 vo0p0 = vaddq_f32(vo0p0, vo0p1); in xnn_f32_dwconv2d_chw_ukernel_3x3s2p1__neonfma_1x4_acc4()
147 vo0p2 = vaddq_f32(vo0p2, vo0p3); in xnn_f32_dwconv2d_chw_ukernel_3x3s2p1__neonfma_1x4_acc4()
148 vo0p0 = vaddq_f32(vo0p0, vo0p2); in xnn_f32_dwconv2d_chw_ukernel_3x3s2p1__neonfma_1x4_acc4()

12345678910>>...14