/external/XNNPACK/src/f32-ibilinear/gen/ |
D | neon-c8.c | 51 const float32x4_t vtd0123 = vsubq_f32(vtr0123, vtl0123); in xnn_f32_ibilinear_ukernel__neon_c8() 52 const float32x4_t vbd0123 = vsubq_f32(vbr0123, vbl0123); in xnn_f32_ibilinear_ukernel__neon_c8() 53 const float32x4_t vtd4567 = vsubq_f32(vtr4567, vtl4567); in xnn_f32_ibilinear_ukernel__neon_c8() 54 const float32x4_t vbd4567 = vsubq_f32(vbr4567, vbl4567); in xnn_f32_ibilinear_ukernel__neon_c8() 61 const float32x4_t vd0123 = vsubq_f32(vb0123, vt0123); in xnn_f32_ibilinear_ukernel__neon_c8() 62 const float32x4_t vd4567 = vsubq_f32(vb4567, vt4567); in xnn_f32_ibilinear_ukernel__neon_c8() 76 const float32x4_t vtd0123 = vsubq_f32(vtr0123, vtl0123); in xnn_f32_ibilinear_ukernel__neon_c8() 77 const float32x4_t vbd0123 = vsubq_f32(vbr0123, vbl0123); in xnn_f32_ibilinear_ukernel__neon_c8() 82 const float32x4_t vd0123 = vsubq_f32(vb0123, vt0123); in xnn_f32_ibilinear_ukernel__neon_c8() 95 const float32x4_t vtd0123 = vsubq_f32(vtr0123, vtl0123); in xnn_f32_ibilinear_ukernel__neon_c8() [all …]
|
D | neonfma-c8.c | 55 const float32x4_t vtd0123 = vsubq_f32(vtr0123, vtl0123); in xnn_f32_ibilinear_ukernel__neonfma_c8() 56 const float32x4_t vbd0123 = vsubq_f32(vbr0123, vbl0123); in xnn_f32_ibilinear_ukernel__neonfma_c8() 57 const float32x4_t vtd4567 = vsubq_f32(vtr4567, vtl4567); in xnn_f32_ibilinear_ukernel__neonfma_c8() 58 const float32x4_t vbd4567 = vsubq_f32(vbr4567, vbl4567); in xnn_f32_ibilinear_ukernel__neonfma_c8() 72 const float32x4_t vd0123 = vsubq_f32(vb0123, vt0123); in xnn_f32_ibilinear_ukernel__neonfma_c8() 73 const float32x4_t vd4567 = vsubq_f32(vb4567, vt4567); in xnn_f32_ibilinear_ukernel__neonfma_c8() 92 const float32x4_t vtd0123 = vsubq_f32(vtr0123, vtl0123); in xnn_f32_ibilinear_ukernel__neonfma_c8() 93 const float32x4_t vbd0123 = vsubq_f32(vbr0123, vbl0123); in xnn_f32_ibilinear_ukernel__neonfma_c8() 103 const float32x4_t vd0123 = vsubq_f32(vb0123, vt0123); in xnn_f32_ibilinear_ukernel__neonfma_c8() 120 const float32x4_t vtd0123 = vsubq_f32(vtr0123, vtl0123); in xnn_f32_ibilinear_ukernel__neonfma_c8() [all …]
|
D | neon-c4.c | 47 const float32x4_t vtd0123 = vsubq_f32(vtr0123, vtl0123); in xnn_f32_ibilinear_ukernel__neon_c4() 48 const float32x4_t vbd0123 = vsubq_f32(vbr0123, vbl0123); in xnn_f32_ibilinear_ukernel__neon_c4() 53 const float32x4_t vd0123 = vsubq_f32(vb0123, vt0123); in xnn_f32_ibilinear_ukernel__neon_c4() 65 const float32x4_t vtd0123 = vsubq_f32(vtr0123, vtl0123); in xnn_f32_ibilinear_ukernel__neon_c4() 66 const float32x4_t vbd0123 = vsubq_f32(vbr0123, vbl0123); in xnn_f32_ibilinear_ukernel__neon_c4() 71 const float32x4_t vd0123 = vsubq_f32(vb0123, vt0123); in xnn_f32_ibilinear_ukernel__neon_c4()
|
/external/webrtc/common_audio/third_party/ooura/fft_size_128/ |
D | ooura_fft_neon.cc | 46 const float32x4_t x1v = vsubq_f32(a01v, a23v); in cft1st_128_neon() 48 const float32x4_t x3v = vsubq_f32(a45v, a67v); in cft1st_128_neon() 52 x0v = vsubq_f32(x0v, x2v); in cft1st_128_neon() 89 const float32x4_t x1r0_1i0_1r1_x1i1 = vsubq_f32(a_00_32, a_08_40); in cftmdl_128_neon() 97 const float32x4_t x3r0_3i0_3r1_x3i1 = vsubq_f32(a_16_48, a_24_56); in cftmdl_128_neon() 99 const float32x4_t xx1 = vsubq_f32(x0r0_0i0_0r1_x0i1, x2r0_2i0_2r1_x2i1); in cftmdl_128_neon() 147 const float32x4_t x1r0_1i0_1r1_x1i1 = vsubq_f32(a_00_32, a_08_40); in cftmdl_128_neon() 155 const float32x4_t x3r0_3i0_3r1_x3i1 = vsubq_f32(a_16_48, a_24_56); in cftmdl_128_neon() 157 const float32x4_t xx1 = vsubq_f32(x0r0_0i0_0r1_x0i1, x2r0_2i0_2r1_x2i1); in cftmdl_128_neon() 200 const float32x4_t wkrt = vsubq_f32(mm_half, c_k1); // 28, 29, 30, 31, in rftfsub_128_neon() [all …]
|
/external/XNNPACK/src/f32-raddstoreexpminusmax/gen/ |
D | neonfma-p5-x20.c | 52 const float32x4_t vx0123 = vsubq_f32(vi0123, vi_max); in xnn_f32_raddstoreexpminusmax_ukernel__neonfma_p5_x20() 53 const float32x4_t vx4567 = vsubq_f32(vi4567, vi_max); in xnn_f32_raddstoreexpminusmax_ukernel__neonfma_p5_x20() 54 const float32x4_t vx89AB = vsubq_f32(vi89AB, vi_max); in xnn_f32_raddstoreexpminusmax_ukernel__neonfma_p5_x20() 55 const float32x4_t vxCDEF = vsubq_f32(viCDEF, vi_max); in xnn_f32_raddstoreexpminusmax_ukernel__neonfma_p5_x20() 56 const float32x4_t vxGHIJ = vsubq_f32(viGHIJ, vi_max); in xnn_f32_raddstoreexpminusmax_ukernel__neonfma_p5_x20() 79 vn0123 = vsubq_f32(vn0123, vmagic_bias); in xnn_f32_raddstoreexpminusmax_ukernel__neonfma_p5_x20() 80 vn4567 = vsubq_f32(vn4567, vmagic_bias); in xnn_f32_raddstoreexpminusmax_ukernel__neonfma_p5_x20() 81 vn89AB = vsubq_f32(vn89AB, vmagic_bias); in xnn_f32_raddstoreexpminusmax_ukernel__neonfma_p5_x20() 82 vnCDEF = vsubq_f32(vnCDEF, vmagic_bias); in xnn_f32_raddstoreexpminusmax_ukernel__neonfma_p5_x20() 83 vnGHIJ = vsubq_f32(vnGHIJ, vmagic_bias); in xnn_f32_raddstoreexpminusmax_ukernel__neonfma_p5_x20() [all …]
|
D | neon-p5-x20-acc5.c | 57 const float32x4_t vx0123 = vsubq_f32(vi0123, vi_max); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x20_acc5() 58 const float32x4_t vx4567 = vsubq_f32(vi4567, vi_max); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x20_acc5() 59 const float32x4_t vx89AB = vsubq_f32(vi89AB, vi_max); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x20_acc5() 60 const float32x4_t vxCDEF = vsubq_f32(viCDEF, vi_max); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x20_acc5() 61 const float32x4_t vxGHIJ = vsubq_f32(viGHIJ, vi_max); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x20_acc5() 84 vn0123 = vsubq_f32(vn0123, vmagic_bias); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x20_acc5() 85 vn4567 = vsubq_f32(vn4567, vmagic_bias); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x20_acc5() 86 vn89AB = vsubq_f32(vn89AB, vmagic_bias); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x20_acc5() 87 vnCDEF = vsubq_f32(vnCDEF, vmagic_bias); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x20_acc5() 88 vnGHIJ = vsubq_f32(vnGHIJ, vmagic_bias); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x20_acc5() [all …]
|
D | neonfma-p5-x20-acc5.c | 56 const float32x4_t vx0123 = vsubq_f32(vi0123, vi_max); in xnn_f32_raddstoreexpminusmax_ukernel__neonfma_p5_x20_acc5() 57 const float32x4_t vx4567 = vsubq_f32(vi4567, vi_max); in xnn_f32_raddstoreexpminusmax_ukernel__neonfma_p5_x20_acc5() 58 const float32x4_t vx89AB = vsubq_f32(vi89AB, vi_max); in xnn_f32_raddstoreexpminusmax_ukernel__neonfma_p5_x20_acc5() 59 const float32x4_t vxCDEF = vsubq_f32(viCDEF, vi_max); in xnn_f32_raddstoreexpminusmax_ukernel__neonfma_p5_x20_acc5() 60 const float32x4_t vxGHIJ = vsubq_f32(viGHIJ, vi_max); in xnn_f32_raddstoreexpminusmax_ukernel__neonfma_p5_x20_acc5() 83 vn0123 = vsubq_f32(vn0123, vmagic_bias); in xnn_f32_raddstoreexpminusmax_ukernel__neonfma_p5_x20_acc5() 84 vn4567 = vsubq_f32(vn4567, vmagic_bias); in xnn_f32_raddstoreexpminusmax_ukernel__neonfma_p5_x20_acc5() 85 vn89AB = vsubq_f32(vn89AB, vmagic_bias); in xnn_f32_raddstoreexpminusmax_ukernel__neonfma_p5_x20_acc5() 86 vnCDEF = vsubq_f32(vnCDEF, vmagic_bias); in xnn_f32_raddstoreexpminusmax_ukernel__neonfma_p5_x20_acc5() 87 vnGHIJ = vsubq_f32(vnGHIJ, vmagic_bias); in xnn_f32_raddstoreexpminusmax_ukernel__neonfma_p5_x20_acc5() [all …]
|
D | neon-p5-x20.c | 53 const float32x4_t vx0123 = vsubq_f32(vi0123, vi_max); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x20() 54 const float32x4_t vx4567 = vsubq_f32(vi4567, vi_max); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x20() 55 const float32x4_t vx89AB = vsubq_f32(vi89AB, vi_max); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x20() 56 const float32x4_t vxCDEF = vsubq_f32(viCDEF, vi_max); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x20() 57 const float32x4_t vxGHIJ = vsubq_f32(viGHIJ, vi_max); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x20() 80 vn0123 = vsubq_f32(vn0123, vmagic_bias); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x20() 81 vn4567 = vsubq_f32(vn4567, vmagic_bias); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x20() 82 vn89AB = vsubq_f32(vn89AB, vmagic_bias); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x20() 83 vnCDEF = vsubq_f32(vnCDEF, vmagic_bias); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x20() 84 vnGHIJ = vsubq_f32(vnGHIJ, vmagic_bias); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x20() [all …]
|
D | neon-p5-x20-acc2.c | 54 const float32x4_t vx0123 = vsubq_f32(vi0123, vi_max); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x20_acc2() 55 const float32x4_t vx4567 = vsubq_f32(vi4567, vi_max); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x20_acc2() 56 const float32x4_t vx89AB = vsubq_f32(vi89AB, vi_max); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x20_acc2() 57 const float32x4_t vxCDEF = vsubq_f32(viCDEF, vi_max); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x20_acc2() 58 const float32x4_t vxGHIJ = vsubq_f32(viGHIJ, vi_max); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x20_acc2() 81 vn0123 = vsubq_f32(vn0123, vmagic_bias); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x20_acc2() 82 vn4567 = vsubq_f32(vn4567, vmagic_bias); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x20_acc2() 83 vn89AB = vsubq_f32(vn89AB, vmagic_bias); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x20_acc2() 84 vnCDEF = vsubq_f32(vnCDEF, vmagic_bias); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x20_acc2() 85 vnGHIJ = vsubq_f32(vnGHIJ, vmagic_bias); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x20_acc2() [all …]
|
D | neonfma-p5-x20-acc2.c | 53 const float32x4_t vx0123 = vsubq_f32(vi0123, vi_max); in xnn_f32_raddstoreexpminusmax_ukernel__neonfma_p5_x20_acc2() 54 const float32x4_t vx4567 = vsubq_f32(vi4567, vi_max); in xnn_f32_raddstoreexpminusmax_ukernel__neonfma_p5_x20_acc2() 55 const float32x4_t vx89AB = vsubq_f32(vi89AB, vi_max); in xnn_f32_raddstoreexpminusmax_ukernel__neonfma_p5_x20_acc2() 56 const float32x4_t vxCDEF = vsubq_f32(viCDEF, vi_max); in xnn_f32_raddstoreexpminusmax_ukernel__neonfma_p5_x20_acc2() 57 const float32x4_t vxGHIJ = vsubq_f32(viGHIJ, vi_max); in xnn_f32_raddstoreexpminusmax_ukernel__neonfma_p5_x20_acc2() 80 vn0123 = vsubq_f32(vn0123, vmagic_bias); in xnn_f32_raddstoreexpminusmax_ukernel__neonfma_p5_x20_acc2() 81 vn4567 = vsubq_f32(vn4567, vmagic_bias); in xnn_f32_raddstoreexpminusmax_ukernel__neonfma_p5_x20_acc2() 82 vn89AB = vsubq_f32(vn89AB, vmagic_bias); in xnn_f32_raddstoreexpminusmax_ukernel__neonfma_p5_x20_acc2() 83 vnCDEF = vsubq_f32(vnCDEF, vmagic_bias); in xnn_f32_raddstoreexpminusmax_ukernel__neonfma_p5_x20_acc2() 84 vnGHIJ = vsubq_f32(vnGHIJ, vmagic_bias); in xnn_f32_raddstoreexpminusmax_ukernel__neonfma_p5_x20_acc2() [all …]
|
D | neon-p5-x16.c | 52 const float32x4_t vx0123 = vsubq_f32(vi0123, vi_max); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x16() 53 const float32x4_t vx4567 = vsubq_f32(vi4567, vi_max); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x16() 54 const float32x4_t vx89AB = vsubq_f32(vi89AB, vi_max); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x16() 55 const float32x4_t vxCDEF = vsubq_f32(viCDEF, vi_max); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x16() 76 vn0123 = vsubq_f32(vn0123, vmagic_bias); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x16() 77 vn4567 = vsubq_f32(vn4567, vmagic_bias); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x16() 78 vn89AB = vsubq_f32(vn89AB, vmagic_bias); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x16() 79 vnCDEF = vsubq_f32(vnCDEF, vmagic_bias); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x16() 154 const float32x4_t vx = vsubq_f32(vi, vi_max); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x16() 169 vn = vsubq_f32(vn, vmagic_bias); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x16() [all …]
|
D | neonfma-p5-x16.c | 51 const float32x4_t vx0123 = vsubq_f32(vi0123, vi_max); in xnn_f32_raddstoreexpminusmax_ukernel__neonfma_p5_x16() 52 const float32x4_t vx4567 = vsubq_f32(vi4567, vi_max); in xnn_f32_raddstoreexpminusmax_ukernel__neonfma_p5_x16() 53 const float32x4_t vx89AB = vsubq_f32(vi89AB, vi_max); in xnn_f32_raddstoreexpminusmax_ukernel__neonfma_p5_x16() 54 const float32x4_t vxCDEF = vsubq_f32(viCDEF, vi_max); in xnn_f32_raddstoreexpminusmax_ukernel__neonfma_p5_x16() 75 vn0123 = vsubq_f32(vn0123, vmagic_bias); in xnn_f32_raddstoreexpminusmax_ukernel__neonfma_p5_x16() 76 vn4567 = vsubq_f32(vn4567, vmagic_bias); in xnn_f32_raddstoreexpminusmax_ukernel__neonfma_p5_x16() 77 vn89AB = vsubq_f32(vn89AB, vmagic_bias); in xnn_f32_raddstoreexpminusmax_ukernel__neonfma_p5_x16() 78 vnCDEF = vsubq_f32(vnCDEF, vmagic_bias); in xnn_f32_raddstoreexpminusmax_ukernel__neonfma_p5_x16() 153 const float32x4_t vx = vsubq_f32(vi, vi_max); in xnn_f32_raddstoreexpminusmax_ukernel__neonfma_p5_x16() 168 vn = vsubq_f32(vn, vmagic_bias); in xnn_f32_raddstoreexpminusmax_ukernel__neonfma_p5_x16() [all …]
|
D | neonfma-p5-x16-acc2.c | 52 const float32x4_t vx0123 = vsubq_f32(vi0123, vi_max); in xnn_f32_raddstoreexpminusmax_ukernel__neonfma_p5_x16_acc2() 53 const float32x4_t vx4567 = vsubq_f32(vi4567, vi_max); in xnn_f32_raddstoreexpminusmax_ukernel__neonfma_p5_x16_acc2() 54 const float32x4_t vx89AB = vsubq_f32(vi89AB, vi_max); in xnn_f32_raddstoreexpminusmax_ukernel__neonfma_p5_x16_acc2() 55 const float32x4_t vxCDEF = vsubq_f32(viCDEF, vi_max); in xnn_f32_raddstoreexpminusmax_ukernel__neonfma_p5_x16_acc2() 76 vn0123 = vsubq_f32(vn0123, vmagic_bias); in xnn_f32_raddstoreexpminusmax_ukernel__neonfma_p5_x16_acc2() 77 vn4567 = vsubq_f32(vn4567, vmagic_bias); in xnn_f32_raddstoreexpminusmax_ukernel__neonfma_p5_x16_acc2() 78 vn89AB = vsubq_f32(vn89AB, vmagic_bias); in xnn_f32_raddstoreexpminusmax_ukernel__neonfma_p5_x16_acc2() 79 vnCDEF = vsubq_f32(vnCDEF, vmagic_bias); in xnn_f32_raddstoreexpminusmax_ukernel__neonfma_p5_x16_acc2() 156 const float32x4_t vx = vsubq_f32(vi, vi_max); in xnn_f32_raddstoreexpminusmax_ukernel__neonfma_p5_x16_acc2() 171 vn = vsubq_f32(vn, vmagic_bias); in xnn_f32_raddstoreexpminusmax_ukernel__neonfma_p5_x16_acc2() [all …]
|
D | neon-p5-x16-acc2.c | 53 const float32x4_t vx0123 = vsubq_f32(vi0123, vi_max); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x16_acc2() 54 const float32x4_t vx4567 = vsubq_f32(vi4567, vi_max); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x16_acc2() 55 const float32x4_t vx89AB = vsubq_f32(vi89AB, vi_max); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x16_acc2() 56 const float32x4_t vxCDEF = vsubq_f32(viCDEF, vi_max); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x16_acc2() 77 vn0123 = vsubq_f32(vn0123, vmagic_bias); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x16_acc2() 78 vn4567 = vsubq_f32(vn4567, vmagic_bias); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x16_acc2() 79 vn89AB = vsubq_f32(vn89AB, vmagic_bias); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x16_acc2() 80 vnCDEF = vsubq_f32(vnCDEF, vmagic_bias); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x16_acc2() 157 const float32x4_t vx = vsubq_f32(vi, vi_max); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x16_acc2() 172 vn = vsubq_f32(vn, vmagic_bias); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x16_acc2() [all …]
|
D | neonfma-p5-x16-acc4.c | 54 const float32x4_t vx0123 = vsubq_f32(vi0123, vi_max); in xnn_f32_raddstoreexpminusmax_ukernel__neonfma_p5_x16_acc4() 55 const float32x4_t vx4567 = vsubq_f32(vi4567, vi_max); in xnn_f32_raddstoreexpminusmax_ukernel__neonfma_p5_x16_acc4() 56 const float32x4_t vx89AB = vsubq_f32(vi89AB, vi_max); in xnn_f32_raddstoreexpminusmax_ukernel__neonfma_p5_x16_acc4() 57 const float32x4_t vxCDEF = vsubq_f32(viCDEF, vi_max); in xnn_f32_raddstoreexpminusmax_ukernel__neonfma_p5_x16_acc4() 78 vn0123 = vsubq_f32(vn0123, vmagic_bias); in xnn_f32_raddstoreexpminusmax_ukernel__neonfma_p5_x16_acc4() 79 vn4567 = vsubq_f32(vn4567, vmagic_bias); in xnn_f32_raddstoreexpminusmax_ukernel__neonfma_p5_x16_acc4() 80 vn89AB = vsubq_f32(vn89AB, vmagic_bias); in xnn_f32_raddstoreexpminusmax_ukernel__neonfma_p5_x16_acc4() 81 vnCDEF = vsubq_f32(vnCDEF, vmagic_bias); in xnn_f32_raddstoreexpminusmax_ukernel__neonfma_p5_x16_acc4() 160 const float32x4_t vx = vsubq_f32(vi, vi_max); in xnn_f32_raddstoreexpminusmax_ukernel__neonfma_p5_x16_acc4() 175 vn = vsubq_f32(vn, vmagic_bias); in xnn_f32_raddstoreexpminusmax_ukernel__neonfma_p5_x16_acc4() [all …]
|
D | neon-p5-x16-acc4.c | 55 const float32x4_t vx0123 = vsubq_f32(vi0123, vi_max); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x16_acc4() 56 const float32x4_t vx4567 = vsubq_f32(vi4567, vi_max); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x16_acc4() 57 const float32x4_t vx89AB = vsubq_f32(vi89AB, vi_max); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x16_acc4() 58 const float32x4_t vxCDEF = vsubq_f32(viCDEF, vi_max); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x16_acc4() 79 vn0123 = vsubq_f32(vn0123, vmagic_bias); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x16_acc4() 80 vn4567 = vsubq_f32(vn4567, vmagic_bias); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x16_acc4() 81 vn89AB = vsubq_f32(vn89AB, vmagic_bias); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x16_acc4() 82 vnCDEF = vsubq_f32(vnCDEF, vmagic_bias); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x16_acc4() 161 const float32x4_t vx = vsubq_f32(vi, vi_max); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x16_acc4() 176 vn = vsubq_f32(vn, vmagic_bias); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x16_acc4() [all …]
|
/external/XNNPACK/src/f32-sigmoid/gen/ |
D | neonfma-rr1-p5-div-x24.c | 66 vn0123 = vsubq_f32(vn0123, vmagic_bias); in xnn_f32_sigmoid_ukernel__neonfma_rr1_p5_div_x24() 67 vn4567 = vsubq_f32(vn4567, vmagic_bias); in xnn_f32_sigmoid_ukernel__neonfma_rr1_p5_div_x24() 68 vn89AB = vsubq_f32(vn89AB, vmagic_bias); in xnn_f32_sigmoid_ukernel__neonfma_rr1_p5_div_x24() 69 vnCDEF = vsubq_f32(vnCDEF, vmagic_bias); in xnn_f32_sigmoid_ukernel__neonfma_rr1_p5_div_x24() 70 vnGHIJ = vsubq_f32(vnGHIJ, vmagic_bias); in xnn_f32_sigmoid_ukernel__neonfma_rr1_p5_div_x24() 71 vnKLMN = vsubq_f32(vnKLMN, vmagic_bias); in xnn_f32_sigmoid_ukernel__neonfma_rr1_p5_div_x24() 150 vf0123 = vbslq_f32(vm0123, vf0123, vsubq_f32(vone, vf0123)); in xnn_f32_sigmoid_ukernel__neonfma_rr1_p5_div_x24() 151 vf4567 = vbslq_f32(vm4567, vf4567, vsubq_f32(vone, vf4567)); in xnn_f32_sigmoid_ukernel__neonfma_rr1_p5_div_x24() 152 vf89AB = vbslq_f32(vm89AB, vf89AB, vsubq_f32(vone, vf89AB)); in xnn_f32_sigmoid_ukernel__neonfma_rr1_p5_div_x24() 153 vfCDEF = vbslq_f32(vmCDEF, vfCDEF, vsubq_f32(vone, vfCDEF)); in xnn_f32_sigmoid_ukernel__neonfma_rr1_p5_div_x24() [all …]
|
D | neonfma-rr1-p5-div-x20.c | 62 vn0123 = vsubq_f32(vn0123, vmagic_bias); in xnn_f32_sigmoid_ukernel__neonfma_rr1_p5_div_x20() 63 vn4567 = vsubq_f32(vn4567, vmagic_bias); in xnn_f32_sigmoid_ukernel__neonfma_rr1_p5_div_x20() 64 vn89AB = vsubq_f32(vn89AB, vmagic_bias); in xnn_f32_sigmoid_ukernel__neonfma_rr1_p5_div_x20() 65 vnCDEF = vsubq_f32(vnCDEF, vmagic_bias); in xnn_f32_sigmoid_ukernel__neonfma_rr1_p5_div_x20() 66 vnGHIJ = vsubq_f32(vnGHIJ, vmagic_bias); in xnn_f32_sigmoid_ukernel__neonfma_rr1_p5_div_x20() 134 vf0123 = vbslq_f32(vm0123, vf0123, vsubq_f32(vone, vf0123)); in xnn_f32_sigmoid_ukernel__neonfma_rr1_p5_div_x20() 135 vf4567 = vbslq_f32(vm4567, vf4567, vsubq_f32(vone, vf4567)); in xnn_f32_sigmoid_ukernel__neonfma_rr1_p5_div_x20() 136 vf89AB = vbslq_f32(vm89AB, vf89AB, vsubq_f32(vone, vf89AB)); in xnn_f32_sigmoid_ukernel__neonfma_rr1_p5_div_x20() 137 vfCDEF = vbslq_f32(vmCDEF, vfCDEF, vsubq_f32(vone, vfCDEF)); in xnn_f32_sigmoid_ukernel__neonfma_rr1_p5_div_x20() 138 vfGHIJ = vbslq_f32(vmGHIJ, vfGHIJ, vsubq_f32(vone, vfGHIJ)); in xnn_f32_sigmoid_ukernel__neonfma_rr1_p5_div_x20() [all …]
|
D | neonfma-rr1-p5-div-x16.c | 58 vn0123 = vsubq_f32(vn0123, vmagic_bias); in xnn_f32_sigmoid_ukernel__neonfma_rr1_p5_div_x16() 59 vn4567 = vsubq_f32(vn4567, vmagic_bias); in xnn_f32_sigmoid_ukernel__neonfma_rr1_p5_div_x16() 60 vn89AB = vsubq_f32(vn89AB, vmagic_bias); in xnn_f32_sigmoid_ukernel__neonfma_rr1_p5_div_x16() 61 vnCDEF = vsubq_f32(vnCDEF, vmagic_bias); in xnn_f32_sigmoid_ukernel__neonfma_rr1_p5_div_x16() 118 vf0123 = vbslq_f32(vm0123, vf0123, vsubq_f32(vone, vf0123)); in xnn_f32_sigmoid_ukernel__neonfma_rr1_p5_div_x16() 119 vf4567 = vbslq_f32(vm4567, vf4567, vsubq_f32(vone, vf4567)); in xnn_f32_sigmoid_ukernel__neonfma_rr1_p5_div_x16() 120 vf89AB = vbslq_f32(vm89AB, vf89AB, vsubq_f32(vone, vf89AB)); in xnn_f32_sigmoid_ukernel__neonfma_rr1_p5_div_x16() 121 vfCDEF = vbslq_f32(vmCDEF, vfCDEF, vsubq_f32(vone, vfCDEF)); in xnn_f32_sigmoid_ukernel__neonfma_rr1_p5_div_x16() 135 vn = vsubq_f32(vn, vmagic_bias); in xnn_f32_sigmoid_ukernel__neonfma_rr1_p5_div_x16() 150 vf = vbslq_f32(vm, vf, vsubq_f32(vone, vf)); in xnn_f32_sigmoid_ukernel__neonfma_rr1_p5_div_x16() [all …]
|
D | neonfma-rr1-p5-nr2recps-x24.c | 66 vn0123 = vsubq_f32(vn0123, vmagic_bias); in xnn_f32_sigmoid_ukernel__neonfma_rr1_p5_nr2recps_x24() 67 vn4567 = vsubq_f32(vn4567, vmagic_bias); in xnn_f32_sigmoid_ukernel__neonfma_rr1_p5_nr2recps_x24() 68 vn89AB = vsubq_f32(vn89AB, vmagic_bias); in xnn_f32_sigmoid_ukernel__neonfma_rr1_p5_nr2recps_x24() 69 vnCDEF = vsubq_f32(vnCDEF, vmagic_bias); in xnn_f32_sigmoid_ukernel__neonfma_rr1_p5_nr2recps_x24() 70 vnGHIJ = vsubq_f32(vnGHIJ, vmagic_bias); in xnn_f32_sigmoid_ukernel__neonfma_rr1_p5_nr2recps_x24() 71 vnKLMN = vsubq_f32(vnKLMN, vmagic_bias); in xnn_f32_sigmoid_ukernel__neonfma_rr1_p5_nr2recps_x24() 171 vf0123 = vbslq_f32(vm0123, vf0123, vsubq_f32(vone, vf0123)); in xnn_f32_sigmoid_ukernel__neonfma_rr1_p5_nr2recps_x24() 172 vf4567 = vbslq_f32(vm4567, vf4567, vsubq_f32(vone, vf4567)); in xnn_f32_sigmoid_ukernel__neonfma_rr1_p5_nr2recps_x24() 173 vf89AB = vbslq_f32(vm89AB, vf89AB, vsubq_f32(vone, vf89AB)); in xnn_f32_sigmoid_ukernel__neonfma_rr1_p5_nr2recps_x24() 174 vfCDEF = vbslq_f32(vmCDEF, vfCDEF, vsubq_f32(vone, vfCDEF)); in xnn_f32_sigmoid_ukernel__neonfma_rr1_p5_nr2recps_x24() [all …]
|
D | neonfma-rr1-p5-nr1recps1fma-x24.c | 66 vn0123 = vsubq_f32(vn0123, vmagic_bias); in xnn_f32_sigmoid_ukernel__neonfma_rr1_p5_nr1recps1fma_x24() 67 vn4567 = vsubq_f32(vn4567, vmagic_bias); in xnn_f32_sigmoid_ukernel__neonfma_rr1_p5_nr1recps1fma_x24() 68 vn89AB = vsubq_f32(vn89AB, vmagic_bias); in xnn_f32_sigmoid_ukernel__neonfma_rr1_p5_nr1recps1fma_x24() 69 vnCDEF = vsubq_f32(vnCDEF, vmagic_bias); in xnn_f32_sigmoid_ukernel__neonfma_rr1_p5_nr1recps1fma_x24() 70 vnGHIJ = vsubq_f32(vnGHIJ, vmagic_bias); in xnn_f32_sigmoid_ukernel__neonfma_rr1_p5_nr1recps1fma_x24() 71 vnKLMN = vsubq_f32(vnKLMN, vmagic_bias); in xnn_f32_sigmoid_ukernel__neonfma_rr1_p5_nr1recps1fma_x24() 171 vf0123 = vbslq_f32(vm0123, vf0123, vsubq_f32(vone, vf0123)); in xnn_f32_sigmoid_ukernel__neonfma_rr1_p5_nr1recps1fma_x24() 172 vf4567 = vbslq_f32(vm4567, vf4567, vsubq_f32(vone, vf4567)); in xnn_f32_sigmoid_ukernel__neonfma_rr1_p5_nr1recps1fma_x24() 173 vf89AB = vbslq_f32(vm89AB, vf89AB, vsubq_f32(vone, vf89AB)); in xnn_f32_sigmoid_ukernel__neonfma_rr1_p5_nr1recps1fma_x24() 174 vfCDEF = vbslq_f32(vmCDEF, vfCDEF, vsubq_f32(vone, vfCDEF)); in xnn_f32_sigmoid_ukernel__neonfma_rr1_p5_nr1recps1fma_x24() [all …]
|
/external/XNNPACK/src/f32-velu/gen/ |
D | velu-neon-rr2-p6-x24.c | 68 vn0123 = vsubq_f32(vn0123, vmagic_bias); in xnn_f32_velu_ukernel__neon_rr2_p6_x24() 70 vn4567 = vsubq_f32(vn4567, vmagic_bias); in xnn_f32_velu_ukernel__neon_rr2_p6_x24() 72 vn89AB = vsubq_f32(vn89AB, vmagic_bias); in xnn_f32_velu_ukernel__neon_rr2_p6_x24() 74 vnCDEF = vsubq_f32(vnCDEF, vmagic_bias); in xnn_f32_velu_ukernel__neon_rr2_p6_x24() 76 vnGHIJ = vsubq_f32(vnGHIJ, vmagic_bias); in xnn_f32_velu_ukernel__neon_rr2_p6_x24() 78 vnKLMN = vsubq_f32(vnKLMN, vmagic_bias); in xnn_f32_velu_ukernel__neon_rr2_p6_x24() 130 vs0123 = vsubq_f32(vs0123, vone); in xnn_f32_velu_ukernel__neon_rr2_p6_x24() 132 vs4567 = vsubq_f32(vs4567, vone); in xnn_f32_velu_ukernel__neon_rr2_p6_x24() 134 vs89AB = vsubq_f32(vs89AB, vone); in xnn_f32_velu_ukernel__neon_rr2_p6_x24() 136 vsCDEF = vsubq_f32(vsCDEF, vone); in xnn_f32_velu_ukernel__neon_rr2_p6_x24() [all …]
|
D | velu-neonfma-rr1-p6-x24.c | 67 vn0123 = vsubq_f32(vn0123, vmagic_bias); in xnn_f32_velu_ukernel__neonfma_rr1_p6_x24() 69 vn4567 = vsubq_f32(vn4567, vmagic_bias); in xnn_f32_velu_ukernel__neonfma_rr1_p6_x24() 71 vn89AB = vsubq_f32(vn89AB, vmagic_bias); in xnn_f32_velu_ukernel__neonfma_rr1_p6_x24() 73 vnCDEF = vsubq_f32(vnCDEF, vmagic_bias); in xnn_f32_velu_ukernel__neonfma_rr1_p6_x24() 75 vnGHIJ = vsubq_f32(vnGHIJ, vmagic_bias); in xnn_f32_velu_ukernel__neonfma_rr1_p6_x24() 77 vnKLMN = vsubq_f32(vnKLMN, vmagic_bias); in xnn_f32_velu_ukernel__neonfma_rr1_p6_x24() 122 vs0123 = vsubq_f32(vs0123, vone); in xnn_f32_velu_ukernel__neonfma_rr1_p6_x24() 124 vs4567 = vsubq_f32(vs4567, vone); in xnn_f32_velu_ukernel__neonfma_rr1_p6_x24() 126 vs89AB = vsubq_f32(vs89AB, vone); in xnn_f32_velu_ukernel__neonfma_rr1_p6_x24() 128 vsCDEF = vsubq_f32(vsCDEF, vone); in xnn_f32_velu_ukernel__neonfma_rr1_p6_x24() [all …]
|
D | velu-neonfma-rr1-p6-x20.c | 64 vn0123 = vsubq_f32(vn0123, vmagic_bias); in xnn_f32_velu_ukernel__neonfma_rr1_p6_x20() 66 vn4567 = vsubq_f32(vn4567, vmagic_bias); in xnn_f32_velu_ukernel__neonfma_rr1_p6_x20() 68 vn89AB = vsubq_f32(vn89AB, vmagic_bias); in xnn_f32_velu_ukernel__neonfma_rr1_p6_x20() 70 vnCDEF = vsubq_f32(vnCDEF, vmagic_bias); in xnn_f32_velu_ukernel__neonfma_rr1_p6_x20() 72 vnGHIJ = vsubq_f32(vnGHIJ, vmagic_bias); in xnn_f32_velu_ukernel__neonfma_rr1_p6_x20() 111 vs0123 = vsubq_f32(vs0123, vone); in xnn_f32_velu_ukernel__neonfma_rr1_p6_x20() 113 vs4567 = vsubq_f32(vs4567, vone); in xnn_f32_velu_ukernel__neonfma_rr1_p6_x20() 115 vs89AB = vsubq_f32(vs89AB, vone); in xnn_f32_velu_ukernel__neonfma_rr1_p6_x20() 117 vsCDEF = vsubq_f32(vsCDEF, vone); in xnn_f32_velu_ukernel__neonfma_rr1_p6_x20() 119 vsGHIJ = vsubq_f32(vsGHIJ, vone); in xnn_f32_velu_ukernel__neonfma_rr1_p6_x20() [all …]
|
D | velu-neon-rr2-p6-x20.c | 65 vn0123 = vsubq_f32(vn0123, vmagic_bias); in xnn_f32_velu_ukernel__neon_rr2_p6_x20() 67 vn4567 = vsubq_f32(vn4567, vmagic_bias); in xnn_f32_velu_ukernel__neon_rr2_p6_x20() 69 vn89AB = vsubq_f32(vn89AB, vmagic_bias); in xnn_f32_velu_ukernel__neon_rr2_p6_x20() 71 vnCDEF = vsubq_f32(vnCDEF, vmagic_bias); in xnn_f32_velu_ukernel__neon_rr2_p6_x20() 73 vnGHIJ = vsubq_f32(vnGHIJ, vmagic_bias); in xnn_f32_velu_ukernel__neon_rr2_p6_x20() 118 vs0123 = vsubq_f32(vs0123, vone); in xnn_f32_velu_ukernel__neon_rr2_p6_x20() 120 vs4567 = vsubq_f32(vs4567, vone); in xnn_f32_velu_ukernel__neon_rr2_p6_x20() 122 vs89AB = vsubq_f32(vs89AB, vone); in xnn_f32_velu_ukernel__neon_rr2_p6_x20() 124 vsCDEF = vsubq_f32(vsCDEF, vone); in xnn_f32_velu_ukernel__neon_rr2_p6_x20() 126 vsGHIJ = vsubq_f32(vsGHIJ, vone); in xnn_f32_velu_ukernel__neon_rr2_p6_x20() [all …]
|