/external/XNNPACK/src/f32-raddstoreexpminusmax/gen/ |
D | psimd-p5-x4.c | 57 const psimd_f32 vs0123 = (psimd_f32) ((psimd_u32) vn0123 << 23); in xnn_f32_raddstoreexpminusmax_ukernel__psimd_p5_x4() local 81 vt0123 = psimd_mul_f32(vt0123, vs0123); in xnn_f32_raddstoreexpminusmax_ukernel__psimd_p5_x4() 83 psimd_f32 vf0123 = psimd_qfma_f32(vs0123, vt0123, vp0123); in xnn_f32_raddstoreexpminusmax_ukernel__psimd_p5_x4()
|
D | sse2-p5-x4.c | 57 const __m128 vs0123 = _mm_castsi128_ps(_mm_slli_epi32(_mm_castps_si128(vn0123), 23)); in xnn_f32_raddstoreexpminusmax_ukernel__sse2_p5_x4() local 81 vt0123 = _mm_mul_ps(vt0123, vs0123); in xnn_f32_raddstoreexpminusmax_ukernel__sse2_p5_x4() 83 __m128 vf0123 = _mm_add_ps(_mm_mul_ps(vt0123, vp0123), vs0123); in xnn_f32_raddstoreexpminusmax_ukernel__sse2_p5_x4()
|
D | psimd-p5-x8.c | 60 const psimd_f32 vs0123 = (psimd_f32) ((psimd_u32) vn0123 << 23); in xnn_f32_raddstoreexpminusmax_ukernel__psimd_p5_x8() local 92 vt0123 = psimd_mul_f32(vt0123, vs0123); in xnn_f32_raddstoreexpminusmax_ukernel__psimd_p5_x8() 95 psimd_f32 vf0123 = psimd_qfma_f32(vs0123, vt0123, vp0123); in xnn_f32_raddstoreexpminusmax_ukernel__psimd_p5_x8()
|
D | psimd-p5-x8-acc2.c | 61 const psimd_f32 vs0123 = (psimd_f32) ((psimd_u32) vn0123 << 23); in xnn_f32_raddstoreexpminusmax_ukernel__psimd_p5_x8_acc2() local 93 vt0123 = psimd_mul_f32(vt0123, vs0123); in xnn_f32_raddstoreexpminusmax_ukernel__psimd_p5_x8_acc2() 96 psimd_f32 vf0123 = psimd_qfma_f32(vs0123, vt0123, vp0123); in xnn_f32_raddstoreexpminusmax_ukernel__psimd_p5_x8_acc2()
|
D | neon-p5-x8-acc2.c | 65 … const float32x4_t vs0123 = vreinterpretq_f32_s32(vshlq_n_s32(vreinterpretq_s32_f32(vn0123), 23)); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x8_acc2() local 97 vt0123 = vmulq_f32(vt0123, vs0123); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x8_acc2() 100 float32x4_t vf0123 = vmlaq_f32(vs0123, vp0123, vt0123); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x8_acc2()
|
D | sse2-p5-x8.c | 60 const __m128 vs0123 = _mm_castsi128_ps(_mm_slli_epi32(_mm_castps_si128(vn0123), 23)); in xnn_f32_raddstoreexpminusmax_ukernel__sse2_p5_x8() local 92 vt0123 = _mm_mul_ps(vt0123, vs0123); in xnn_f32_raddstoreexpminusmax_ukernel__sse2_p5_x8() 95 __m128 vf0123 = _mm_add_ps(_mm_mul_ps(vt0123, vp0123), vs0123); in xnn_f32_raddstoreexpminusmax_ukernel__sse2_p5_x8()
|
D | neonfma-p5-x8-acc2.c | 64 … const float32x4_t vs0123 = vreinterpretq_f32_s32(vshlq_n_s32(vreinterpretq_s32_f32(vn0123), 23)); in xnn_f32_raddstoreexpminusmax_ukernel__neonfma_p5_x8_acc2() local 96 vt0123 = vmulq_f32(vt0123, vs0123); in xnn_f32_raddstoreexpminusmax_ukernel__neonfma_p5_x8_acc2() 99 float32x4_t vf0123 = vfmaq_f32(vs0123, vp0123, vt0123); in xnn_f32_raddstoreexpminusmax_ukernel__neonfma_p5_x8_acc2()
|
D | neon-p5-x8.c | 64 … const float32x4_t vs0123 = vreinterpretq_f32_s32(vshlq_n_s32(vreinterpretq_s32_f32(vn0123), 23)); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x8() local 96 vt0123 = vmulq_f32(vt0123, vs0123); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x8() 99 float32x4_t vf0123 = vmlaq_f32(vs0123, vp0123, vt0123); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x8()
|
D | sse2-p5-x8-acc2.c | 61 const __m128 vs0123 = _mm_castsi128_ps(_mm_slli_epi32(_mm_castps_si128(vn0123), 23)); in xnn_f32_raddstoreexpminusmax_ukernel__sse2_p5_x8_acc2() local 93 vt0123 = _mm_mul_ps(vt0123, vs0123); in xnn_f32_raddstoreexpminusmax_ukernel__sse2_p5_x8_acc2() 96 __m128 vf0123 = _mm_add_ps(_mm_mul_ps(vt0123, vp0123), vs0123); in xnn_f32_raddstoreexpminusmax_ukernel__sse2_p5_x8_acc2()
|
D | neonfma-p5-x8.c | 63 … const float32x4_t vs0123 = vreinterpretq_f32_s32(vshlq_n_s32(vreinterpretq_s32_f32(vn0123), 23)); in xnn_f32_raddstoreexpminusmax_ukernel__neonfma_p5_x8() local 95 vt0123 = vmulq_f32(vt0123, vs0123); in xnn_f32_raddstoreexpminusmax_ukernel__neonfma_p5_x8() 98 float32x4_t vf0123 = vfmaq_f32(vs0123, vp0123, vt0123); in xnn_f32_raddstoreexpminusmax_ukernel__neonfma_p5_x8()
|
D | psimd-p5-x12.c | 63 const psimd_f32 vs0123 = (psimd_f32) ((psimd_u32) vn0123 << 23); in xnn_f32_raddstoreexpminusmax_ukernel__psimd_p5_x12() local 103 vt0123 = psimd_mul_f32(vt0123, vs0123); in xnn_f32_raddstoreexpminusmax_ukernel__psimd_p5_x12() 107 psimd_f32 vf0123 = psimd_qfma_f32(vs0123, vt0123, vp0123); in xnn_f32_raddstoreexpminusmax_ukernel__psimd_p5_x12()
|
D | psimd-p5-x12-acc2.c | 64 const psimd_f32 vs0123 = (psimd_f32) ((psimd_u32) vn0123 << 23); in xnn_f32_raddstoreexpminusmax_ukernel__psimd_p5_x12_acc2() local 104 vt0123 = psimd_mul_f32(vt0123, vs0123); in xnn_f32_raddstoreexpminusmax_ukernel__psimd_p5_x12_acc2() 108 psimd_f32 vf0123 = psimd_qfma_f32(vs0123, vt0123, vp0123); in xnn_f32_raddstoreexpminusmax_ukernel__psimd_p5_x12_acc2()
|
D | psimd-p5-x12-acc3.c | 65 const psimd_f32 vs0123 = (psimd_f32) ((psimd_u32) vn0123 << 23); in xnn_f32_raddstoreexpminusmax_ukernel__psimd_p5_x12_acc3() local 105 vt0123 = psimd_mul_f32(vt0123, vs0123); in xnn_f32_raddstoreexpminusmax_ukernel__psimd_p5_x12_acc3() 109 psimd_f32 vf0123 = psimd_qfma_f32(vs0123, vt0123, vp0123); in xnn_f32_raddstoreexpminusmax_ukernel__psimd_p5_x12_acc3()
|
D | sse2-p5-x12.c | 63 const __m128 vs0123 = _mm_castsi128_ps(_mm_slli_epi32(_mm_castps_si128(vn0123), 23)); in xnn_f32_raddstoreexpminusmax_ukernel__sse2_p5_x12() local 103 vt0123 = _mm_mul_ps(vt0123, vs0123); in xnn_f32_raddstoreexpminusmax_ukernel__sse2_p5_x12() 107 __m128 vf0123 = _mm_add_ps(_mm_mul_ps(vt0123, vp0123), vs0123); in xnn_f32_raddstoreexpminusmax_ukernel__sse2_p5_x12()
|
D | neon-p5-x12.c | 67 … const float32x4_t vs0123 = vreinterpretq_f32_s32(vshlq_n_s32(vreinterpretq_s32_f32(vn0123), 23)); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x12() local 107 vt0123 = vmulq_f32(vt0123, vs0123); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x12() 111 float32x4_t vf0123 = vmlaq_f32(vs0123, vp0123, vt0123); in xnn_f32_raddstoreexpminusmax_ukernel__neon_p5_x12()
|
D | neonfma-p5-x12.c | 66 … const float32x4_t vs0123 = vreinterpretq_f32_s32(vshlq_n_s32(vreinterpretq_s32_f32(vn0123), 23)); in xnn_f32_raddstoreexpminusmax_ukernel__neonfma_p5_x12() local 106 vt0123 = vmulq_f32(vt0123, vs0123); in xnn_f32_raddstoreexpminusmax_ukernel__neonfma_p5_x12() 110 float32x4_t vf0123 = vfmaq_f32(vs0123, vp0123, vt0123); in xnn_f32_raddstoreexpminusmax_ukernel__neonfma_p5_x12()
|
D | neonfma-p5-x12-acc2.c | 67 … const float32x4_t vs0123 = vreinterpretq_f32_s32(vshlq_n_s32(vreinterpretq_s32_f32(vn0123), 23)); in xnn_f32_raddstoreexpminusmax_ukernel__neonfma_p5_x12_acc2() local 107 vt0123 = vmulq_f32(vt0123, vs0123); in xnn_f32_raddstoreexpminusmax_ukernel__neonfma_p5_x12_acc2() 111 float32x4_t vf0123 = vfmaq_f32(vs0123, vp0123, vt0123); in xnn_f32_raddstoreexpminusmax_ukernel__neonfma_p5_x12_acc2()
|
/external/XNNPACK/src/f32-sigmoid/gen/ |
D | psimd-p5-div-x8.c | 68 const psimd_f32 vs0123 = (psimd_f32) ((psimd_u32) vn0123 << 23); in xnn_f32_sigmoid_ukernel__psimd_p5_div_x8() local 101 vt0123 = psimd_mul_f32(vt0123, vs0123); in xnn_f32_sigmoid_ukernel__psimd_p5_div_x8() 104 const psimd_f32 ve0123 = psimd_qfma_f32(vs0123, vt0123, vp0123); in xnn_f32_sigmoid_ukernel__psimd_p5_div_x8()
|
D | neonfma-rr1-p5-div-x8.c | 65 … const float32x4_t vs0123 = vreinterpretq_f32_s32(vshlq_n_s32(vreinterpretq_s32_f32(vn0123), 23)); in xnn_f32_sigmoid_ukernel__neonfma_rr1_p5_div_x8() local 93 vt0123 = vmulq_f32(vt0123, vs0123); in xnn_f32_sigmoid_ukernel__neonfma_rr1_p5_div_x8() 96 float32x4_t ve0123 = vfmaq_f32(vs0123, vp0123, vt0123); in xnn_f32_sigmoid_ukernel__neonfma_rr1_p5_div_x8()
|
D | sse41-p5-div-x8.c | 68 const __m128 vs0123 = _mm_castsi128_ps(_mm_slli_epi32(_mm_castps_si128(vn0123), 23)); in xnn_f32_sigmoid_ukernel__sse41_p5_div_x8() local 100 vt0123 = _mm_mul_ps(vt0123, vs0123); in xnn_f32_sigmoid_ukernel__sse41_p5_div_x8() 103 __m128 ve0123 = _mm_add_ps(_mm_mul_ps(vt0123, vp0123), vs0123); in xnn_f32_sigmoid_ukernel__sse41_p5_div_x8()
|
D | psimd-p5-div-x12.c | 71 const psimd_f32 vs0123 = (psimd_f32) ((psimd_u32) vn0123 << 23); in xnn_f32_sigmoid_ukernel__psimd_p5_div_x12() local 112 vt0123 = psimd_mul_f32(vt0123, vs0123); in xnn_f32_sigmoid_ukernel__psimd_p5_div_x12() 116 const psimd_f32 ve0123 = psimd_qfma_f32(vs0123, vt0123, vp0123); in xnn_f32_sigmoid_ukernel__psimd_p5_div_x12()
|
D | neonfma-rr1-p5-nr2recps-x8.c | 65 … const float32x4_t vs0123 = vreinterpretq_f32_s32(vshlq_n_s32(vreinterpretq_s32_f32(vn0123), 23)); in xnn_f32_sigmoid_ukernel__neonfma_rr1_p5_nr2recps_x8() local 93 vt0123 = vmulq_f32(vt0123, vs0123); in xnn_f32_sigmoid_ukernel__neonfma_rr1_p5_nr2recps_x8() 96 float32x4_t ve0123 = vfmaq_f32(vs0123, vp0123, vt0123); in xnn_f32_sigmoid_ukernel__neonfma_rr1_p5_nr2recps_x8()
|
D | sse2-p5-div-x8.c | 68 const __m128 vs0123 = _mm_castsi128_ps(_mm_slli_epi32(_mm_castps_si128(vn0123), 23)); in xnn_f32_sigmoid_ukernel__sse2_p5_div_x8() local 100 vt0123 = _mm_mul_ps(vt0123, vs0123); in xnn_f32_sigmoid_ukernel__sse2_p5_div_x8() 103 __m128 ve0123 = _mm_add_ps(_mm_mul_ps(vt0123, vp0123), vs0123); in xnn_f32_sigmoid_ukernel__sse2_p5_div_x8()
|
D | neonfma-rr1-p5-nr1recps1fma-x8.c | 65 … const float32x4_t vs0123 = vreinterpretq_f32_s32(vshlq_n_s32(vreinterpretq_s32_f32(vn0123), 23)); in xnn_f32_sigmoid_ukernel__neonfma_rr1_p5_nr1recps1fma_x8() local 93 vt0123 = vmulq_f32(vt0123, vs0123); in xnn_f32_sigmoid_ukernel__neonfma_rr1_p5_nr1recps1fma_x8() 96 float32x4_t ve0123 = vfmaq_f32(vs0123, vp0123, vt0123); in xnn_f32_sigmoid_ukernel__neonfma_rr1_p5_nr1recps1fma_x8()
|
D | neonfma-rr1-p5-div-x12.c | 68 … const float32x4_t vs0123 = vreinterpretq_f32_s32(vshlq_n_s32(vreinterpretq_s32_f32(vn0123), 23)); in xnn_f32_sigmoid_ukernel__neonfma_rr1_p5_div_x12() local 103 vt0123 = vmulq_f32(vt0123, vs0123); in xnn_f32_sigmoid_ukernel__neonfma_rr1_p5_div_x12() 107 float32x4_t ve0123 = vfmaq_f32(vs0123, vp0123, vt0123); in xnn_f32_sigmoid_ukernel__neonfma_rr1_p5_div_x12()
|