/external/clang/test/SemaCXX/ |
D | address-space-conversion.cpp | 50 void test_static_cast(void_ptr vp, void_ptr_1 vp1, void_ptr_2 vp2, in test_static_cast() argument 69 (void)static_cast<A_ptr_2>(vp2); in test_static_cast() 95 (void)static_cast<A_ptr>(vp2); // expected-error{{casts away qualifiers}} in test_static_cast() 97 (void)static_cast<A_ptr_1>(vp2); // expected-error{{casts away qualifiers}} in test_static_cast() 131 void test_reinterpret_cast(void_ptr vp, void_ptr_1 vp1, void_ptr_2 vp2, in test_reinterpret_cast() argument 143 (void)reinterpret_cast<A_ptr>(vp2); in test_reinterpret_cast() 151 (void)reinterpret_cast<A_ptr_1>(vp2); in test_reinterpret_cast() 157 void test_cstyle_cast(void_ptr vp, void_ptr_1 vp1, void_ptr_2 vp2, in test_cstyle_cast() argument 169 (void)(A_ptr)(vp2); in test_cstyle_cast() 177 (void)(A_ptr_1)(vp2); in test_cstyle_cast() [all …]
|
/external/XNNPACK/src/f32-vscaleextexp/gen/ |
D | avx512f-p5-scalef-x48.c | 67 __m512 vp2 = _mm512_fmadd_ps(vc5, vt2, vc4); in xnn_f32_vscaleextexp_ukernel__avx512f_p5_scalef_x48() local 71 vp2 = _mm512_fmadd_ps(vp2, vt2, vc3); in xnn_f32_vscaleextexp_ukernel__avx512f_p5_scalef_x48() 75 vp2 = _mm512_fmadd_ps(vp2, vt2, vc2); in xnn_f32_vscaleextexp_ukernel__avx512f_p5_scalef_x48() 79 vp2 = _mm512_fmadd_ps(vp2, vt2, vc1); in xnn_f32_vscaleextexp_ukernel__avx512f_p5_scalef_x48() 83 vp2 = _mm512_fmadd_ps(vp2, vt2, vc0); in xnn_f32_vscaleextexp_ukernel__avx512f_p5_scalef_x48() 93 __m512 vf2 = _mm512_mul_ps(vp2, vscalev); in xnn_f32_vscaleextexp_ukernel__avx512f_p5_scalef_x48()
|
D | avx512f-p5-scalef-x64.c | 71 __m512 vp2 = _mm512_fmadd_ps(vc5, vt2, vc4); in xnn_f32_vscaleextexp_ukernel__avx512f_p5_scalef_x64() local 76 vp2 = _mm512_fmadd_ps(vp2, vt2, vc3); in xnn_f32_vscaleextexp_ukernel__avx512f_p5_scalef_x64() 81 vp2 = _mm512_fmadd_ps(vp2, vt2, vc2); in xnn_f32_vscaleextexp_ukernel__avx512f_p5_scalef_x64() 86 vp2 = _mm512_fmadd_ps(vp2, vt2, vc1); in xnn_f32_vscaleextexp_ukernel__avx512f_p5_scalef_x64() 91 vp2 = _mm512_fmadd_ps(vp2, vt2, vc0); in xnn_f32_vscaleextexp_ukernel__avx512f_p5_scalef_x64() 102 __m512 vf2 = _mm512_mul_ps(vp2, vscalev); in xnn_f32_vscaleextexp_ukernel__avx512f_p5_scalef_x64()
|
D | avx2-p5-x24.c | 73 __m256 vp2 = _mm256_fmadd_ps(vc5, vt2, vc4); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x24() local 77 vp2 = _mm256_fmadd_ps(vp2, vt2, vc3); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x24() 81 vp2 = _mm256_fmadd_ps(vp2, vt2, vc2); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x24() 85 vp2 = _mm256_fmadd_ps(vp2, vt2, vc1); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x24() 89 vp2 = _mm256_fmadd_ps(vp2, vt2, vc0); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x24() 99 __m256 vf2 = _mm256_mul_ps(vp2, vscalev); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x24()
|
D | avx512f-p5-scalef-x80.c | 75 __m512 vp2 = _mm512_fmadd_ps(vc5, vt2, vc4); in xnn_f32_vscaleextexp_ukernel__avx512f_p5_scalef_x80() local 81 vp2 = _mm512_fmadd_ps(vp2, vt2, vc3); in xnn_f32_vscaleextexp_ukernel__avx512f_p5_scalef_x80() 87 vp2 = _mm512_fmadd_ps(vp2, vt2, vc2); in xnn_f32_vscaleextexp_ukernel__avx512f_p5_scalef_x80() 93 vp2 = _mm512_fmadd_ps(vp2, vt2, vc1); in xnn_f32_vscaleextexp_ukernel__avx512f_p5_scalef_x80() 99 vp2 = _mm512_fmadd_ps(vp2, vt2, vc0); in xnn_f32_vscaleextexp_ukernel__avx512f_p5_scalef_x80() 111 __m512 vf2 = _mm512_mul_ps(vp2, vscalev); in xnn_f32_vscaleextexp_ukernel__avx512f_p5_scalef_x80()
|
D | avx2-p5-x32.c | 77 __m256 vp2 = _mm256_fmadd_ps(vc5, vt2, vc4); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x32() local 82 vp2 = _mm256_fmadd_ps(vp2, vt2, vc3); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x32() 87 vp2 = _mm256_fmadd_ps(vp2, vt2, vc2); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x32() 92 vp2 = _mm256_fmadd_ps(vp2, vt2, vc1); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x32() 97 vp2 = _mm256_fmadd_ps(vp2, vt2, vc0); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x32() 108 __m256 vf2 = _mm256_mul_ps(vp2, vscalev); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x32()
|
D | avx512f-p5-scalef-x96.c | 79 __m512 vp2 = _mm512_fmadd_ps(vc5, vt2, vc4); in xnn_f32_vscaleextexp_ukernel__avx512f_p5_scalef_x96() local 86 vp2 = _mm512_fmadd_ps(vp2, vt2, vc3); in xnn_f32_vscaleextexp_ukernel__avx512f_p5_scalef_x96() 93 vp2 = _mm512_fmadd_ps(vp2, vt2, vc2); in xnn_f32_vscaleextexp_ukernel__avx512f_p5_scalef_x96() 100 vp2 = _mm512_fmadd_ps(vp2, vt2, vc1); in xnn_f32_vscaleextexp_ukernel__avx512f_p5_scalef_x96() 107 vp2 = _mm512_fmadd_ps(vp2, vt2, vc0); in xnn_f32_vscaleextexp_ukernel__avx512f_p5_scalef_x96() 120 __m512 vf2 = _mm512_mul_ps(vp2, vscalev); in xnn_f32_vscaleextexp_ukernel__avx512f_p5_scalef_x96()
|
D | avx2-p5-x40.c | 81 __m256 vp2 = _mm256_fmadd_ps(vc5, vt2, vc4); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x40() local 87 vp2 = _mm256_fmadd_ps(vp2, vt2, vc3); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x40() 93 vp2 = _mm256_fmadd_ps(vp2, vt2, vc2); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x40() 99 vp2 = _mm256_fmadd_ps(vp2, vt2, vc1); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x40() 105 vp2 = _mm256_fmadd_ps(vp2, vt2, vc0); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x40() 117 __m256 vf2 = _mm256_mul_ps(vp2, vscalev); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x40()
|
D | avx512f-p5-scalef-x112.c | 83 __m512 vp2 = _mm512_fmadd_ps(vc5, vt2, vc4); in xnn_f32_vscaleextexp_ukernel__avx512f_p5_scalef_x112() local 91 vp2 = _mm512_fmadd_ps(vp2, vt2, vc3); in xnn_f32_vscaleextexp_ukernel__avx512f_p5_scalef_x112() 99 vp2 = _mm512_fmadd_ps(vp2, vt2, vc2); in xnn_f32_vscaleextexp_ukernel__avx512f_p5_scalef_x112() 107 vp2 = _mm512_fmadd_ps(vp2, vt2, vc1); in xnn_f32_vscaleextexp_ukernel__avx512f_p5_scalef_x112() 115 vp2 = _mm512_fmadd_ps(vp2, vt2, vc0); in xnn_f32_vscaleextexp_ukernel__avx512f_p5_scalef_x112() 129 __m512 vf2 = _mm512_mul_ps(vp2, vscalev); in xnn_f32_vscaleextexp_ukernel__avx512f_p5_scalef_x112()
|
D | avx512f-p5-scalef-x128.c | 87 __m512 vp2 = _mm512_fmadd_ps(vc5, vt2, vc4); in xnn_f32_vscaleextexp_ukernel__avx512f_p5_scalef_x128() local 96 vp2 = _mm512_fmadd_ps(vp2, vt2, vc3); in xnn_f32_vscaleextexp_ukernel__avx512f_p5_scalef_x128() 105 vp2 = _mm512_fmadd_ps(vp2, vt2, vc2); in xnn_f32_vscaleextexp_ukernel__avx512f_p5_scalef_x128() 114 vp2 = _mm512_fmadd_ps(vp2, vt2, vc1); in xnn_f32_vscaleextexp_ukernel__avx512f_p5_scalef_x128() 123 vp2 = _mm512_fmadd_ps(vp2, vt2, vc0); in xnn_f32_vscaleextexp_ukernel__avx512f_p5_scalef_x128() 138 __m512 vf2 = _mm512_mul_ps(vp2, vscalev); in xnn_f32_vscaleextexp_ukernel__avx512f_p5_scalef_x128()
|
/external/XNNPACK/src/f32-vscaleexpminusmax/gen/ |
D | avx512f-p5-scalef-x48.c | 71 __m512 vp2 = _mm512_fmadd_ps(vc5, vt2, vc4); in xnn_f32_vscaleexpminusmax_ukernel__avx512f_p5_scalef_x48() local 75 vp2 = _mm512_fmadd_ps(vp2, vt2, vc3); in xnn_f32_vscaleexpminusmax_ukernel__avx512f_p5_scalef_x48() 79 vp2 = _mm512_fmadd_ps(vp2, vt2, vc2); in xnn_f32_vscaleexpminusmax_ukernel__avx512f_p5_scalef_x48() 83 vp2 = _mm512_fmadd_ps(vp2, vt2, vc1); in xnn_f32_vscaleexpminusmax_ukernel__avx512f_p5_scalef_x48() 87 vp2 = _mm512_fmadd_ps(vp2, vt2, vc0); in xnn_f32_vscaleexpminusmax_ukernel__avx512f_p5_scalef_x48() 94 __m512 vf2 = _mm512_scalef_ps(vp2, vn2); in xnn_f32_vscaleexpminusmax_ukernel__avx512f_p5_scalef_x48()
|
D | avx512f-p5-scalef-x64.c | 76 __m512 vp2 = _mm512_fmadd_ps(vc5, vt2, vc4); in xnn_f32_vscaleexpminusmax_ukernel__avx512f_p5_scalef_x64() local 81 vp2 = _mm512_fmadd_ps(vp2, vt2, vc3); in xnn_f32_vscaleexpminusmax_ukernel__avx512f_p5_scalef_x64() 86 vp2 = _mm512_fmadd_ps(vp2, vt2, vc2); in xnn_f32_vscaleexpminusmax_ukernel__avx512f_p5_scalef_x64() 91 vp2 = _mm512_fmadd_ps(vp2, vt2, vc1); in xnn_f32_vscaleexpminusmax_ukernel__avx512f_p5_scalef_x64() 96 vp2 = _mm512_fmadd_ps(vp2, vt2, vc0); in xnn_f32_vscaleexpminusmax_ukernel__avx512f_p5_scalef_x64() 104 __m512 vf2 = _mm512_scalef_ps(vp2, vn2); in xnn_f32_vscaleexpminusmax_ukernel__avx512f_p5_scalef_x64()
|
D | avx512f-p5-scalef-x80.c | 81 __m512 vp2 = _mm512_fmadd_ps(vc5, vt2, vc4); in xnn_f32_vscaleexpminusmax_ukernel__avx512f_p5_scalef_x80() local 87 vp2 = _mm512_fmadd_ps(vp2, vt2, vc3); in xnn_f32_vscaleexpminusmax_ukernel__avx512f_p5_scalef_x80() 93 vp2 = _mm512_fmadd_ps(vp2, vt2, vc2); in xnn_f32_vscaleexpminusmax_ukernel__avx512f_p5_scalef_x80() 99 vp2 = _mm512_fmadd_ps(vp2, vt2, vc1); in xnn_f32_vscaleexpminusmax_ukernel__avx512f_p5_scalef_x80() 105 vp2 = _mm512_fmadd_ps(vp2, vt2, vc0); in xnn_f32_vscaleexpminusmax_ukernel__avx512f_p5_scalef_x80() 114 __m512 vf2 = _mm512_scalef_ps(vp2, vn2); in xnn_f32_vscaleexpminusmax_ukernel__avx512f_p5_scalef_x80()
|
D | avx512f-p5-scalef-x96.c | 86 __m512 vp2 = _mm512_fmadd_ps(vc5, vt2, vc4); in xnn_f32_vscaleexpminusmax_ukernel__avx512f_p5_scalef_x96() local 93 vp2 = _mm512_fmadd_ps(vp2, vt2, vc3); in xnn_f32_vscaleexpminusmax_ukernel__avx512f_p5_scalef_x96() 100 vp2 = _mm512_fmadd_ps(vp2, vt2, vc2); in xnn_f32_vscaleexpminusmax_ukernel__avx512f_p5_scalef_x96() 107 vp2 = _mm512_fmadd_ps(vp2, vt2, vc1); in xnn_f32_vscaleexpminusmax_ukernel__avx512f_p5_scalef_x96() 114 vp2 = _mm512_fmadd_ps(vp2, vt2, vc0); in xnn_f32_vscaleexpminusmax_ukernel__avx512f_p5_scalef_x96() 124 __m512 vf2 = _mm512_scalef_ps(vp2, vn2); in xnn_f32_vscaleexpminusmax_ukernel__avx512f_p5_scalef_x96()
|
D | avx512f-p5-scalef-x112.c | 91 __m512 vp2 = _mm512_fmadd_ps(vc5, vt2, vc4); in xnn_f32_vscaleexpminusmax_ukernel__avx512f_p5_scalef_x112() local 99 vp2 = _mm512_fmadd_ps(vp2, vt2, vc3); in xnn_f32_vscaleexpminusmax_ukernel__avx512f_p5_scalef_x112() 107 vp2 = _mm512_fmadd_ps(vp2, vt2, vc2); in xnn_f32_vscaleexpminusmax_ukernel__avx512f_p5_scalef_x112() 115 vp2 = _mm512_fmadd_ps(vp2, vt2, vc1); in xnn_f32_vscaleexpminusmax_ukernel__avx512f_p5_scalef_x112() 123 vp2 = _mm512_fmadd_ps(vp2, vt2, vc0); in xnn_f32_vscaleexpminusmax_ukernel__avx512f_p5_scalef_x112() 134 __m512 vf2 = _mm512_scalef_ps(vp2, vn2); in xnn_f32_vscaleexpminusmax_ukernel__avx512f_p5_scalef_x112()
|
D | avx2-p5-x24.c | 86 __m256 vp2 = _mm256_fmadd_ps(vc5, vt2, vc4); in xnn_f32_vscaleexpminusmax_ukernel__avx2_p5_x24() local 90 vp2 = _mm256_fmadd_ps(vp2, vt2, vc3); in xnn_f32_vscaleexpminusmax_ukernel__avx2_p5_x24() 94 vp2 = _mm256_fmadd_ps(vp2, vt2, vc2); in xnn_f32_vscaleexpminusmax_ukernel__avx2_p5_x24() 98 vp2 = _mm256_fmadd_ps(vp2, vt2, vc1); in xnn_f32_vscaleexpminusmax_ukernel__avx2_p5_x24() 110 __m256 vf2 = _mm256_fmadd_ps(vt2, vp2, vs2); in xnn_f32_vscaleexpminusmax_ukernel__avx2_p5_x24()
|
D | avx512f-p5-scalef-x128.c | 96 __m512 vp2 = _mm512_fmadd_ps(vc5, vt2, vc4); in xnn_f32_vscaleexpminusmax_ukernel__avx512f_p5_scalef_x128() local 105 vp2 = _mm512_fmadd_ps(vp2, vt2, vc3); in xnn_f32_vscaleexpminusmax_ukernel__avx512f_p5_scalef_x128() 114 vp2 = _mm512_fmadd_ps(vp2, vt2, vc2); in xnn_f32_vscaleexpminusmax_ukernel__avx512f_p5_scalef_x128() 123 vp2 = _mm512_fmadd_ps(vp2, vt2, vc1); in xnn_f32_vscaleexpminusmax_ukernel__avx512f_p5_scalef_x128() 132 vp2 = _mm512_fmadd_ps(vp2, vt2, vc0); in xnn_f32_vscaleexpminusmax_ukernel__avx512f_p5_scalef_x128() 144 __m512 vf2 = _mm512_scalef_ps(vp2, vn2); in xnn_f32_vscaleexpminusmax_ukernel__avx512f_p5_scalef_x128()
|
/external/XNNPACK/src/f32-raddstoreexpminusmax/gen/ |
D | scalar-p5-x4.c | 94 float vp2 = vc5 * vt2 + vc4; in xnn_f32_raddstoreexpminusmax_ukernel__scalar_p5_x4() local 99 vp2 = vp2 * vt2 + vc3; in xnn_f32_raddstoreexpminusmax_ukernel__scalar_p5_x4() 104 vp2 = vp2 * vt2 + vc2; in xnn_f32_raddstoreexpminusmax_ukernel__scalar_p5_x4() 109 vp2 = vp2 * vt2 + vc1; in xnn_f32_raddstoreexpminusmax_ukernel__scalar_p5_x4() 123 float vf2 = vt2 * vp2 + vs2; in xnn_f32_raddstoreexpminusmax_ukernel__scalar_p5_x4()
|
D | scalar-p5-x4-acc2.c | 95 float vp2 = vc5 * vt2 + vc4; in xnn_f32_raddstoreexpminusmax_ukernel__scalar_p5_x4_acc2() local 100 vp2 = vp2 * vt2 + vc3; in xnn_f32_raddstoreexpminusmax_ukernel__scalar_p5_x4_acc2() 105 vp2 = vp2 * vt2 + vc2; in xnn_f32_raddstoreexpminusmax_ukernel__scalar_p5_x4_acc2() 110 vp2 = vp2 * vt2 + vc1; in xnn_f32_raddstoreexpminusmax_ukernel__scalar_p5_x4_acc2() 124 float vf2 = vt2 * vp2 + vs2; in xnn_f32_raddstoreexpminusmax_ukernel__scalar_p5_x4_acc2()
|
D | scalar-p5-x4-acc4.c | 97 float vp2 = vc5 * vt2 + vc4; in xnn_f32_raddstoreexpminusmax_ukernel__scalar_p5_x4_acc4() local 102 vp2 = vp2 * vt2 + vc3; in xnn_f32_raddstoreexpminusmax_ukernel__scalar_p5_x4_acc4() 107 vp2 = vp2 * vt2 + vc2; in xnn_f32_raddstoreexpminusmax_ukernel__scalar_p5_x4_acc4() 112 vp2 = vp2 * vt2 + vc1; in xnn_f32_raddstoreexpminusmax_ukernel__scalar_p5_x4_acc4() 126 float vf2 = vt2 * vp2 + vs2; in xnn_f32_raddstoreexpminusmax_ukernel__scalar_p5_x4_acc4()
|
D | avx512f-p5-scalef-x128-acc2.c | 97 __m512 vp2 = _mm512_fmadd_ps(vc5, vt2, vc4); in xnn_f32_raddstoreexpminusmax_ukernel__avx512f_p5_scalef_x128_acc2() local 106 vp2 = _mm512_fmadd_ps(vp2, vt2, vc3); in xnn_f32_raddstoreexpminusmax_ukernel__avx512f_p5_scalef_x128_acc2() 115 vp2 = _mm512_fmadd_ps(vp2, vt2, vc2); in xnn_f32_raddstoreexpminusmax_ukernel__avx512f_p5_scalef_x128_acc2() 124 vp2 = _mm512_fmadd_ps(vp2, vt2, vc1); in xnn_f32_raddstoreexpminusmax_ukernel__avx512f_p5_scalef_x128_acc2() 133 vp2 = _mm512_fmadd_ps(vp2, vt2, vc0); in xnn_f32_raddstoreexpminusmax_ukernel__avx512f_p5_scalef_x128_acc2() 145 const __m512 vf2 = _mm512_scalef_ps(vp2, vn2); in xnn_f32_raddstoreexpminusmax_ukernel__avx512f_p5_scalef_x128_acc2()
|
/external/XNNPACK/src/f32-sigmoid/gen/ |
D | scalar-p5-div-x4.c | 102 float vp2 = vt2 * vc5 + vc4; in xnn_f32_sigmoid_ukernel__scalar_p5_div_x4() local 107 vp2 = vt2 * vp2 + vc3; in xnn_f32_sigmoid_ukernel__scalar_p5_div_x4() 112 vp2 = vt2 * vp2 + vc2; in xnn_f32_sigmoid_ukernel__scalar_p5_div_x4() 117 vp2 = vt2 * vp2 + vc1; in xnn_f32_sigmoid_ukernel__scalar_p5_div_x4() 131 const float ve2 = vt2 * vp2 + vs2; in xnn_f32_sigmoid_ukernel__scalar_p5_div_x4()
|
/external/XNNPACK/src/f32-raddexpminusmax/gen/ |
D | avx512f-p5-scalef-x128-acc2.c | 96 __m512 vp2 = _mm512_fmadd_ps(vc5, vt2, vc4); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x128_acc2() local 105 vp2 = _mm512_fmadd_ps(vp2, vt2, vc3); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x128_acc2() 114 vp2 = _mm512_fmadd_ps(vp2, vt2, vc2); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x128_acc2() 123 vp2 = _mm512_fmadd_ps(vp2, vt2, vc1); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x128_acc2() 132 vp2 = _mm512_fmadd_ps(vp2, vt2, vc0); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x128_acc2() 144 const __m512 vf2 = _mm512_scalef_ps(vp2, vn2); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x128_acc2()
|
D | avx512f-p5-scalef-x128.c | 95 __m512 vp2 = _mm512_fmadd_ps(vc5, vt2, vc4); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x128() local 104 vp2 = _mm512_fmadd_ps(vp2, vt2, vc3); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x128() 113 vp2 = _mm512_fmadd_ps(vp2, vt2, vc2); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x128() 122 vp2 = _mm512_fmadd_ps(vp2, vt2, vc1); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x128() 131 vp2 = _mm512_fmadd_ps(vp2, vt2, vc0); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x128() 143 const __m512 vf2 = _mm512_scalef_ps(vp2, vn2); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x128()
|
D | avx512f-p5-scalef-x128-acc4.c | 98 __m512 vp2 = _mm512_fmadd_ps(vc5, vt2, vc4); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x128_acc4() local 107 vp2 = _mm512_fmadd_ps(vp2, vt2, vc3); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x128_acc4() 116 vp2 = _mm512_fmadd_ps(vp2, vt2, vc2); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x128_acc4() 125 vp2 = _mm512_fmadd_ps(vp2, vt2, vc1); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x128_acc4() 134 vp2 = _mm512_fmadd_ps(vp2, vt2, vc0); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x128_acc4() 146 const __m512 vf2 = _mm512_scalef_ps(vp2, vn2); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x128_acc4()
|