/third_party/cmsis/CMSIS/DSP/Source/ComplexMathFunctions/ |
D | arm_cmplx_dot_prod_q15.c | 68 q15x8_t vecSrcA, vecSrcB; in arm_cmplx_dot_prod_q15() local 75 vecSrcA = vld1q(pSrcA); in arm_cmplx_dot_prod_q15() 82 accReal = vmlsldavaq(accReal, vecSrcA, vecSrcB); in arm_cmplx_dot_prod_q15() 86 accImag = vmlaldavaxq(accImag, vecSrcA, vecSrcB); in arm_cmplx_dot_prod_q15() 91 vecSrcA = vld1q(pSrcA); in arm_cmplx_dot_prod_q15() 104 accReal = vmlsldavaq(accReal, vecSrcA, vecSrcB); in arm_cmplx_dot_prod_q15() 107 accImag = vmlaldavaxq(accImag, vecSrcA, vecSrcB); in arm_cmplx_dot_prod_q15() 111 vecSrcA = vld1q(pSrcA); in arm_cmplx_dot_prod_q15() 126 vecSrcA = vldrhq_z_s16(pSrcA, p); in arm_cmplx_dot_prod_q15() 129 accReal = vmlsldavaq_p(accReal, vecSrcA, vecSrcB, p); in arm_cmplx_dot_prod_q15() [all …]
|
D | arm_cmplx_dot_prod_q31.c | 70 q31x4_t vecSrcA, vecSrcB; in arm_cmplx_dot_prod_q31() local 77 vecSrcA = vld1q(pSrcA); in arm_cmplx_dot_prod_q31() 84 accReal = vrmlsldavhaq(accReal, vecSrcA, vecSrcB); in arm_cmplx_dot_prod_q31() 88 accImag = vrmlaldavhaxq(accImag, vecSrcA, vecSrcB); in arm_cmplx_dot_prod_q31() 93 vecSrcA = vld1q(pSrcA); in arm_cmplx_dot_prod_q31() 106 accReal = vrmlsldavhaq(accReal, vecSrcA, vecSrcB); in arm_cmplx_dot_prod_q31() 109 accImag = vrmlaldavhaxq(accImag, vecSrcA, vecSrcB); in arm_cmplx_dot_prod_q31() 113 vecSrcA = vld1q(pSrcA); in arm_cmplx_dot_prod_q31() 128 vecSrcA = vldrwq_z_s32(pSrcA, p); in arm_cmplx_dot_prod_q31() 131 accReal = vrmlsldavhaq_p(accReal, vecSrcA, vecSrcB, p); in arm_cmplx_dot_prod_q31() [all …]
|
D | arm_cmplx_mult_cmplx_f16.c | 82 f16x8_t vecSrcA, vecSrcB; in arm_cmplx_mult_cmplx_f16() local 90 vecSrcA = vld1q(pSrcA); in arm_cmplx_mult_cmplx_f16() 96 vec_acc = vcmulq(vecSrcA, vecSrcB); in arm_cmplx_mult_cmplx_f16() 100 vec_acc = vcmlaq_rot90(vec_acc, vecSrcA, vecSrcB); in arm_cmplx_mult_cmplx_f16() 107 vecSrcA = vld1q(pSrcA); in arm_cmplx_mult_cmplx_f16() 122 vec_acc = vcmulq(vecSrcA, vecSrcB); in arm_cmplx_mult_cmplx_f16() 125 vec_acc = vcmlaq_rot90(vec_acc, vecSrcA, vecSrcB); in arm_cmplx_mult_cmplx_f16() 144 vecSrcA = vldrhq_z_f16(pSrcA, p); in arm_cmplx_mult_cmplx_f16() 146 vec_acc = vcmulq_m(vuninitializedq_f16(),vecSrcA, vecSrcB, p); in arm_cmplx_mult_cmplx_f16() 147 vec_acc = vcmlaq_rot90_m(vec_acc, vecSrcA, vecSrcB, p); in arm_cmplx_mult_cmplx_f16() [all …]
|
D | arm_cmplx_dot_prod_f16.c | 93 f16x8_t vecSrcA, vecSrcB; in arm_cmplx_dot_prod_f16() local 101 vecSrcA = vld1q( pSrcA); in arm_cmplx_dot_prod_f16() 107 vec_acc = vcmlaq(vec_acc, vecSrcA, vecSrcB); in arm_cmplx_dot_prod_f16() 111 vec_acc = vcmlaq_rot90(vec_acc, vecSrcA, vecSrcB); in arm_cmplx_dot_prod_f16() 116 vecSrcA = vld1q(pSrcA); in arm_cmplx_dot_prod_f16() 129 vec_acc = vcmlaq(vec_acc, vecSrcA, vecSrcB); in arm_cmplx_dot_prod_f16() 132 vec_acc = vcmlaq_rot90(vec_acc, vecSrcA, vecSrcB); in arm_cmplx_dot_prod_f16() 147 vecSrcA = vldrhq_z_f16(pSrcA, p); in arm_cmplx_dot_prod_f16() 149 vec_acc = vcmlaq_m(vec_acc, vecSrcA, vecSrcB, p); in arm_cmplx_dot_prod_f16() 150 vec_acc = vcmlaq_rot90_m(vec_acc, vecSrcA, vecSrcB, p); in arm_cmplx_dot_prod_f16() [all …]
|
D | arm_cmplx_mult_cmplx_q15.c | 61 q15x8_t vecSrcA, vecSrcB; in arm_cmplx_mult_cmplx_q15() local 70 vecSrcA = vld1q(pSrcA); in arm_cmplx_mult_cmplx_q15() 79 vecDst = vqdmlsdhq(vuninitializedq_s16(), vecSrcA, vecSrcB); in arm_cmplx_mult_cmplx_q15() 84 vecDst = vqdmladhxq(vecDst, vecSrcA, vecSrcB); in arm_cmplx_mult_cmplx_q15() 92 vecSrcA = vld1q(pSrcA); in arm_cmplx_mult_cmplx_q15() 109 vecDst = vqdmlsdhq(vuninitializedq_s16(), vecSrcA, vecSrcB); in arm_cmplx_mult_cmplx_q15() 112 vecDst = vqdmladhxq(vecDst, vecSrcA, vecSrcB); in arm_cmplx_mult_cmplx_q15() 135 vecSrcA = vldrhq_z_s16(pSrcA, p); in arm_cmplx_mult_cmplx_q15() 138 vecDst = vqdmlsdhq_m(vuninitializedq_s16(), vecSrcA, vecSrcB, p); in arm_cmplx_mult_cmplx_q15() 139 vecDst = vqdmladhxq_m(vecDst, vecSrcA, vecSrcB, p); in arm_cmplx_mult_cmplx_q15() [all …]
|
D | arm_cmplx_mult_cmplx_q31.c | 61 q31x4_t vecSrcA, vecSrcB; in arm_cmplx_mult_cmplx_q31() local 69 vecSrcA = vld1q(pSrcA); in arm_cmplx_mult_cmplx_q31() 77 vecDst = vqdmlsdhq(vuninitializedq_s32(), vecSrcA, vecSrcB); in arm_cmplx_mult_cmplx_q31() 82 vecDst = vqdmladhxq(vecDst, vecSrcA, vecSrcB); in arm_cmplx_mult_cmplx_q31() 90 vecSrcA = vld1q(pSrcA); in arm_cmplx_mult_cmplx_q31() 107 vecDst = vqdmlsdhq(vuninitializedq_s32(), vecSrcA, vecSrcB); in arm_cmplx_mult_cmplx_q31() 110 vecDst = vqdmladhxq(vecDst, vecSrcA, vecSrcB); in arm_cmplx_mult_cmplx_q31() 132 vecSrcA = vldrwq_z_s32(pSrcA, p); in arm_cmplx_mult_cmplx_q31() 135 vecDst = vqdmlsdhq_m(vuninitializedq_s32(), vecSrcA, vecSrcB, p); in arm_cmplx_mult_cmplx_q31() 136 vecDst = vqdmladhxq_m(vecDst, vecSrcA, vecSrcB, p); in arm_cmplx_mult_cmplx_q31() [all …]
|
D | arm_cmplx_mult_cmplx_f32.c | 80 f32x4_t vecSrcA, vecSrcB; in arm_cmplx_mult_cmplx_f32() local 88 vecSrcA = vld1q(pSrcA); in arm_cmplx_mult_cmplx_f32() 94 vec_acc = vcmulq(vecSrcA, vecSrcB); in arm_cmplx_mult_cmplx_f32() 98 vec_acc = vcmlaq_rot90(vec_acc, vecSrcA, vecSrcB); in arm_cmplx_mult_cmplx_f32() 105 vecSrcA = vld1q(pSrcA); in arm_cmplx_mult_cmplx_f32() 120 vec_acc = vcmulq(vecSrcA, vecSrcB); in arm_cmplx_mult_cmplx_f32() 123 vec_acc = vcmlaq_rot90(vec_acc, vecSrcA, vecSrcB); in arm_cmplx_mult_cmplx_f32() 142 vecSrcA = vldrwq_z_f32(pSrcA, p); in arm_cmplx_mult_cmplx_f32() 144 vec_acc = vcmulq_m(vuninitializedq_f32(),vecSrcA, vecSrcB, p); in arm_cmplx_mult_cmplx_f32() 145 vec_acc = vcmlaq_rot90_m(vec_acc, vecSrcA, vecSrcB, p); in arm_cmplx_mult_cmplx_f32() [all …]
|
D | arm_cmplx_dot_prod_f32.c | 88 f32x4_t vecSrcA, vecSrcB; in arm_cmplx_dot_prod_f32() local 96 vecSrcA = vld1q(pSrcA); in arm_cmplx_dot_prod_f32() 102 vec_acc = vcmlaq(vec_acc, vecSrcA, vecSrcB); in arm_cmplx_dot_prod_f32() 106 vec_acc = vcmlaq_rot90(vec_acc, vecSrcA, vecSrcB); in arm_cmplx_dot_prod_f32() 111 vecSrcA = vld1q(pSrcA); in arm_cmplx_dot_prod_f32() 124 vec_acc = vcmlaq(vec_acc, vecSrcA, vecSrcB); in arm_cmplx_dot_prod_f32() 127 vec_acc = vcmlaq_rot90(vec_acc, vecSrcA, vecSrcB); in arm_cmplx_dot_prod_f32() 141 vecSrcA = vldrwq_z_f32(pSrcA, p); in arm_cmplx_dot_prod_f32() 143 vec_acc = vcmlaq_m(vec_acc, vecSrcA, vecSrcB, p); in arm_cmplx_dot_prod_f32() 144 vec_acc = vcmlaq_rot90_m(vec_acc, vecSrcA, vecSrcB, p); in arm_cmplx_dot_prod_f32() [all …]
|
/third_party/cmsis/CMSIS/DSP/Source/BasicMathFunctions/ |
D | arm_or_u8.c | 58 uint8x16_t vecSrcA, vecSrcB; in arm_or_u8() local 65 vecSrcA = vld1q(pSrcA); in arm_or_u8() 68 vst1q(pDst, vorrq_u8(vecSrcA, vecSrcB) ); in arm_or_u8() 84 vecSrcA = vld1q(pSrcA); in arm_or_u8() 86 vstrbq_p(pDst, vorrq_u8(vecSrcA, vecSrcB), p0); in arm_or_u8()
|
D | arm_and_u32.c | 58 uint32x4_t vecSrcA, vecSrcB; in arm_and_u32() local 65 vecSrcA = vld1q(pSrcA); in arm_and_u32() 68 vst1q(pDst, vandq_u32(vecSrcA, vecSrcB) ); in arm_and_u32() 84 vecSrcA = vld1q(pSrcA); in arm_and_u32() 86 vstrwq_p(pDst, vandq_u32(vecSrcA, vecSrcB), p0); in arm_and_u32()
|
D | arm_xor_u8.c | 58 uint8x16_t vecSrcA, vecSrcB; in arm_xor_u8() local 65 vecSrcA = vld1q(pSrcA); in arm_xor_u8() 68 vst1q(pDst, veorq_u8(vecSrcA, vecSrcB) ); in arm_xor_u8() 84 vecSrcA = vld1q(pSrcA); in arm_xor_u8() 86 vstrbq_p(pDst, veorq_u8(vecSrcA, vecSrcB), p0); in arm_xor_u8()
|
D | arm_or_u32.c | 58 uint32x4_t vecSrcA, vecSrcB; in arm_or_u32() local 65 vecSrcA = vld1q(pSrcA); in arm_or_u32() 68 vst1q(pDst, vorrq_u32(vecSrcA, vecSrcB) ); in arm_or_u32() 84 vecSrcA = vld1q(pSrcA); in arm_or_u32() 86 vstrwq_p(pDst, vorrq_u32(vecSrcA, vecSrcB), p0); in arm_or_u32()
|
D | arm_xor_u32.c | 58 uint32x4_t vecSrcA, vecSrcB; in arm_xor_u32() local 65 vecSrcA = vld1q(pSrcA); in arm_xor_u32() 68 vst1q(pDst, veorq_u32(vecSrcA, vecSrcB) ); in arm_xor_u32() 84 vecSrcA = vld1q(pSrcA); in arm_xor_u32() 86 vstrwq_p(pDst, veorq_u32(vecSrcA, vecSrcB), p0); in arm_xor_u32()
|
D | arm_or_u16.c | 66 uint16x8_t vecSrcA, vecSrcB; in arm_or_u16() local 73 vecSrcA = vld1q(pSrcA); in arm_or_u16() 76 vst1q(pDst, vorrq_u16(vecSrcA, vecSrcB) ); in arm_or_u16() 92 vecSrcA = vld1q(pSrcA); in arm_or_u16() 94 vstrhq_p(pDst, vorrq_u16(vecSrcA, vecSrcB), p0); in arm_or_u16()
|
D | arm_xor_u16.c | 66 uint16x8_t vecSrcA, vecSrcB; in arm_xor_u16() local 73 vecSrcA = vld1q(pSrcA); in arm_xor_u16() 76 vst1q(pDst, veorq_u16(vecSrcA, vecSrcB) ); in arm_xor_u16() 92 vecSrcA = vld1q(pSrcA); in arm_xor_u16() 94 vstrhq_p(pDst, veorq_u16(vecSrcA, vecSrcB), p0); in arm_xor_u16()
|
D | arm_and_u16.c | 66 uint16x8_t vecSrcA, vecSrcB; in arm_and_u16() local 73 vecSrcA = vld1q(pSrcA); in arm_and_u16() 76 vst1q(pDst, vandq_u16(vecSrcA, vecSrcB) ); in arm_and_u16() 92 vecSrcA = vld1q(pSrcA); in arm_and_u16() 94 vstrhq_p(pDst, vandq_u16(vecSrcA, vecSrcB), p0); in arm_and_u16()
|
D | arm_and_u8.c | 59 uint8x16_t vecSrcA, vecSrcB; in arm_and_u8() local 66 vecSrcA = vld1q(pSrcA); in arm_and_u8() 69 vst1q(pDst, vandq_u8(vecSrcA, vecSrcB) ); in arm_and_u8() 85 vecSrcA = vld1q(pSrcA); in arm_and_u8() 87 vstrbq_p(pDst, vandq_u8(vecSrcA, vecSrcB), p0); in arm_and_u8()
|