/third_party/cmsis/CMSIS/DSP/Source/ComplexMathFunctions/ |
D | arm_cmplx_mag_f16.c | 85 f16x8x2_t vecSrc; in arm_cmplx_mag_f16() local 95 vecSrc = vld2q(pSrc); in arm_cmplx_mag_f16() 97 sum = vmulq(vecSrc.val[0], vecSrc.val[0]); in arm_cmplx_mag_f16() 98 sum = vfmaq(sum, vecSrc.val[1], vecSrc.val[1]); in arm_cmplx_mag_f16() 142 vecSrc = vld2q((float16_t const *)pSrc); in arm_cmplx_mag_f16() 143 sum = vmulq(vecSrc.val[0], vecSrc.val[0]); in arm_cmplx_mag_f16() 144 sum = vfmaq(sum, vecSrc.val[1], vecSrc.val[1]); in arm_cmplx_mag_f16()
|
D | arm_cmplx_mag_squared_f16.c | 82 f16x8x2_t vecSrc; in arm_cmplx_mag_squared_f16() local 89 vecSrc = vld2q(pSrc); in arm_cmplx_mag_squared_f16() 90 sum = vmulq_m(vuninitializedq_f16(),vecSrc.val[0], vecSrc.val[0],p); in arm_cmplx_mag_squared_f16() 91 sum = vfmaq_m(sum, vecSrc.val[1], vecSrc.val[1],p); in arm_cmplx_mag_squared_f16()
|
D | arm_cmplx_mag_squared_q31.c | 61 q31x4x2_t vecSrc; in arm_cmplx_mag_squared_q31() local 71 vecSrc = vld2q(pSrc); in arm_cmplx_mag_squared_q31() 72 vReal = vmulhq(vecSrc.val[0], vecSrc.val[0]); in arm_cmplx_mag_squared_q31() 73 vImag = vmulhq(vecSrc.val[1], vecSrc.val[1]); in arm_cmplx_mag_squared_q31()
|
D | arm_cmplx_mag_q31.c | 64 q31x4x2_t vecSrc; in arm_cmplx_mag_q31() local 74 vecSrc = vld2q(pSrc); in arm_cmplx_mag_q31() 76 sum = vqaddq(vmulhq(vecSrc.val[0], vecSrc.val[0]), in arm_cmplx_mag_q31() 77 vmulhq(vecSrc.val[1], vecSrc.val[1])); in arm_cmplx_mag_q31()
|
D | arm_cmplx_mag_squared_f32.c | 81 f32x4x2_t vecSrc; in arm_cmplx_mag_squared_f32() local 89 vecSrc = vld2q(pSrc); in arm_cmplx_mag_squared_f32() 90 sum = vmulq(vecSrc.val[0], vecSrc.val[0]); in arm_cmplx_mag_squared_f32() 91 sum = vfmaq(sum, vecSrc.val[1], vecSrc.val[1]); in arm_cmplx_mag_squared_f32()
|
D | arm_cmplx_conj_q31.c | 62 q31x4x2_t vecSrc; in arm_cmplx_conj_q31() local 75 vecSrc = vld2q(pSrc); in arm_cmplx_conj_q31() 76 vecSrc.val[1] = vqsubq(zero, vecSrc.val[1]); in arm_cmplx_conj_q31() 77 vst2q(pDst,vecSrc); in arm_cmplx_conj_q31()
|
/third_party/cmsis/CMSIS/DSP/Source/StatisticsFunctions/ |
D | arm_absmin_f32.c | 72 f32x4_t vecSrc; in arm_absmin_f32() local 89 vecSrc = vldrwq_f32(pSrcVec); in arm_absmin_f32() 91 vecSrc = vabsq(vecSrc); in arm_absmin_f32() 96 p0 = vcmpleq(vecSrc, curExtremValVec); in arm_absmin_f32() 97 curExtremValVec = vpselq(vecSrc, curExtremValVec, p0); in arm_absmin_f32() 115 vecSrc = vldrwq_f32(pSrcVec); in arm_absmin_f32() 117 vecSrc = vabsq(vecSrc); in arm_absmin_f32() 122 p0 = vcmpleq_m(vecSrc, curExtremValVec, p0); in arm_absmin_f32() 123 curExtremValVec = vpselq(vecSrc, curExtremValVec, p0); in arm_absmin_f32()
|
D | arm_absmin_f16.c | 67 f16x8_t vecSrc; in arm_absmin_f16() local 84 vecSrc = vldrhq_f16(pSrcVec); in arm_absmin_f16() 86 vecSrc = vabsq(vecSrc); in arm_absmin_f16() 91 p0 = vcmpleq(vecSrc, curExtremValVec); in arm_absmin_f16() 92 curExtremValVec = vpselq(vecSrc, curExtremValVec, p0); in arm_absmin_f16() 110 vecSrc = vldrhq_f16(pSrcVec); in arm_absmin_f16() 112 vecSrc = vabsq(vecSrc); in arm_absmin_f16() 117 p0 = vcmpleq_m(vecSrc, curExtremValVec, p0); in arm_absmin_f16() 118 curExtremValVec = vpselq(vecSrc, curExtremValVec, p0); in arm_absmin_f16()
|
D | arm_absmax_f16.c | 66 f16x8_t vecSrc; in arm_absmax_f16() local 83 vecSrc = vldrhq_f16(pSrcVec); in arm_absmax_f16() 85 vecSrc = vabsq(vecSrc); in arm_absmax_f16() 90 p0 = vcmpgeq(vecSrc, curExtremValVec); in arm_absmax_f16() 91 curExtremValVec = vpselq(vecSrc, curExtremValVec, p0); in arm_absmax_f16() 107 vecSrc = vldrhq_f16(pSrcVec); in arm_absmax_f16() 109 vecSrc = vabsq(vecSrc); in arm_absmax_f16() 116 p0 = vcmpgeq_m(vecSrc, curExtremValVec, p0); in arm_absmax_f16() 117 curExtremValVec = vpselq(vecSrc, curExtremValVec, p0); in arm_absmax_f16()
|
D | arm_absmin_q15.c | 60 q15x8_t vecSrc; in arm_absmin_q15() local 78 vecSrc = vld1q(pSrcVec); in arm_absmin_q15() 80 vecSrc = vabsq(vecSrc); in arm_absmin_q15() 85 p0 = vcmpleq(vecSrc, curExtremValVec); in arm_absmin_q15() 86 curExtremValVec = vpselq(vecSrc, curExtremValVec, p0); in arm_absmin_q15() 102 vecSrc = vld1q(pSrcVec); in arm_absmin_q15() 104 vecSrc = vabsq(vecSrc); in arm_absmin_q15() 111 p0 = vcmpleq_m(vecSrc, curExtremValVec, p0); in arm_absmin_q15() 112 curExtremValVec = vpselq(vecSrc, curExtremValVec, p0); in arm_absmin_q15()
|
D | arm_absmin_q31.c | 61 q31x4_t vecSrc; in arm_absmin_q31() local 79 vecSrc = vldrwq_s32(pSrcVec); in arm_absmin_q31() 81 vecSrc = vabsq(vecSrc); in arm_absmin_q31() 86 p0 = vcmpleq(vecSrc, curExtremValVec); in arm_absmin_q31() 87 curExtremValVec = vpselq(vecSrc, curExtremValVec, p0); in arm_absmin_q31() 103 vecSrc = vldrwq_s32(pSrcVec); in arm_absmin_q31() 105 vecSrc = vabsq(vecSrc); in arm_absmin_q31() 112 p0 = vcmpleq_m(vecSrc, curExtremValVec, p0); in arm_absmin_q31() 113 curExtremValVec = vpselq(vecSrc, curExtremValVec, p0); in arm_absmin_q31()
|
D | arm_absmin_q7.c | 63 q7x16_t vecSrc; in arm_small_blk_absmin_q7() local 81 vecSrc = vld1q(pSrcVec); in arm_small_blk_absmin_q7() 83 vecSrc = vabsq(vecSrc); in arm_small_blk_absmin_q7() 88 p0 = vcmpleq(vecSrc, curExtremValVec); in arm_small_blk_absmin_q7() 89 curExtremValVec = vpselq(vecSrc, curExtremValVec, p0); in arm_small_blk_absmin_q7() 105 vecSrc = vld1q(pSrcVec); in arm_small_blk_absmin_q7() 107 vecSrc = vabsq(vecSrc); in arm_small_blk_absmin_q7() 114 p0 = vcmpleq_m(vecSrc, curExtremValVec, p0); in arm_small_blk_absmin_q7() 115 curExtremValVec = vpselq(vecSrc, curExtremValVec, p0); in arm_small_blk_absmin_q7()
|
D | arm_var_q31.c | 67 q31x4_t vecSrc; in arm_var_q31() local 83 vecSrc = vldrwq_s32(pSrc); in arm_var_q31() 89 vecSrc = vshrq(vecSrc, 8); in arm_var_q31() 90 sumOfSquares = vmlaldavaq(sumOfSquares, vecSrc, vecSrc); in arm_var_q31() 91 sum = vaddlvaq(sum, vecSrc); in arm_var_q31()
|
D | arm_min_f16.c | 66 f16x8_t vecSrc; in arm_min_f16() local 82 vecSrc = vldrhq_f16(pSrcVec); pSrcVec += 8; in arm_min_f16() 87 p0 = vcmpleq(vecSrc, curExtremValVec); in arm_min_f16() 88 curExtremValVec = vpselq(vecSrc, curExtremValVec, p0); in arm_min_f16() 104 vecSrc = vldrhq_f16(pSrcVec); pSrcVec += 8; in arm_min_f16() 110 p0 = vcmpleq_m(vecSrc, curExtremValVec, p0); in arm_min_f16() 111 curExtremValVec = vpselq(vecSrc, curExtremValVec, p0); in arm_min_f16()
|
D | arm_var_f16.c | 63 f16x8_t vecSrc; in arm_var_f16() local 88 vecSrc = vldrhq_z_f16((float16_t const *) pSrc, p); in arm_var_f16() 92 vecSrc = vsubq_m(vuninitializedq_f16(), vecSrc, fMean, p); in arm_var_f16() 93 sumVec = vfmaq_m(sumVec, vecSrc, vecSrc, p); in arm_var_f16()
|
/third_party/cmsis/CMSIS/DSP/Source/BasicMathFunctions/ |
D | arm_not_u16.c | 64 uint16x8_t vecSrc; in arm_not_u16() local 71 vecSrc = vld1q(pSrc); in arm_not_u16() 73 vst1q(pDst, vmvnq_u16(vecSrc) ); in arm_not_u16() 88 vecSrc = vld1q(pSrc); in arm_not_u16() 89 vstrhq_p(pDst, vmvnq_u16(vecSrc), p0); in arm_not_u16()
|
D | arm_not_u32.c | 56 uint32x4_t vecSrc; in arm_not_u32() local 63 vecSrc = vld1q(pSrc); in arm_not_u32() 65 vst1q(pDst, vmvnq_u32(vecSrc) ); in arm_not_u32() 80 vecSrc = vld1q(pSrc); in arm_not_u32() 81 vstrwq_p(pDst, vmvnq_u32(vecSrc), p0); in arm_not_u32()
|
D | arm_not_u8.c | 56 uint8x16_t vecSrc; in arm_not_u8() local 63 vecSrc = vld1q(pSrc); in arm_not_u8() 65 vst1q(pDst, vmvnq_u8(vecSrc) ); in arm_not_u8() 80 vecSrc = vld1q(pSrc); in arm_not_u8() 81 vstrbq_p(pDst, vmvnq_u8(vecSrc), p0); in arm_not_u8()
|
D | arm_offset_q31.c | 64 q31x4_t vecSrc; in arm_offset_q31() local 74 vecSrc = vld1q(pSrc); in arm_offset_q31() 75 vst1q(pDst, vqaddq(vecSrc, offset)); in arm_offset_q31() 93 vecSrc = vld1q(pSrc); in arm_offset_q31() 94 vstrwq_p(pDst, vqaddq(vecSrc, offset), p0); in arm_offset_q31()
|
D | arm_offset_q7.c | 63 q7x16_t vecSrc; in arm_offset_q7() local 73 vecSrc = vld1q(pSrc); in arm_offset_q7() 74 vst1q(pDst, vqaddq(vecSrc, offset)); in arm_offset_q7() 92 vecSrc = vld1q(pSrc); in arm_offset_q7() 93 vstrbq_p(pDst, vqaddq(vecSrc, offset), p0); in arm_offset_q7()
|
D | arm_negate_q7.c | 61 q7x16_t vecSrc; in arm_negate_q7() local 71 vecSrc = vld1q(pSrc); in arm_negate_q7() 72 vst1q(pDst, vqnegq(vecSrc)); in arm_negate_q7() 90 vecSrc = vld1q(pSrc); in arm_negate_q7() 91 vstrbq_p(pDst, vqnegq(vecSrc), p0); in arm_negate_q7()
|
D | arm_negate_q15.c | 63 q15x8_t vecSrc; in arm_negate_q15() local 73 vecSrc = vld1q(pSrc); in arm_negate_q15() 74 vst1q(pDst, vqnegq(vecSrc)); in arm_negate_q15() 92 vecSrc = vld1q(pSrc); in arm_negate_q15() 93 vstrhq_p(pDst, vqnegq(vecSrc), p0); in arm_negate_q15()
|
D | arm_negate_q31.c | 62 q31x4_t vecSrc; in arm_negate_q31() local 72 vecSrc = vld1q(pSrc); in arm_negate_q31() 73 vst1q(pDst, vqnegq(vecSrc)); in arm_negate_q31() 91 vecSrc = vld1q(pSrc); in arm_negate_q31() 92 vstrwq_p(pDst, vqnegq(vecSrc), p0); in arm_negate_q31()
|
D | arm_offset_q15.c | 63 q15x8_t vecSrc; in arm_offset_q15() local 73 vecSrc = vld1q(pSrc); in arm_offset_q15() 74 vst1q(pDst, vqaddq(vecSrc, offset)); in arm_offset_q15() 92 vecSrc = vld1q(pSrc); in arm_offset_q15() 93 vstrhq_p(pDst, vqaddq(vecSrc, offset), p0); in arm_offset_q15()
|
D | arm_abs_q15.c | 62 q15x8_t vecSrc; in arm_abs_q15() local 72 vecSrc = vld1q(pSrc); in arm_abs_q15() 73 vst1q(pDst, vqabsq(vecSrc)); in arm_abs_q15() 91 vecSrc = vld1q(pSrc); in arm_abs_q15() 92 vstrhq_p(pDst, vqabsq(vecSrc), p0); in arm_abs_q15()
|