Lines Matching refs:c7fffffff
2742 __m128i c7fffffff, res, res_sat, res_xor_a; in vqd_s32() local
2743 c7fffffff = _mm_set1_epi32(0x7fffffff); in vqd_s32()
2746 res_sat = _mm_add_epi32(res_sat, c7fffffff); in vqd_s32()
3296 __m128i c7fffffff, res, res_sat, res_xor_a, b_xor_a_; in vqaddq_s32() local
3297 c7fffffff = _mm_set1_epi32(0x7fffffff); in vqaddq_s32()
3300 res_sat = _mm_add_epi32(res_sat, c7fffffff); in vqaddq_s32()
4651 __m128i c7fffffff, res, res_sat, res_xor_a, b_xor_a; in vqsubq_s32() local
4652 c7fffffff = _mm_set1_epi32(0x7fffffff); in vqsubq_s32()
4655 res_sat = _mm_add_epi32(res_sat, c7fffffff); in vqsubq_s32()
5541 __m128i c7fffffff; in vcage_f32() local
5543 c7fffffff = _mm_set1_epi32 (0x7fffffff); in vcage_f32()
5544 a0 = _mm_and_ps (_pM128(a), *(__m128*)&c7fffffff); in vcage_f32()
5545 b0 = _mm_and_ps (_pM128(b), *(__m128*)&c7fffffff); in vcage_f32()
5553 __m128i c7fffffff; in vcageq_f32() local
5555 c7fffffff = _mm_set1_epi32 (0x7fffffff); in vcageq_f32()
5556 a0 = _mm_and_ps (a, *(__m128*)&c7fffffff); in vcageq_f32()
5557 b0 = _mm_and_ps (b, *(__m128*)&c7fffffff); in vcageq_f32()
5568 __m128i c7fffffff; in vcale_f32() local
5570 c7fffffff = _mm_set1_epi32 (0x7fffffff); in vcale_f32()
5571 a0 = _mm_and_ps (_pM128(a), *(__m128*)&c7fffffff); in vcale_f32()
5572 b0 = _mm_and_ps (_pM128(b), *(__m128*)&c7fffffff); in vcale_f32()
5580 __m128i c7fffffff; in vcaleq_f32() local
5582 c7fffffff = _mm_set1_epi32 (0x7fffffff); in vcaleq_f32()
5583 a0 = _mm_and_ps (a, *(__m128*)&c7fffffff); in vcaleq_f32()
5584 b0 = _mm_and_ps (b, *(__m128*)&c7fffffff); in vcaleq_f32()
5595 __m128i c7fffffff; in vcagt_f32() local
5597 c7fffffff = _mm_set1_epi32 (0x7fffffff); in vcagt_f32()
5598 a0 = _mm_and_ps (_pM128(a), *(__m128*)&c7fffffff); in vcagt_f32()
5599 b0 = _mm_and_ps (_pM128(b), *(__m128*)&c7fffffff); in vcagt_f32()
5607 __m128i c7fffffff; in vcagtq_f32() local
5609 c7fffffff = _mm_set1_epi32 (0x7fffffff); in vcagtq_f32()
5610 a0 = _mm_and_ps (a, *(__m128*)&c7fffffff); in vcagtq_f32()
5611 b0 = _mm_and_ps (b, *(__m128*)&c7fffffff); in vcagtq_f32()
5622 __m128i c7fffffff; in vcalt_f32() local
5624 c7fffffff = _mm_set1_epi32 (0x7fffffff); in vcalt_f32()
5625 a0 = _mm_and_ps (_pM128(a), *(__m128*)&c7fffffff); in vcalt_f32()
5626 b0 = _mm_and_ps (_pM128(b), *(__m128*)&c7fffffff); in vcalt_f32()
5634 __m128i c7fffffff; in vcaltq_f32() local
5636 c7fffffff = _mm_set1_epi32 (0x7fffffff); in vcaltq_f32()
5637 a0 = _mm_and_ps (a, *(__m128*)&c7fffffff); in vcaltq_f32()
5638 b0 = _mm_and_ps (b, *(__m128*)&c7fffffff); in vcaltq_f32()
13019 __m128i c7fffffff, a_trunc, mask_trunc; in vqmovn_u32() local
13020 c7fffffff = _mm_set1_epi32((uint32_t)0x7fffffff); // 31-th bit set to zero in vqmovn_u32()
13021 a_trunc = _mm_and_si128(a, c7fffffff); // a truncated to max signed in vqmovn_u32()
13023 …mask_trunc = _mm_and_si128(mask_trunc, c7fffffff); //zero or c7fff if the 15-th bit had been se… in vqmovn_u32()
14608 …_NEON2SSE_ALIGN_16 static const int32_t c7fffffff[4] = {0x7fffffff, 0x7fffffff, 0x7fffffff, 0x7fff… in vabs_f32() local
14609 res = _mm_and_ps (_pM128(a), *(__m128*)c7fffffff); //use 64 low bits only in vabs_f32()
14626 …_NEON2SSE_ALIGN_16 static const int32_t c7fffffff[4] = {0x7fffffff, 0x7fffffff, 0x7fffffff, 0x7fff… in vabsq_f32() local
14627 return _mm_and_ps (a, *(__m128*)c7fffffff); in vabsq_f32()