Searched refs:vrev64_u8 (Results 1 – 8 of 8) sorted by relevance
390 const uint8x8_t L3210 = vrev64_u8(L0123); in vpx_d135_predictor_4x4_neon()417 const uint8x8_t L76543210 = vrev64_u8(L01234567); in vpx_d135_predictor_8x8_neon()481 const uint8x8_t L76543210 = vrev64_u8(vget_low_u8(L0123456789abcdef)); in vpx_d135_predictor_16x16_neon()482 const uint8x8_t Lfedcba98 = vrev64_u8(vget_high_u8(L0123456789abcdef)); in vpx_d135_predictor_16x16_neon()531 const uint8x8_t LL76543210 = vrev64_u8(vget_low_u8(LL0123456789abcdef)); in vpx_d135_predictor_32x32_neon()532 const uint8x8_t LU76543210 = vrev64_u8(vget_low_u8(LU0123456789abcdef)); in vpx_d135_predictor_32x32_neon()533 const uint8x8_t LLfedcba98 = vrev64_u8(vget_high_u8(LL0123456789abcdef)); in vpx_d135_predictor_32x32_neon()534 const uint8x8_t LUfedcba98 = vrev64_u8(vget_high_u8(LU0123456789abcdef)); in vpx_d135_predictor_32x32_neon()
738 const_nt_1_col_t = vrev64_u8(const_nt_1_col_t); in ihevc_intra_pred_luma_planar_neonintr()812 const_nt_1_col_t = vrev64_u8(const_nt_1_col_t); in ihevc_intra_pred_luma_planar_neonintr()1323 rev_res = vrev64_u8(src_tmp_1); /* Reversing the loaded values */ in ihevc_intra_pred_luma_horz_neonintr()1745 rev_res = vrev64_u8(vreinterpret_u8_u64(shift_res)); in ihevc_intra_pred_luma_mode2_neonintr()1787 rev_val_first = vrev64_u8(pu1_src_val2); in ihevc_intra_pred_luma_mode2_neonintr()1791 rev_val_second = vrev64_u8(pu1_src_val1); in ihevc_intra_pred_luma_mode2_neonintr()2355 ref_left_t = vrev64_u8(ref_left_t); in ihevc_intra_pred_luma_mode_11_to_17_neonintr()2373 rev_val = vrev64_u8(vreinterpret_u8_u32(ref_left_tmp)); in ihevc_intra_pred_luma_mode_11_to_17_neonintr()
304 v_src_reverse[i] = vrev64_u8(v_src[i]); in AddPartial()
812 return vrev64_u8(a); in test_vrev64_u8()
13339 return vrev64_u8(a); in test_vrev64_u8()
872 return vrev64_u8(a); in test_vrev64_u8()
15640 return vrev64_u8(a); in test_vrev64_u8()
1985 _NEON2SSESTORAGE uint8x8_t vrev64_u8(uint8x8_t vec); // VREV64.8 d0,d014418 _NEON2SSESTORAGE uint8x8_t vrev64_u8(uint8x8_t vec); // VREV64.8 d0,d014419 #define vrev64_u8 vrev64_s8 macro14428 #define vrev64_p8 vrev64_u8