Home
last modified time | relevance | path

Searched refs:cospi_12_64 (Results 1 – 25 of 30) sorted by relevance

12

/external/libvpx/libvpx/vpx_dsp/arm/
Didct32x32_34_add_neon.c86 s1[27], cospi_12_64); in vpx_idct32_6_neon()
87 s1[26] = multiply_accumulate_shift_and_narrow_s16(s1[20], cospi_12_64, s1[27], in vpx_idct32_6_neon()
90 s1[22] = multiply_accumulate_shift_and_narrow_s16(s1[23], -cospi_12_64, in vpx_idct32_6_neon()
93 s1[24], cospi_12_64); in vpx_idct32_6_neon()
313 s1[27], cospi_12_64); in vpx_idct32_8_neon()
314 s1[26] = multiply_accumulate_shift_and_narrow_s16(s1[20], cospi_12_64, s1[27], in vpx_idct32_8_neon()
317 s1[22] = multiply_accumulate_shift_and_narrow_s16(s1[23], -cospi_12_64, in vpx_idct32_8_neon()
320 s1[24], cospi_12_64); in vpx_idct32_8_neon()
Dhighbd_idct32x32_34_add_neon.c93 s1[27], cospi_12_64); in vpx_highbd_idct32_6_neon()
94 s1[26] = multiply_accumulate_shift_and_narrow_s32_dual(s1[20], cospi_12_64, in vpx_highbd_idct32_6_neon()
97 s1[22] = multiply_accumulate_shift_and_narrow_s32_dual(s1[23], -cospi_12_64, in vpx_highbd_idct32_6_neon()
100 s1[24], cospi_12_64); in vpx_highbd_idct32_6_neon()
417 s1[27], cospi_12_64); in vpx_highbd_idct32_8_neon()
418 s1[26] = multiply_accumulate_shift_and_narrow_s32_dual(s1[20], cospi_12_64, in vpx_highbd_idct32_8_neon()
421 s1[22] = multiply_accumulate_shift_and_narrow_s32_dual(s1[23], -cospi_12_64, in vpx_highbd_idct32_8_neon()
424 s1[24], cospi_12_64); in vpx_highbd_idct32_8_neon()
Didct32x32_135_add_neon.c163 s2[26], cospi_12_64); in vpx_idct32_12_neon()
164 s3[26] = multiply_accumulate_shift_and_narrow_s16(s2[21], cospi_12_64, s2[26], in vpx_idct32_12_neon()
167 s3[22] = multiply_accumulate_shift_and_narrow_s16(s1[23], -cospi_12_64, in vpx_idct32_12_neon()
170 s1[24], cospi_12_64); in vpx_idct32_12_neon()
446 s3[6] = multiply_shift_and_narrow_s16(in[12], cospi_12_64); in vpx_idct32_16_neon()
468 s2[26], cospi_12_64); in vpx_idct32_16_neon()
469 s3[26] = multiply_accumulate_shift_and_narrow_s16(s2[21], cospi_12_64, s2[26], in vpx_idct32_16_neon()
472 s3[22] = multiply_accumulate_shift_and_narrow_s16(s2[22], -cospi_12_64, in vpx_idct32_16_neon()
475 s2[25], cospi_12_64); in vpx_idct32_16_neon()
Dfwd_txfm_neon.c102 v_t1_lo = vmull_n_s16(vget_low_s16(v_x1), (int16_t)cospi_12_64); in vpx_fdct8x8_neon()
103 v_t1_hi = vmull_n_s16(vget_high_s16(v_x1), (int16_t)cospi_12_64); in vpx_fdct8x8_neon()
106 v_t2_lo = vmull_n_s16(vget_low_s16(v_x2), (int16_t)cospi_12_64); in vpx_fdct8x8_neon()
107 v_t2_hi = vmull_n_s16(vget_high_s16(v_x2), (int16_t)cospi_12_64); in vpx_fdct8x8_neon()
Dhighbd_idct32x32_135_add_neon.c173 s2[26], cospi_12_64); in vpx_highbd_idct32_12_neon()
174 s3[26] = multiply_accumulate_shift_and_narrow_s32_dual(s2[21], cospi_12_64, in vpx_highbd_idct32_12_neon()
177 s3[22] = multiply_accumulate_shift_and_narrow_s32_dual(s1[23], -cospi_12_64, in vpx_highbd_idct32_12_neon()
180 s1[24], cospi_12_64); in vpx_highbd_idct32_12_neon()
522 s3[6] = multiply_shift_and_narrow_s32_dual(in[12], cospi_12_64); in vpx_highbd_idct32_16_neon()
544 s2[26], cospi_12_64); in vpx_highbd_idct32_16_neon()
545 s3[26] = multiply_accumulate_shift_and_narrow_s32_dual(s2[21], cospi_12_64, in vpx_highbd_idct32_16_neon()
548 s3[22] = multiply_accumulate_shift_and_narrow_s32_dual(s2[22], -cospi_12_64, in vpx_highbd_idct32_16_neon()
551 s2[25], cospi_12_64); in vpx_highbd_idct32_16_neon()
Dhighbd_idct32x32_1024_add_neon.c468 do_butterfly(q[14], q[13], cospi_12_64, cospi_20_64, &q[1], &q[3]); in vpx_highbd_idct32_32_neon()
482 do_butterfly(q[14], q[13], -cospi_20_64, -cospi_12_64, &q[4], &q[7]); in vpx_highbd_idct32_32_neon()
587 do_butterfly(q[14], q[13], cospi_12_64, cospi_20_64, &q[1], &q[3]); in vpx_highbd_idct32_32_neon()
/external/libvpx/libvpx/vpx_dsp/mips/
Ditrans16_dspr2.c260 [cospi_20_64] "r"(cospi_20_64), [cospi_12_64] "r"(cospi_12_64), in idct16_rows_dspr2()
646 [cospi_20_64] "r"(cospi_20_64), [cospi_12_64] "r"(cospi_12_64), in idct16_cols_add_blk_dspr2()
1134 s10 = x10 * cospi_20_64 + x11 * cospi_12_64; in iadst16_dspr2()
1135 s11 = x10 * cospi_12_64 - x11 * cospi_20_64; in iadst16_dspr2()
1138 s14 = -x14 * cospi_12_64 + x15 * cospi_20_64; in iadst16_dspr2()
1139 s15 = x14 * cospi_20_64 + x15 * cospi_12_64; in iadst16_dspr2()
Ditrans8_dspr2.c185 [cospi_4_64] "r"(cospi_4_64), [cospi_12_64] "r"(cospi_12_64), in idct8_rows_dspr2()
445 [cospi_4_64] "r"(cospi_4_64), [cospi_12_64] "r"(cospi_12_64), in idct8_columns_add_blk_dspr2()
Ditrans32_cols_dspr2.c231 [cospi_12_64] "r"(cospi_12_64), [cospi_20_64] "r"(cospi_20_64)); in vpx_idct32_cols_add_blk_dspr2()
287 [cospi_12_64] "r"(cospi_12_64), [cospi_20_64] "r"(cospi_20_64)); in vpx_idct32_cols_add_blk_dspr2()
682 [cospi_20_64] "r"(cospi_20_64), [cospi_12_64] "r"(cospi_12_64), in vpx_idct32_cols_add_blk_dspr2()
Ditrans32_dspr2.c275 [cospi_12_64] "r"(cospi_12_64), [cospi_20_64] "r"(cospi_20_64)); in idct32_rows_dspr2()
331 [cospi_12_64] "r"(cospi_12_64), [cospi_20_64] "r"(cospi_20_64)); in idct32_rows_dspr2()
726 [cospi_20_64] "r"(cospi_20_64), [cospi_12_64] "r"(cospi_12_64), in idct32_rows_dspr2()
Didct8x8_msa.c56 k2 = VP9_SET_COSPI_PAIR(-cospi_20_64, cospi_12_64); in vpx_idct8x8_12_add_msa()
57 k3 = VP9_SET_COSPI_PAIR(cospi_12_64, cospi_20_64); in vpx_idct8x8_12_add_msa()
Dfwd_dct32x32_msa.c96 DOTP_CONST_PAIR(vec7, vec4, cospi_12_64, cospi_20_64, temp1, temp0); in fdct8x32_1d_column_even_store()
219 DOTP_CONST_PAIR(in18, in17, cospi_12_64, cospi_20_64, in29, in30); in fdct8x32_1d_column_odd_store()
232 DOTP_CONST_PAIR(-in16, in19, cospi_12_64, cospi_20_64, in28, in31); in fdct8x32_1d_column_odd_store()
354 DOTP_CONST_PAIR(vec7, vec4, cospi_12_64, cospi_20_64, in5, in4); in fdct8x32_1d_row_even_4x()
427 DOTP_CONST_PAIR(vec7, vec4, cospi_12_64, cospi_20_64, temp1, temp0); in fdct8x32_1d_row_even()
565 DOTP_CONST_PAIR(in18, in17, cospi_12_64, cospi_20_64, in29, in30); in fdct8x32_1d_row_odd()
579 DOTP_CONST_PAIR(-in16, in19, cospi_12_64, cospi_20_64, in28, in31); in fdct8x32_1d_row_odd()
740 DOTP_CONST_PAIR(vec7, vec4, cospi_12_64, cospi_20_64, temp1, temp0); in fdct8x32_1d_row_even_rd()
877 DOTP_CONST_PAIR(in18, in17, cospi_12_64, cospi_20_64, in29, in30); in fdct8x32_1d_row_odd_rd()
890 DOTP_CONST_PAIR(-in16, in19, cospi_12_64, cospi_20_64, in28, in31); in fdct8x32_1d_row_odd_rd()
Didct16x16_msa.c28 DOTP_CONST_PAIR(reg10, reg6, cospi_12_64, cospi_20_64, reg10, reg6); in vpx_idct16_1d_rows_msa()
120 DOTP_CONST_PAIR(reg10, reg6, cospi_12_64, cospi_20_64, reg10, reg6); in vpx_idct16_1d_columns_addblk_msa()
394 k0 = VP9_SET_COSPI_PAIR(cospi_12_64, cospi_20_64); in vpx_iadst16_1d_columns_addblk_msa()
395 k1 = VP9_SET_COSPI_PAIR(-cospi_20_64, cospi_12_64); in vpx_iadst16_1d_columns_addblk_msa()
396 k2 = VP9_SET_COSPI_PAIR(cospi_20_64, -cospi_12_64); in vpx_iadst16_1d_columns_addblk_msa()
Didct32x32_msa.c51 DOTP_CONST_PAIR(reg5, reg3, cospi_12_64, cospi_20_64, reg5, reg3); in idct32x8_row_even_process_store()
190 DOTP_CONST_PAIR(vec1, vec0, cospi_12_64, cospi_20_64, loc0, loc1); in idct32x8_row_odd_process_store()
191 DOTP_CONST_PAIR(vec3, vec2, -cospi_20_64, cospi_12_64, loc2, loc3); in idct32x8_row_odd_process_store()
362 DOTP_CONST_PAIR(reg5, reg3, cospi_12_64, cospi_20_64, reg5, reg3); in idct8x32_column_even_process_store()
494 DOTP_CONST_PAIR(vec1, vec0, cospi_12_64, cospi_20_64, loc0, loc1); in idct8x32_column_odd_process_store()
495 DOTP_CONST_PAIR(vec3, vec2, -cospi_20_64, cospi_12_64, loc2, loc3); in idct8x32_column_odd_process_store()
Dfwd_txfm_msa.h64 cospi_4_64, cospi_28_64, cospi_12_64, cospi_20_64 }; \
124 cospi_4_64, cospi_28_64, cospi_12_64, cospi_20_64 }; \
188 -cospi_8_64, -cospi_24_64, cospi_12_64, cospi_20_64 }; \
Dinv_txfm_msa.h220 v8i16 mask_m = { cospi_28_64, cospi_4_64, cospi_20_64, cospi_12_64, \
375 k0_m = VP9_SET_COSPI_PAIR(cospi_12_64, cospi_20_64); \
376 k1_m = VP9_SET_COSPI_PAIR(-cospi_20_64, cospi_12_64); \
377 k2_m = VP9_SET_COSPI_PAIR(cospi_20_64, -cospi_12_64); \
/external/libvpx/libvpx/vp9/encoder/
Dvp9_dct.c85 t1 = x1 * cospi_12_64 + x2 * cospi_20_64; in fdct8()
86 t2 = x2 * cospi_12_64 + x1 * -cospi_20_64; in fdct8()
164 t1 = x1 * cospi_12_64 + x2 * cospi_20_64; in fdct16()
165 t2 = x2 * cospi_12_64 + x1 * -cospi_20_64; in fdct16()
413 s10 = x10 * cospi_20_64 + x11 * cospi_12_64; in fadst16()
414 s11 = x10 * cospi_12_64 - x11 * cospi_20_64; in fadst16()
417 s14 = -x14 * cospi_12_64 + x15 * cospi_20_64; in fadst16()
418 s15 = x14 * cospi_20_64 + x15 * cospi_12_64; in fadst16()
618 t1 = x1 * cospi_12_64 + x2 * cospi_20_64; in vp9_fdct8x8_quant_c()
619 t2 = x2 * cospi_12_64 + x1 * -cospi_20_64; in vp9_fdct8x8_quant_c()
/external/libvpx/libvpx/vpx_dsp/
Dfwd_txfm.c155 t1 = x1 * cospi_12_64 + x2 * cospi_20_64; in vpx_fdct8x8_c()
156 t2 = x2 * cospi_12_64 + x1 * -cospi_20_64; in vpx_fdct8x8_c()
289 t1 = x1 * cospi_12_64 + x2 * cospi_20_64; in vpx_fdct16x16_c()
290 t2 = x2 * cospi_12_64 + x1 * -cospi_20_64; in vpx_fdct16x16_c()
608 output[5] = dct_32_round(step[5] * cospi_12_64 + step[6] * cospi_20_64); in vpx_fdct32()
609 output[6] = dct_32_round(step[6] * cospi_12_64 + step[5] * -cospi_20_64); in vpx_fdct32()
625 output[21] = dct_32_round(step[21] * -cospi_20_64 + step[26] * cospi_12_64); in vpx_fdct32()
626 output[22] = dct_32_round(step[22] * -cospi_12_64 + step[25] * -cospi_20_64); in vpx_fdct32()
629 output[25] = dct_32_round(step[25] * cospi_12_64 + step[22] * -cospi_20_64); in vpx_fdct32()
630 output[26] = dct_32_round(step[26] * cospi_20_64 + step[21] * cospi_12_64); in vpx_fdct32()
Dinv_txfm.c282 temp1 = input[5] * cospi_12_64 - input[3] * cospi_20_64; in idct8_c()
283 temp2 = input[5] * cospi_20_64 + input[3] * cospi_12_64; in idct8_c()
458 s10 = x10 * cospi_20_64 + x11 * cospi_12_64; in iadst16_c()
459 s11 = x10 * cospi_12_64 - x11 * cospi_20_64; in iadst16_c()
462 s14 = -x14 * cospi_12_64 + x15 * cospi_20_64; in iadst16_c()
463 s15 = x14 * cospi_20_64 + x15 * cospi_12_64; in iadst16_c()
616 temp1 = step2[5] * cospi_12_64 - step2[6] * cospi_20_64; in idct16_c()
617 temp2 = step2[5] * cospi_20_64 + step2[6] * cospi_12_64; in idct16_c()
928 temp1 = step2[5] * cospi_12_64 - step2[6] * cospi_20_64; in idct32_c()
929 temp2 = step2[5] * cospi_20_64 + step2[6] * cospi_12_64; in idct32_c()
[all …]
Dtxfm_common.h39 static const tran_high_t cospi_12_64 = 13623; variable
/external/libvpx/libvpx/vpx_dsp/x86/
Dinv_txfm_ssse3.c24 const __m128i stg1_2 = pair_set_epi16(-cospi_20_64, cospi_12_64); in vpx_idct8x8_64_add_ssse3()
25 const __m128i stg1_3 = pair_set_epi16(cospi_12_64, cospi_20_64); in vpx_idct8x8_64_add_ssse3()
227 const __m128i stg1_3 = pair_set_epi16(2 * cospi_12_64, 2 * cospi_12_64); in vpx_idct8x8_12_add_ssse3()
534 const __m128i stg3_8 = pair_set_epi16(-cospi_20_64, cospi_12_64); in idct32_34_second_half()
535 const __m128i stg3_9 = pair_set_epi16(cospi_12_64, cospi_20_64); in idct32_34_second_half()
536 const __m128i stg3_10 = pair_set_epi16(-cospi_12_64, -cospi_20_64); in idct32_34_second_half()
712 const __m128i stk3_3 = pair_set_epi16(2 * cospi_12_64, 2 * cospi_12_64); in idct32_8x32_135_quarter_1()
881 const __m128i stg3_8 = pair_set_epi16(-cospi_20_64, cospi_12_64); in idct32_8x32_quarter_3_4()
882 const __m128i stg3_9 = pair_set_epi16(cospi_12_64, cospi_20_64); in idct32_8x32_quarter_3_4()
883 const __m128i stg3_10 = pair_set_epi16(-cospi_12_64, -cospi_20_64); in idct32_8x32_quarter_3_4()
[all …]
Dinv_txfm_sse2.c255 const __m128i stg1_2 = pair_set_epi16(-cospi_20_64, cospi_12_64); in vpx_idct8x8_64_add_sse2()
256 const __m128i stg1_3 = pair_set_epi16(cospi_12_64, cospi_20_64); in vpx_idct8x8_64_add_sse2()
344 const __m128i stg1_2 = pair_set_epi16(-cospi_20_64, cospi_12_64); in idct8_sse2()
345 const __m128i stg1_3 = pair_set_epi16(cospi_12_64, cospi_20_64); in idct8_sse2()
600 const __m128i stg1_2 = pair_set_epi16(-cospi_20_64, cospi_12_64); in vpx_idct8x8_12_add_sse2()
601 const __m128i stg1_3 = pair_set_epi16(cospi_12_64, cospi_20_64); in vpx_idct8x8_12_add_sse2()
980 const __m128i stg3_2 = pair_set_epi16(cospi_12_64, -cospi_20_64); in vpx_idct16x16_256_add_sse2()
981 const __m128i stg3_3 = pair_set_epi16(cospi_20_64, cospi_12_64); in vpx_idct16x16_256_add_sse2()
1128 const __m128i k__cospi_p20_p12 = pair_set_epi16(cospi_20_64, cospi_12_64); in iadst16_8col()
1129 const __m128i k__cospi_p12_m20 = pair_set_epi16(cospi_12_64, -cospi_20_64); in iadst16_8col()
[all …]
Dfwd_txfm_impl_sse2.h270 const __m128i k__cospi_p12_p20 = pair_set_epi16(cospi_12_64, cospi_20_64); in FDCT8x8_2D()
271 const __m128i k__cospi_m20_p12 = pair_set_epi16(-cospi_20_64, cospi_12_64); in FDCT8x8_2D()
592 const __m128i k__cospi_p12_p20 = pair_set_epi16(cospi_12_64, cospi_20_64); in FDCT16x16_2D()
593 const __m128i k__cospi_m20_p12 = pair_set_epi16(-cospi_20_64, cospi_12_64); in FDCT16x16_2D()
/external/libvpx/libvpx/vp9/encoder/x86/
Dvp9_dct_ssse3.c40 const __m128i k__cospi_p12_p20 = pair_set_epi16(cospi_12_64, cospi_20_64); in vp9_fdct8x8_quant_ssse3()
41 const __m128i k__cospi_m20_p12 = pair_set_epi16(-cospi_20_64, cospi_12_64); in vp9_fdct8x8_quant_ssse3()
Dvp9_dct_intrin_sse2.c202 const __m128i k__cospi_p12_p20 = pair_set_epi16(cospi_12_64, cospi_20_64); in vp9_fdct8x8_quant_sse2()
203 const __m128i k__cospi_m20_p12 = pair_set_epi16(-cospi_20_64, cospi_12_64); in vp9_fdct8x8_quant_sse2()
769 const __m128i k__cospi_p12_p20 = pair_set_epi16(cospi_12_64, cospi_20_64); in fdct8_sse2()
770 const __m128i k__cospi_m20_p12 = pair_set_epi16(-cospi_20_64, cospi_12_64); in fdct8_sse2()
1221 const __m128i k__cospi_p12_p20 = pair_set_epi16(cospi_12_64, cospi_20_64); in fdct16_8col()
1222 const __m128i k__cospi_m20_p12 = pair_set_epi16(-cospi_20_64, cospi_12_64); in fdct16_8col()
1553 const __m128i k__cospi_p20_p12 = pair_set_epi16(cospi_20_64, cospi_12_64); in fadst16_8col()
1554 const __m128i k__cospi_p12_m20 = pair_set_epi16(cospi_12_64, -cospi_20_64); in fadst16_8col()
1556 const __m128i k__cospi_m12_p20 = pair_set_epi16(-cospi_12_64, cospi_20_64); in fadst16_8col()

12