Home
last modified time | relevance | path

Searched refs:dst_r (Results 1 – 21 of 21) sorted by relevance

/external/libvpx/libvpx/vp9/common/mips/msa/
Dvp9_mfqe_msa.c25 v8i16 src_wt, dst_wt, res_h_r, res_h_l, src_r, src_l, dst_r, dst_l; in filter_by_weight8x8_msa() local
44 UNPCK_UB_SH(dst0, dst_r, dst_l); in filter_by_weight8x8_msa()
46 res_h_r += (dst_r * dst_wt); in filter_by_weight8x8_msa()
55 UNPCK_UB_SH(dst1, dst_r, dst_l); in filter_by_weight8x8_msa()
57 res_h_r += (dst_r * dst_wt); in filter_by_weight8x8_msa()
73 v8i16 src_wt, dst_wt, res_h_r, res_h_l, src_r, src_l, dst_r, dst_l; in filter_by_weight16x16_msa() local
84 UNPCK_UB_SH(dst0, dst_r, dst_l); in filter_by_weight16x16_msa()
86 res_h_r += (dst_r * dst_wt); in filter_by_weight16x16_msa()
94 UNPCK_UB_SH(dst1, dst_r, dst_l); in filter_by_weight16x16_msa()
96 res_h_r += (dst_r * dst_wt); in filter_by_weight16x16_msa()
[all …]
/external/libvpx/libvpx/vp8/common/mips/msa/
Dmfqe_msa.c25 v8i16 src_wt, dst_wt, res_h_r, res_h_l, src_r, src_l, dst_r, dst_l; in filter_by_weight8x8_msa() local
44 UNPCK_UB_SH(dst0, dst_r, dst_l); in filter_by_weight8x8_msa()
46 res_h_r += (dst_r * dst_wt); in filter_by_weight8x8_msa()
55 UNPCK_UB_SH(dst1, dst_r, dst_l); in filter_by_weight8x8_msa()
57 res_h_r += (dst_r * dst_wt); in filter_by_weight8x8_msa()
76 v8i16 src_r, src_l, dst_r, dst_l; in filter_by_weight16x16_msa() local
87 UNPCK_UB_SH(dst0, dst_r, dst_l); in filter_by_weight16x16_msa()
89 res_h_r += (dst_r * dst_wt); in filter_by_weight16x16_msa()
97 UNPCK_UB_SH(dst1, dst_r, dst_l); in filter_by_weight16x16_msa()
99 res_h_r += (dst_r * dst_wt); in filter_by_weight16x16_msa()
[all …]
/external/pcre/dist2/src/sljit/
DsljitNativeX86_common.c893 sljit_s32 dst_r; in emit_mov_byte() local
919 dst_r = FAST_IS_REG(dst) ? dst : TMP_REG1; in emit_mov_byte()
924 SLJIT_ASSERT(dst_r == TMP_REG1); in emit_mov_byte()
927 dst_r = src; in emit_mov_byte()
929 dst_r = src; in emit_mov_byte()
968 inst = emit_x86_instruction(compiler, 2, dst_r, 0, src, srcw); in emit_mov_byte()
976 if (dst_r == TMP_REG1) { in emit_mov_byte()
997 inst = emit_x86_instruction(compiler, 1, work_r, 0, dst_r, 0); in emit_mov_byte()
1010 inst = emit_x86_instruction(compiler, 1, work_r, 0, dst_r, 0); in emit_mov_byte()
1016 inst = emit_x86_instruction(compiler, 1, dst_r, 0, dst, dstw); in emit_mov_byte()
[all …]
DsljitNativeARM_64.c1117 sljit_s32 dst_r, flags, mem_flags; in sljit_emit_op1() local
1142 dst_r = SLOW_IS_REG(dst) ? dst : TMP_REG1; in sljit_emit_op1()
1147 if (dst_r != TMP_REG1 && FAST_IS_REG(src)) in sljit_emit_op1()
1148 … return emit_op_imm(compiler, op | ((op_flags & SLJIT_I32_OP) ? INT_OP : 0), dst_r, TMP_REG1, src); in sljit_emit_op1()
1192 FAIL_IF(emit_op_imm(compiler, SLJIT_MOV | ARG2_IMM, dst_r, TMP_REG1, srcw)); in sljit_emit_op1()
1194 dst_r = src; in sljit_emit_op1()
1196 FAIL_IF(emit_op_mem(compiler, mem_flags, dst_r, src, srcw, TMP_REG1)); in sljit_emit_op1()
1199 return emit_op_mem(compiler, mem_flags | STORE, dst_r, dst, dstw, TMP_REG2); in sljit_emit_op1()
1219 emit_op_imm(compiler, flags | op, dst_r, TMP_REG1, src); in sljit_emit_op1()
1222 return emit_op_mem(compiler, mem_flags | STORE, dst_r, dst, dstw, TMP_REG2); in sljit_emit_op1()
[all …]
DsljitNativeARM_T2_32.c1319 sljit_s32 dst_r, flags; in sljit_emit_op1() local
1334 dst_r = SLOW_IS_REG(dst) ? dst : TMP_REG1; in sljit_emit_op1()
1372 FAIL_IF(emit_op_imm(compiler, SLJIT_MOV | ARG2_IMM, dst_r, TMP_REG2, srcw)); in sljit_emit_op1()
1374 FAIL_IF(emit_op_mem(compiler, flags, dst_r, src, srcw, TMP_REG1)); in sljit_emit_op1()
1376 if (dst_r != TMP_REG1) in sljit_emit_op1()
1377 return emit_op_imm(compiler, op, dst_r, TMP_REG2, src); in sljit_emit_op1()
1378 dst_r = src; in sljit_emit_op1()
1384 return emit_op_mem(compiler, flags | STORE, dst_r, dst, dstw, TMP_REG2); in sljit_emit_op1()
1402 emit_op_imm(compiler, flags | op, dst_r, TMP_REG2, src); in sljit_emit_op1()
1405 return emit_op_mem(compiler, flags | STORE, dst_r, dst, dstw, TMP_REG2); in sljit_emit_op1()
[all …]
DsljitNativeSPARC_common.c686 sljit_s32 dst_r = TMP_REG2; in emit_op() local
698 dst_r = dst; in emit_op()
701 sugg_src2_r = dst_r; in emit_op()
752 dst_r = src2_r; in emit_op()
763 dst_r = 0; in emit_op()
791 FAIL_IF(emit_single_op(compiler, op, flags, dst_r, src1_r, src2_r)); in emit_op()
795 getput_arg_fast(compiler, flags, dst_r, dst, dstw); in emit_op()
798 return getput_arg(compiler, flags, dst_r, dst, dstw, 0, 0); in emit_op()
997 sljit_s32 dst_r = FAST_IS_REG(dst) ? dst : TMP_FREG1; in sljit_emit_fop1_conv_f64_from_sw() local
1016 …FAIL_IF(push_inst(compiler, SELECT_FOP(op, FITOS, FITOD) | FD(dst_r) | FS2(TMP_FREG1), MOVABLE_INS… in sljit_emit_fop1_conv_f64_from_sw()
[all …]
DsljitNativePPC_common.c978 sljit_s32 dst_r = TMP_REG2; in emit_op() local
986 dst_r = dst; in emit_op()
990 sugg_src2_r = dst_r; in emit_op()
1013 dst_r = src2_r; in emit_op()
1024 FAIL_IF(emit_single_op(compiler, op, flags, dst_r, src1_r, src2_r)); in emit_op()
1029 return emit_op_mem(compiler, input_flags, dst_r, dst, dstw, TMP_REG1); in emit_op()
1562 sljit_s32 dst_r = FAST_IS_REG(dst) ? dst : TMP_FREG1; in sljit_emit_fop1_conv_f64_from_sw() local
1585 FAIL_IF(push_inst(compiler, FCFID | FD(dst_r) | FB(TMP_FREG1))); in sljit_emit_fop1_conv_f64_from_sw()
1590 return push_inst(compiler, FRSP | FD(dst_r) | FB(dst_r)); in sljit_emit_fop1_conv_f64_from_sw()
1595 sljit_s32 dst_r = FAST_IS_REG(dst) ? dst : TMP_FREG1; in sljit_emit_fop1_conv_f64_from_sw()
[all …]
DsljitNativeMIPS_common.c937 sljit_s32 dst_r = TMP_REG2; in emit_op() local
952 dst_r = dst; in emit_op()
955 sugg_src2_r = dst_r; in emit_op()
1009 dst_r = src2_r; in emit_op()
1020 dst_r = 0; in emit_op()
1048 FAIL_IF(emit_single_op(compiler, op, flags, dst_r, src1_r, src2_r)); in emit_op()
1052 getput_arg_fast(compiler, flags, DR(dst_r), dst, dstw); in emit_op()
1055 return getput_arg(compiler, flags, DR(dst_r), dst, dstw, 0, 0); in emit_op()
1349 sljit_s32 dst_r = FAST_IS_REG(dst) ? dst : TMP_FREG1; in sljit_emit_fop1_conv_f64_from_sw() local
1366 …(4 << 21) | (((op & SLJIT_F32_OP) ^ SLJIT_F32_OP) >> 8) | FS(TMP_FREG1) | FD(dst_r), MOVABLE_INS)); in sljit_emit_fop1_conv_f64_from_sw()
[all …]
DsljitNativeARM_32.c1851 sljit_s32 dst_r = FAST_IS_REG(dst) ? dst : TMP_FREG1; in sljit_emit_fop1_conv_f64_from_sw() local
1866 …FAIL_IF(push_inst(compiler, EMIT_FPU_OPERATION(VCVT_F32_S32, op & SLJIT_F32_OP, dst_r, TMP_FREG1, … in sljit_emit_fop1_conv_f64_from_sw()
1897 sljit_s32 dst_r; in sljit_emit_fop1() local
1904 dst_r = FAST_IS_REG(dst) ? dst : TMP_FREG1; in sljit_emit_fop1()
1910 FAIL_IF(emit_fop_mem(compiler, (op & SLJIT_F32_OP) | FPU_LOAD, dst_r, src, srcw)); in sljit_emit_fop1()
1911 src = dst_r; in sljit_emit_fop1()
1916 if (src != dst_r) { in sljit_emit_fop1()
1917 if (dst_r != TMP_FREG1) in sljit_emit_fop1()
1918 FAIL_IF(push_inst(compiler, EMIT_FPU_OPERATION(VMOV_F32, op & SLJIT_F32_OP, dst_r, src, 0))); in sljit_emit_fop1()
1920 dst_r = src; in sljit_emit_fop1()
[all …]
DsljitNativeX86_64.c798 sljit_s32 dst_r; in emit_mov_int() local
823 dst_r = FAST_IS_REG(dst) ? dst : TMP_REG1; in emit_mov_int()
826 dst_r = src; in emit_mov_int()
829 inst = emit_x86_instruction(compiler, 1, dst_r, 0, src, srcw); in emit_mov_int()
834 FAIL_IF(emit_mov(compiler, dst_r, 0, src, srcw)); in emit_mov_int()
841 inst = emit_x86_instruction(compiler, 1, dst_r, 0, dst, dstw); in emit_mov_int()
DsljitNativeTILEGX_64.c1965 sljit_s32 dst_r = TMP_REG2; in emit_op() local
1981 dst_r = dst; in emit_op()
1984 sugg_src2_r = dst_r; in emit_op()
2037 dst_r = src2_r; in emit_op()
2046 dst_r = 0; in emit_op()
2071 FAIL_IF(emit_single_op(compiler, op, flags, dst_r, src1_r, src2_r)); in emit_op()
2075 getput_arg_fast(compiler, flags, reg_map[dst_r], dst, dstw); in emit_op()
2079 return getput_arg(compiler, flags, reg_map[dst_r], dst, dstw, 0, 0); in emit_op()
/external/stressapptest/src/
Dadler32memcpy.cc421 #define dst_r "r4" in AdlerMemcpyAsm()
427 "mov " dst_r ", %[dst]; \n" in AdlerMemcpyAsm()
452 "vstm " dst_r "!, {q8, q9, q10, q11}; \n" in AdlerMemcpyAsm()
/external/libvpx/libvpx/third_party/libyuv/source/
Drow_neon.cc590 uint8_t* dst_r, in SplitRGBRow_NEON() argument
604 "+r"(dst_r), // %1 in SplitRGBRow_NEON()
Drow_any.cc1067 void NAMEANY(const uint8_t* src_ptr, uint8_t* dst_r, uint8_t* dst_g, \
1074 ANY_SIMD(src_ptr, dst_r, dst_g, dst_b, n); \
1078 memcpy(dst_r + n, temp + 16 * 3, r); \
Dplanar_functions.cc496 uint8_t* dst_r, in SplitRGBPlane() argument
505 void (*SplitRGBRow)(const uint8_t* src_rgb, uint8_t* dst_r, uint8_t* dst_g, in SplitRGBPlane()
510 dst_r = dst_r + (height - 1) * dst_stride_r; in SplitRGBPlane()
543 SplitRGBRow(src_rgb, dst_r, dst_g, dst_b, width); in SplitRGBPlane()
544 dst_r += dst_stride_r; in SplitRGBPlane()
Drow_neon64.cc637 uint8_t* dst_r, in SplitRGBRow_NEON() argument
650 "+r"(dst_r), // %1 in SplitRGBRow_NEON()
Drow_gcc.cc3614 uint8_t* dst_r, in SplitRGBRow_SSSE3() argument
3658 "+r"(dst_r), // %1 in SplitRGBRow_SSSE3()
Drow_common.cc2109 uint8_t* dst_r, in SplitRGBRow_C() argument
2115 dst_r[x] = src_rgb[0]; in SplitRGBRow_C()
/external/libvpx/libvpx/third_party/libyuv/include/libyuv/
Dplanar_functions.h112 uint8_t* dst_r,
Drow.h1373 uint8_t* dst_r,
1378 uint8_t* dst_r,
1383 uint8_t* dst_r,
1388 uint8_t* dst_r,
1393 uint8_t* dst_r,
/external/webp/src/mux/
Danim_encode.c377 const int dst_r = (dst >> 16) & 0xff; in PixelsAreSimilar() local
382 (abs(src_r - dst_r) * dst_a <= (max_allowed_diff * 255)) && in PixelsAreSimilar()