/external/libaom/libaom/aom_dsp/x86/ |
D | variance_impl_avx2.c | 72 exp_dst_lo = _mm256_unpacklo_epi8(dst_reg, zero_reg); \ 73 exp_dst_hi = _mm256_unpackhi_epi8(dst_reg, zero_reg); \ 88 res_cmp = _mm256_cmpgt_epi16(zero_reg, sum_reg); \ 113 __m256i zero_reg; in aom_sub_pixel_variance32xh_avx2() local 117 zero_reg = _mm256_set1_epi16(0); in aom_sub_pixel_variance32xh_avx2() 125 MERGE_WITH_SRC(src_reg, zero_reg) in aom_sub_pixel_variance32xh_avx2() 137 MERGE_WITH_SRC(src_reg, zero_reg) in aom_sub_pixel_variance32xh_avx2() 167 MERGE_WITH_SRC(src_reg, zero_reg) in aom_sub_pixel_variance32xh_avx2() 187 MERGE_WITH_SRC(src_avg, zero_reg) in aom_sub_pixel_variance32xh_avx2() 251 MERGE_WITH_SRC(src_pack, zero_reg) in aom_sub_pixel_variance32xh_avx2() [all …]
|
/external/swiftshader/third_party/llvm-7.0/configs/common/lib/Target/ARM/ |
D | ARMGenDAGISel.inc | 84 /* 54*/ OPC_EmitRegister, MVT::i32, 0 /*zero_reg*/, 92 /* 72*/ OPC_EmitRegister, MVT::i32, 0 /*zero_reg*/, 122 /* 129*/ OPC_EmitRegister, MVT::i32, 0 /*zero_reg*/, 130 /* 147*/ OPC_EmitRegister, MVT::i32, 0 /*zero_reg*/, 157 /* 200*/ OPC_EmitRegister, MVT::i32, 0 /*zero_reg*/, 183 /* 255*/ OPC_EmitRegister, MVT::i32, 0 /*zero_reg*/, 202 /* 294*/ OPC_EmitRegister, MVT::i32, 0 /*zero_reg*/, 229 /* 349*/ OPC_EmitRegister, MVT::i32, 0 /*zero_reg*/, 256 /* 407*/ OPC_EmitRegister, MVT::i32, 0 /*zero_reg*/, 275 /* 446*/ OPC_EmitRegister, MVT::i32, 0 /*zero_reg*/, [all …]
|
D | ARMGenGlobalISel.inc | 790 GIR_AddRegister, /*InsnID*/0, ::zero_reg, 814 GIR_AddRegister, /*InsnID*/0, ::zero_reg, 838 GIR_AddRegister, /*InsnID*/0, ::zero_reg, 862 GIR_AddRegister, /*InsnID*/0, ::zero_reg, 886 GIR_AddRegister, /*InsnID*/0, ::zero_reg, 910 GIR_AddRegister, /*InsnID*/0, ::zero_reg, 934 GIR_AddRegister, /*InsnID*/0, ::zero_reg, 958 GIR_AddRegister, /*InsnID*/0, ::zero_reg, 994 GIR_AddRegister, /*InsnID*/0, ::zero_reg, 1030 GIR_AddRegister, /*InsnID*/0, ::zero_reg, [all …]
|
/external/libvpx/libvpx/vpx_dsp/x86/ |
D | variance_avx2.c | 190 exp_dst_lo = _mm256_unpacklo_epi8(dst_reg, zero_reg); \ 191 exp_dst_hi = _mm256_unpackhi_epi8(dst_reg, zero_reg); \ 206 res_cmp = _mm256_cmpgt_epi16(zero_reg, sum_reg); \ 230 const __m256i zero_reg = _mm256_setzero_si256(); in spv32_x0_y0() local 239 exp_src_lo = _mm256_unpacklo_epi8(avg_reg, zero_reg); in spv32_x0_y0() 240 exp_src_hi = _mm256_unpackhi_epi8(avg_reg, zero_reg); in spv32_x0_y0() 243 exp_src_lo = _mm256_unpacklo_epi8(src_reg, zero_reg); in spv32_x0_y0() 244 exp_src_hi = _mm256_unpackhi_epi8(src_reg, zero_reg); in spv32_x0_y0() 259 const __m256i zero_reg = _mm256_setzero_si256(); in spv32_half_zero() local 270 exp_src_lo = _mm256_unpacklo_epi8(avg_reg, zero_reg); in spv32_half_zero() [all …]
|
/external/libaom/libaom/av1/encoder/x86/ |
D | error_intrin_avx2.c | 40 const __m256i zero_reg = _mm256_setzero_si256(); in av1_block_error_avx2() local 57 exp_dqcoeff_lo = _mm256_unpacklo_epi32(dqcoeff_reg, zero_reg); in av1_block_error_avx2() 58 exp_dqcoeff_hi = _mm256_unpackhi_epi32(dqcoeff_reg, zero_reg); in av1_block_error_avx2() 60 exp_coeff_lo = _mm256_unpacklo_epi32(coeff_reg, zero_reg); in av1_block_error_avx2() 61 exp_coeff_hi = _mm256_unpackhi_epi32(coeff_reg, zero_reg); in av1_block_error_avx2()
|
/external/v8/src/mips/ |
D | macro-assembler-mips.cc | 46 return rt.rm() == zero_reg; in IsZero() 219 Branch(&ok, eq, t8, Operand(zero_reg)); in RecordWriteField() 793 subu(rs, zero_reg, rt.rm()); in Neg() 918 subu(scratch, zero_reg, rt.rm()); in Ror() 939 lw(zero_reg, rs); in Pref() 1356 addiu(rd, zero_reg, j.immediate()); in li() 1358 ori(rd, zero_reg, j.immediate()); in li() 1487 Nor(scratch2, zero_reg, scratch3); in ShlPair() 1494 Branch(&done, eq, scratch1, Operand(zero_reg)); in ShlPair() 1496 mov(dst_low, zero_reg); in ShlPair() [all …]
|
D | assembler-mips.cc | 167 zero_reg, in ToRegister() 701 Register nop_rt_reg = (type == 0) ? zero_reg : at; in IsNop() 703 rd == static_cast<uint32_t>(ToNumber(zero_reg)) && in IsNop() 1576 beq(zero_reg, zero_reg, offset); in b() 1581 bgezal(zero_reg, offset); in bal() 1613 DCHECK(rt != zero_reg); in bgezc() 1620 DCHECK(rs != zero_reg); in bgeuc() 1621 DCHECK(rt != zero_reg); in bgeuc() 1629 DCHECK(rs != zero_reg); in bgec() 1630 DCHECK(rt != zero_reg); in bgec() [all …]
|
D | codegen-mips.cc | 68 __ bne(t2, zero_reg, &lastb); in CreateMemCopyUint8Function() 77 __ bne(t8, zero_reg, &unaligned); in CreateMemCopyUint8Function() 78 __ subu(a3, zero_reg, a0); // In delay slot. in CreateMemCopyUint8Function() 81 __ beq(a3, zero_reg, &aligned); // Already aligned. in CreateMemCopyUint8Function() 130 __ Branch(USE_DELAY_SLOT, &skip_pref, gt, v1, Operand(zero_reg)); in CreateMemCopyUint8Function() 224 __ Branch(&leave, le, a2, Operand(zero_reg)); in CreateMemCopyUint8Function() 243 __ beq(a3, zero_reg, &ua_chk16w); in CreateMemCopyUint8Function() 296 __ Branch(USE_DELAY_SLOT, &ua_skip_pref, gt, v1, Operand(zero_reg)); in CreateMemCopyUint8Function() 331 __ Branch(USE_DELAY_SLOT, &ua_skip_pref, gt, v1, Operand(zero_reg)); in CreateMemCopyUint8Function() 530 __ beq(a2, zero_reg, &leave); in CreateMemCopyUint8Function()
|
D | code-stubs-mips.cc | 99 __ Branch(&non_outermost_js, ne, t2, Operand(zero_reg)); in Generate() 163 __ sw(zero_reg, MemOperand(t1)); in Generate() 340 __ Branch(&profiler_disabled, eq, t9, Operand(zero_reg)); in CallApiFunctionAndReturn() 555 __ sw(zero_reg, in Generate()
|
D | macro-assembler-mips.h | 243 #define COND_ARGS Condition cond = al, Register rs = zero_reg, \ 244 const Operand& rt = Operand(zero_reg), BranchDelaySlot bd = PROTECT 268 Register rs = zero_reg, const Operand& rt = Operand(zero_reg)) { 714 void mov(Register rd, Register rt) { or_(rd, rt, zero_reg); } in mov() 1045 #define COND_ARGS Condition cond = al, Register rs = zero_reg, \ 1046 const Operand& rt = Operand(zero_reg), BranchDelaySlot bd = PROTECT
|
/external/v8/src/builtins/mips/ |
D | builtins-mips.cc | 51 t0, Operand(zero_reg)); in Generate_InternalArrayConstructor() 136 __ Branch(&loop, greater_equal, t3, Operand(zero_reg)); in Generate_JSBuiltinsConstructStubHelper() 193 __ Branch(¬_create_implicit_receiver, ne, t2, Operand(zero_reg)); in Generate_JSConstructStubGeneric() 266 __ Branch(&loop, greater_equal, t3, Operand(zero_reg)); in Generate_JSConstructStubGeneric() 481 __ Branch(&prepare_step_in_if_stepping, ne, t1, Operand(zero_reg)); in Generate_ResumeGeneratorTrampoline() 518 __ Move(t2, zero_reg); in Generate_ResumeGeneratorTrampoline() 521 __ Branch(&done_loop, lt, a3, Operand(zero_reg)); in Generate_ResumeGeneratorTrampoline() 695 __ Branch(&found_deoptimized_code, ne, scratch2, Operand(zero_reg)); in MaybeTailCallOptimizedCodeSlot() 741 __ Branch(&extra_wide, ne, scratch2, Operand(zero_reg)); in AdvanceBytecodeOffsetOrReturn() 828 t0, Operand(zero_reg)); in Generate_InterpreterEntryTrampoline() [all …]
|
/external/v8/src/builtins/mips64/ |
D | builtins-mips64.cc | 51 a4, Operand(zero_reg)); in Generate_InternalArrayConstructor() 135 __ Branch(&loop, greater_equal, t3, Operand(zero_reg)); in Generate_JSBuiltinsConstructStubHelper() 193 __ Branch(¬_create_implicit_receiver, ne, t2, Operand(zero_reg)); in Generate_JSConstructStubGeneric() 266 __ Branch(&loop, greater_equal, t3, Operand(zero_reg)); in Generate_JSConstructStubGeneric() 372 __ Branch(&prepare_step_in_if_stepping, ne, a5, Operand(zero_reg)); in Generate_ResumeGeneratorTrampoline() 411 __ Move(t2, zero_reg); in Generate_ResumeGeneratorTrampoline() 414 __ Branch(&done_loop, lt, a3, Operand(zero_reg)); in Generate_ResumeGeneratorTrampoline() 693 __ Branch(&found_deoptimized_code, ne, a5, Operand(zero_reg)); in MaybeTailCallOptimizedCodeSlot() 740 __ Branch(&extra_wide, ne, scratch2, Operand(zero_reg)); in AdvanceBytecodeOffsetOrReturn() 827 a4, Operand(zero_reg)); in Generate_InterpreterEntryTrampoline() [all …]
|
/external/v8/src/mips64/ |
D | macro-assembler-mips64.cc | 46 return rt.rm() == zero_reg; in IsZero() 219 Branch(&ok, eq, t8, Operand(zero_reg)); in RecordWriteField() 931 dsubu(rs, zero_reg, rt.rm()); in Neg() 1151 dinsu_(dest, zero_reg, 32, 32); in ByteSwapUnsigned() 1587 daddiu(rd, zero_reg, (j.immediate() & kImm16Mask)); in LiLower32BitHelper() 1589 ori(rd, zero_reg, j.immediate() & kImm16Mask); in LiLower32BitHelper() 1719 ori(rd, zero_reg, j.immediate() & kImm16Mask); in li_optimized() 1726 ori(rd, zero_reg, j.immediate() & kImm16Mask); in li_optimized() 1749 daddiu(rd, zero_reg, j.immediate() & kImm16Mask); in li_optimized() 1757 daddiu(rd, zero_reg, j.immediate() & kImm16Mask); in li_optimized() [all …]
|
D | assembler-mips64.cc | 145 zero_reg, in ToRegister() 670 Register nop_rt_reg = (type == 0) ? zero_reg : at; in IsNop() 672 rd == static_cast<uint32_t>(ToNumber(zero_reg)) && in IsNop() 1556 beq(zero_reg, zero_reg, offset); in b() 1561 bgezal(zero_reg, offset); in bal() 1593 DCHECK(rt != zero_reg); in bgezc() 1600 DCHECK(rs != zero_reg); in bgeuc() 1601 DCHECK(rt != zero_reg); in bgeuc() 1609 DCHECK(rs != zero_reg); in bgec() 1610 DCHECK(rt != zero_reg); in bgec() [all …]
|
D | codegen-mips64.cc | 67 __ bne(a6, zero_reg, &lastb); in CreateMemCopyUint8Function() 76 __ bne(t8, zero_reg, &unaligned); in CreateMemCopyUint8Function() 77 __ subu(a3, zero_reg, a0); // In delay slot. in CreateMemCopyUint8Function() 80 __ beq(a3, zero_reg, &aligned); // Already aligned. in CreateMemCopyUint8Function() 130 __ Branch(USE_DELAY_SLOT, &skip_pref, gt, v1, Operand(zero_reg)); in CreateMemCopyUint8Function() 224 __ Branch(&leave, le, a2, Operand(zero_reg)); in CreateMemCopyUint8Function() 243 __ beq(a3, zero_reg, &ua_chk16w); in CreateMemCopyUint8Function() 296 __ Branch(USE_DELAY_SLOT, &ua_skip_pref, gt, v1, Operand(zero_reg)); in CreateMemCopyUint8Function() 332 __ Branch(USE_DELAY_SLOT, &ua_skip_pref, gt, v1, Operand(zero_reg)); in CreateMemCopyUint8Function() 531 __ beq(a2, zero_reg, &leave); in CreateMemCopyUint8Function()
|
D | code-stubs-mips64.cc | 98 __ Branch(&non_outermost_js, ne, a6, Operand(zero_reg)); in Generate() 162 __ Sd(zero_reg, MemOperand(a5)); in Generate() 344 __ Branch(&profiler_disabled, eq, t9, Operand(zero_reg)); in CallApiFunctionAndReturn() 560 __ Sd(zero_reg, in Generate()
|
D | macro-assembler-mips64.h | 263 #define COND_ARGS Condition cond = al, Register rs = zero_reg, \ 264 const Operand& rt = Operand(zero_reg), BranchDelaySlot bd = PROTECT 285 Register rs = zero_reg, const Operand& rt = Operand(zero_reg)) { 682 void mov(Register rd, Register rt) { or_(rd, rt, zero_reg); } in mov() 1097 #define COND_ARGS Condition cond = al, Register rs = zero_reg, \ 1098 const Operand& rt = Operand(zero_reg), BranchDelaySlot bd = PROTECT
|
D | deoptimizer-mips64.cc | 81 __ mov(a0, zero_reg); in Generate() 195 __ BranchShort(&inner_push_loop, ne, a3, Operand(zero_reg)); in Generate()
|
/external/v8/src/regexp/mips64/ |
D | regexp-macro-assembler-mips64.cc | 261 __ Branch(&fallthrough, eq, a1, Operand(zero_reg)); in CheckNotBackReferenceIgnoreCase() 270 BranchOrBacktrack(on_no_match, gt, t1, Operand(zero_reg)); in CheckNotBackReferenceIgnoreCase() 363 __ mov(a3, zero_reg); in CheckNotBackReferenceIgnoreCase() 383 BranchOrBacktrack(on_no_match, eq, v0, Operand(zero_reg)); in CheckNotBackReferenceIgnoreCase() 410 __ Branch(&fallthrough, eq, a1, Operand(zero_reg)); in CheckNotBackReference() 419 BranchOrBacktrack(on_no_match, gt, t1, Operand(zero_reg)); in CheckNotBackReference() 469 Operand rhs = (c == 0) ? Operand(zero_reg) : Operand(c); in CheckCharacterAfterAnd() 478 Operand rhs = (c == 0) ? Operand(zero_reg) : Operand(c); in CheckNotCharacterAfterAnd() 527 BranchOrBacktrack(on_bit_set, ne, a0, Operand(zero_reg)); in CheckBitInTable() 607 BranchOrBacktrack(on_no_match, eq, a0, Operand(zero_reg)); in CheckSpecialCharacterClass() [all …]
|
/external/v8/src/regexp/mips/ |
D | regexp-macro-assembler-mips.cc | 224 __ Branch(&fallthrough, eq, a1, Operand(zero_reg)); in CheckNotBackReferenceIgnoreCase() 233 BranchOrBacktrack(on_no_match, gt, t5, Operand(zero_reg)); in CheckNotBackReferenceIgnoreCase() 326 __ mov(a3, zero_reg); in CheckNotBackReferenceIgnoreCase() 346 BranchOrBacktrack(on_no_match, eq, v0, Operand(zero_reg)); in CheckNotBackReferenceIgnoreCase() 373 __ Branch(&fallthrough, le, a1, Operand(zero_reg)); in CheckNotBackReference() 382 BranchOrBacktrack(on_no_match, gt, t5, Operand(zero_reg)); in CheckNotBackReference() 438 Operand rhs = (c == 0) ? Operand(zero_reg) : Operand(c); in CheckCharacterAfterAnd() 447 Operand rhs = (c == 0) ? Operand(zero_reg) : Operand(c); in CheckNotCharacterAfterAnd() 496 BranchOrBacktrack(on_bit_set, ne, a0, Operand(zero_reg)); in CheckBitInTable() 576 BranchOrBacktrack(on_no_match, eq, a0, Operand(zero_reg)); in CheckSpecialCharacterClass() [all …]
|
/external/v8/src/wasm/baseline/mips64/ |
D | liftoff-assembler-mips64.h | 485 TurboAssembler::Branch(trap_div_by_zero, eq, rhs, Operand(zero_reg)); in emit_i32_divs() 494 Operand(zero_reg)); in emit_i32_divs() 501 TurboAssembler::Branch(trap_div_by_zero, eq, rhs, Operand(zero_reg)); in emit_i32_divu() 507 TurboAssembler::Branch(trap_div_by_zero, eq, rhs, Operand(zero_reg)); in emit_i32_rems() 513 TurboAssembler::Branch(trap_div_by_zero, eq, rhs, Operand(zero_reg)); in emit_i32_remu() 569 TurboAssembler::Branch(trap_div_by_zero, eq, rhs.gp(), Operand(zero_reg)); in emit_i64_divs() 579 Operand(zero_reg)); in emit_i64_divs() 588 TurboAssembler::Branch(trap_div_by_zero, eq, rhs.gp(), Operand(zero_reg)); in emit_i64_divu() 596 TurboAssembler::Branch(trap_div_by_zero, eq, rhs.gp(), Operand(zero_reg)); in emit_i64_rems() 604 TurboAssembler::Branch(trap_div_by_zero, eq, rhs.gp(), Operand(zero_reg)); in emit_i64_remu() [all …]
|
/external/v8/src/compiler/mips64/ |
D | code-generator-mips64.cc | 57 return zero_reg; in InputOrZeroRegister() 362 __ BranchShort(&binop, eq, i.TempRegister(1), Operand(zero_reg)); \ 384 __ BranchShort(&binop, eq, i.TempRegister(1), Operand(zero_reg)); \ 397 __ BranchShort(&exchange, eq, i.TempRegister(1), Operand(zero_reg)); \ 417 __ BranchShort(&exchange, eq, i.TempRegister(2), Operand(zero_reg)); \ 434 Operand(zero_reg)); \ 459 Operand(zero_reg)); \ 582 __ Jump(code, RelocInfo::CODE_TARGET, ne, kScratchReg, Operand(zero_reg)); in BailoutIfDeoptimized() 868 Operand(zero_reg)); in AssembleArchInstruction() 874 __ BranchShort(&done, eq, kScratchReg, Operand(zero_reg)); in AssembleArchInstruction() [all …]
|
/external/v8/src/wasm/baseline/mips/ |
D | liftoff-assembler-mips.h | 144 assm->TurboAssembler::Move(tmp.high_gp(), zero_reg); in ChangeEndiannessLoad() 152 assm->TurboAssembler::Move(tmp.high_gp(), zero_reg); in ChangeEndiannessLoad() 559 TurboAssembler::Branch(trap_div_by_zero, eq, rhs, Operand(zero_reg)); in emit_i32_divs() 568 Operand(zero_reg)); in emit_i32_divs() 575 TurboAssembler::Branch(trap_div_by_zero, eq, rhs, Operand(zero_reg)); in emit_i32_divu() 581 TurboAssembler::Branch(trap_div_by_zero, eq, rhs, Operand(zero_reg)); in emit_i32_rems() 587 TurboAssembler::Branch(trap_div_by_zero, eq, rhs, Operand(zero_reg)); in emit_i32_remu() 705 assm->TurboAssembler::Branch(&move, eq, amount, Operand(zero_reg)); in Emit64BitShiftOperation() 913 TurboAssembler::Movz(dst.gp(), zero_reg, kScratchReg); in emit_type_conversion() 979 TurboAssembler::Move(dst.high_gp(), zero_reg); in emit_type_conversion() [all …]
|
/external/v8/src/compiler/mips/ |
D | code-generator-mips.cc | 55 return zero_reg; in InputOrZeroRegister() 352 __ BranchShort(&binop, eq, i.TempRegister(1), Operand(zero_reg)); \ 373 __ BranchShort(&binop, eq, i.TempRegister(1), Operand(zero_reg)); \ 386 __ BranchShort(&exchange, eq, i.TempRegister(1), Operand(zero_reg)); \ 405 __ BranchShort(&exchange, eq, i.TempRegister(2), Operand(zero_reg)); \ 422 Operand(zero_reg)); \ 446 Operand(zero_reg)); \ 569 __ Jump(code, RelocInfo::CODE_TARGET, ne, kScratchReg, Operand(zero_reg)); in BailoutIfDeoptimized() 848 Operand(zero_reg)); in AssembleArchInstruction() 855 __ BranchShort(&done, eq, kScratchReg, Operand(zero_reg)); in AssembleArchInstruction() [all …]
|
/external/swiftshader/third_party/LLVM/lib/Target/PTX/ |
D | PTXInstrFormats.td | 34 (ops (i1 zero_reg), (i32 2))>;
|