/third_party/node/deps/v8/src/regexp/mips64/ |
D | regexp-macro-assembler-mips64.cc | 183 __ Ld(a0, register_location(reg)); in AdvanceRegister() local 194 __ Ld(a0, MemOperand(frame_pointer(), kBacktrackCount)); in Backtrack() local 231 __ Ld(a1, MemOperand(frame_pointer(), kStringStartMinusOne)); in CheckAtStart() local 240 __ Ld(a1, MemOperand(frame_pointer(), kStringStartMinusOne)); in CheckNotAtStart() local 265 __ Ld(a0, register_location(start_reg)); // Index of start of capture. in CheckNotBackReferenceIgnoreCase() local 266 __ Ld(a1, register_location(start_reg + 1)); // Index of end of capture. in CheckNotBackReferenceIgnoreCase() local 275 __ Ld(t1, MemOperand(frame_pointer(), kStringStartMinusOne)); in CheckNotBackReferenceIgnoreCase() local 335 __ Ld(t1, register_location(start_reg)); // Index of start of capture. in CheckNotBackReferenceIgnoreCase() local 336 __ Ld(a2, register_location(start_reg + 1)); // Index of end of capture. in CheckNotBackReferenceIgnoreCase() local 398 __ Ld(a0, register_location(start_reg)); in CheckNotBackReference() local [all …]
|
/third_party/node/deps/v8/src/builtins/mips64/ |
D | builtins-mips64.cc | 88 __ Ld(scratch2, MemOperand(scratch2)); in Generate_PushArguments() local 90 __ Ld(scratch2, MemOperand(scratch2)); in Generate_PushArguments() local 135 __ Ld(cp, MemOperand(fp, ConstructFrameConstants::kContextOffset)); in Generate_JSBuiltinsConstructStubHelper() local 137 __ Ld(t3, MemOperand(fp, ConstructFrameConstants::kLengthOffset)); in Generate_JSBuiltinsConstructStubHelper() local 179 __ Ld(t2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); in Generate_JSConstructStubGeneric() local 238 __ Ld(a1, MemOperand(fp, ConstructFrameConstants::kConstructorOffset)); in Generate_JSConstructStubGeneric() local 239 __ Ld(a0, MemOperand(fp, ConstructFrameConstants::kLengthOffset)); in Generate_JSConstructStubGeneric() local 289 __ Ld(v0, MemOperand(sp, 0 * kPointerSize)); in Generate_JSConstructStubGeneric() local 294 __ Ld(a1, MemOperand(fp, ConstructFrameConstants::kLengthOffset)); in Generate_JSConstructStubGeneric() local 316 __ Ld(cp, MemOperand(fp, ConstructFrameConstants::kContextOffset)); in Generate_JSConstructStubGeneric() local [all …]
|
/third_party/node/deps/v8/src/regexp/riscv64/ |
D | regexp-macro-assembler-riscv64.cc | 149 __ Ld(a0, register_location(reg)); in AdvanceRegister() local 159 __ Ld(a0, MemOperand(frame_pointer(), kBacktrackCount)); in Backtrack() local 193 __ Ld(a1, MemOperand(frame_pointer(), kStringStartMinusOne)); in CheckAtStart() local 201 __ Ld(a1, MemOperand(frame_pointer(), kStringStartMinusOne)); in CheckNotAtStart() local 274 __ Ld(a0, register_location(start_reg)); // Index of start of capture. in CheckNotBackReferenceIgnoreCase() local 275 __ Ld(a1, register_location(start_reg + 1)); // Index of end of capture. in CheckNotBackReferenceIgnoreCase() local 284 __ Ld(t1, MemOperand(frame_pointer(), kStringStartMinusOne)); in CheckNotBackReferenceIgnoreCase() local 344 __ Ld(t1, register_location(start_reg)); // Index of start of capture. in CheckNotBackReferenceIgnoreCase() local 345 __ Ld(a2, register_location(start_reg + 1)); // Index of end of capture. in CheckNotBackReferenceIgnoreCase() local 392 __ Ld(end_of_input_address(), MemOperand(frame_pointer(), kInputEnd)); in CheckNotBackReferenceIgnoreCase() local [all …]
|
/third_party/node/deps/v8/src/builtins/riscv64/ |
D | builtins-riscv64.cc | 87 __ Ld(scratch2, MemOperand(scratch2)); in Generate_PushArguments() local 89 __ Ld(scratch2, MemOperand(scratch2)); in Generate_PushArguments() local 138 __ Ld(cp, MemOperand(fp, ConstructFrameConstants::kContextOffset)); in Generate_JSBuiltinsConstructStubHelper() local 140 __ Ld(kScratchReg, MemOperand(fp, ConstructFrameConstants::kLengthOffset)); in Generate_JSBuiltinsConstructStubHelper() local 249 __ Ld(a1, MemOperand(fp, ConstructFrameConstants::kConstructorOffset)); in Generate_JSConstructStubGeneric() local 250 __ Ld(a0, MemOperand(fp, ConstructFrameConstants::kLengthOffset)); in Generate_JSConstructStubGeneric() local 294 __ Ld(cp, MemOperand(fp, ConstructFrameConstants::kContextOffset)); in Generate_JSConstructStubGeneric() local 310 __ Ld(a0, MemOperand(sp, 0 * kSystemPointerSize)); in Generate_JSConstructStubGeneric() local 315 __ Ld(a1, MemOperand(fp, ConstructFrameConstants::kLengthOffset)); in Generate_JSConstructStubGeneric() local 341 __ Ld(cp, MemOperand(fp, ConstructFrameConstants::kContextOffset)); in Generate_JSConstructStubGeneric() local [all …]
|
/third_party/skia/third_party/externals/swiftshader/third_party/llvm-10.0/llvm/lib/Target/ARM/ |
D | ARMParallelDSP.cpp | 340 if (auto *Ld = dyn_cast<LoadInst>(SExt->getOperand(0))) { in IsNarrowSequence() local 342 return LoadPairs.count(Ld) || OffsetLoads.count(Ld); in IsNarrowSequence() 363 auto *Ld = dyn_cast<LoadInst>(&I); in RecordMemoryOps() local 364 if (!Ld || !Ld->isSimple() || in RecordMemoryOps() 365 !Ld->hasOneUse() || !isa<SExtInst>(Ld->user_back())) in RecordMemoryOps() 367 Loads.push_back(Ld); in RecordMemoryOps()
|
/third_party/skia/third_party/externals/swiftshader/third_party/llvm-10.0/llvm/lib/Transforms/Scalar/ |
D | LoopVersioningLICM.cpp | 353 LoadInst *Ld = dyn_cast<LoadInst>(I); in instructionSafeForVersioning() local 354 if (!Ld || !Ld->isSimple()) { in instructionSafeForVersioning() 359 Value *Ptr = Ld->getPointerOperand(); in instructionSafeForVersioning()
|
D | LoopUnrollAndJamPass.cpp | 260 if (auto *Ld = dyn_cast<LoadInst>(&I)) { in computeUnrollAndJamCount() local 261 Value *V = Ld->getPointerOperand(); in computeUnrollAndJamCount()
|
/third_party/node/deps/v8/src/baseline/mips64/ |
D | baseline-assembler-mips64-inl.h | 169 __ Ld(type, FieldMemOperand(map, Map::kInstanceTypeOffset)); in JumpIfInstanceType() 177 __ Ld(scratch, operand); in JumpIfPointer() 199 __ Ld(scratch, operand); in JumpIfTagged() 207 __ Ld(scratch, operand); in JumpIfTagged() 352 __ Ld(output, FieldMemOperand(source, offset)); 356 __ Ld(output, FieldMemOperand(source, offset)); 360 __ Ld(output, FieldMemOperand(source, offset));
|
/third_party/skia/third_party/externals/swiftshader/third_party/llvm-10.0/llvm/lib/CodeGen/ |
D | GCRootLowering.cpp | 217 Value *Ld = new LoadInst(CI->getType(), CI->getArgOperand(1), "", CI); in DoLowering() local 218 Ld->takeName(CI); in DoLowering() 219 CI->replaceAllUsesWith(Ld); in DoLowering()
|
/third_party/skia/third_party/externals/swiftshader/third_party/llvm-10.0/llvm/lib/Target/SystemZ/ |
D | SystemZTargetTransformInfo.cpp | 866 if (LoadInst *Ld = dyn_cast<LoadInst>(I->getOperand(0))) in getCmpSelInstrCost() local 868 if (!Ld->hasOneUse() && Ld->getParent() == I->getParent() && in getCmpSelInstrCost() 909 isFoldableLoad(const LoadInst *Ld, const Instruction *&FoldedValue) { in isFoldableLoad() argument 910 if (!Ld->hasOneUse()) in isFoldableLoad() 912 FoldedValue = Ld; in isFoldableLoad() 913 const Instruction *UserI = cast<Instruction>(*Ld->user_begin()); in isFoldableLoad() 914 unsigned LoadedBits = getScalarSizeInBits(Ld->getType()); in isFoldableLoad()
|
D | SystemZTargetTransformInfo.h | 90 bool isFoldableLoad(const LoadInst *Ld, const Instruction *&FoldedValue);
|
/third_party/node/deps/v8/src/codegen/mips64/ |
D | macro-assembler-mips64.h | 224 inline void Move(Register output, MemOperand operand) { Ld(output, operand); } in Move() 297 void Ld(Register rd, const MemOperand& rs); 395 Ld(dst, MemOperand(sp, 0)); in pop() 403 Ld(src2, MemOperand(sp, 0 * kPointerSize)); in Pop() 404 Ld(src1, MemOperand(sp, 1 * kPointerSize)); in Pop() 410 Ld(src3, MemOperand(sp, 0 * kPointerSize)); in Pop() 411 Ld(src2, MemOperand(sp, 1 * kPointerSize)); in Pop() 412 Ld(src1, MemOperand(sp, 2 * kPointerSize)); in Pop() 973 Ld(dest, MemOperand(sp, 0)); in LoadReceiver() 1265 Ld(scratch, MemOperand(scratch)); in GenerateSwitchTable() [all …]
|
D | macro-assembler-mips64.cc | 100 Ld(destination, MemOperand(s6, RootRegisterOffsetForRootIndex(index))); in LoadRoot() 107 Ld(destination, MemOperand(s6, RootRegisterOffsetForRootIndex(index))); in LoadRoot() 283 Ld(scratch, MemOperand(address)); in CallRecordWriteStub() 1277 Ld(rd, rs); in CallRecordWriteStub() 1432 void TurboAssembler::Ld(Register rd, const MemOperand& rs) { in CallRecordWriteStub() function in v8::internal::TurboAssembler 1937 Ld(ToRegister(i), MemOperand(sp, stack_offset)); in CallRecordWriteStub() 2632 Ld(scratch, src); in CallRecordWriteStub() 2730 Ld(scratch, src); in CallRecordWriteStub() 3349 Ld(result, MemOperand(sp, 0)); in CallRecordWriteStub() 4231 Ld(destination, in CallRecordWriteStub() [all …]
|
/third_party/node/deps/v8/src/codegen/riscv64/ |
D | macro-assembler-riscv64.h | 202 inline void Move(Register output, MemOperand operand) { Ld(output, operand); } in Move() 286 void Ld(Register rd, const MemOperand& rs); 382 Ld(dst, MemOperand(sp, 0)); in pop() 390 Ld(src2, MemOperand(sp, 0 * kSystemPointerSize)); in Pop() 391 Ld(src1, MemOperand(sp, 1 * kSystemPointerSize)); in Pop() 397 Ld(src3, MemOperand(sp, 0 * kSystemPointerSize)); in Pop() 398 Ld(src2, MemOperand(sp, 1 * kSystemPointerSize)); in Pop() 399 Ld(src1, MemOperand(sp, 2 * kSystemPointerSize)); in Pop() 1049 Ld(dest, MemOperand(sp, 0)); in LoadReceiver()
|
D | macro-assembler-riscv64.cc | 97 Ld(destination, in LoadRoot() 106 Ld(destination, in LoadRoot() 1426 void TurboAssembler::Ld(Register rd, const MemOperand& rs) { in Ld() function in v8::internal::TurboAssembler 1709 Ld(reg, MemOperand(sp, stack_offset)); \ in MultiPop() 3189 Ld(destination, MemOperand(kRootRegister, offset)); in LoadRootRelative() 3260 Ld(t6, MemOperand(kRootRegister, offset)); in Jump() 3367 Ld(builtin, MemOperand(builtin, IsolateData::builtin_entry_table_offset())); in LoadEntryFromBuiltinIndex() 3397 Ld(destination, EntryFromBuiltinAsOperand(builtin)); in LoadEntryFromBuiltin() 3410 Ld(t6, MemOperand(scratch, kInstrSize * 4)); in PatchAndJump() 3569 Ld(scratch2, MemOperand(scratch2)); in PushArray() [all …]
|
/third_party/node/deps/v8/src/baseline/riscv64/ |
D | baseline-assembler-riscv64-inl.h | 167 __ Ld(type, FieldMemOperand(map, Map::kInstanceTypeOffset)); in JumpIfInstanceType() 175 __ Ld(temp, operand); in JumpIfPointer() 199 __ Ld(scratch, operand); in JumpIfTagged() 208 __ Ld(scratch, operand); in JumpIfTagged()
|
/third_party/node/deps/v8/src/compiler/backend/mips64/ |
D | code-generator-mips64.cc | 507 __ Ld(ra, MemOperand(fp, StandardFrameConstants::kCallerPCOffset)); in AssemblePrepareTailCall() local 508 __ Ld(fp, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); in AssemblePrepareTailCall() local 561 __ Ld(kScratchReg, MemOperand(kJavaScriptCallCodeStartRegister, offset)); in BailoutIfDeoptimized() local 659 __ Ld(kScratchReg, FieldMemOperand(func, JSFunction::kContextOffset)); in AssembleArchInstruction() local 664 __ Ld(a2, FieldMemOperand(func, JSFunction::kCodeOffset)); in AssembleArchInstruction() local 823 __ Ld(i.OutputRegister(), MemOperand(fp, 0)); in AssembleArchInstruction() local 1648 __ Ld(i.OutputRegister(), i.MemoryOperand()); in AssembleArchInstruction() local 1746 __ Ld(i.OutputRegister(0), MemOperand(fp, offset)); in AssembleArchInstruction() local 1787 __ Ld(kScratchReg, i.MemoryOperand()); in AssembleArchInstruction() local 1797 __ Ld(kScratchReg, i.MemoryOperand()); in AssembleArchInstruction() local [all …]
|
/third_party/skia/third_party/externals/swiftshader/third_party/llvm-10.0/llvm/lib/Analysis/ |
D | LoopAccessAnalysis.cpp | 1851 auto *Ld = dyn_cast<LoadInst>(&I); in analyzeLoop() local 1852 if (!Ld) { in analyzeLoop() 1853 recordAnalysis("CantVectorizeInstruction", Ld) in analyzeLoop() 1858 if (!Ld->isSimple() && !IsAnnotatedParallel) { in analyzeLoop() 1859 recordAnalysis("NonSimpleLoad", Ld) in analyzeLoop() 1866 Loads.push_back(Ld); in analyzeLoop() 1867 DepChecker->addAccess(Ld); in analyzeLoop() 1869 collectStridedAccess(Ld); in analyzeLoop()
|
/third_party/node/deps/v8/src/wasm/baseline/mips64/ |
D | liftoff-assembler-mips64.h | 109 assm->Ld(dst.gp(), src); in Load() 314 Ld(scratch, MemOperand(fp, 8)); in PrepareTailCall() 316 Ld(scratch, MemOperand(fp, 0)); in PrepareTailCall() 322 Ld(scratch, MemOperand(sp, i * 8)); in PrepareTailCall() 379 Ld(stack_limit, in PatchPrepareStackFrame() 382 Ld(stack_limit, MemOperand(stack_limit)); in PatchPrepareStackFrame() 449 Ld(dst, liftoff::GetInstanceOperand()); in LoadInstanceFromFrame() 463 Ld(dst, MemOperand(instance, offset)); in LoadFromInstance() 474 Ld(dst, MemOperand(instance, offset)); in LoadTaggedPointerFromInstance() 489 Ld(dst, src_op); in LoadTaggedPointer() [all …]
|
/third_party/node/deps/v8/src/wasm/baseline/riscv64/ |
D | liftoff-assembler-riscv64.h | 108 assm->Ld(dst.gp(), src); in Load() 299 Ld(scratch, MemOperand(fp, 8)); in PrepareTailCall() 301 Ld(scratch, MemOperand(fp, 0)); in PrepareTailCall() 307 Ld(scratch, MemOperand(sp, i * 8)); in PrepareTailCall() 363 Ld(stack_limit, in PatchPrepareStackFrame() 366 Ld(stack_limit, MemOperand(stack_limit)); in PatchPrepareStackFrame() 441 Ld(dst, liftoff::GetInstanceOperand()); in LoadInstanceFromFrame() 456 Ld(dst, MemOperand(src)); in LoadFromInstance() 487 Ld(dst, src_op); in LoadFullPointer() 552 TurboAssembler::Ld(dst.gp(), src_op); in Load() [all …]
|
/third_party/skia/third_party/externals/swiftshader/third_party/llvm-10.0/llvm/lib/Target/X86/ |
D | X86InstrFragmentsSIMD.td | 829 auto *Ld = cast<LoadSDNode>(N); 830 return Ld->getAlignment() >= Ld->getMemoryVT().getStoreSize(); 884 auto *Ld = cast<LoadSDNode>(N); 886 Ld->getAlignment() >= Ld->getMemoryVT().getStoreSize(); 1115 auto *Ld = cast<MaskedLoadSDNode>(N); 1116 return Ld->getAlignment() >= Ld->getValueType(0).getStoreSize();
|
/third_party/skia/third_party/externals/swiftshader/third_party/llvm-10.0/llvm/lib/Target/AArch64/ |
D | AArch64ISelDAGToDAG.cpp | 1290 SDNode *Ld = CurDAG->getMachineNode(Opc, dl, ResTys, Ops); in SelectLoad() local 1291 SDValue SuperReg = SDValue(Ld, 0); in SelectLoad() 1296 ReplaceUses(SDValue(N, NumVecs), SDValue(Ld, 1)); in SelectLoad() 1300 CurDAG->setNodeMemRefs(cast<MachineSDNode>(Ld), {MemOp}); in SelectLoad() 1318 SDNode *Ld = CurDAG->getMachineNode(Opc, dl, ResTys, Ops); in SelectPostLoad() local 1321 ReplaceUses(SDValue(N, NumVecs), SDValue(Ld, 0)); in SelectPostLoad() 1324 SDValue SuperReg = SDValue(Ld, 1); in SelectPostLoad() 1333 ReplaceUses(SDValue(N, NumVecs + 1), SDValue(Ld, 2)); in SelectPostLoad() 1435 SDNode *Ld = CurDAG->getMachineNode(Opc, dl, ResTys, Ops); in SelectLoadLane() local 1436 SDValue SuperReg = SDValue(Ld, 0); in SelectLoadLane() [all …]
|
/third_party/node/deps/v8/src/compiler/backend/riscv64/ |
D | code-generator-riscv64.cc | 557 __ Ld(ra, MemOperand(fp, StandardFrameConstants::kCallerPCOffset)); in AssemblePrepareTailCall() local 558 __ Ld(fp, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); in AssemblePrepareTailCall() local 854 __ Ld(i.OutputRegister(), MemOperand(fp, 0)); in AssembleArchInstruction() local 1617 __ Ld(i.OutputRegister(), i.MemoryOperand()); in AssembleArchInstruction() local 1713 __ Ld(i.OutputRegister(0), MemOperand(fp, offset)); in AssembleArchInstruction() local 1768 ASSEMBLE_ATOMIC_LOAD_INTEGER(Ld); in AssembleArchInstruction() 1980 __ Ld(kScratchReg, i.MemoryOperand()); in AssembleArchInstruction() local 2000 __ Ld(kScratchReg, i.MemoryOperand()); in AssembleArchInstruction() local 2008 __ Ld(kScratchReg, i.MemoryOperand()); in AssembleArchInstruction() local 2027 __ Ld(kScratchReg, i.MemoryOperand()); in AssembleArchInstruction() local [all …]
|
/third_party/skia/third_party/externals/swiftshader/third_party/llvm-10.0/llvm/lib/Transforms/Utils/ |
D | LoopUnrollAndJam.cpp | 596 if (auto *Ld = dyn_cast<LoadInst>(&I)) { in getLoadsAndStores() local 597 if (!Ld->isSimple()) in getLoadsAndStores()
|
/third_party/skia/third_party/externals/swiftshader/third_party/llvm-10.0/llvm/lib/Target/Sparc/ |
D | SparcISelLowering.cpp | 3388 LoadSDNode *Ld = cast<LoadSDNode>(N); in ReplaceNodeResults() local 3391 if (Ld->getValueType(0) != MVT::i64 || Ld->getMemoryVT() != MVT::i64) in ReplaceNodeResults() 3396 Ld->getExtensionType(), dl, MVT::v2i32, Ld->getChain(), in ReplaceNodeResults() 3397 Ld->getBasePtr(), Ld->getPointerInfo(), MVT::v2i32, Ld->getAlignment(), in ReplaceNodeResults() 3398 Ld->getMemOperand()->getFlags(), Ld->getAAInfo()); in ReplaceNodeResults()
|