/third_party/node/deps/v8/src/regexp/riscv64/ |
D | regexp-macro-assembler-riscv64.cc | 149 __ Ld(a0, register_location(reg)); in AdvanceRegister() local 159 __ Ld(a0, MemOperand(frame_pointer(), kBacktrackCount)); in Backtrack() local 193 __ Ld(a1, MemOperand(frame_pointer(), kStringStartMinusOne)); in CheckAtStart() local 201 __ Ld(a1, MemOperand(frame_pointer(), kStringStartMinusOne)); in CheckNotAtStart() local 274 __ Ld(a0, register_location(start_reg)); // Index of start of capture. in CheckNotBackReferenceIgnoreCase() local 275 __ Ld(a1, register_location(start_reg + 1)); // Index of end of capture. in CheckNotBackReferenceIgnoreCase() local 284 __ Ld(t1, MemOperand(frame_pointer(), kStringStartMinusOne)); in CheckNotBackReferenceIgnoreCase() local 344 __ Ld(t1, register_location(start_reg)); // Index of start of capture. in CheckNotBackReferenceIgnoreCase() local 345 __ Ld(a2, register_location(start_reg + 1)); // Index of end of capture. in CheckNotBackReferenceIgnoreCase() local 392 __ Ld(end_of_input_address(), MemOperand(frame_pointer(), kInputEnd)); in CheckNotBackReferenceIgnoreCase() local [all …]
|
/third_party/node/deps/v8/src/regexp/mips64/ |
D | regexp-macro-assembler-mips64.cc | 183 __ Ld(a0, register_location(reg)); in AdvanceRegister() local 194 __ Ld(a0, MemOperand(frame_pointer(), kBacktrackCount)); in Backtrack() local 231 __ Ld(a1, MemOperand(frame_pointer(), kStringStartMinusOne)); in CheckAtStart() local 240 __ Ld(a1, MemOperand(frame_pointer(), kStringStartMinusOne)); in CheckNotAtStart() local 265 __ Ld(a0, register_location(start_reg)); // Index of start of capture. in CheckNotBackReferenceIgnoreCase() local 266 __ Ld(a1, register_location(start_reg + 1)); // Index of end of capture. in CheckNotBackReferenceIgnoreCase() local 275 __ Ld(t1, MemOperand(frame_pointer(), kStringStartMinusOne)); in CheckNotBackReferenceIgnoreCase() local 335 __ Ld(t1, register_location(start_reg)); // Index of start of capture. in CheckNotBackReferenceIgnoreCase() local 336 __ Ld(a2, register_location(start_reg + 1)); // Index of end of capture. in CheckNotBackReferenceIgnoreCase() local 398 __ Ld(a0, register_location(start_reg)); in CheckNotBackReference() local [all …]
|
/third_party/node/deps/v8/src/builtins/mips64/ |
D | builtins-mips64.cc | 88 __ Ld(scratch2, MemOperand(scratch2)); in Generate_PushArguments() local 90 __ Ld(scratch2, MemOperand(scratch2)); in Generate_PushArguments() local 135 __ Ld(cp, MemOperand(fp, ConstructFrameConstants::kContextOffset)); in Generate_JSBuiltinsConstructStubHelper() local 137 __ Ld(t3, MemOperand(fp, ConstructFrameConstants::kLengthOffset)); in Generate_JSBuiltinsConstructStubHelper() local 179 __ Ld(t2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); in Generate_JSConstructStubGeneric() local 238 __ Ld(a1, MemOperand(fp, ConstructFrameConstants::kConstructorOffset)); in Generate_JSConstructStubGeneric() local 239 __ Ld(a0, MemOperand(fp, ConstructFrameConstants::kLengthOffset)); in Generate_JSConstructStubGeneric() local 289 __ Ld(v0, MemOperand(sp, 0 * kPointerSize)); in Generate_JSConstructStubGeneric() local 294 __ Ld(a1, MemOperand(fp, ConstructFrameConstants::kLengthOffset)); in Generate_JSConstructStubGeneric() local 316 __ Ld(cp, MemOperand(fp, ConstructFrameConstants::kContextOffset)); in Generate_JSConstructStubGeneric() local [all …]
|
/third_party/node/deps/v8/src/builtins/riscv64/ |
D | builtins-riscv64.cc | 87 __ Ld(scratch2, MemOperand(scratch2)); in Generate_PushArguments() local 89 __ Ld(scratch2, MemOperand(scratch2)); in Generate_PushArguments() local 138 __ Ld(cp, MemOperand(fp, ConstructFrameConstants::kContextOffset)); in Generate_JSBuiltinsConstructStubHelper() local 140 __ Ld(kScratchReg, MemOperand(fp, ConstructFrameConstants::kLengthOffset)); in Generate_JSBuiltinsConstructStubHelper() local 249 __ Ld(a1, MemOperand(fp, ConstructFrameConstants::kConstructorOffset)); in Generate_JSConstructStubGeneric() local 250 __ Ld(a0, MemOperand(fp, ConstructFrameConstants::kLengthOffset)); in Generate_JSConstructStubGeneric() local 294 __ Ld(cp, MemOperand(fp, ConstructFrameConstants::kContextOffset)); in Generate_JSConstructStubGeneric() local 310 __ Ld(a0, MemOperand(sp, 0 * kSystemPointerSize)); in Generate_JSConstructStubGeneric() local 315 __ Ld(a1, MemOperand(fp, ConstructFrameConstants::kLengthOffset)); in Generate_JSConstructStubGeneric() local 341 __ Ld(cp, MemOperand(fp, ConstructFrameConstants::kContextOffset)); in Generate_JSConstructStubGeneric() local [all …]
|
/third_party/node/deps/v8/src/compiler/backend/mips64/ |
D | code-generator-mips64.cc | 507 __ Ld(ra, MemOperand(fp, StandardFrameConstants::kCallerPCOffset)); in AssemblePrepareTailCall() local 508 __ Ld(fp, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); in AssemblePrepareTailCall() local 561 __ Ld(kScratchReg, MemOperand(kJavaScriptCallCodeStartRegister, offset)); in BailoutIfDeoptimized() local 659 __ Ld(kScratchReg, FieldMemOperand(func, JSFunction::kContextOffset)); in AssembleArchInstruction() local 664 __ Ld(a2, FieldMemOperand(func, JSFunction::kCodeOffset)); in AssembleArchInstruction() local 823 __ Ld(i.OutputRegister(), MemOperand(fp, 0)); in AssembleArchInstruction() local 1648 __ Ld(i.OutputRegister(), i.MemoryOperand()); in AssembleArchInstruction() local 1746 __ Ld(i.OutputRegister(0), MemOperand(fp, offset)); in AssembleArchInstruction() local 1787 __ Ld(kScratchReg, i.MemoryOperand()); in AssembleArchInstruction() local 1797 __ Ld(kScratchReg, i.MemoryOperand()); in AssembleArchInstruction() local [all …]
|
/third_party/node/deps/v8/src/compiler/backend/riscv64/ |
D | code-generator-riscv64.cc | 557 __ Ld(ra, MemOperand(fp, StandardFrameConstants::kCallerPCOffset)); in AssemblePrepareTailCall() local 558 __ Ld(fp, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); in AssemblePrepareTailCall() local 854 __ Ld(i.OutputRegister(), MemOperand(fp, 0)); in AssembleArchInstruction() local 1617 __ Ld(i.OutputRegister(), i.MemoryOperand()); in AssembleArchInstruction() local 1713 __ Ld(i.OutputRegister(0), MemOperand(fp, offset)); in AssembleArchInstruction() local 1980 __ Ld(kScratchReg, i.MemoryOperand()); in AssembleArchInstruction() local 2000 __ Ld(kScratchReg, i.MemoryOperand()); in AssembleArchInstruction() local 2008 __ Ld(kScratchReg, i.MemoryOperand()); in AssembleArchInstruction() local 2027 __ Ld(kScratchReg, i.MemoryOperand()); in AssembleArchInstruction() local 3929 __ Ld( in AssembleConstructFrame() local [all …]
|
/third_party/skia/third_party/externals/swiftshader/third_party/llvm-10.0/llvm/lib/CodeGen/ |
D | GCRootLowering.cpp | 217 Value *Ld = new LoadInst(CI->getType(), CI->getArgOperand(1), "", CI); in DoLowering() local
|
/third_party/skia/third_party/externals/swiftshader/third_party/llvm-10.0/llvm/lib/Transforms/Scalar/ |
D | LoopVersioningLICM.cpp | 353 LoadInst *Ld = dyn_cast<LoadInst>(I); in instructionSafeForVersioning() local
|
D | LoopUnrollAndJamPass.cpp | 260 if (auto *Ld = dyn_cast<LoadInst>(&I)) { in computeUnrollAndJamCount() local
|
D | GVNHoist.cpp | 981 if (auto *Ld = dyn_cast<LoadInst>(Repl)) { in makeGepOperandsAvailable() local
|
D | LoopInterchange.cpp | 98 if (auto *Ld = dyn_cast<LoadInst>(&I)) { in populateDependencyMatrix() local
|
/third_party/skia/third_party/externals/swiftshader/third_party/llvm-10.0/llvm/lib/Target/ARM/ |
D | ARMParallelDSP.cpp | 340 if (auto *Ld = dyn_cast<LoadInst>(SExt->getOperand(0))) { in IsNarrowSequence() local 363 auto *Ld = dyn_cast<LoadInst>(&I); in RecordMemoryOps() local
|
D | ARMISelLowering.cpp | 2562 } else if (LoadSDNode *Ld = dyn_cast<LoadSDNode>(Arg)) { in MatchingStackOffset() local 5119 if (LoadSDNode *Ld = dyn_cast<LoadSDNode>(Op)) in bitcastf32Toi32() local 5137 if (LoadSDNode *Ld = dyn_cast<LoadSDNode>(Op)) { in expandf64Toi32() local 15020 if (auto *Ld = dyn_cast<MaskedLoadSDNode>(ExtVal.getOperand(0))) { in isVectorLoadExtDesirable() local
|
D | ARMISelDAGToDAG.cpp | 4049 SDNode *Ld = CurDAG->getMachineNode(NewOpc, dl, ResTys, Ops); in Select() local
|
/third_party/skia/third_party/externals/swiftshader/third_party/llvm-10.0/llvm/lib/Target/SystemZ/ |
D | SystemZTargetTransformInfo.cpp | 866 if (LoadInst *Ld = dyn_cast<LoadInst>(I->getOperand(0))) in getCmpSelInstrCost() local 909 isFoldableLoad(const LoadInst *Ld, const Instruction *&FoldedValue) { in isFoldableLoad()
|
/third_party/skia/third_party/externals/swiftshader/third_party/llvm-10.0/llvm/lib/Target/AArch64/ |
D | AArch64ISelDAGToDAG.cpp | 1290 SDNode *Ld = CurDAG->getMachineNode(Opc, dl, ResTys, Ops); in SelectLoad() local 1318 SDNode *Ld = CurDAG->getMachineNode(Opc, dl, ResTys, Ops); in SelectPostLoad() local 1435 SDNode *Ld = CurDAG->getMachineNode(Opc, dl, ResTys, Ops); in SelectLoadLane() local 1479 SDNode *Ld = CurDAG->getMachineNode(Opc, dl, ResTys, Ops); in SelectPostLoadLane() local 3149 SDNode *Ld = CurDAG->getMachineNode(Op, DL, MVT::i64, MVT::i64, in Select() local
|
/third_party/skia/third_party/externals/swiftshader/third_party/llvm-10.0/llvm/lib/Transforms/Utils/ |
D | LoopUnrollAndJam.cpp | 596 if (auto *Ld = dyn_cast<LoadInst>(&I)) { in getLoadsAndStores() local
|
/third_party/skia/third_party/externals/swiftshader/third_party/llvm-10.0/llvm/lib/Target/Hexagon/ |
D | HexagonCommonGEP.cpp | 1028 if (LoadInst *Ld = dyn_cast<LoadInst>(R)) { in separateConstantChains() local
|
/third_party/skia/third_party/externals/swiftshader/third_party/llvm-10.0/llvm/lib/Analysis/ |
D | LoopAccessAnalysis.cpp | 1851 auto *Ld = dyn_cast<LoadInst>(&I); in analyzeLoop() local
|
/third_party/skia/third_party/externals/swiftshader/third_party/llvm-10.0/llvm/lib/Target/X86/ |
D | X86ISelLowering.cpp | 4417 } else if (LoadSDNode *Ld = dyn_cast<LoadSDNode>(Arg)) { in MatchingStackOffset() local 8011 static bool findEltLoadSrc(SDValue Elt, LoadSDNode *&Ld, int64_t &ByteOffset) { in findEltLoadSrc() 8138 LoadSDNode *Ld = Loads[EltIdx]; in EltsFromConsecutiveLoads() local 8439 SDValue Ld = BVOp->getSplatValue(&UndefElements); in lowerBuildVectorAsBroadcast() local 9947 SDValue Ld = DAG.getLoad(VT, dl, DAG.getEntryNode(), LegalDAGConstVec, MPI); in LowerBUILD_VECTOR() local 12802 LoadSDNode *Ld = cast<LoadSDNode>(V); in lowerShuffleAsBroadcast() local 22765 LoadSDNode *Ld = cast<LoadSDNode>(Op.getNode()); in LowerLoad() local 29399 SDValue Ld = DAG.getMemIntrinsicNode(X86ISD::VZEXT_LOAD, dl, Tys, Ops, in ReplaceNodeResults() local 29548 auto *Ld = cast<LoadSDNode>(N); in ReplaceNodeResults() local 40011 static SDValue getIndexFromUnindexedLoad(LoadSDNode *Ld) { in getIndexFromUnindexedLoad() [all …]
|
/third_party/skia/third_party/externals/swiftshader/third_party/llvm-10.0/llvm/lib/CodeGen/SelectionDAG/ |
D | DAGCombiner.cpp | 9546 MaskedLoadSDNode *Ld = dyn_cast<MaskedLoadSDNode>(N0); in tryToFoldExtOfMaskedLoad() local 15541 auto *Ld = cast<LoadSDNode>(Val); in getStoreMergeCandidates() local 16035 LoadSDNode *Ld = cast<LoadSDNode>(Val); in MergeConsecutiveStores() local 16250 LoadSDNode *Ld = cast<LoadSDNode>(LoadNodes[i].MemNode); in MergeConsecutiveStores() local 16477 if (LoadSDNode *Ld = dyn_cast<LoadSDNode>(Value)) { in visitSTORE() local 18457 auto *Ld = dyn_cast<LoadSDNode>(Extract->getOperand(0)); in narrowExtractedVectorLoad() local
|
/third_party/skia/third_party/externals/swiftshader/third_party/llvm-10.0/llvm/lib/Target/AMDGPU/ |
D | AMDGPUISelDAGToDAG.cpp | 2692 auto Ld = cast<LoadSDNode>(N); in isUniformLoad() local
|
/third_party/skia/third_party/externals/swiftshader/third_party/llvm-10.0/llvm/lib/Target/Sparc/ |
D | SparcISelLowering.cpp | 3388 LoadSDNode *Ld = cast<LoadSDNode>(N); in ReplaceNodeResults() local
|
/third_party/skia/third_party/externals/swiftshader/third_party/llvm-10.0/llvm/lib/Target/PowerPC/ |
D | PPCISelLowering.cpp | 8232 SDValue Ld; in LowerINT_TO_FP() local 13365 SDValue Ld = DAG.getMemIntrinsicNode(PPCISD::LXSIZX, dl, in combineFPToIntToFP() local
|
/third_party/node/deps/v8/src/codegen/mips64/ |
D | macro-assembler-mips64.cc | 1432 void TurboAssembler::Ld(Register rd, const MemOperand& rs) { in CallRecordWriteStub() function in v8::internal::TurboAssembler
|