/third_party/vixl/benchmarks/aarch64/ |
D | bench-utils.cc | 261 __ Ldrsw(PickX(), MemOperand(scratch, -42, PreIndex)); in GenerateMemOperandSequence() local
|
/third_party/node/deps/v8/src/builtins/arm64/ |
D | builtins-arm64.cc | 1523 __ Ldrsw(x10, in Generate_InterpreterEntryTrampoline() local 3460 __ Ldrsw(w10, MemOperand(x10)); in CallApiFunctionAndReturn() local 4002 __ Ldrsw(x1, MemOperand(x4, Deoptimizer::output_count_offset())); in Generate_DeoptimizationEntry() local
|
/third_party/node/deps/v8/src/codegen/arm64/ |
D | macro-assembler-arm64-inl.h | 1073 Ldrsw(dst, in SmiUntag()
|
D | macro-assembler-arm64.h | 45 V(Ldrsw, Register&, rt, LDRSW_x)
|
D | macro-assembler-arm64.cc | 2107 Ldrsw(scratch, FieldMemOperand(code_object, Code::kBuiltinIndexOffset)); in LoadCodeObjectEntry()
|
/third_party/vixl/test/aarch64/ |
D | test-assembler-aarch64.cc | 3129 __ Ldrsw(x8, MemOperand(x24)); in TEST() local 3130 __ Ldrsw(x9, MemOperand(x24, 4)); in TEST() local 3806 __ Ldrsw(x4, 0x7fffffff); in TEST() local 3807 __ Ldrsw(x5, 0x80000000); in TEST() local 3838 __ Ldrsw(x2, 0x7fffffff); in TEST() local 3839 __ Ldrsw(x3, 0x80000000); in TEST() local 3858 __ Ldrsw(x6, 0x7fffffff); in TEST() local 3859 __ Ldrsw(x7, 0x80000000); in TEST() local
|
D | test-disasm-aarch64.cc | 1934 COMPARE_MACRO_PREFIX(Ldrsw(x21, 0x80000000), "ldrsw x21, pc+8"); in TEST()
|
D | test-assembler-sve-aarch64.cc | 8746 masm->Ldrsw(dst, MemOperand(addr)); in ScalarLoadHelper()
|
/third_party/vixl/src/aarch64/ |
D | macro-assembler-aarch64.h | 57 V(Ldrsw, Register&, rt, LDRSW_x) 2097 void Ldrsw(const Register& rt, uint32_t imm) { in Ldrsw() function 2118 void Ldrsw(const Register& rt, RawLiteral* literal) { in Ldrsw() function
|
/third_party/node/deps/v8/src/wasm/baseline/arm64/ |
D | liftoff-assembler-arm64.h | 564 Ldrsw(dst.gp().X(), src_op); in Load()
|
/third_party/node/deps/v8/src/compiler/backend/arm64/ |
D | code-generator-arm64.cc | 1875 __ Ldrsw(i.OutputRegister(), i.MemoryOperand()); in AssembleArchInstruction() local
|