/art/compiler/utils/arm/ |
D | assembler_arm_vixl.cc | 370 void ArmVIXLAssembler::StoreRegisterList(RegList regs, size_t stack_offset) { in StoreRegisterList() argument 376 if (stack_offset != 0) { in StoreRegisterList() 379 ___ Add(base, sp, Operand::From(stack_offset)); in StoreRegisterList() 384 ___ Str(vixl32::Register(i), MemOperand(sp, stack_offset)); in StoreRegisterList() 385 stack_offset += kRegSizeInBytes; in StoreRegisterList() 391 void ArmVIXLAssembler::LoadRegisterList(RegList regs, size_t stack_offset) { in LoadRegisterList() argument 397 if (stack_offset != 0) { in LoadRegisterList() 399 ___ Add(base, sp, Operand::From(stack_offset)); in LoadRegisterList() 404 ___ Ldr(vixl32::Register(i), MemOperand(sp, stack_offset)); in LoadRegisterList() 405 stack_offset += kRegSizeInBytes; in LoadRegisterList()
|
D | assembler_arm_vixl.h | 274 void LoadRegisterList(RegList regs, size_t stack_offset); 275 void StoreRegisterList(RegList regs, size_t stack_offset);
|
/art/libelffile/dwarf/ |
D | expression.h | 69 void WriteOpFbreg(int32_t stack_offset) { in WriteOpFbreg() argument 71 PushSleb128(stack_offset); in WriteOpFbreg()
|
/art/compiler/optimizing/ |
D | code_generator.cc | 590 size_t stack_offset = static_cast<size_t>(pointer_size); // Start after the ArtMethod*. in CreateStringBuilderAppendLocations() local 604 locations->SetInAt(i, Location::StackSlot(stack_offset)); in CreateStringBuilderAppendLocations() 608 stack_offset = RoundUp(stack_offset, sizeof(uint64_t)); in CreateStringBuilderAppendLocations() 609 locations->SetInAt(i, Location::DoubleStackSlot(stack_offset)); in CreateStringBuilderAppendLocations() 611 stack_offset += sizeof(uint32_t); in CreateStringBuilderAppendLocations() 619 stack_offset += sizeof(uint32_t); in CreateStringBuilderAppendLocations() 623 size_t param_size = stack_offset - static_cast<size_t>(pointer_size); in CreateStringBuilderAppendLocations() 1725 size_t stack_offset = codegen->GetFirstRegisterSlotInSlowPath(); in SaveLiveRegisters() local 1731 locations->SetStackBit(stack_offset / kVRegSize); in SaveLiveRegisters() 1733 DCHECK_LT(stack_offset, codegen->GetFrameSize() - codegen->FrameEntrySpillSize()); in SaveLiveRegisters() [all …]
|
D | code_generator_arm_vixl.cc | 182 size_t stack_offset) { in SaveContiguousSRegisterList() argument 187 __ Vstr(vixl32::SRegister(first), MemOperand(sp, stack_offset)); in SaveContiguousSRegisterList() 188 return stack_offset + kSRegSizeInBytes; in SaveContiguousSRegisterList() 191 __ Vstr(vixl32::SRegister(first++), MemOperand(sp, stack_offset)); in SaveContiguousSRegisterList() 192 stack_offset += kSRegSizeInBytes; in SaveContiguousSRegisterList() 207 __ Vstr(d_reg, MemOperand(sp, stack_offset)); in SaveContiguousSRegisterList() 211 if (stack_offset != 0) { in SaveContiguousSRegisterList() 213 __ Add(base, sp, Operand::From(stack_offset)); in SaveContiguousSRegisterList() 217 stack_offset += number_of_d_regs * kDRegSizeInBytes; in SaveContiguousSRegisterList() 221 __ Vstr(vixl32::SRegister(last + 1), MemOperand(sp, stack_offset)); in SaveContiguousSRegisterList() [all …]
|
D | code_generator_x86.cc | 5480 int stack_offset = slow_path->GetStackOffsetOfCoreRegister(location.AsRegister<Register>()); in GetInvokeStaticOrDirectExtraParameter() local 5481 __ movl(temp, Address(ESP, stack_offset)); in GetInvokeStaticOrDirectExtraParameter() 6993 int stack_offset = ensure_scratch.IsSpilled() ? kX86WordSize : 0; in MoveMemoryToMemory() local 6997 __ movl(temp_reg, Address(ESP, src + stack_offset)); in MoveMemoryToMemory() 6998 __ movl(Address(ESP, dst + stack_offset), temp_reg); in MoveMemoryToMemory() 6999 stack_offset += kX86WordSize; in MoveMemoryToMemory() 7174 int stack_offset = ensure_scratch.IsSpilled() ? kX86WordSize : 0; in Exchange() local 7175 __ movl(static_cast<Register>(ensure_scratch.GetRegister()), Address(ESP, mem + stack_offset)); in Exchange() 7176 __ movl(Address(ESP, mem + stack_offset), reg); in Exchange() 7185 int stack_offset = ensure_scratch.IsSpilled() ? kX86WordSize : 0; in Exchange32() local [all …]
|
D | code_generator_arm64.cc | 180 size_t stack_offset = codegen->GetFirstRegisterSlotInSlowPath(); in SaveLiveRegisters() local 185 locations->SetStackBit(stack_offset / kVRegSize); in SaveLiveRegisters() 187 DCHECK_LT(stack_offset, codegen->GetFrameSize() - codegen->FrameEntrySpillSize()); in SaveLiveRegisters() 189 saved_core_stack_offsets_[i] = stack_offset; in SaveLiveRegisters() 190 stack_offset += kXRegSizeInBytes; in SaveLiveRegisters() 196 DCHECK_LT(stack_offset, codegen->GetFrameSize() - codegen->FrameEntrySpillSize()); in SaveLiveRegisters() 198 saved_fpu_stack_offsets_[i] = stack_offset; in SaveLiveRegisters() 199 stack_offset += fp_reg_size; in SaveLiveRegisters() 907 size_t stack_offset = calling_convention.GetStackOffsetOf(stack_index_); in GetNextLocation() local 908 next_location = DataType::Is64BitType(type) ? Location::DoubleStackSlot(stack_offset) in GetNextLocation() [all …]
|
D | code_generator_x86_64.cc | 6517 int stack_offset = ensure_scratch.IsSpilled() ? kX86_64WordSize : 0; in ExchangeMemory32() local 6518 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), mem1 + stack_offset)); in ExchangeMemory32() 6520 Address(CpuRegister(RSP), mem2 + stack_offset)); in ExchangeMemory32() 6521 __ movl(Address(CpuRegister(RSP), mem2 + stack_offset), CpuRegister(TMP)); in ExchangeMemory32() 6522 __ movl(Address(CpuRegister(RSP), mem1 + stack_offset), in ExchangeMemory32() 6530 int stack_offset = ensure_scratch.IsSpilled() ? kX86_64WordSize : 0; in ExchangeMemory64() local 6535 Address(CpuRegister(RSP), mem1 + stack_offset)); in ExchangeMemory64() 6537 Address(CpuRegister(RSP), mem2 + stack_offset)); in ExchangeMemory64() 6538 __ movq(Address(CpuRegister(RSP), mem2 + stack_offset), in ExchangeMemory64() 6540 __ movq(Address(CpuRegister(RSP), mem1 + stack_offset), in ExchangeMemory64() [all …]
|
D | code_generator_riscv64.cc | 197 size_t stack_offset = calling_convention.GetStackOffsetOf(stack_index_); in GetNextLocation() local 198 next_location = DataType::Is64BitType(type) ? Location::DoubleStackSlot(stack_offset) : in GetNextLocation() 199 Location::StackSlot(stack_offset); in GetNextLocation()
|
/art/runtime/oat/ |
D | jni_stub_hash_map.cc | 121 size_t stack_offset = 0; in JniStubKeyOptimizedHash() local 124 stack_offset += StackOffset(c); in JniStubKeyOptimizedHash() 163 result += stack_offset; in JniStubKeyOptimizedHash()
|
/art/compiler/utils/arm64/ |
D | assembler_arm64.h | 103 void SaveRestoreZRegisterList(uint32_t vreg_bit_vector, int64_t stack_offset) { in SaveRestoreZRegisterList() argument 109 vixl_masm_.Add(temp, vixl::aarch64::sp, stack_offset); in SaveRestoreZRegisterList()
|
/art/runtime/interpreter/mterp/x86ng/ |
D | main.S | 596 .macro LOOP_OVER_SHORTY_LOADING_INTS stack_offset, shorty, inst, arg_index, finished, is_string_init 620 movl (LOCAL1 + \stack_offset)(%esp), %eax 627 movl %eax, (4 + \stack_offset)(%esp, REG_VAR(arg_index), 4) 635 movl %eax, (4 + \stack_offset)(%esp, REG_VAR(arg_index), 4) 650 movl (LOCAL1 + \stack_offset)(%esp), %eax 657 movl %eax, (4 + \stack_offset)(%esp, REG_VAR(arg_index), 4)
|
/art/runtime/ |
D | thread.cc | 4376 const size_t stack_offset = stack_index * kFrameSlotSize; in VisitQuickFramePrecise() local 4377 FindWithType(stack_offset, in VisitQuickFramePrecise()
|