/art/compiler/utils/arm/ |
D | assembler_arm_vixl.cc | 387 void ArmVIXLAssembler::StoreRegisterList(RegList regs, size_t stack_offset) { in StoreRegisterList() argument 393 if (stack_offset != 0) { in StoreRegisterList() 396 ___ Add(base, sp, Operand::From(stack_offset)); in StoreRegisterList() 401 ___ Str(vixl32::Register(i), MemOperand(sp, stack_offset)); in StoreRegisterList() 402 stack_offset += kRegSizeInBytes; in StoreRegisterList() 408 void ArmVIXLAssembler::LoadRegisterList(RegList regs, size_t stack_offset) { in LoadRegisterList() argument 414 if (stack_offset != 0) { in LoadRegisterList() 416 ___ Add(base, sp, Operand::From(stack_offset)); in LoadRegisterList() 421 ___ Ldr(vixl32::Register(i), MemOperand(sp, stack_offset)); in LoadRegisterList() 422 stack_offset += kRegSizeInBytes; in LoadRegisterList()
|
D | assembler_arm_vixl.h | 217 void LoadRegisterList(RegList regs, size_t stack_offset); 218 void StoreRegisterList(RegList regs, size_t stack_offset);
|
/art/compiler/debug/dwarf/ |
D | expression.h | 69 void WriteOpFbreg(int32_t stack_offset) { in WriteOpFbreg() argument 71 PushSleb128(stack_offset); in WriteOpFbreg()
|
/art/compiler/optimizing/ |
D | code_generator.cc | 1520 size_t stack_offset = codegen->GetFirstRegisterSlotInSlowPath(); in SaveLiveRegisters() local 1526 locations->SetStackBit(stack_offset / kVRegSize); in SaveLiveRegisters() 1528 DCHECK_LT(stack_offset, codegen->GetFrameSize() - codegen->FrameEntrySpillSize()); in SaveLiveRegisters() 1530 saved_core_stack_offsets_[i] = stack_offset; in SaveLiveRegisters() 1531 stack_offset += codegen->SaveCoreRegister(stack_offset, i); in SaveLiveRegisters() 1536 DCHECK_LT(stack_offset, codegen->GetFrameSize() - codegen->FrameEntrySpillSize()); in SaveLiveRegisters() 1538 saved_fpu_stack_offsets_[i] = stack_offset; in SaveLiveRegisters() 1539 stack_offset += codegen->SaveFloatingPointRegister(stack_offset, i); in SaveLiveRegisters() 1544 size_t stack_offset = codegen->GetFirstRegisterSlotInSlowPath(); in RestoreLiveRegisters() local 1548 DCHECK_LT(stack_offset, codegen->GetFrameSize() - codegen->FrameEntrySpillSize()); in RestoreLiveRegisters() [all …]
|
D | code_generator_arm_vixl.cc | 191 size_t stack_offset) { in SaveContiguousSRegisterList() argument 196 __ Vstr(vixl32::SRegister(first), MemOperand(sp, stack_offset)); in SaveContiguousSRegisterList() 197 return stack_offset + kSRegSizeInBytes; in SaveContiguousSRegisterList() 200 __ Vstr(vixl32::SRegister(first++), MemOperand(sp, stack_offset)); in SaveContiguousSRegisterList() 201 stack_offset += kSRegSizeInBytes; in SaveContiguousSRegisterList() 216 __ Vstr(d_reg, MemOperand(sp, stack_offset)); in SaveContiguousSRegisterList() 220 if (stack_offset != 0) { in SaveContiguousSRegisterList() 222 __ Add(base, sp, Operand::From(stack_offset)); in SaveContiguousSRegisterList() 226 stack_offset += number_of_d_regs * kDRegSizeInBytes; in SaveContiguousSRegisterList() 230 __ Vstr(vixl32::SRegister(last + 1), MemOperand(sp, stack_offset)); in SaveContiguousSRegisterList() [all …]
|
D | code_generator_x86.cc | 4505 int stack_offset = slow_path->GetStackOffsetOfCoreRegister(location.AsRegister<Register>()); in GetInvokeStaticOrDirectExtraParameter() local 4506 __ movl(temp, Address(ESP, stack_offset)); in GetInvokeStaticOrDirectExtraParameter() 5770 int stack_offset = ensure_scratch.IsSpilled() ? kX86WordSize : 0; in MoveMemoryToMemory() local 5774 __ movl(temp_reg, Address(ESP, src + stack_offset)); in MoveMemoryToMemory() 5775 __ movl(Address(ESP, dst + stack_offset), temp_reg); in MoveMemoryToMemory() 5776 stack_offset += kX86WordSize; in MoveMemoryToMemory() 5930 int stack_offset = ensure_scratch.IsSpilled() ? kX86WordSize : 0; in Exchange() local 5931 __ movl(static_cast<Register>(ensure_scratch.GetRegister()), Address(ESP, mem + stack_offset)); in Exchange() 5932 __ movl(Address(ESP, mem + stack_offset), reg); in Exchange() 5941 int stack_offset = ensure_scratch.IsSpilled() ? kX86WordSize : 0; in Exchange32() local [all …]
|
D | code_generator_arm64.cc | 220 size_t stack_offset = codegen->GetFirstRegisterSlotInSlowPath(); in SaveLiveRegisters() local 225 locations->SetStackBit(stack_offset / kVRegSize); in SaveLiveRegisters() 227 DCHECK_LT(stack_offset, codegen->GetFrameSize() - codegen->FrameEntrySpillSize()); in SaveLiveRegisters() 229 saved_core_stack_offsets_[i] = stack_offset; in SaveLiveRegisters() 230 stack_offset += kXRegSizeInBytes; in SaveLiveRegisters() 235 DCHECK_LT(stack_offset, codegen->GetFrameSize() - codegen->FrameEntrySpillSize()); in SaveLiveRegisters() 237 saved_fpu_stack_offsets_[i] = stack_offset; in SaveLiveRegisters() 238 stack_offset += kDRegSizeInBytes; in SaveLiveRegisters() 1361 size_t stack_offset = calling_convention.GetStackOffsetOf(stack_index_); in GetNextLocation() local 1362 next_location = DataType::Is64BitType(type) ? Location::DoubleStackSlot(stack_offset) in GetNextLocation() [all …]
|
D | code_generator_mips.cc | 102 size_t stack_offset = calling_convention.GetStackOffsetOf(stack_index_); in GetNextLocation() local 103 next_location = Location::StackSlot(stack_offset); in GetNextLocation() 122 size_t stack_offset = calling_convention.GetStackOffsetOf(stack_index_); in GetNextLocation() local 123 next_location = Location::DoubleStackSlot(stack_offset); in GetNextLocation() 138 size_t stack_offset = calling_convention.GetStackOffsetOf(stack_index_); in GetNextLocation() local 139 next_location = DataType::Is64BitType(type) ? Location::DoubleStackSlot(stack_offset) in GetNextLocation() 140 : Location::StackSlot(stack_offset); in GetNextLocation() 1229 int stack_offset = ensure_scratch.IsSpilled() ? kStackAlignment : 0; in Exchange() local 1230 for (int i = 0; i <= (double_slot ? 1 : 0); i++, stack_offset += kMipsWordSize) { in Exchange() 1234 index1 + stack_offset); in Exchange() [all …]
|
D | code_generator_x86_64.cc | 5370 int stack_offset = ensure_scratch.IsSpilled() ? kX86_64WordSize : 0; in ExchangeMemory32() local 5371 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), mem1 + stack_offset)); in ExchangeMemory32() 5373 Address(CpuRegister(RSP), mem2 + stack_offset)); in ExchangeMemory32() 5374 __ movl(Address(CpuRegister(RSP), mem2 + stack_offset), CpuRegister(TMP)); in ExchangeMemory32() 5375 __ movl(Address(CpuRegister(RSP), mem1 + stack_offset), in ExchangeMemory32() 5383 int stack_offset = ensure_scratch.IsSpilled() ? kX86_64WordSize : 0; in ExchangeMemory64() local 5388 Address(CpuRegister(RSP), mem1 + stack_offset)); in ExchangeMemory64() 5390 Address(CpuRegister(RSP), mem2 + stack_offset)); in ExchangeMemory64() 5391 __ movq(Address(CpuRegister(RSP), mem2 + stack_offset), in ExchangeMemory64() 5393 __ movq(Address(CpuRegister(RSP), mem1 + stack_offset), in ExchangeMemory64() [all …]
|
D | code_generator_mips64.cc | 99 size_t stack_offset = calling_convention.GetStackOffsetOf(stack_index_); in GetNextLocation() local 100 next_location = DataType::Is64BitType(type) ? Location::DoubleStackSlot(stack_offset) in GetNextLocation() 101 : Location::StackSlot(stack_offset); in GetNextLocation() 1050 int stack_offset = ensure_scratch.IsSpilled() ? kMips64DoublewordSize : 0; in Exchange() local 1054 index1 + stack_offset); in Exchange() 1058 index2 + stack_offset); in Exchange() 1062 index2 + stack_offset); in Exchange() 1063 __ StoreToOffset(store_type, TMP, SP, index1 + stack_offset); in Exchange()
|
/art/compiler/utils/mips64/ |
D | assembler_mips64.cc | 3601 int stack_offset = frame_size - kFramePointerSize; in BuildFrame() local 3602 StoreToOffset(kStoreDoubleword, RA, SP, stack_offset); in BuildFrame() 3603 cfi_.RelOffset(DWARFReg(RA), stack_offset); in BuildFrame() 3605 stack_offset -= kFramePointerSize; in BuildFrame() 3607 StoreToOffset(kStoreDoubleword, reg, SP, stack_offset); in BuildFrame() 3608 cfi_.RelOffset(DWARFReg(reg), stack_offset); in BuildFrame() 3643 int stack_offset = frame_size - (callee_save_regs.size() * kFramePointerSize) - kFramePointerSize; in RemoveFrame() local 3646 LoadFromOffset(kLoadDoubleword, reg, SP, stack_offset); in RemoveFrame() 3648 stack_offset += kFramePointerSize; in RemoveFrame() 3650 LoadFromOffset(kLoadDoubleword, RA, SP, stack_offset); in RemoveFrame()
|
/art/compiler/utils/mips/ |
D | assembler_mips.cc | 4769 int stack_offset = frame_size - kFramePointerSize; in BuildFrame() local 4770 StoreToOffset(kStoreWord, RA, SP, stack_offset); in BuildFrame() 4771 cfi_.RelOffset(DWARFReg(RA), stack_offset); in BuildFrame() 4773 stack_offset -= kFramePointerSize; in BuildFrame() 4775 StoreToOffset(kStoreWord, reg, SP, stack_offset); in BuildFrame() 4776 cfi_.RelOffset(DWARFReg(reg), stack_offset); in BuildFrame() 4810 int stack_offset = frame_size - (callee_save_regs.size() * kFramePointerSize) - kFramePointerSize; in RemoveFrame() local 4813 LoadFromOffset(kLoadWord, reg, SP, stack_offset); in RemoveFrame() 4815 stack_offset += kFramePointerSize; in RemoveFrame() 4817 LoadFromOffset(kLoadWord, RA, SP, stack_offset); in RemoveFrame()
|
/art/runtime/ |
D | thread.cc | 3701 const size_t stack_offset = stack_index * kFrameSlotSize; in VisitQuickFramePrecise() local 3702 FindWithType(stack_offset, in VisitQuickFramePrecise()
|