Home
last modified time | relevance | path

Searched refs:stack_offset (Results 1 – 11 of 11) sorted by relevance

/art/compiler/utils/arm/
Dassembler_arm_vixl.cc384 void ArmVIXLAssembler::StoreRegisterList(RegList regs, size_t stack_offset) { in StoreRegisterList() argument
390 if (stack_offset != 0) { in StoreRegisterList()
393 ___ Add(base, sp, Operand::From(stack_offset)); in StoreRegisterList()
398 ___ Str(vixl32::Register(i), MemOperand(sp, stack_offset)); in StoreRegisterList()
399 stack_offset += kRegSizeInBytes; in StoreRegisterList()
405 void ArmVIXLAssembler::LoadRegisterList(RegList regs, size_t stack_offset) { in LoadRegisterList() argument
411 if (stack_offset != 0) { in LoadRegisterList()
413 ___ Add(base, sp, Operand::From(stack_offset)); in LoadRegisterList()
418 ___ Ldr(vixl32::Register(i), MemOperand(sp, stack_offset)); in LoadRegisterList()
419 stack_offset += kRegSizeInBytes; in LoadRegisterList()
Dassembler_arm_vixl.h233 void LoadRegisterList(RegList regs, size_t stack_offset);
234 void StoreRegisterList(RegList regs, size_t stack_offset);
/art/libelffile/dwarf/
Dexpression.h69 void WriteOpFbreg(int32_t stack_offset) { in WriteOpFbreg() argument
71 PushSleb128(stack_offset); in WriteOpFbreg()
/art/compiler/optimizing/
Dcode_generator.cc662 size_t stack_offset = static_cast<size_t>(pointer_size); // Start after the ArtMethod*. in CreateStringBuilderAppendLocations() local
676 locations->SetInAt(i, Location::StackSlot(stack_offset)); in CreateStringBuilderAppendLocations()
680 stack_offset = RoundUp(stack_offset, sizeof(uint64_t)); in CreateStringBuilderAppendLocations()
681 locations->SetInAt(i, Location::DoubleStackSlot(stack_offset)); in CreateStringBuilderAppendLocations()
683 stack_offset += sizeof(uint32_t); in CreateStringBuilderAppendLocations()
691 stack_offset += sizeof(uint32_t); in CreateStringBuilderAppendLocations()
695 size_t param_size = stack_offset - static_cast<size_t>(pointer_size); in CreateStringBuilderAppendLocations()
1728 size_t stack_offset = codegen->GetFirstRegisterSlotInSlowPath(); in SaveLiveRegisters() local
1734 locations->SetStackBit(stack_offset / kVRegSize); in SaveLiveRegisters()
1736 DCHECK_LT(stack_offset, codegen->GetFrameSize() - codegen->FrameEntrySpillSize()); in SaveLiveRegisters()
[all …]
Dcode_generator_arm_vixl.cc178 size_t stack_offset) { in SaveContiguousSRegisterList() argument
183 __ Vstr(vixl32::SRegister(first), MemOperand(sp, stack_offset)); in SaveContiguousSRegisterList()
184 return stack_offset + kSRegSizeInBytes; in SaveContiguousSRegisterList()
187 __ Vstr(vixl32::SRegister(first++), MemOperand(sp, stack_offset)); in SaveContiguousSRegisterList()
188 stack_offset += kSRegSizeInBytes; in SaveContiguousSRegisterList()
203 __ Vstr(d_reg, MemOperand(sp, stack_offset)); in SaveContiguousSRegisterList()
207 if (stack_offset != 0) { in SaveContiguousSRegisterList()
209 __ Add(base, sp, Operand::From(stack_offset)); in SaveContiguousSRegisterList()
213 stack_offset += number_of_d_regs * kDRegSizeInBytes; in SaveContiguousSRegisterList()
217 __ Vstr(vixl32::SRegister(last + 1), MemOperand(sp, stack_offset)); in SaveContiguousSRegisterList()
[all …]
Dcode_generator_x86.cc5278 int stack_offset = slow_path->GetStackOffsetOfCoreRegister(location.AsRegister<Register>()); in GetInvokeStaticOrDirectExtraParameter() local
5279 __ movl(temp, Address(ESP, stack_offset)); in GetInvokeStaticOrDirectExtraParameter()
6720 int stack_offset = ensure_scratch.IsSpilled() ? kX86WordSize : 0; in MoveMemoryToMemory() local
6724 __ movl(temp_reg, Address(ESP, src + stack_offset)); in MoveMemoryToMemory()
6725 __ movl(Address(ESP, dst + stack_offset), temp_reg); in MoveMemoryToMemory()
6726 stack_offset += kX86WordSize; in MoveMemoryToMemory()
6901 int stack_offset = ensure_scratch.IsSpilled() ? kX86WordSize : 0; in Exchange() local
6902 __ movl(static_cast<Register>(ensure_scratch.GetRegister()), Address(ESP, mem + stack_offset)); in Exchange()
6903 __ movl(Address(ESP, mem + stack_offset), reg); in Exchange()
6912 int stack_offset = ensure_scratch.IsSpilled() ? kX86WordSize : 0; in Exchange32() local
[all …]
Dcode_generator_arm64.cc174 size_t stack_offset = codegen->GetFirstRegisterSlotInSlowPath(); in SaveLiveRegisters() local
179 locations->SetStackBit(stack_offset / kVRegSize); in SaveLiveRegisters()
181 DCHECK_LT(stack_offset, codegen->GetFrameSize() - codegen->FrameEntrySpillSize()); in SaveLiveRegisters()
183 saved_core_stack_offsets_[i] = stack_offset; in SaveLiveRegisters()
184 stack_offset += kXRegSizeInBytes; in SaveLiveRegisters()
190 DCHECK_LT(stack_offset, codegen->GetFrameSize() - codegen->FrameEntrySpillSize()); in SaveLiveRegisters()
192 saved_fpu_stack_offsets_[i] = stack_offset; in SaveLiveRegisters()
193 stack_offset += fp_reg_size; in SaveLiveRegisters()
879 size_t stack_offset = calling_convention.GetStackOffsetOf(stack_index_); in GetNextLocation() local
880 next_location = DataType::Is64BitType(type) ? Location::DoubleStackSlot(stack_offset) in GetNextLocation()
[all …]
Dcode_generator_x86_64.cc6202 int stack_offset = ensure_scratch.IsSpilled() ? kX86_64WordSize : 0; in ExchangeMemory32() local
6203 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), mem1 + stack_offset)); in ExchangeMemory32()
6205 Address(CpuRegister(RSP), mem2 + stack_offset)); in ExchangeMemory32()
6206 __ movl(Address(CpuRegister(RSP), mem2 + stack_offset), CpuRegister(TMP)); in ExchangeMemory32()
6207 __ movl(Address(CpuRegister(RSP), mem1 + stack_offset), in ExchangeMemory32()
6215 int stack_offset = ensure_scratch.IsSpilled() ? kX86_64WordSize : 0; in ExchangeMemory64() local
6220 Address(CpuRegister(RSP), mem1 + stack_offset)); in ExchangeMemory64()
6222 Address(CpuRegister(RSP), mem2 + stack_offset)); in ExchangeMemory64()
6223 __ movq(Address(CpuRegister(RSP), mem2 + stack_offset), in ExchangeMemory64()
6225 __ movq(Address(CpuRegister(RSP), mem1 + stack_offset), in ExchangeMemory64()
[all …]
/art/compiler/utils/arm64/
Dassembler_arm64.h104 void SaveRestoreZRegisterList(uint32_t vreg_bit_vector, int64_t stack_offset) { in SaveRestoreZRegisterList() argument
110 vixl_masm_.Add(temp, vixl::aarch64::sp, stack_offset); in SaveRestoreZRegisterList()
/art/runtime/interpreter/mterp/x86ng/
Dmain.S592 .macro LOOP_OVER_SHORTY_LOADING_INTS stack_offset, shorty, inst, arg_index, finished, is_string_init
616 movl (LOCAL1 + \stack_offset)(%esp), %eax
623 movl %eax, (4 + \stack_offset)(%esp, REG_VAR(arg_index), 4)
631 movl %eax, (4 + \stack_offset)(%esp, REG_VAR(arg_index), 4)
646 movl (LOCAL1 + \stack_offset)(%esp), %eax
653 movl %eax, (4 + \stack_offset)(%esp, REG_VAR(arg_index), 4)
/art/runtime/
Dthread.cc4165 const size_t stack_offset = stack_index * kFrameSlotSize; in VisitQuickFramePrecise() local
4166 FindWithType(stack_offset, in VisitQuickFramePrecise()