1 /*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #include "assembler.h"
18
19 #include <algorithm>
20 #include <vector>
21
22 #ifdef ART_ENABLE_CODEGEN_arm
23 #include "arm/assembler_arm32.h"
24 #include "arm/assembler_thumb2.h"
25 #endif
26 #ifdef ART_ENABLE_CODEGEN_arm64
27 #include "arm64/assembler_arm64.h"
28 #endif
29 #ifdef ART_ENABLE_CODEGEN_mips
30 #include "mips/assembler_mips.h"
31 #endif
32 #ifdef ART_ENABLE_CODEGEN_mips64
33 #include "mips64/assembler_mips64.h"
34 #endif
35 #ifdef ART_ENABLE_CODEGEN_x86
36 #include "x86/assembler_x86.h"
37 #endif
38 #ifdef ART_ENABLE_CODEGEN_x86_64
39 #include "x86_64/assembler_x86_64.h"
40 #endif
41 #include "base/casts.h"
42 #include "globals.h"
43 #include "memory_region.h"
44
45 namespace art {
46
AssemblerBuffer(ArenaAllocator * arena)47 AssemblerBuffer::AssemblerBuffer(ArenaAllocator* arena)
48 : arena_(arena) {
49 static const size_t kInitialBufferCapacity = 4 * KB;
50 contents_ = arena_->AllocArray<uint8_t>(kInitialBufferCapacity, kArenaAllocAssembler);
51 cursor_ = contents_;
52 limit_ = ComputeLimit(contents_, kInitialBufferCapacity);
53 fixup_ = nullptr;
54 slow_path_ = nullptr;
55 #ifndef NDEBUG
56 has_ensured_capacity_ = false;
57 fixups_processed_ = false;
58 #endif
59
60 // Verify internal state.
61 CHECK_EQ(Capacity(), kInitialBufferCapacity);
62 CHECK_EQ(Size(), 0U);
63 }
64
65
~AssemblerBuffer()66 AssemblerBuffer::~AssemblerBuffer() {
67 if (arena_->IsRunningOnMemoryTool()) {
68 arena_->MakeInaccessible(contents_, Capacity());
69 }
70 }
71
72
ProcessFixups(const MemoryRegion & region)73 void AssemblerBuffer::ProcessFixups(const MemoryRegion& region) {
74 AssemblerFixup* fixup = fixup_;
75 while (fixup != nullptr) {
76 fixup->Process(region, fixup->position());
77 fixup = fixup->previous();
78 }
79 }
80
81
FinalizeInstructions(const MemoryRegion & instructions)82 void AssemblerBuffer::FinalizeInstructions(const MemoryRegion& instructions) {
83 // Copy the instructions from the buffer.
84 MemoryRegion from(reinterpret_cast<void*>(contents()), Size());
85 instructions.CopyFrom(0, from);
86 // Process fixups in the instructions.
87 ProcessFixups(instructions);
88 #ifndef NDEBUG
89 fixups_processed_ = true;
90 #endif
91 }
92
93
ExtendCapacity(size_t min_capacity)94 void AssemblerBuffer::ExtendCapacity(size_t min_capacity) {
95 size_t old_size = Size();
96 size_t old_capacity = Capacity();
97 DCHECK_GT(min_capacity, old_capacity);
98 size_t new_capacity = std::min(old_capacity * 2, old_capacity + 1 * MB);
99 new_capacity = std::max(new_capacity, min_capacity);
100
101 // Allocate the new data area and copy contents of the old one to it.
102 contents_ = reinterpret_cast<uint8_t*>(
103 arena_->Realloc(contents_, old_capacity, new_capacity, kArenaAllocAssembler));
104
105 // Update the cursor and recompute the limit.
106 cursor_ = contents_ + old_size;
107 limit_ = ComputeLimit(contents_, new_capacity);
108
109 // Verify internal state.
110 CHECK_EQ(Capacity(), new_capacity);
111 CHECK_EQ(Size(), old_size);
112 }
113
ImplicitlyAdvancePC()114 void DebugFrameOpCodeWriterForAssembler::ImplicitlyAdvancePC() {
115 uint32_t pc = dchecked_integral_cast<uint32_t>(assembler_->CodeSize());
116 if (delay_emitting_advance_pc_) {
117 uint32_t stream_pos = dchecked_integral_cast<uint32_t>(opcodes_.size());
118 delayed_advance_pcs_.push_back(DelayedAdvancePC {stream_pos, pc});
119 } else {
120 AdvancePC(pc);
121 }
122 }
123
Create(ArenaAllocator * arena,InstructionSet instruction_set,const InstructionSetFeatures * instruction_set_features)124 std::unique_ptr<Assembler> Assembler::Create(
125 ArenaAllocator* arena,
126 InstructionSet instruction_set,
127 const InstructionSetFeatures* instruction_set_features) {
128 switch (instruction_set) {
129 #ifdef ART_ENABLE_CODEGEN_arm
130 case kArm:
131 return std::unique_ptr<Assembler>(new (arena) arm::Arm32Assembler(arena));
132 case kThumb2:
133 return std::unique_ptr<Assembler>(new (arena) arm::Thumb2Assembler(arena));
134 #endif
135 #ifdef ART_ENABLE_CODEGEN_arm64
136 case kArm64:
137 return std::unique_ptr<Assembler>(new (arena) arm64::Arm64Assembler(arena));
138 #endif
139 #ifdef ART_ENABLE_CODEGEN_mips
140 case kMips:
141 return std::unique_ptr<Assembler>(new (arena) mips::MipsAssembler(
142 arena,
143 instruction_set_features != nullptr
144 ? instruction_set_features->AsMipsInstructionSetFeatures()
145 : nullptr));
146 #endif
147 #ifdef ART_ENABLE_CODEGEN_mips64
148 case kMips64:
149 return std::unique_ptr<Assembler>(new (arena) mips64::Mips64Assembler(arena));
150 #endif
151 #ifdef ART_ENABLE_CODEGEN_x86
152 case kX86:
153 return std::unique_ptr<Assembler>(new (arena) x86::X86Assembler(arena));
154 #endif
155 #ifdef ART_ENABLE_CODEGEN_x86_64
156 case kX86_64:
157 return std::unique_ptr<Assembler>(new (arena) x86_64::X86_64Assembler(arena));
158 #endif
159 default:
160 LOG(FATAL) << "Unknown InstructionSet: " << instruction_set;
161 return nullptr;
162 }
163 }
164
StoreImmediateToThread32(ThreadOffset<4> dest ATTRIBUTE_UNUSED,uint32_t imm ATTRIBUTE_UNUSED,ManagedRegister scratch ATTRIBUTE_UNUSED)165 void Assembler::StoreImmediateToThread32(ThreadOffset<4> dest ATTRIBUTE_UNUSED,
166 uint32_t imm ATTRIBUTE_UNUSED,
167 ManagedRegister scratch ATTRIBUTE_UNUSED) {
168 UNIMPLEMENTED(FATAL);
169 }
170
StoreImmediateToThread64(ThreadOffset<8> dest ATTRIBUTE_UNUSED,uint32_t imm ATTRIBUTE_UNUSED,ManagedRegister scratch ATTRIBUTE_UNUSED)171 void Assembler::StoreImmediateToThread64(ThreadOffset<8> dest ATTRIBUTE_UNUSED,
172 uint32_t imm ATTRIBUTE_UNUSED,
173 ManagedRegister scratch ATTRIBUTE_UNUSED) {
174 UNIMPLEMENTED(FATAL);
175 }
176
StoreStackOffsetToThread32(ThreadOffset<4> thr_offs ATTRIBUTE_UNUSED,FrameOffset fr_offs ATTRIBUTE_UNUSED,ManagedRegister scratch ATTRIBUTE_UNUSED)177 void Assembler::StoreStackOffsetToThread32(ThreadOffset<4> thr_offs ATTRIBUTE_UNUSED,
178 FrameOffset fr_offs ATTRIBUTE_UNUSED,
179 ManagedRegister scratch ATTRIBUTE_UNUSED) {
180 UNIMPLEMENTED(FATAL);
181 }
182
StoreStackOffsetToThread64(ThreadOffset<8> thr_offs ATTRIBUTE_UNUSED,FrameOffset fr_offs ATTRIBUTE_UNUSED,ManagedRegister scratch ATTRIBUTE_UNUSED)183 void Assembler::StoreStackOffsetToThread64(ThreadOffset<8> thr_offs ATTRIBUTE_UNUSED,
184 FrameOffset fr_offs ATTRIBUTE_UNUSED,
185 ManagedRegister scratch ATTRIBUTE_UNUSED) {
186 UNIMPLEMENTED(FATAL);
187 }
188
StoreStackPointerToThread32(ThreadOffset<4> thr_offs ATTRIBUTE_UNUSED)189 void Assembler::StoreStackPointerToThread32(ThreadOffset<4> thr_offs ATTRIBUTE_UNUSED) {
190 UNIMPLEMENTED(FATAL);
191 }
192
StoreStackPointerToThread64(ThreadOffset<8> thr_offs ATTRIBUTE_UNUSED)193 void Assembler::StoreStackPointerToThread64(ThreadOffset<8> thr_offs ATTRIBUTE_UNUSED) {
194 UNIMPLEMENTED(FATAL);
195 }
196
LoadFromThread32(ManagedRegister dest ATTRIBUTE_UNUSED,ThreadOffset<4> src ATTRIBUTE_UNUSED,size_t size ATTRIBUTE_UNUSED)197 void Assembler::LoadFromThread32(ManagedRegister dest ATTRIBUTE_UNUSED,
198 ThreadOffset<4> src ATTRIBUTE_UNUSED,
199 size_t size ATTRIBUTE_UNUSED) {
200 UNIMPLEMENTED(FATAL);
201 }
202
LoadFromThread64(ManagedRegister dest ATTRIBUTE_UNUSED,ThreadOffset<8> src ATTRIBUTE_UNUSED,size_t size ATTRIBUTE_UNUSED)203 void Assembler::LoadFromThread64(ManagedRegister dest ATTRIBUTE_UNUSED,
204 ThreadOffset<8> src ATTRIBUTE_UNUSED,
205 size_t size ATTRIBUTE_UNUSED) {
206 UNIMPLEMENTED(FATAL);
207 }
208
LoadRawPtrFromThread32(ManagedRegister dest ATTRIBUTE_UNUSED,ThreadOffset<4> offs ATTRIBUTE_UNUSED)209 void Assembler::LoadRawPtrFromThread32(ManagedRegister dest ATTRIBUTE_UNUSED,
210 ThreadOffset<4> offs ATTRIBUTE_UNUSED) {
211 UNIMPLEMENTED(FATAL);
212 }
213
LoadRawPtrFromThread64(ManagedRegister dest ATTRIBUTE_UNUSED,ThreadOffset<8> offs ATTRIBUTE_UNUSED)214 void Assembler::LoadRawPtrFromThread64(ManagedRegister dest ATTRIBUTE_UNUSED,
215 ThreadOffset<8> offs ATTRIBUTE_UNUSED) {
216 UNIMPLEMENTED(FATAL);
217 }
218
CopyRawPtrFromThread32(FrameOffset fr_offs ATTRIBUTE_UNUSED,ThreadOffset<4> thr_offs ATTRIBUTE_UNUSED,ManagedRegister scratch ATTRIBUTE_UNUSED)219 void Assembler::CopyRawPtrFromThread32(FrameOffset fr_offs ATTRIBUTE_UNUSED,
220 ThreadOffset<4> thr_offs ATTRIBUTE_UNUSED,
221 ManagedRegister scratch ATTRIBUTE_UNUSED) {
222 UNIMPLEMENTED(FATAL);
223 }
224
CopyRawPtrFromThread64(FrameOffset fr_offs ATTRIBUTE_UNUSED,ThreadOffset<8> thr_offs ATTRIBUTE_UNUSED,ManagedRegister scratch ATTRIBUTE_UNUSED)225 void Assembler::CopyRawPtrFromThread64(FrameOffset fr_offs ATTRIBUTE_UNUSED,
226 ThreadOffset<8> thr_offs ATTRIBUTE_UNUSED,
227 ManagedRegister scratch ATTRIBUTE_UNUSED) {
228 UNIMPLEMENTED(FATAL);
229 }
230
CopyRawPtrToThread32(ThreadOffset<4> thr_offs ATTRIBUTE_UNUSED,FrameOffset fr_offs ATTRIBUTE_UNUSED,ManagedRegister scratch ATTRIBUTE_UNUSED)231 void Assembler::CopyRawPtrToThread32(ThreadOffset<4> thr_offs ATTRIBUTE_UNUSED,
232 FrameOffset fr_offs ATTRIBUTE_UNUSED,
233 ManagedRegister scratch ATTRIBUTE_UNUSED) {
234 UNIMPLEMENTED(FATAL);
235 }
236
CopyRawPtrToThread64(ThreadOffset<8> thr_offs ATTRIBUTE_UNUSED,FrameOffset fr_offs ATTRIBUTE_UNUSED,ManagedRegister scratch ATTRIBUTE_UNUSED)237 void Assembler::CopyRawPtrToThread64(ThreadOffset<8> thr_offs ATTRIBUTE_UNUSED,
238 FrameOffset fr_offs ATTRIBUTE_UNUSED,
239 ManagedRegister scratch ATTRIBUTE_UNUSED) {
240 UNIMPLEMENTED(FATAL);
241 }
242
CallFromThread32(ThreadOffset<4> offset ATTRIBUTE_UNUSED,ManagedRegister scratch ATTRIBUTE_UNUSED)243 void Assembler::CallFromThread32(ThreadOffset<4> offset ATTRIBUTE_UNUSED,
244 ManagedRegister scratch ATTRIBUTE_UNUSED) {
245 UNIMPLEMENTED(FATAL);
246 }
247
CallFromThread64(ThreadOffset<8> offset ATTRIBUTE_UNUSED,ManagedRegister scratch ATTRIBUTE_UNUSED)248 void Assembler::CallFromThread64(ThreadOffset<8> offset ATTRIBUTE_UNUSED,
249 ManagedRegister scratch ATTRIBUTE_UNUSED) {
250 UNIMPLEMENTED(FATAL);
251 }
252
253 } // namespace art
254