1 // Copyright (c) 1994-2006 Sun Microsystems Inc.
2 // All Rights Reserved.
3 //
4 // Redistribution and use in source and binary forms, with or without
5 // modification, are permitted provided that the following conditions
6 // are met:
7 //
8 // - Redistributions of source code must retain the above copyright notice,
9 // this list of conditions and the following disclaimer.
10 //
11 // - Redistribution in binary form must reproduce the above copyright
12 // notice, this list of conditions and the following disclaimer in the
13 // documentation and/or other materials provided with the
14 // distribution.
15 //
16 // - Neither the name of Sun Microsystems or the names of contributors may
17 // be used to endorse or promote products derived from this software without
18 // specific prior written permission.
19 //
20 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
21 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
22 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
23 // FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
24 // COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
25 // INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
26 // (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
27 // SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
28 // HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
29 // STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
30 // ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
31 // OF THE POSSIBILITY OF SUCH DAMAGE.
32
33 // The original source code covered by the above license above has been modified
34 // significantly by Google Inc.
35 // Copyright 2012 the V8 project authors. All rights reserved.
36
37 #ifndef V8_ARM_ASSEMBLER_ARM_INL_H_
38 #define V8_ARM_ASSEMBLER_ARM_INL_H_
39
40 #include "arm/assembler-arm.h"
41
42 #include "cpu.h"
43 #include "debug.h"
44
45
46 namespace v8 {
47 namespace internal {
48
49
ToAllocationIndex(DwVfpRegister reg)50 int DwVfpRegister::ToAllocationIndex(DwVfpRegister reg) {
51 ASSERT(!reg.is(kDoubleRegZero));
52 ASSERT(!reg.is(kScratchDoubleReg));
53 return reg.code();
54 }
55
56
apply(intptr_t delta)57 void RelocInfo::apply(intptr_t delta) {
58 if (RelocInfo::IsInternalReference(rmode_)) {
59 // absolute code pointer inside code object moves with the code object.
60 int32_t* p = reinterpret_cast<int32_t*>(pc_);
61 *p += delta; // relocate entry
62 }
63 // We do not use pc relative addressing on ARM, so there is
64 // nothing else to do.
65 }
66
67
target_address()68 Address RelocInfo::target_address() {
69 ASSERT(IsCodeTarget(rmode_) || rmode_ == RUNTIME_ENTRY);
70 return Assembler::target_address_at(pc_);
71 }
72
73
target_address_address()74 Address RelocInfo::target_address_address() {
75 ASSERT(IsCodeTarget(rmode_) || rmode_ == RUNTIME_ENTRY
76 || rmode_ == EMBEDDED_OBJECT
77 || rmode_ == EXTERNAL_REFERENCE);
78 return reinterpret_cast<Address>(Assembler::target_address_address_at(pc_));
79 }
80
81
target_address_size()82 int RelocInfo::target_address_size() {
83 return kPointerSize;
84 }
85
86
set_target_address(Address target,WriteBarrierMode mode)87 void RelocInfo::set_target_address(Address target, WriteBarrierMode mode) {
88 ASSERT(IsCodeTarget(rmode_) || rmode_ == RUNTIME_ENTRY);
89 Assembler::set_target_address_at(pc_, target);
90 if (mode == UPDATE_WRITE_BARRIER && host() != NULL && IsCodeTarget(rmode_)) {
91 Object* target_code = Code::GetCodeFromTargetAddress(target);
92 host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(
93 host(), this, HeapObject::cast(target_code));
94 }
95 }
96
97
target_object()98 Object* RelocInfo::target_object() {
99 ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
100 return Memory::Object_at(Assembler::target_address_address_at(pc_));
101 }
102
103
target_object_handle(Assembler * origin)104 Handle<Object> RelocInfo::target_object_handle(Assembler* origin) {
105 ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
106 return Memory::Object_Handle_at(Assembler::target_address_address_at(pc_));
107 }
108
109
target_object_address()110 Object** RelocInfo::target_object_address() {
111 ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
112 return reinterpret_cast<Object**>(Assembler::target_address_address_at(pc_));
113 }
114
115
set_target_object(Object * target,WriteBarrierMode mode)116 void RelocInfo::set_target_object(Object* target, WriteBarrierMode mode) {
117 ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
118 Assembler::set_target_address_at(pc_, reinterpret_cast<Address>(target));
119 if (mode == UPDATE_WRITE_BARRIER &&
120 host() != NULL &&
121 target->IsHeapObject()) {
122 host()->GetHeap()->incremental_marking()->RecordWrite(
123 host(), &Memory::Object_at(pc_), HeapObject::cast(target));
124 }
125 }
126
127
target_reference_address()128 Address* RelocInfo::target_reference_address() {
129 ASSERT(rmode_ == EXTERNAL_REFERENCE);
130 return reinterpret_cast<Address*>(Assembler::target_address_address_at(pc_));
131 }
132
133
target_cell_handle()134 Handle<JSGlobalPropertyCell> RelocInfo::target_cell_handle() {
135 ASSERT(rmode_ == RelocInfo::GLOBAL_PROPERTY_CELL);
136 Address address = Memory::Address_at(pc_);
137 return Handle<JSGlobalPropertyCell>(
138 reinterpret_cast<JSGlobalPropertyCell**>(address));
139 }
140
141
target_cell()142 JSGlobalPropertyCell* RelocInfo::target_cell() {
143 ASSERT(rmode_ == RelocInfo::GLOBAL_PROPERTY_CELL);
144 Address address = Memory::Address_at(pc_);
145 Object* object = HeapObject::FromAddress(
146 address - JSGlobalPropertyCell::kValueOffset);
147 return reinterpret_cast<JSGlobalPropertyCell*>(object);
148 }
149
150
set_target_cell(JSGlobalPropertyCell * cell,WriteBarrierMode mode)151 void RelocInfo::set_target_cell(JSGlobalPropertyCell* cell,
152 WriteBarrierMode mode) {
153 ASSERT(rmode_ == RelocInfo::GLOBAL_PROPERTY_CELL);
154 Address address = cell->address() + JSGlobalPropertyCell::kValueOffset;
155 Memory::Address_at(pc_) = address;
156 if (mode == UPDATE_WRITE_BARRIER && host() != NULL) {
157 // TODO(1550) We are passing NULL as a slot because cell can never be on
158 // evacuation candidate.
159 host()->GetHeap()->incremental_marking()->RecordWrite(
160 host(), NULL, cell);
161 }
162 }
163
164
call_address()165 Address RelocInfo::call_address() {
166 // The 2 instructions offset assumes patched debug break slot or return
167 // sequence.
168 ASSERT((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
169 (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
170 return Memory::Address_at(pc_ + 2 * Assembler::kInstrSize);
171 }
172
173
set_call_address(Address target)174 void RelocInfo::set_call_address(Address target) {
175 ASSERT((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
176 (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
177 Memory::Address_at(pc_ + 2 * Assembler::kInstrSize) = target;
178 if (host() != NULL) {
179 Object* target_code = Code::GetCodeFromTargetAddress(target);
180 host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(
181 host(), this, HeapObject::cast(target_code));
182 }
183 }
184
185
call_object()186 Object* RelocInfo::call_object() {
187 return *call_object_address();
188 }
189
190
set_call_object(Object * target)191 void RelocInfo::set_call_object(Object* target) {
192 *call_object_address() = target;
193 }
194
195
call_object_address()196 Object** RelocInfo::call_object_address() {
197 ASSERT((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
198 (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
199 return reinterpret_cast<Object**>(pc_ + 2 * Assembler::kInstrSize);
200 }
201
202
IsPatchedReturnSequence()203 bool RelocInfo::IsPatchedReturnSequence() {
204 Instr current_instr = Assembler::instr_at(pc_);
205 Instr next_instr = Assembler::instr_at(pc_ + Assembler::kInstrSize);
206 #ifdef USE_BLX
207 // A patched return sequence is:
208 // ldr ip, [pc, #0]
209 // blx ip
210 return ((current_instr & kLdrPCMask) == kLdrPCPattern)
211 && ((next_instr & kBlxRegMask) == kBlxRegPattern);
212 #else
213 // A patched return sequence is:
214 // mov lr, pc
215 // ldr pc, [pc, #-4]
216 return (current_instr == kMovLrPc)
217 && ((next_instr & kLdrPCMask) == kLdrPCPattern);
218 #endif
219 }
220
221
IsPatchedDebugBreakSlotSequence()222 bool RelocInfo::IsPatchedDebugBreakSlotSequence() {
223 Instr current_instr = Assembler::instr_at(pc_);
224 return !Assembler::IsNop(current_instr, Assembler::DEBUG_BREAK_NOP);
225 }
226
227
Visit(ObjectVisitor * visitor)228 void RelocInfo::Visit(ObjectVisitor* visitor) {
229 RelocInfo::Mode mode = rmode();
230 if (mode == RelocInfo::EMBEDDED_OBJECT) {
231 visitor->VisitEmbeddedPointer(this);
232 } else if (RelocInfo::IsCodeTarget(mode)) {
233 visitor->VisitCodeTarget(this);
234 } else if (mode == RelocInfo::GLOBAL_PROPERTY_CELL) {
235 visitor->VisitGlobalPropertyCell(this);
236 } else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
237 visitor->VisitExternalReference(this);
238 #ifdef ENABLE_DEBUGGER_SUPPORT
239 // TODO(isolates): Get a cached isolate below.
240 } else if (((RelocInfo::IsJSReturn(mode) &&
241 IsPatchedReturnSequence()) ||
242 (RelocInfo::IsDebugBreakSlot(mode) &&
243 IsPatchedDebugBreakSlotSequence())) &&
244 Isolate::Current()->debug()->has_break_points()) {
245 visitor->VisitDebugTarget(this);
246 #endif
247 } else if (mode == RelocInfo::RUNTIME_ENTRY) {
248 visitor->VisitRuntimeEntry(this);
249 }
250 }
251
252
253 template<typename StaticVisitor>
Visit(Heap * heap)254 void RelocInfo::Visit(Heap* heap) {
255 RelocInfo::Mode mode = rmode();
256 if (mode == RelocInfo::EMBEDDED_OBJECT) {
257 StaticVisitor::VisitEmbeddedPointer(heap, this);
258 } else if (RelocInfo::IsCodeTarget(mode)) {
259 StaticVisitor::VisitCodeTarget(heap, this);
260 } else if (mode == RelocInfo::GLOBAL_PROPERTY_CELL) {
261 StaticVisitor::VisitGlobalPropertyCell(heap, this);
262 } else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
263 StaticVisitor::VisitExternalReference(this);
264 #ifdef ENABLE_DEBUGGER_SUPPORT
265 } else if (heap->isolate()->debug()->has_break_points() &&
266 ((RelocInfo::IsJSReturn(mode) &&
267 IsPatchedReturnSequence()) ||
268 (RelocInfo::IsDebugBreakSlot(mode) &&
269 IsPatchedDebugBreakSlotSequence()))) {
270 StaticVisitor::VisitDebugTarget(heap, this);
271 #endif
272 } else if (mode == RelocInfo::RUNTIME_ENTRY) {
273 StaticVisitor::VisitRuntimeEntry(this);
274 }
275 }
276
277
Operand(int32_t immediate,RelocInfo::Mode rmode)278 Operand::Operand(int32_t immediate, RelocInfo::Mode rmode) {
279 rm_ = no_reg;
280 imm32_ = immediate;
281 rmode_ = rmode;
282 }
283
284
Operand(const ExternalReference & f)285 Operand::Operand(const ExternalReference& f) {
286 rm_ = no_reg;
287 imm32_ = reinterpret_cast<int32_t>(f.address());
288 rmode_ = RelocInfo::EXTERNAL_REFERENCE;
289 }
290
291
Operand(Smi * value)292 Operand::Operand(Smi* value) {
293 rm_ = no_reg;
294 imm32_ = reinterpret_cast<intptr_t>(value);
295 rmode_ = RelocInfo::NONE;
296 }
297
298
Operand(Register rm)299 Operand::Operand(Register rm) {
300 rm_ = rm;
301 rs_ = no_reg;
302 shift_op_ = LSL;
303 shift_imm_ = 0;
304 }
305
306
is_reg()307 bool Operand::is_reg() const {
308 return rm_.is_valid() &&
309 rs_.is(no_reg) &&
310 shift_op_ == LSL &&
311 shift_imm_ == 0;
312 }
313
314
CheckBuffer()315 void Assembler::CheckBuffer() {
316 if (buffer_space() <= kGap) {
317 GrowBuffer();
318 }
319 if (pc_offset() >= next_buffer_check_) {
320 CheckConstPool(false, true);
321 }
322 }
323
324
emit(Instr x)325 void Assembler::emit(Instr x) {
326 CheckBuffer();
327 *reinterpret_cast<Instr*>(pc_) = x;
328 pc_ += kInstrSize;
329 }
330
331
target_address_address_at(Address pc)332 Address Assembler::target_address_address_at(Address pc) {
333 Address target_pc = pc;
334 Instr instr = Memory::int32_at(target_pc);
335 // If we have a bx instruction, the instruction before the bx is
336 // what we need to patch.
337 static const int32_t kBxInstMask = 0x0ffffff0;
338 static const int32_t kBxInstPattern = 0x012fff10;
339 if ((instr & kBxInstMask) == kBxInstPattern) {
340 target_pc -= kInstrSize;
341 instr = Memory::int32_at(target_pc);
342 }
343
344 #ifdef USE_BLX
345 // If we have a blx instruction, the instruction before it is
346 // what needs to be patched.
347 if ((instr & kBlxRegMask) == kBlxRegPattern) {
348 target_pc -= kInstrSize;
349 instr = Memory::int32_at(target_pc);
350 }
351 #endif
352
353 ASSERT(IsLdrPcImmediateOffset(instr));
354 int offset = instr & 0xfff; // offset_12 is unsigned
355 if ((instr & (1 << 23)) == 0) offset = -offset; // U bit defines offset sign
356 // Verify that the constant pool comes after the instruction referencing it.
357 ASSERT(offset >= -4);
358 return target_pc + offset + 8;
359 }
360
361
target_address_at(Address pc)362 Address Assembler::target_address_at(Address pc) {
363 return Memory::Address_at(target_address_address_at(pc));
364 }
365
366
deserialization_set_special_target_at(Address constant_pool_entry,Address target)367 void Assembler::deserialization_set_special_target_at(
368 Address constant_pool_entry, Address target) {
369 Memory::Address_at(constant_pool_entry) = target;
370 }
371
372
set_external_target_at(Address constant_pool_entry,Address target)373 void Assembler::set_external_target_at(Address constant_pool_entry,
374 Address target) {
375 Memory::Address_at(constant_pool_entry) = target;
376 }
377
378
set_target_address_at(Address pc,Address target)379 void Assembler::set_target_address_at(Address pc, Address target) {
380 Memory::Address_at(target_address_address_at(pc)) = target;
381 // Intuitively, we would think it is necessary to flush the instruction cache
382 // after patching a target address in the code as follows:
383 // CPU::FlushICache(pc, sizeof(target));
384 // However, on ARM, no instruction was actually patched by the assignment
385 // above; the target address is not part of an instruction, it is patched in
386 // the constant pool and is read via a data access; the instruction accessing
387 // this address in the constant pool remains unchanged.
388 }
389
390 } } // namespace v8::internal
391
392 #endif // V8_ARM_ASSEMBLER_ARM_INL_H_
393