1
2 // Copyright (c) 1994-2006 Sun Microsystems Inc.
3 // All Rights Reserved.
4 //
5 // Redistribution and use in source and binary forms, with or without
6 // modification, are permitted provided that the following conditions are
7 // met:
8 //
9 // - Redistributions of source code must retain the above copyright notice,
10 // this list of conditions and the following disclaimer.
11 //
12 // - Redistribution in binary form must reproduce the above copyright
13 // notice, this list of conditions and the following disclaimer in the
14 // documentation and/or other materials provided with the distribution.
15 //
16 // - Neither the name of Sun Microsystems or the names of contributors may
17 // be used to endorse or promote products derived from this software without
18 // specific prior written permission.
19 //
20 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
21 // IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
22 // THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
23 // PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
24 // CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
25 // EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
26 // PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
27 // PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
28 // LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
29 // NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
30 // SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
31
32 // The original source code covered by the above license above has been
33 // modified significantly by Google Inc.
34 // Copyright 2012 the V8 project authors. All rights reserved.
35
36
37 #ifndef V8_MIPS_ASSEMBLER_MIPS_INL_H_
38 #define V8_MIPS_ASSEMBLER_MIPS_INL_H_
39
40 #include "src/mips/assembler-mips.h"
41
42 #include "src/assembler.h"
43 #include "src/debug/debug.h"
44 #include "src/objects-inl.h"
45
46 namespace v8 {
47 namespace internal {
48
49
SupportsCrankshaft()50 bool CpuFeatures::SupportsCrankshaft() { return IsSupported(FPU); }
51
SupportsSimd128()52 bool CpuFeatures::SupportsSimd128() { return false; }
53
54 // -----------------------------------------------------------------------------
55 // Operand and MemOperand.
56
Operand(int32_t immediate,RelocInfo::Mode rmode)57 Operand::Operand(int32_t immediate, RelocInfo::Mode rmode) {
58 rm_ = no_reg;
59 imm32_ = immediate;
60 rmode_ = rmode;
61 }
62
63
Operand(const ExternalReference & f)64 Operand::Operand(const ExternalReference& f) {
65 rm_ = no_reg;
66 imm32_ = reinterpret_cast<int32_t>(f.address());
67 rmode_ = RelocInfo::EXTERNAL_REFERENCE;
68 }
69
70
Operand(Smi * value)71 Operand::Operand(Smi* value) {
72 rm_ = no_reg;
73 imm32_ = reinterpret_cast<intptr_t>(value);
74 rmode_ = RelocInfo::NONE32;
75 }
76
77
Operand(Register rm)78 Operand::Operand(Register rm) {
79 rm_ = rm;
80 }
81
82
is_reg()83 bool Operand::is_reg() const {
84 return rm_.is_valid();
85 }
86
87
88 // -----------------------------------------------------------------------------
89 // RelocInfo.
90
apply(intptr_t delta)91 void RelocInfo::apply(intptr_t delta) {
92 if (IsInternalReference(rmode_) || IsInternalReferenceEncoded(rmode_)) {
93 // Absolute code pointer inside code object moves with the code object.
94 byte* p = reinterpret_cast<byte*>(pc_);
95 int count = Assembler::RelocateInternalReference(rmode_, p, delta);
96 Assembler::FlushICache(isolate_, p, count * sizeof(uint32_t));
97 }
98 }
99
100
target_address()101 Address RelocInfo::target_address() {
102 DCHECK(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_));
103 return Assembler::target_address_at(pc_, host_);
104 }
105
target_address_address()106 Address RelocInfo::target_address_address() {
107 DCHECK(IsCodeTarget(rmode_) ||
108 IsRuntimeEntry(rmode_) ||
109 rmode_ == EMBEDDED_OBJECT ||
110 rmode_ == EXTERNAL_REFERENCE);
111 // Read the address of the word containing the target_address in an
112 // instruction stream.
113 // The only architecture-independent user of this function is the serializer.
114 // The serializer uses it to find out how many raw bytes of instruction to
115 // output before the next target.
116 // For an instruction like LUI/ORI where the target bits are mixed into the
117 // instruction bits, the size of the target will be zero, indicating that the
118 // serializer should not step forward in memory after a target is resolved
119 // and written. In this case the target_address_address function should
120 // return the end of the instructions to be patched, allowing the
121 // deserializer to deserialize the instructions as raw bytes and put them in
122 // place, ready to be patched with the target. After jump optimization,
123 // that is the address of the instruction that follows J/JAL/JR/JALR
124 // instruction.
125 return reinterpret_cast<Address>(
126 pc_ + Assembler::kInstructionsFor32BitConstant * Assembler::kInstrSize);
127 }
128
129
constant_pool_entry_address()130 Address RelocInfo::constant_pool_entry_address() {
131 UNREACHABLE();
132 return NULL;
133 }
134
135
target_address_size()136 int RelocInfo::target_address_size() {
137 return Assembler::kSpecialTargetSize;
138 }
139
target_address_at(Address pc,Code * code)140 Address Assembler::target_address_at(Address pc, Code* code) {
141 Address constant_pool = code ? code->constant_pool() : NULL;
142 return target_address_at(pc, constant_pool);
143 }
144
set_target_address_at(Isolate * isolate,Address pc,Code * code,Address target,ICacheFlushMode icache_flush_mode)145 void Assembler::set_target_address_at(Isolate* isolate, Address pc, Code* code,
146 Address target,
147 ICacheFlushMode icache_flush_mode) {
148 Address constant_pool = code ? code->constant_pool() : NULL;
149 set_target_address_at(isolate, pc, constant_pool, target, icache_flush_mode);
150 }
151
target_address_from_return_address(Address pc)152 Address Assembler::target_address_from_return_address(Address pc) {
153 return pc - kCallTargetAddressOffset;
154 }
155
156
set_target_internal_reference_encoded_at(Address pc,Address target)157 void Assembler::set_target_internal_reference_encoded_at(Address pc,
158 Address target) {
159 Instr instr1 = Assembler::instr_at(pc + 0 * Assembler::kInstrSize);
160 Instr instr2 = Assembler::instr_at(pc + 1 * Assembler::kInstrSize);
161 DCHECK(Assembler::IsLui(instr1));
162 DCHECK(Assembler::IsOri(instr2) || Assembler::IsJicOrJialc(instr2));
163 instr1 &= ~kImm16Mask;
164 instr2 &= ~kImm16Mask;
165 int32_t imm = reinterpret_cast<int32_t>(target);
166 DCHECK((imm & 3) == 0);
167 if (Assembler::IsJicOrJialc(instr2)) {
168 // Encoded internal references are lui/jic load of 32-bit absolute address.
169 uint32_t lui_offset_u, jic_offset_u;
170 Assembler::UnpackTargetAddressUnsigned(imm, lui_offset_u, jic_offset_u);
171
172 Assembler::instr_at_put(pc + 0 * Assembler::kInstrSize,
173 instr1 | lui_offset_u);
174 Assembler::instr_at_put(pc + 1 * Assembler::kInstrSize,
175 instr2 | jic_offset_u);
176 } else {
177 // Encoded internal references are lui/ori load of 32-bit absolute address.
178 Assembler::instr_at_put(pc + 0 * Assembler::kInstrSize,
179 instr1 | ((imm >> kLuiShift) & kImm16Mask));
180 Assembler::instr_at_put(pc + 1 * Assembler::kInstrSize,
181 instr2 | (imm & kImm16Mask));
182 }
183
184 // Currently used only by deserializer, and all code will be flushed
185 // after complete deserialization, no need to flush on each reference.
186 }
187
188
deserialization_set_target_internal_reference_at(Isolate * isolate,Address pc,Address target,RelocInfo::Mode mode)189 void Assembler::deserialization_set_target_internal_reference_at(
190 Isolate* isolate, Address pc, Address target, RelocInfo::Mode mode) {
191 if (mode == RelocInfo::INTERNAL_REFERENCE_ENCODED) {
192 DCHECK(IsLui(instr_at(pc)));
193 set_target_internal_reference_encoded_at(pc, target);
194 } else {
195 DCHECK(mode == RelocInfo::INTERNAL_REFERENCE);
196 Memory::Address_at(pc) = target;
197 }
198 }
199
200
target_object()201 Object* RelocInfo::target_object() {
202 DCHECK(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
203 return reinterpret_cast<Object*>(Assembler::target_address_at(pc_, host_));
204 }
205
206
target_object_handle(Assembler * origin)207 Handle<Object> RelocInfo::target_object_handle(Assembler* origin) {
208 DCHECK(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
209 return Handle<Object>(reinterpret_cast<Object**>(
210 Assembler::target_address_at(pc_, host_)));
211 }
212
213
set_target_object(Object * target,WriteBarrierMode write_barrier_mode,ICacheFlushMode icache_flush_mode)214 void RelocInfo::set_target_object(Object* target,
215 WriteBarrierMode write_barrier_mode,
216 ICacheFlushMode icache_flush_mode) {
217 DCHECK(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
218 Assembler::set_target_address_at(isolate_, pc_, host_,
219 reinterpret_cast<Address>(target),
220 icache_flush_mode);
221 if (write_barrier_mode == UPDATE_WRITE_BARRIER &&
222 host() != NULL &&
223 target->IsHeapObject()) {
224 host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(
225 host(), this, HeapObject::cast(target));
226 host()->GetHeap()->RecordWriteIntoCode(host(), this, target);
227 }
228 }
229
230
target_external_reference()231 Address RelocInfo::target_external_reference() {
232 DCHECK(rmode_ == EXTERNAL_REFERENCE);
233 return Assembler::target_address_at(pc_, host_);
234 }
235
236
target_internal_reference()237 Address RelocInfo::target_internal_reference() {
238 if (rmode_ == INTERNAL_REFERENCE) {
239 return Memory::Address_at(pc_);
240 } else {
241 // Encoded internal references are lui/ori or lui/jic load of 32-bit
242 // absolute address.
243 DCHECK(rmode_ == INTERNAL_REFERENCE_ENCODED);
244 Instr instr1 = Assembler::instr_at(pc_ + 0 * Assembler::kInstrSize);
245 Instr instr2 = Assembler::instr_at(pc_ + 1 * Assembler::kInstrSize);
246 DCHECK(Assembler::IsLui(instr1));
247 DCHECK(Assembler::IsOri(instr2) || Assembler::IsJicOrJialc(instr2));
248 if (Assembler::IsJicOrJialc(instr2)) {
249 return reinterpret_cast<Address>(
250 Assembler::CreateTargetAddress(instr1, instr2));
251 }
252 int32_t imm = (instr1 & static_cast<int32_t>(kImm16Mask)) << kLuiShift;
253 imm |= (instr2 & static_cast<int32_t>(kImm16Mask));
254 return reinterpret_cast<Address>(imm);
255 }
256 }
257
258
target_internal_reference_address()259 Address RelocInfo::target_internal_reference_address() {
260 DCHECK(rmode_ == INTERNAL_REFERENCE || rmode_ == INTERNAL_REFERENCE_ENCODED);
261 return reinterpret_cast<Address>(pc_);
262 }
263
264
target_runtime_entry(Assembler * origin)265 Address RelocInfo::target_runtime_entry(Assembler* origin) {
266 DCHECK(IsRuntimeEntry(rmode_));
267 return target_address();
268 }
269
270
set_target_runtime_entry(Address target,WriteBarrierMode write_barrier_mode,ICacheFlushMode icache_flush_mode)271 void RelocInfo::set_target_runtime_entry(Address target,
272 WriteBarrierMode write_barrier_mode,
273 ICacheFlushMode icache_flush_mode) {
274 DCHECK(IsRuntimeEntry(rmode_));
275 if (target_address() != target)
276 set_target_address(target, write_barrier_mode, icache_flush_mode);
277 }
278
279
target_cell_handle()280 Handle<Cell> RelocInfo::target_cell_handle() {
281 DCHECK(rmode_ == RelocInfo::CELL);
282 Address address = Memory::Address_at(pc_);
283 return Handle<Cell>(reinterpret_cast<Cell**>(address));
284 }
285
286
target_cell()287 Cell* RelocInfo::target_cell() {
288 DCHECK(rmode_ == RelocInfo::CELL);
289 return Cell::FromValueAddress(Memory::Address_at(pc_));
290 }
291
292
set_target_cell(Cell * cell,WriteBarrierMode write_barrier_mode,ICacheFlushMode icache_flush_mode)293 void RelocInfo::set_target_cell(Cell* cell,
294 WriteBarrierMode write_barrier_mode,
295 ICacheFlushMode icache_flush_mode) {
296 DCHECK(rmode_ == RelocInfo::CELL);
297 Address address = cell->address() + Cell::kValueOffset;
298 Memory::Address_at(pc_) = address;
299 if (write_barrier_mode == UPDATE_WRITE_BARRIER && host() != NULL) {
300 host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(host(), this,
301 cell);
302 }
303 }
304
305
306 static const int kNoCodeAgeSequenceLength = 7 * Assembler::kInstrSize;
307
308
code_age_stub_handle(Assembler * origin)309 Handle<Object> RelocInfo::code_age_stub_handle(Assembler* origin) {
310 UNREACHABLE(); // This should never be reached on Arm.
311 return Handle<Object>();
312 }
313
314
code_age_stub()315 Code* RelocInfo::code_age_stub() {
316 DCHECK(rmode_ == RelocInfo::CODE_AGE_SEQUENCE);
317 return Code::GetCodeFromTargetAddress(
318 Assembler::target_address_at(pc_ + Assembler::kInstrSize, host_));
319 }
320
321
set_code_age_stub(Code * stub,ICacheFlushMode icache_flush_mode)322 void RelocInfo::set_code_age_stub(Code* stub,
323 ICacheFlushMode icache_flush_mode) {
324 DCHECK(rmode_ == RelocInfo::CODE_AGE_SEQUENCE);
325 Assembler::set_target_address_at(isolate_, pc_ + Assembler::kInstrSize, host_,
326 stub->instruction_start());
327 }
328
329
debug_call_address()330 Address RelocInfo::debug_call_address() {
331 // The pc_ offset of 0 assumes patched debug break slot or return
332 // sequence.
333 DCHECK(IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence());
334 return Assembler::target_address_at(pc_, host_);
335 }
336
337
set_debug_call_address(Address target)338 void RelocInfo::set_debug_call_address(Address target) {
339 DCHECK(IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence());
340 // The pc_ offset of 0 assumes patched debug break slot or return
341 // sequence.
342 Assembler::set_target_address_at(isolate_, pc_, host_, target);
343 if (host() != NULL) {
344 Object* target_code = Code::GetCodeFromTargetAddress(target);
345 host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(
346 host(), this, HeapObject::cast(target_code));
347 }
348 }
349
350
WipeOut()351 void RelocInfo::WipeOut() {
352 DCHECK(IsEmbeddedObject(rmode_) || IsCodeTarget(rmode_) ||
353 IsRuntimeEntry(rmode_) || IsExternalReference(rmode_) ||
354 IsInternalReference(rmode_) || IsInternalReferenceEncoded(rmode_));
355 if (IsInternalReference(rmode_)) {
356 Memory::Address_at(pc_) = NULL;
357 } else if (IsInternalReferenceEncoded(rmode_)) {
358 Assembler::set_target_internal_reference_encoded_at(pc_, nullptr);
359 } else {
360 Assembler::set_target_address_at(isolate_, pc_, host_, NULL);
361 }
362 }
363
364 template <typename ObjectVisitor>
Visit(Isolate * isolate,ObjectVisitor * visitor)365 void RelocInfo::Visit(Isolate* isolate, ObjectVisitor* visitor) {
366 RelocInfo::Mode mode = rmode();
367 if (mode == RelocInfo::EMBEDDED_OBJECT) {
368 visitor->VisitEmbeddedPointer(this);
369 } else if (RelocInfo::IsCodeTarget(mode)) {
370 visitor->VisitCodeTarget(this);
371 } else if (mode == RelocInfo::CELL) {
372 visitor->VisitCell(this);
373 } else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
374 visitor->VisitExternalReference(this);
375 } else if (mode == RelocInfo::INTERNAL_REFERENCE ||
376 mode == RelocInfo::INTERNAL_REFERENCE_ENCODED) {
377 visitor->VisitInternalReference(this);
378 } else if (RelocInfo::IsCodeAgeSequence(mode)) {
379 visitor->VisitCodeAgeSequence(this);
380 } else if (RelocInfo::IsDebugBreakSlot(mode) &&
381 IsPatchedDebugBreakSlotSequence()) {
382 visitor->VisitDebugTarget(this);
383 } else if (RelocInfo::IsRuntimeEntry(mode)) {
384 visitor->VisitRuntimeEntry(this);
385 }
386 }
387
388
389 template<typename StaticVisitor>
Visit(Heap * heap)390 void RelocInfo::Visit(Heap* heap) {
391 RelocInfo::Mode mode = rmode();
392 if (mode == RelocInfo::EMBEDDED_OBJECT) {
393 StaticVisitor::VisitEmbeddedPointer(heap, this);
394 } else if (RelocInfo::IsCodeTarget(mode)) {
395 StaticVisitor::VisitCodeTarget(heap, this);
396 } else if (mode == RelocInfo::CELL) {
397 StaticVisitor::VisitCell(heap, this);
398 } else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
399 StaticVisitor::VisitExternalReference(this);
400 } else if (mode == RelocInfo::INTERNAL_REFERENCE ||
401 mode == RelocInfo::INTERNAL_REFERENCE_ENCODED) {
402 StaticVisitor::VisitInternalReference(this);
403 } else if (RelocInfo::IsCodeAgeSequence(mode)) {
404 StaticVisitor::VisitCodeAgeSequence(heap, this);
405 } else if (RelocInfo::IsDebugBreakSlot(mode) &&
406 IsPatchedDebugBreakSlotSequence()) {
407 StaticVisitor::VisitDebugTarget(heap, this);
408 } else if (RelocInfo::IsRuntimeEntry(mode)) {
409 StaticVisitor::VisitRuntimeEntry(this);
410 }
411 }
412
413
414 // -----------------------------------------------------------------------------
415 // Assembler.
416
417
CheckBuffer()418 void Assembler::CheckBuffer() {
419 if (buffer_space() <= kGap) {
420 GrowBuffer();
421 }
422 }
423
424
CheckTrampolinePoolQuick(int extra_instructions)425 void Assembler::CheckTrampolinePoolQuick(int extra_instructions) {
426 if (pc_offset() >= next_buffer_check_ - extra_instructions * kInstrSize) {
427 CheckTrampolinePool();
428 }
429 }
430
431
CheckForEmitInForbiddenSlot()432 void Assembler::CheckForEmitInForbiddenSlot() {
433 if (!is_buffer_growth_blocked()) {
434 CheckBuffer();
435 }
436 if (IsPrevInstrCompactBranch()) {
437 // Nop instruction to preceed a CTI in forbidden slot:
438 Instr nop = SPECIAL | SLL;
439 *reinterpret_cast<Instr*>(pc_) = nop;
440 pc_ += kInstrSize;
441
442 ClearCompactBranchState();
443 }
444 }
445
446
EmitHelper(Instr x,CompactBranchType is_compact_branch)447 void Assembler::EmitHelper(Instr x, CompactBranchType is_compact_branch) {
448 if (IsPrevInstrCompactBranch()) {
449 if (Instruction::IsForbiddenAfterBranchInstr(x)) {
450 // Nop instruction to preceed a CTI in forbidden slot:
451 Instr nop = SPECIAL | SLL;
452 *reinterpret_cast<Instr*>(pc_) = nop;
453 pc_ += kInstrSize;
454 }
455 ClearCompactBranchState();
456 }
457 *reinterpret_cast<Instr*>(pc_) = x;
458 pc_ += kInstrSize;
459 if (is_compact_branch == CompactBranchType::COMPACT_BRANCH) {
460 EmittedCompactBranchInstruction();
461 }
462 CheckTrampolinePoolQuick();
463 }
464
465 template <>
466 inline void Assembler::EmitHelper(uint8_t x);
467
468 template <typename T>
EmitHelper(T x)469 void Assembler::EmitHelper(T x) {
470 *reinterpret_cast<T*>(pc_) = x;
471 pc_ += sizeof(x);
472 CheckTrampolinePoolQuick();
473 }
474
475 template <>
EmitHelper(uint8_t x)476 void Assembler::EmitHelper(uint8_t x) {
477 *reinterpret_cast<uint8_t*>(pc_) = x;
478 pc_ += sizeof(x);
479 if (reinterpret_cast<intptr_t>(pc_) % kInstrSize == 0) {
480 CheckTrampolinePoolQuick();
481 }
482 }
483
emit(Instr x,CompactBranchType is_compact_branch)484 void Assembler::emit(Instr x, CompactBranchType is_compact_branch) {
485 if (!is_buffer_growth_blocked()) {
486 CheckBuffer();
487 }
488 EmitHelper(x, is_compact_branch);
489 }
490
491
492 } // namespace internal
493 } // namespace v8
494
495 #endif // V8_MIPS_ASSEMBLER_MIPS_INL_H_
496