1
2 // Copyright (c) 1994-2006 Sun Microsystems Inc.
3 // All Rights Reserved.
4 //
5 // Redistribution and use in source and binary forms, with or without
6 // modification, are permitted provided that the following conditions are
7 // met:
8 //
9 // - Redistributions of source code must retain the above copyright notice,
10 // this list of conditions and the following disclaimer.
11 //
12 // - Redistribution in binary form must reproduce the above copyright
13 // notice, this list of conditions and the following disclaimer in the
14 // documentation and/or other materials provided with the distribution.
15 //
16 // - Neither the name of Sun Microsystems or the names of contributors may
17 // be used to endorse or promote products derived from this software without
18 // specific prior written permission.
19 //
20 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
21 // IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
22 // THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
23 // PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
24 // CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
25 // EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
26 // PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
27 // PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
28 // LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
29 // NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
30 // SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
31
32 // The original source code covered by the above license above has been
33 // modified significantly by Google Inc.
34 // Copyright 2012 the V8 project authors. All rights reserved.
35
36
37 #ifndef V8_MIPS_ASSEMBLER_MIPS_INL_H_
38 #define V8_MIPS_ASSEMBLER_MIPS_INL_H_
39
40 #include "src/mips64/assembler-mips64.h"
41
42 #include "src/assembler.h"
43 #include "src/debug/debug.h"
44
45
46 namespace v8 {
47 namespace internal {
48
49
SupportsCrankshaft()50 bool CpuFeatures::SupportsCrankshaft() { return IsSupported(FPU); }
51
52
53 // -----------------------------------------------------------------------------
54 // Operand and MemOperand.
55
Operand(int64_t immediate,RelocInfo::Mode rmode)56 Operand::Operand(int64_t immediate, RelocInfo::Mode rmode) {
57 rm_ = no_reg;
58 imm64_ = immediate;
59 rmode_ = rmode;
60 }
61
62
Operand(const ExternalReference & f)63 Operand::Operand(const ExternalReference& f) {
64 rm_ = no_reg;
65 imm64_ = reinterpret_cast<int64_t>(f.address());
66 rmode_ = RelocInfo::EXTERNAL_REFERENCE;
67 }
68
69
Operand(Smi * value)70 Operand::Operand(Smi* value) {
71 rm_ = no_reg;
72 imm64_ = reinterpret_cast<intptr_t>(value);
73 rmode_ = RelocInfo::NONE32;
74 }
75
76
Operand(Register rm)77 Operand::Operand(Register rm) {
78 rm_ = rm;
79 }
80
81
is_reg()82 bool Operand::is_reg() const {
83 return rm_.is_valid();
84 }
85
86
87 // -----------------------------------------------------------------------------
88 // RelocInfo.
89
apply(intptr_t delta)90 void RelocInfo::apply(intptr_t delta) {
91 if (IsInternalReference(rmode_) || IsInternalReferenceEncoded(rmode_)) {
92 // Absolute code pointer inside code object moves with the code object.
93 byte* p = reinterpret_cast<byte*>(pc_);
94 int count = Assembler::RelocateInternalReference(rmode_, p, delta);
95 Assembler::FlushICache(isolate_, p, count * sizeof(uint32_t));
96 }
97 }
98
99
target_address()100 Address RelocInfo::target_address() {
101 DCHECK(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_));
102 return Assembler::target_address_at(pc_, host_);
103 }
104
105
target_address_address()106 Address RelocInfo::target_address_address() {
107 DCHECK(IsCodeTarget(rmode_) ||
108 IsRuntimeEntry(rmode_) ||
109 rmode_ == EMBEDDED_OBJECT ||
110 rmode_ == EXTERNAL_REFERENCE);
111 // Read the address of the word containing the target_address in an
112 // instruction stream.
113 // The only architecture-independent user of this function is the serializer.
114 // The serializer uses it to find out how many raw bytes of instruction to
115 // output before the next target.
116 // For an instruction like LUI/ORI where the target bits are mixed into the
117 // instruction bits, the size of the target will be zero, indicating that the
118 // serializer should not step forward in memory after a target is resolved
119 // and written. In this case the target_address_address function should
120 // return the end of the instructions to be patched, allowing the
121 // deserializer to deserialize the instructions as raw bytes and put them in
122 // place, ready to be patched with the target. After jump optimization,
123 // that is the address of the instruction that follows J/JAL/JR/JALR
124 // instruction.
125 // return reinterpret_cast<Address>(
126 // pc_ + Assembler::kInstructionsFor32BitConstant * Assembler::kInstrSize);
127 return reinterpret_cast<Address>(
128 pc_ + Assembler::kInstructionsFor64BitConstant * Assembler::kInstrSize);
129 }
130
131
constant_pool_entry_address()132 Address RelocInfo::constant_pool_entry_address() {
133 UNREACHABLE();
134 return NULL;
135 }
136
137
target_address_size()138 int RelocInfo::target_address_size() {
139 return Assembler::kSpecialTargetSize;
140 }
141
142
set_target_address(Address target,WriteBarrierMode write_barrier_mode,ICacheFlushMode icache_flush_mode)143 void RelocInfo::set_target_address(Address target,
144 WriteBarrierMode write_barrier_mode,
145 ICacheFlushMode icache_flush_mode) {
146 DCHECK(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_));
147 Assembler::set_target_address_at(isolate_, pc_, host_, target,
148 icache_flush_mode);
149 if (write_barrier_mode == UPDATE_WRITE_BARRIER &&
150 host() != NULL && IsCodeTarget(rmode_)) {
151 Object* target_code = Code::GetCodeFromTargetAddress(target);
152 host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(
153 host(), this, HeapObject::cast(target_code));
154 }
155 }
156
157
target_address_from_return_address(Address pc)158 Address Assembler::target_address_from_return_address(Address pc) {
159 return pc - kCallTargetAddressOffset;
160 }
161
162
set_target_internal_reference_encoded_at(Address pc,Address target)163 void Assembler::set_target_internal_reference_encoded_at(Address pc,
164 Address target) {
165 // Encoded internal references are j/jal instructions.
166 Instr instr = Assembler::instr_at(pc + 0 * Assembler::kInstrSize);
167
168 uint64_t imm28 =
169 (reinterpret_cast<uint64_t>(target) & static_cast<uint64_t>(kImm28Mask));
170
171 instr &= ~kImm26Mask;
172 uint64_t imm26 = imm28 >> 2;
173 DCHECK(is_uint26(imm26));
174
175 instr_at_put(pc, instr | (imm26 & kImm26Mask));
176 // Currently used only by deserializer, and all code will be flushed
177 // after complete deserialization, no need to flush on each reference.
178 }
179
180
deserialization_set_target_internal_reference_at(Isolate * isolate,Address pc,Address target,RelocInfo::Mode mode)181 void Assembler::deserialization_set_target_internal_reference_at(
182 Isolate* isolate, Address pc, Address target, RelocInfo::Mode mode) {
183 if (mode == RelocInfo::INTERNAL_REFERENCE_ENCODED) {
184 DCHECK(IsJ(instr_at(pc)));
185 set_target_internal_reference_encoded_at(pc, target);
186 } else {
187 DCHECK(mode == RelocInfo::INTERNAL_REFERENCE);
188 Memory::Address_at(pc) = target;
189 }
190 }
191
192
target_object()193 Object* RelocInfo::target_object() {
194 DCHECK(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
195 return reinterpret_cast<Object*>(Assembler::target_address_at(pc_, host_));
196 }
197
198
target_object_handle(Assembler * origin)199 Handle<Object> RelocInfo::target_object_handle(Assembler* origin) {
200 DCHECK(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
201 return Handle<Object>(reinterpret_cast<Object**>(
202 Assembler::target_address_at(pc_, host_)));
203 }
204
205
set_target_object(Object * target,WriteBarrierMode write_barrier_mode,ICacheFlushMode icache_flush_mode)206 void RelocInfo::set_target_object(Object* target,
207 WriteBarrierMode write_barrier_mode,
208 ICacheFlushMode icache_flush_mode) {
209 DCHECK(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
210 Assembler::set_target_address_at(isolate_, pc_, host_,
211 reinterpret_cast<Address>(target),
212 icache_flush_mode);
213 if (write_barrier_mode == UPDATE_WRITE_BARRIER &&
214 host() != NULL &&
215 target->IsHeapObject()) {
216 host()->GetHeap()->incremental_marking()->RecordWrite(
217 host(), &Memory::Object_at(pc_), HeapObject::cast(target));
218 }
219 }
220
221
target_external_reference()222 Address RelocInfo::target_external_reference() {
223 DCHECK(rmode_ == EXTERNAL_REFERENCE);
224 return Assembler::target_address_at(pc_, host_);
225 }
226
227
target_internal_reference()228 Address RelocInfo::target_internal_reference() {
229 if (rmode_ == INTERNAL_REFERENCE) {
230 return Memory::Address_at(pc_);
231 } else {
232 // Encoded internal references are j/jal instructions.
233 DCHECK(rmode_ == INTERNAL_REFERENCE_ENCODED);
234 Instr instr = Assembler::instr_at(pc_ + 0 * Assembler::kInstrSize);
235 instr &= kImm26Mask;
236 uint64_t imm28 = instr << 2;
237 uint64_t segment =
238 (reinterpret_cast<uint64_t>(pc_) & ~static_cast<uint64_t>(kImm28Mask));
239 return reinterpret_cast<Address>(segment | imm28);
240 }
241 }
242
243
target_internal_reference_address()244 Address RelocInfo::target_internal_reference_address() {
245 DCHECK(rmode_ == INTERNAL_REFERENCE || rmode_ == INTERNAL_REFERENCE_ENCODED);
246 return reinterpret_cast<Address>(pc_);
247 }
248
249
target_runtime_entry(Assembler * origin)250 Address RelocInfo::target_runtime_entry(Assembler* origin) {
251 DCHECK(IsRuntimeEntry(rmode_));
252 return target_address();
253 }
254
255
set_target_runtime_entry(Address target,WriteBarrierMode write_barrier_mode,ICacheFlushMode icache_flush_mode)256 void RelocInfo::set_target_runtime_entry(Address target,
257 WriteBarrierMode write_barrier_mode,
258 ICacheFlushMode icache_flush_mode) {
259 DCHECK(IsRuntimeEntry(rmode_));
260 if (target_address() != target)
261 set_target_address(target, write_barrier_mode, icache_flush_mode);
262 }
263
264
target_cell_handle()265 Handle<Cell> RelocInfo::target_cell_handle() {
266 DCHECK(rmode_ == RelocInfo::CELL);
267 Address address = Memory::Address_at(pc_);
268 return Handle<Cell>(reinterpret_cast<Cell**>(address));
269 }
270
271
target_cell()272 Cell* RelocInfo::target_cell() {
273 DCHECK(rmode_ == RelocInfo::CELL);
274 return Cell::FromValueAddress(Memory::Address_at(pc_));
275 }
276
277
set_target_cell(Cell * cell,WriteBarrierMode write_barrier_mode,ICacheFlushMode icache_flush_mode)278 void RelocInfo::set_target_cell(Cell* cell,
279 WriteBarrierMode write_barrier_mode,
280 ICacheFlushMode icache_flush_mode) {
281 DCHECK(rmode_ == RelocInfo::CELL);
282 Address address = cell->address() + Cell::kValueOffset;
283 Memory::Address_at(pc_) = address;
284 if (write_barrier_mode == UPDATE_WRITE_BARRIER && host() != NULL) {
285 // TODO(1550) We are passing NULL as a slot because cell can never be on
286 // evacuation candidate.
287 host()->GetHeap()->incremental_marking()->RecordWrite(
288 host(), NULL, cell);
289 }
290 }
291
292
293 static const int kNoCodeAgeSequenceLength = 9 * Assembler::kInstrSize;
294
295
code_age_stub_handle(Assembler * origin)296 Handle<Object> RelocInfo::code_age_stub_handle(Assembler* origin) {
297 UNREACHABLE(); // This should never be reached on Arm.
298 return Handle<Object>();
299 }
300
301
code_age_stub()302 Code* RelocInfo::code_age_stub() {
303 DCHECK(rmode_ == RelocInfo::CODE_AGE_SEQUENCE);
304 return Code::GetCodeFromTargetAddress(
305 Assembler::target_address_at(pc_ + Assembler::kInstrSize, host_));
306 }
307
308
set_code_age_stub(Code * stub,ICacheFlushMode icache_flush_mode)309 void RelocInfo::set_code_age_stub(Code* stub,
310 ICacheFlushMode icache_flush_mode) {
311 DCHECK(rmode_ == RelocInfo::CODE_AGE_SEQUENCE);
312 Assembler::set_target_address_at(isolate_, pc_ + Assembler::kInstrSize, host_,
313 stub->instruction_start());
314 }
315
316
debug_call_address()317 Address RelocInfo::debug_call_address() {
318 // The pc_ offset of 0 assumes patched debug break slot or return
319 // sequence.
320 DCHECK(IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence());
321 return Assembler::target_address_at(pc_, host_);
322 }
323
324
set_debug_call_address(Address target)325 void RelocInfo::set_debug_call_address(Address target) {
326 DCHECK(IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence());
327 // The pc_ offset of 0 assumes patched debug break slot or return
328 // sequence.
329 Assembler::set_target_address_at(isolate_, pc_, host_, target);
330 if (host() != NULL) {
331 Object* target_code = Code::GetCodeFromTargetAddress(target);
332 host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(
333 host(), this, HeapObject::cast(target_code));
334 }
335 }
336
337
WipeOut()338 void RelocInfo::WipeOut() {
339 DCHECK(IsEmbeddedObject(rmode_) || IsCodeTarget(rmode_) ||
340 IsRuntimeEntry(rmode_) || IsExternalReference(rmode_) ||
341 IsInternalReference(rmode_) || IsInternalReferenceEncoded(rmode_));
342 if (IsInternalReference(rmode_)) {
343 Memory::Address_at(pc_) = NULL;
344 } else if (IsInternalReferenceEncoded(rmode_)) {
345 Assembler::set_target_internal_reference_encoded_at(pc_, nullptr);
346 } else {
347 Assembler::set_target_address_at(isolate_, pc_, host_, NULL);
348 }
349 }
350
351
IsPatchedReturnSequence()352 bool RelocInfo::IsPatchedReturnSequence() {
353 Instr instr0 = Assembler::instr_at(pc_); // lui.
354 Instr instr1 = Assembler::instr_at(pc_ + 1 * Assembler::kInstrSize); // ori.
355 Instr instr2 = Assembler::instr_at(pc_ + 2 * Assembler::kInstrSize); // dsll.
356 Instr instr3 = Assembler::instr_at(pc_ + 3 * Assembler::kInstrSize); // ori.
357 Instr instr4 = Assembler::instr_at(pc_ + 4 * Assembler::kInstrSize); // jalr.
358
359 bool patched_return = ((instr0 & kOpcodeMask) == LUI &&
360 (instr1 & kOpcodeMask) == ORI &&
361 (instr2 & kFunctionFieldMask) == DSLL &&
362 (instr3 & kOpcodeMask) == ORI &&
363 (instr4 & kFunctionFieldMask) == JALR);
364 return patched_return;
365 }
366
367
IsPatchedDebugBreakSlotSequence()368 bool RelocInfo::IsPatchedDebugBreakSlotSequence() {
369 Instr current_instr = Assembler::instr_at(pc_);
370 return !Assembler::IsNop(current_instr, Assembler::DEBUG_BREAK_NOP);
371 }
372
373
Visit(Isolate * isolate,ObjectVisitor * visitor)374 void RelocInfo::Visit(Isolate* isolate, ObjectVisitor* visitor) {
375 RelocInfo::Mode mode = rmode();
376 if (mode == RelocInfo::EMBEDDED_OBJECT) {
377 visitor->VisitEmbeddedPointer(this);
378 } else if (RelocInfo::IsCodeTarget(mode)) {
379 visitor->VisitCodeTarget(this);
380 } else if (mode == RelocInfo::CELL) {
381 visitor->VisitCell(this);
382 } else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
383 visitor->VisitExternalReference(this);
384 } else if (mode == RelocInfo::INTERNAL_REFERENCE ||
385 mode == RelocInfo::INTERNAL_REFERENCE_ENCODED) {
386 visitor->VisitInternalReference(this);
387 } else if (RelocInfo::IsCodeAgeSequence(mode)) {
388 visitor->VisitCodeAgeSequence(this);
389 } else if (RelocInfo::IsDebugBreakSlot(mode) &&
390 IsPatchedDebugBreakSlotSequence()) {
391 visitor->VisitDebugTarget(this);
392 } else if (RelocInfo::IsRuntimeEntry(mode)) {
393 visitor->VisitRuntimeEntry(this);
394 }
395 }
396
397
398 template<typename StaticVisitor>
Visit(Heap * heap)399 void RelocInfo::Visit(Heap* heap) {
400 RelocInfo::Mode mode = rmode();
401 if (mode == RelocInfo::EMBEDDED_OBJECT) {
402 StaticVisitor::VisitEmbeddedPointer(heap, this);
403 } else if (RelocInfo::IsCodeTarget(mode)) {
404 StaticVisitor::VisitCodeTarget(heap, this);
405 } else if (mode == RelocInfo::CELL) {
406 StaticVisitor::VisitCell(heap, this);
407 } else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
408 StaticVisitor::VisitExternalReference(this);
409 } else if (mode == RelocInfo::INTERNAL_REFERENCE ||
410 mode == RelocInfo::INTERNAL_REFERENCE_ENCODED) {
411 StaticVisitor::VisitInternalReference(this);
412 } else if (RelocInfo::IsCodeAgeSequence(mode)) {
413 StaticVisitor::VisitCodeAgeSequence(heap, this);
414 } else if (RelocInfo::IsDebugBreakSlot(mode) &&
415 IsPatchedDebugBreakSlotSequence()) {
416 StaticVisitor::VisitDebugTarget(heap, this);
417 } else if (RelocInfo::IsRuntimeEntry(mode)) {
418 StaticVisitor::VisitRuntimeEntry(this);
419 }
420 }
421
422
423 // -----------------------------------------------------------------------------
424 // Assembler.
425
426
CheckBuffer()427 void Assembler::CheckBuffer() {
428 if (buffer_space() <= kGap) {
429 GrowBuffer();
430 }
431 }
432
433
CheckTrampolinePoolQuick(int extra_instructions)434 void Assembler::CheckTrampolinePoolQuick(int extra_instructions) {
435 if (pc_offset() >= next_buffer_check_ - extra_instructions * kInstrSize) {
436 CheckTrampolinePool();
437 }
438 }
439
440
CheckForEmitInForbiddenSlot()441 void Assembler::CheckForEmitInForbiddenSlot() {
442 if (!is_buffer_growth_blocked()) {
443 CheckBuffer();
444 }
445 if (IsPrevInstrCompactBranch()) {
446 // Nop instruction to preceed a CTI in forbidden slot:
447 Instr nop = SPECIAL | SLL;
448 *reinterpret_cast<Instr*>(pc_) = nop;
449 pc_ += kInstrSize;
450
451 ClearCompactBranchState();
452 }
453 }
454
455
EmitHelper(Instr x,CompactBranchType is_compact_branch)456 void Assembler::EmitHelper(Instr x, CompactBranchType is_compact_branch) {
457 if (IsPrevInstrCompactBranch()) {
458 if (Instruction::IsForbiddenAfterBranchInstr(x)) {
459 // Nop instruction to preceed a CTI in forbidden slot:
460 Instr nop = SPECIAL | SLL;
461 *reinterpret_cast<Instr*>(pc_) = nop;
462 pc_ += kInstrSize;
463 }
464 ClearCompactBranchState();
465 }
466 *reinterpret_cast<Instr*>(pc_) = x;
467 pc_ += kInstrSize;
468 if (is_compact_branch == CompactBranchType::COMPACT_BRANCH) {
469 EmittedCompactBranchInstruction();
470 }
471 CheckTrampolinePoolQuick();
472 }
473
474
475 template <typename T>
EmitHelper(T x)476 void Assembler::EmitHelper(T x) {
477 *reinterpret_cast<T*>(pc_) = x;
478 pc_ += sizeof(x);
479 CheckTrampolinePoolQuick();
480 }
481
482
emit(Instr x,CompactBranchType is_compact_branch)483 void Assembler::emit(Instr x, CompactBranchType is_compact_branch) {
484 if (!is_buffer_growth_blocked()) {
485 CheckBuffer();
486 }
487 EmitHelper(x, is_compact_branch);
488 }
489
490
emit(uint64_t data)491 void Assembler::emit(uint64_t data) {
492 CheckForEmitInForbiddenSlot();
493 EmitHelper(data);
494 }
495
496
497 } // namespace internal
498 } // namespace v8
499
500 #endif // V8_MIPS_ASSEMBLER_MIPS_INL_H_
501