1
2 // Copyright (c) 1994-2006 Sun Microsystems Inc.
3 // All Rights Reserved.
4 //
5 // Redistribution and use in source and binary forms, with or without
6 // modification, are permitted provided that the following conditions are
7 // met:
8 //
9 // - Redistributions of source code must retain the above copyright notice,
10 // this list of conditions and the following disclaimer.
11 //
12 // - Redistribution in binary form must reproduce the above copyright
13 // notice, this list of conditions and the following disclaimer in the
14 // documentation and/or other materials provided with the distribution.
15 //
16 // - Neither the name of Sun Microsystems or the names of contributors may
17 // be used to endorse or promote products derived from this software without
18 // specific prior written permission.
19 //
20 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
21 // IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
22 // THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
23 // PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
24 // CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
25 // EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
26 // PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
27 // PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
28 // LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
29 // NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
30 // SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
31
32 // The original source code covered by the above license above has been
33 // modified significantly by Google Inc.
34 // Copyright 2012 the V8 project authors. All rights reserved.
35
36
37 #ifndef V8_MIPS_ASSEMBLER_MIPS_INL_H_
38 #define V8_MIPS_ASSEMBLER_MIPS_INL_H_
39
40 #include "src/mips64/assembler-mips64.h"
41
42 #include "src/assembler.h"
43 #include "src/debug/debug.h"
44
45
46 namespace v8 {
47 namespace internal {
48
49
SupportsCrankshaft()50 bool CpuFeatures::SupportsCrankshaft() { return IsSupported(FPU); }
51
SupportsSimd128()52 bool CpuFeatures::SupportsSimd128() { return false; }
53
54 // -----------------------------------------------------------------------------
55 // Operand and MemOperand.
56
Operand(int64_t immediate,RelocInfo::Mode rmode)57 Operand::Operand(int64_t immediate, RelocInfo::Mode rmode) {
58 rm_ = no_reg;
59 imm64_ = immediate;
60 rmode_ = rmode;
61 }
62
63
Operand(const ExternalReference & f)64 Operand::Operand(const ExternalReference& f) {
65 rm_ = no_reg;
66 imm64_ = reinterpret_cast<int64_t>(f.address());
67 rmode_ = RelocInfo::EXTERNAL_REFERENCE;
68 }
69
70
Operand(Smi * value)71 Operand::Operand(Smi* value) {
72 rm_ = no_reg;
73 imm64_ = reinterpret_cast<intptr_t>(value);
74 rmode_ = RelocInfo::NONE32;
75 }
76
77
Operand(Register rm)78 Operand::Operand(Register rm) {
79 rm_ = rm;
80 }
81
82
is_reg()83 bool Operand::is_reg() const {
84 return rm_.is_valid();
85 }
86
87
88 // -----------------------------------------------------------------------------
89 // RelocInfo.
90
apply(intptr_t delta)91 void RelocInfo::apply(intptr_t delta) {
92 if (IsInternalReference(rmode_) || IsInternalReferenceEncoded(rmode_)) {
93 // Absolute code pointer inside code object moves with the code object.
94 byte* p = reinterpret_cast<byte*>(pc_);
95 int count = Assembler::RelocateInternalReference(rmode_, p, delta);
96 Assembler::FlushICache(isolate_, p, count * sizeof(uint32_t));
97 }
98 }
99
100
target_address()101 Address RelocInfo::target_address() {
102 DCHECK(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_));
103 return Assembler::target_address_at(pc_, host_);
104 }
105
target_address_address()106 Address RelocInfo::target_address_address() {
107 DCHECK(IsCodeTarget(rmode_) ||
108 IsRuntimeEntry(rmode_) ||
109 rmode_ == EMBEDDED_OBJECT ||
110 rmode_ == EXTERNAL_REFERENCE);
111 // Read the address of the word containing the target_address in an
112 // instruction stream.
113 // The only architecture-independent user of this function is the serializer.
114 // The serializer uses it to find out how many raw bytes of instruction to
115 // output before the next target.
116 // For an instruction like LUI/ORI where the target bits are mixed into the
117 // instruction bits, the size of the target will be zero, indicating that the
118 // serializer should not step forward in memory after a target is resolved
119 // and written. In this case the target_address_address function should
120 // return the end of the instructions to be patched, allowing the
121 // deserializer to deserialize the instructions as raw bytes and put them in
122 // place, ready to be patched with the target. After jump optimization,
123 // that is the address of the instruction that follows J/JAL/JR/JALR
124 // instruction.
125 // return reinterpret_cast<Address>(
126 // pc_ + Assembler::kInstructionsFor32BitConstant * Assembler::kInstrSize);
127 return reinterpret_cast<Address>(
128 pc_ + Assembler::kInstructionsFor64BitConstant * Assembler::kInstrSize);
129 }
130
131
constant_pool_entry_address()132 Address RelocInfo::constant_pool_entry_address() {
133 UNREACHABLE();
134 return NULL;
135 }
136
137
target_address_size()138 int RelocInfo::target_address_size() {
139 return Assembler::kSpecialTargetSize;
140 }
141
142
target_address_from_return_address(Address pc)143 Address Assembler::target_address_from_return_address(Address pc) {
144 return pc - kCallTargetAddressOffset;
145 }
146
147
set_target_internal_reference_encoded_at(Address pc,Address target)148 void Assembler::set_target_internal_reference_encoded_at(Address pc,
149 Address target) {
150 // Encoded internal references are j/jal instructions.
151 Instr instr = Assembler::instr_at(pc + 0 * Assembler::kInstrSize);
152
153 uint64_t imm28 =
154 (reinterpret_cast<uint64_t>(target) & static_cast<uint64_t>(kImm28Mask));
155
156 instr &= ~kImm26Mask;
157 uint64_t imm26 = imm28 >> 2;
158 DCHECK(is_uint26(imm26));
159
160 instr_at_put(pc, instr | (imm26 & kImm26Mask));
161 // Currently used only by deserializer, and all code will be flushed
162 // after complete deserialization, no need to flush on each reference.
163 }
164
165
deserialization_set_target_internal_reference_at(Isolate * isolate,Address pc,Address target,RelocInfo::Mode mode)166 void Assembler::deserialization_set_target_internal_reference_at(
167 Isolate* isolate, Address pc, Address target, RelocInfo::Mode mode) {
168 if (mode == RelocInfo::INTERNAL_REFERENCE_ENCODED) {
169 DCHECK(IsJ(instr_at(pc)));
170 set_target_internal_reference_encoded_at(pc, target);
171 } else {
172 DCHECK(mode == RelocInfo::INTERNAL_REFERENCE);
173 Memory::Address_at(pc) = target;
174 }
175 }
176
177
target_object()178 Object* RelocInfo::target_object() {
179 DCHECK(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
180 return reinterpret_cast<Object*>(Assembler::target_address_at(pc_, host_));
181 }
182
183
target_object_handle(Assembler * origin)184 Handle<Object> RelocInfo::target_object_handle(Assembler* origin) {
185 DCHECK(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
186 return Handle<Object>(reinterpret_cast<Object**>(
187 Assembler::target_address_at(pc_, host_)));
188 }
189
190
set_target_object(Object * target,WriteBarrierMode write_barrier_mode,ICacheFlushMode icache_flush_mode)191 void RelocInfo::set_target_object(Object* target,
192 WriteBarrierMode write_barrier_mode,
193 ICacheFlushMode icache_flush_mode) {
194 DCHECK(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
195 Assembler::set_target_address_at(isolate_, pc_, host_,
196 reinterpret_cast<Address>(target),
197 icache_flush_mode);
198 if (write_barrier_mode == UPDATE_WRITE_BARRIER &&
199 host() != NULL &&
200 target->IsHeapObject()) {
201 host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(
202 host(), this, HeapObject::cast(target));
203 host()->GetHeap()->RecordWriteIntoCode(host(), this, target);
204 }
205 }
206
207
target_external_reference()208 Address RelocInfo::target_external_reference() {
209 DCHECK(rmode_ == EXTERNAL_REFERENCE);
210 return Assembler::target_address_at(pc_, host_);
211 }
212
213
target_internal_reference()214 Address RelocInfo::target_internal_reference() {
215 if (rmode_ == INTERNAL_REFERENCE) {
216 return Memory::Address_at(pc_);
217 } else {
218 // Encoded internal references are j/jal instructions.
219 DCHECK(rmode_ == INTERNAL_REFERENCE_ENCODED);
220 Instr instr = Assembler::instr_at(pc_ + 0 * Assembler::kInstrSize);
221 instr &= kImm26Mask;
222 uint64_t imm28 = instr << 2;
223 uint64_t segment =
224 (reinterpret_cast<uint64_t>(pc_) & ~static_cast<uint64_t>(kImm28Mask));
225 return reinterpret_cast<Address>(segment | imm28);
226 }
227 }
228
229
target_internal_reference_address()230 Address RelocInfo::target_internal_reference_address() {
231 DCHECK(rmode_ == INTERNAL_REFERENCE || rmode_ == INTERNAL_REFERENCE_ENCODED);
232 return reinterpret_cast<Address>(pc_);
233 }
234
235
target_runtime_entry(Assembler * origin)236 Address RelocInfo::target_runtime_entry(Assembler* origin) {
237 DCHECK(IsRuntimeEntry(rmode_));
238 return target_address();
239 }
240
241
set_target_runtime_entry(Address target,WriteBarrierMode write_barrier_mode,ICacheFlushMode icache_flush_mode)242 void RelocInfo::set_target_runtime_entry(Address target,
243 WriteBarrierMode write_barrier_mode,
244 ICacheFlushMode icache_flush_mode) {
245 DCHECK(IsRuntimeEntry(rmode_));
246 if (target_address() != target)
247 set_target_address(target, write_barrier_mode, icache_flush_mode);
248 }
249
250
target_cell_handle()251 Handle<Cell> RelocInfo::target_cell_handle() {
252 DCHECK(rmode_ == RelocInfo::CELL);
253 Address address = Memory::Address_at(pc_);
254 return Handle<Cell>(reinterpret_cast<Cell**>(address));
255 }
256
257
target_cell()258 Cell* RelocInfo::target_cell() {
259 DCHECK(rmode_ == RelocInfo::CELL);
260 return Cell::FromValueAddress(Memory::Address_at(pc_));
261 }
262
263
set_target_cell(Cell * cell,WriteBarrierMode write_barrier_mode,ICacheFlushMode icache_flush_mode)264 void RelocInfo::set_target_cell(Cell* cell,
265 WriteBarrierMode write_barrier_mode,
266 ICacheFlushMode icache_flush_mode) {
267 DCHECK(rmode_ == RelocInfo::CELL);
268 Address address = cell->address() + Cell::kValueOffset;
269 Memory::Address_at(pc_) = address;
270 if (write_barrier_mode == UPDATE_WRITE_BARRIER && host() != NULL) {
271 host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(host(), this,
272 cell);
273 }
274 }
275
276
277 static const int kNoCodeAgeSequenceLength = 9 * Assembler::kInstrSize;
278
279
code_age_stub_handle(Assembler * origin)280 Handle<Object> RelocInfo::code_age_stub_handle(Assembler* origin) {
281 UNREACHABLE(); // This should never be reached on Arm.
282 return Handle<Object>();
283 }
284
285
code_age_stub()286 Code* RelocInfo::code_age_stub() {
287 DCHECK(rmode_ == RelocInfo::CODE_AGE_SEQUENCE);
288 return Code::GetCodeFromTargetAddress(
289 Assembler::target_address_at(pc_ + Assembler::kInstrSize, host_));
290 }
291
292
set_code_age_stub(Code * stub,ICacheFlushMode icache_flush_mode)293 void RelocInfo::set_code_age_stub(Code* stub,
294 ICacheFlushMode icache_flush_mode) {
295 DCHECK(rmode_ == RelocInfo::CODE_AGE_SEQUENCE);
296 Assembler::set_target_address_at(isolate_, pc_ + Assembler::kInstrSize, host_,
297 stub->instruction_start());
298 }
299
300
debug_call_address()301 Address RelocInfo::debug_call_address() {
302 // The pc_ offset of 0 assumes patched debug break slot or return
303 // sequence.
304 DCHECK(IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence());
305 return Assembler::target_address_at(pc_, host_);
306 }
307
308
set_debug_call_address(Address target)309 void RelocInfo::set_debug_call_address(Address target) {
310 DCHECK(IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence());
311 // The pc_ offset of 0 assumes patched debug break slot or return
312 // sequence.
313 Assembler::set_target_address_at(isolate_, pc_, host_, target);
314 if (host() != NULL) {
315 Object* target_code = Code::GetCodeFromTargetAddress(target);
316 host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(
317 host(), this, HeapObject::cast(target_code));
318 }
319 }
320
321
WipeOut()322 void RelocInfo::WipeOut() {
323 DCHECK(IsEmbeddedObject(rmode_) || IsCodeTarget(rmode_) ||
324 IsRuntimeEntry(rmode_) || IsExternalReference(rmode_) ||
325 IsInternalReference(rmode_) || IsInternalReferenceEncoded(rmode_));
326 if (IsInternalReference(rmode_)) {
327 Memory::Address_at(pc_) = NULL;
328 } else if (IsInternalReferenceEncoded(rmode_)) {
329 Assembler::set_target_internal_reference_encoded_at(pc_, nullptr);
330 } else {
331 Assembler::set_target_address_at(isolate_, pc_, host_, NULL);
332 }
333 }
334
335 template <typename ObjectVisitor>
Visit(Isolate * isolate,ObjectVisitor * visitor)336 void RelocInfo::Visit(Isolate* isolate, ObjectVisitor* visitor) {
337 RelocInfo::Mode mode = rmode();
338 if (mode == RelocInfo::EMBEDDED_OBJECT) {
339 visitor->VisitEmbeddedPointer(this);
340 } else if (RelocInfo::IsCodeTarget(mode)) {
341 visitor->VisitCodeTarget(this);
342 } else if (mode == RelocInfo::CELL) {
343 visitor->VisitCell(this);
344 } else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
345 visitor->VisitExternalReference(this);
346 } else if (mode == RelocInfo::INTERNAL_REFERENCE ||
347 mode == RelocInfo::INTERNAL_REFERENCE_ENCODED) {
348 visitor->VisitInternalReference(this);
349 } else if (RelocInfo::IsCodeAgeSequence(mode)) {
350 visitor->VisitCodeAgeSequence(this);
351 } else if (RelocInfo::IsDebugBreakSlot(mode) &&
352 IsPatchedDebugBreakSlotSequence()) {
353 visitor->VisitDebugTarget(this);
354 } else if (RelocInfo::IsRuntimeEntry(mode)) {
355 visitor->VisitRuntimeEntry(this);
356 }
357 }
358
359
360 template<typename StaticVisitor>
Visit(Heap * heap)361 void RelocInfo::Visit(Heap* heap) {
362 RelocInfo::Mode mode = rmode();
363 if (mode == RelocInfo::EMBEDDED_OBJECT) {
364 StaticVisitor::VisitEmbeddedPointer(heap, this);
365 } else if (RelocInfo::IsCodeTarget(mode)) {
366 StaticVisitor::VisitCodeTarget(heap, this);
367 } else if (mode == RelocInfo::CELL) {
368 StaticVisitor::VisitCell(heap, this);
369 } else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
370 StaticVisitor::VisitExternalReference(this);
371 } else if (mode == RelocInfo::INTERNAL_REFERENCE ||
372 mode == RelocInfo::INTERNAL_REFERENCE_ENCODED) {
373 StaticVisitor::VisitInternalReference(this);
374 } else if (RelocInfo::IsCodeAgeSequence(mode)) {
375 StaticVisitor::VisitCodeAgeSequence(heap, this);
376 } else if (RelocInfo::IsDebugBreakSlot(mode) &&
377 IsPatchedDebugBreakSlotSequence()) {
378 StaticVisitor::VisitDebugTarget(heap, this);
379 } else if (RelocInfo::IsRuntimeEntry(mode)) {
380 StaticVisitor::VisitRuntimeEntry(this);
381 }
382 }
383
384
385 // -----------------------------------------------------------------------------
386 // Assembler.
387
388
CheckBuffer()389 void Assembler::CheckBuffer() {
390 if (buffer_space() <= kGap) {
391 GrowBuffer();
392 }
393 }
394
395
CheckTrampolinePoolQuick(int extra_instructions)396 void Assembler::CheckTrampolinePoolQuick(int extra_instructions) {
397 if (pc_offset() >= next_buffer_check_ - extra_instructions * kInstrSize) {
398 CheckTrampolinePool();
399 }
400 }
401
402
CheckForEmitInForbiddenSlot()403 void Assembler::CheckForEmitInForbiddenSlot() {
404 if (!is_buffer_growth_blocked()) {
405 CheckBuffer();
406 }
407 if (IsPrevInstrCompactBranch()) {
408 // Nop instruction to preceed a CTI in forbidden slot:
409 Instr nop = SPECIAL | SLL;
410 *reinterpret_cast<Instr*>(pc_) = nop;
411 pc_ += kInstrSize;
412
413 ClearCompactBranchState();
414 }
415 }
416
417
EmitHelper(Instr x,CompactBranchType is_compact_branch)418 void Assembler::EmitHelper(Instr x, CompactBranchType is_compact_branch) {
419 if (IsPrevInstrCompactBranch()) {
420 if (Instruction::IsForbiddenAfterBranchInstr(x)) {
421 // Nop instruction to preceed a CTI in forbidden slot:
422 Instr nop = SPECIAL | SLL;
423 *reinterpret_cast<Instr*>(pc_) = nop;
424 pc_ += kInstrSize;
425 }
426 ClearCompactBranchState();
427 }
428 *reinterpret_cast<Instr*>(pc_) = x;
429 pc_ += kInstrSize;
430 if (is_compact_branch == CompactBranchType::COMPACT_BRANCH) {
431 EmittedCompactBranchInstruction();
432 }
433 CheckTrampolinePoolQuick();
434 }
435
436 template <>
437 inline void Assembler::EmitHelper(uint8_t x);
438
439 template <typename T>
EmitHelper(T x)440 void Assembler::EmitHelper(T x) {
441 *reinterpret_cast<T*>(pc_) = x;
442 pc_ += sizeof(x);
443 CheckTrampolinePoolQuick();
444 }
445
446 template <>
EmitHelper(uint8_t x)447 void Assembler::EmitHelper(uint8_t x) {
448 *reinterpret_cast<uint8_t*>(pc_) = x;
449 pc_ += sizeof(x);
450 if (reinterpret_cast<intptr_t>(pc_) % kInstrSize == 0) {
451 CheckTrampolinePoolQuick();
452 }
453 }
454
emit(Instr x,CompactBranchType is_compact_branch)455 void Assembler::emit(Instr x, CompactBranchType is_compact_branch) {
456 if (!is_buffer_growth_blocked()) {
457 CheckBuffer();
458 }
459 EmitHelper(x, is_compact_branch);
460 }
461
462
emit(uint64_t data)463 void Assembler::emit(uint64_t data) {
464 CheckForEmitInForbiddenSlot();
465 EmitHelper(data);
466 }
467
468
469 } // namespace internal
470 } // namespace v8
471
472 #endif // V8_MIPS_ASSEMBLER_MIPS_INL_H_
473