1 // Copyright (c) 1994-2006 Sun Microsystems Inc.
2 // All Rights Reserved.
3 //
4 // Redistribution and use in source and binary forms, with or without
5 // modification, are permitted provided that the following conditions
6 // are met:
7 //
8 // - Redistributions of source code must retain the above copyright notice,
9 // this list of conditions and the following disclaimer.
10 //
11 // - Redistribution in binary form must reproduce the above copyright
12 // notice, this list of conditions and the following disclaimer in the
13 // documentation and/or other materials provided with the
14 // distribution.
15 //
16 // - Neither the name of Sun Microsystems or the names of contributors may
17 // be used to endorse or promote products derived from this software without
18 // specific prior written permission.
19 //
20 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
21 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
22 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
23 // FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
24 // COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
25 // INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
26 // (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
27 // SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
28 // HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
29 // STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
30 // ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
31 // OF THE POSSIBILITY OF SUCH DAMAGE.
32
33 // The original source code covered by the above license above has been modified
34 // significantly by Google Inc.
35 // Copyright 2014 the V8 project authors. All rights reserved.
36
37 #ifndef V8_PPC_ASSEMBLER_PPC_INL_H_
38 #define V8_PPC_ASSEMBLER_PPC_INL_H_
39
40 #include "src/ppc/assembler-ppc.h"
41
42 #include "src/assembler.h"
43 #include "src/debug/debug.h"
44 #include "src/objects-inl.h"
45
46 namespace v8 {
47 namespace internal {
48
49
SupportsCrankshaft()50 bool CpuFeatures::SupportsCrankshaft() { return true; }
51
SupportsSimd128()52 bool CpuFeatures::SupportsSimd128() { return false; }
53
apply(intptr_t delta)54 void RelocInfo::apply(intptr_t delta) {
55 // absolute code pointer inside code object moves with the code object.
56 if (IsInternalReference(rmode_)) {
57 // Jump table entry
58 Address target = Memory::Address_at(pc_);
59 Memory::Address_at(pc_) = target + delta;
60 } else {
61 // mov sequence
62 DCHECK(IsInternalReferenceEncoded(rmode_));
63 Address target = Assembler::target_address_at(pc_, host_);
64 Assembler::set_target_address_at(isolate_, pc_, host_, target + delta,
65 SKIP_ICACHE_FLUSH);
66 }
67 }
68
69
target_internal_reference()70 Address RelocInfo::target_internal_reference() {
71 if (IsInternalReference(rmode_)) {
72 // Jump table entry
73 return Memory::Address_at(pc_);
74 } else {
75 // mov sequence
76 DCHECK(IsInternalReferenceEncoded(rmode_));
77 return Assembler::target_address_at(pc_, host_);
78 }
79 }
80
81
target_internal_reference_address()82 Address RelocInfo::target_internal_reference_address() {
83 DCHECK(IsInternalReference(rmode_) || IsInternalReferenceEncoded(rmode_));
84 return reinterpret_cast<Address>(pc_);
85 }
86
87
target_address()88 Address RelocInfo::target_address() {
89 DCHECK(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_));
90 return Assembler::target_address_at(pc_, host_);
91 }
92
target_address_address()93 Address RelocInfo::target_address_address() {
94 DCHECK(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_) ||
95 rmode_ == EMBEDDED_OBJECT || rmode_ == EXTERNAL_REFERENCE);
96
97 if (FLAG_enable_embedded_constant_pool &&
98 Assembler::IsConstantPoolLoadStart(pc_)) {
99 // We return the PC for embedded constant pool since this function is used
100 // by the serializer and expects the address to reside within the code
101 // object.
102 return reinterpret_cast<Address>(pc_);
103 }
104
105 // Read the address of the word containing the target_address in an
106 // instruction stream.
107 // The only architecture-independent user of this function is the serializer.
108 // The serializer uses it to find out how many raw bytes of instruction to
109 // output before the next target.
110 // For an instruction like LIS/ORI where the target bits are mixed into the
111 // instruction bits, the size of the target will be zero, indicating that the
112 // serializer should not step forward in memory after a target is resolved
113 // and written.
114 return reinterpret_cast<Address>(pc_);
115 }
116
117
constant_pool_entry_address()118 Address RelocInfo::constant_pool_entry_address() {
119 if (FLAG_enable_embedded_constant_pool) {
120 Address constant_pool = host_->constant_pool();
121 DCHECK(constant_pool);
122 ConstantPoolEntry::Access access;
123 if (Assembler::IsConstantPoolLoadStart(pc_, &access))
124 return Assembler::target_constant_pool_address_at(
125 pc_, constant_pool, access, ConstantPoolEntry::INTPTR);
126 }
127 UNREACHABLE();
128 return NULL;
129 }
130
131
target_address_size()132 int RelocInfo::target_address_size() { return Assembler::kSpecialTargetSize; }
133
target_address_at(Address pc,Code * code)134 Address Assembler::target_address_at(Address pc, Code* code) {
135 Address constant_pool = code ? code->constant_pool() : NULL;
136 return target_address_at(pc, constant_pool);
137 }
138
set_target_address_at(Isolate * isolate,Address pc,Code * code,Address target,ICacheFlushMode icache_flush_mode)139 void Assembler::set_target_address_at(Isolate* isolate, Address pc, Code* code,
140 Address target,
141 ICacheFlushMode icache_flush_mode) {
142 Address constant_pool = code ? code->constant_pool() : NULL;
143 set_target_address_at(isolate, pc, constant_pool, target, icache_flush_mode);
144 }
145
target_address_from_return_address(Address pc)146 Address Assembler::target_address_from_return_address(Address pc) {
147 // Returns the address of the call target from the return address that will
148 // be returned to after a call.
149 // Call sequence is :
150 // mov ip, @ call address
151 // mtlr ip
152 // blrl
153 // @ return address
154 int len;
155 ConstantPoolEntry::Access access;
156 if (FLAG_enable_embedded_constant_pool &&
157 IsConstantPoolLoadEnd(pc - 3 * kInstrSize, &access)) {
158 len = (access == ConstantPoolEntry::OVERFLOWED) ? 2 : 1;
159 } else {
160 len = kMovInstructionsNoConstantPool;
161 }
162 return pc - (len + 2) * kInstrSize;
163 }
164
165
return_address_from_call_start(Address pc)166 Address Assembler::return_address_from_call_start(Address pc) {
167 int len;
168 ConstantPoolEntry::Access access;
169 if (FLAG_enable_embedded_constant_pool &&
170 IsConstantPoolLoadStart(pc, &access)) {
171 len = (access == ConstantPoolEntry::OVERFLOWED) ? 2 : 1;
172 } else {
173 len = kMovInstructionsNoConstantPool;
174 }
175 return pc + (len + 2) * kInstrSize;
176 }
177
target_object()178 Object* RelocInfo::target_object() {
179 DCHECK(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
180 return reinterpret_cast<Object*>(Assembler::target_address_at(pc_, host_));
181 }
182
183
target_object_handle(Assembler * origin)184 Handle<Object> RelocInfo::target_object_handle(Assembler* origin) {
185 DCHECK(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
186 return Handle<Object>(
187 reinterpret_cast<Object**>(Assembler::target_address_at(pc_, host_)));
188 }
189
190
set_target_object(Object * target,WriteBarrierMode write_barrier_mode,ICacheFlushMode icache_flush_mode)191 void RelocInfo::set_target_object(Object* target,
192 WriteBarrierMode write_barrier_mode,
193 ICacheFlushMode icache_flush_mode) {
194 DCHECK(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
195 Assembler::set_target_address_at(isolate_, pc_, host_,
196 reinterpret_cast<Address>(target),
197 icache_flush_mode);
198 if (write_barrier_mode == UPDATE_WRITE_BARRIER && host() != NULL &&
199 target->IsHeapObject()) {
200 host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(
201 host(), this, HeapObject::cast(target));
202 host()->GetHeap()->RecordWriteIntoCode(host(), this, target);
203 }
204 }
205
206
target_external_reference()207 Address RelocInfo::target_external_reference() {
208 DCHECK(rmode_ == EXTERNAL_REFERENCE);
209 return Assembler::target_address_at(pc_, host_);
210 }
211
212
target_runtime_entry(Assembler * origin)213 Address RelocInfo::target_runtime_entry(Assembler* origin) {
214 DCHECK(IsRuntimeEntry(rmode_));
215 return target_address();
216 }
217
218
set_target_runtime_entry(Address target,WriteBarrierMode write_barrier_mode,ICacheFlushMode icache_flush_mode)219 void RelocInfo::set_target_runtime_entry(Address target,
220 WriteBarrierMode write_barrier_mode,
221 ICacheFlushMode icache_flush_mode) {
222 DCHECK(IsRuntimeEntry(rmode_));
223 if (target_address() != target)
224 set_target_address(target, write_barrier_mode, icache_flush_mode);
225 }
226
227
target_cell_handle()228 Handle<Cell> RelocInfo::target_cell_handle() {
229 DCHECK(rmode_ == RelocInfo::CELL);
230 Address address = Memory::Address_at(pc_);
231 return Handle<Cell>(reinterpret_cast<Cell**>(address));
232 }
233
234
target_cell()235 Cell* RelocInfo::target_cell() {
236 DCHECK(rmode_ == RelocInfo::CELL);
237 return Cell::FromValueAddress(Memory::Address_at(pc_));
238 }
239
240
set_target_cell(Cell * cell,WriteBarrierMode write_barrier_mode,ICacheFlushMode icache_flush_mode)241 void RelocInfo::set_target_cell(Cell* cell, WriteBarrierMode write_barrier_mode,
242 ICacheFlushMode icache_flush_mode) {
243 DCHECK(rmode_ == RelocInfo::CELL);
244 Address address = cell->address() + Cell::kValueOffset;
245 Memory::Address_at(pc_) = address;
246 if (write_barrier_mode == UPDATE_WRITE_BARRIER && host() != NULL) {
247 host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(host(), this,
248 cell);
249 }
250 }
251
252
253 static const int kNoCodeAgeInstructions =
254 FLAG_enable_embedded_constant_pool ? 7 : 6;
255 static const int kCodeAgingInstructions =
256 Assembler::kMovInstructionsNoConstantPool + 3;
257 static const int kNoCodeAgeSequenceInstructions =
258 ((kNoCodeAgeInstructions >= kCodeAgingInstructions)
259 ? kNoCodeAgeInstructions
260 : kCodeAgingInstructions);
261 static const int kNoCodeAgeSequenceNops =
262 (kNoCodeAgeSequenceInstructions - kNoCodeAgeInstructions);
263 static const int kCodeAgingSequenceNops =
264 (kNoCodeAgeSequenceInstructions - kCodeAgingInstructions);
265 static const int kCodeAgingTargetDelta = 1 * Assembler::kInstrSize;
266 static const int kNoCodeAgeSequenceLength =
267 (kNoCodeAgeSequenceInstructions * Assembler::kInstrSize);
268
269
code_age_stub_handle(Assembler * origin)270 Handle<Object> RelocInfo::code_age_stub_handle(Assembler* origin) {
271 UNREACHABLE(); // This should never be reached on PPC.
272 return Handle<Object>();
273 }
274
275
code_age_stub()276 Code* RelocInfo::code_age_stub() {
277 DCHECK(rmode_ == RelocInfo::CODE_AGE_SEQUENCE);
278 return Code::GetCodeFromTargetAddress(
279 Assembler::target_address_at(pc_ + kCodeAgingTargetDelta, host_));
280 }
281
282
set_code_age_stub(Code * stub,ICacheFlushMode icache_flush_mode)283 void RelocInfo::set_code_age_stub(Code* stub,
284 ICacheFlushMode icache_flush_mode) {
285 DCHECK(rmode_ == RelocInfo::CODE_AGE_SEQUENCE);
286 Assembler::set_target_address_at(isolate_, pc_ + kCodeAgingTargetDelta, host_,
287 stub->instruction_start(),
288 icache_flush_mode);
289 }
290
291
debug_call_address()292 Address RelocInfo::debug_call_address() {
293 DCHECK(IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence());
294 return Assembler::target_address_at(pc_, host_);
295 }
296
297
set_debug_call_address(Address target)298 void RelocInfo::set_debug_call_address(Address target) {
299 DCHECK(IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence());
300 Assembler::set_target_address_at(isolate_, pc_, host_, target);
301 if (host() != NULL) {
302 Object* target_code = Code::GetCodeFromTargetAddress(target);
303 host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(
304 host(), this, HeapObject::cast(target_code));
305 }
306 }
307
308
WipeOut()309 void RelocInfo::WipeOut() {
310 DCHECK(IsEmbeddedObject(rmode_) || IsCodeTarget(rmode_) ||
311 IsRuntimeEntry(rmode_) || IsExternalReference(rmode_) ||
312 IsInternalReference(rmode_) || IsInternalReferenceEncoded(rmode_));
313 if (IsInternalReference(rmode_)) {
314 // Jump table entry
315 Memory::Address_at(pc_) = NULL;
316 } else if (IsInternalReferenceEncoded(rmode_)) {
317 // mov sequence
318 // Currently used only by deserializer, no need to flush.
319 Assembler::set_target_address_at(isolate_, pc_, host_, NULL,
320 SKIP_ICACHE_FLUSH);
321 } else {
322 Assembler::set_target_address_at(isolate_, pc_, host_, NULL);
323 }
324 }
325
326 template <typename ObjectVisitor>
Visit(Isolate * isolate,ObjectVisitor * visitor)327 void RelocInfo::Visit(Isolate* isolate, ObjectVisitor* visitor) {
328 RelocInfo::Mode mode = rmode();
329 if (mode == RelocInfo::EMBEDDED_OBJECT) {
330 visitor->VisitEmbeddedPointer(this);
331 } else if (RelocInfo::IsCodeTarget(mode)) {
332 visitor->VisitCodeTarget(this);
333 } else if (mode == RelocInfo::CELL) {
334 visitor->VisitCell(this);
335 } else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
336 visitor->VisitExternalReference(this);
337 } else if (mode == RelocInfo::INTERNAL_REFERENCE ||
338 mode == RelocInfo::INTERNAL_REFERENCE_ENCODED) {
339 visitor->VisitInternalReference(this);
340 } else if (RelocInfo::IsCodeAgeSequence(mode)) {
341 visitor->VisitCodeAgeSequence(this);
342 } else if (RelocInfo::IsDebugBreakSlot(mode) &&
343 IsPatchedDebugBreakSlotSequence()) {
344 visitor->VisitDebugTarget(this);
345 } else if (IsRuntimeEntry(mode)) {
346 visitor->VisitRuntimeEntry(this);
347 }
348 }
349
350
351 template <typename StaticVisitor>
Visit(Heap * heap)352 void RelocInfo::Visit(Heap* heap) {
353 RelocInfo::Mode mode = rmode();
354 if (mode == RelocInfo::EMBEDDED_OBJECT) {
355 StaticVisitor::VisitEmbeddedPointer(heap, this);
356 } else if (RelocInfo::IsCodeTarget(mode)) {
357 StaticVisitor::VisitCodeTarget(heap, this);
358 } else if (mode == RelocInfo::CELL) {
359 StaticVisitor::VisitCell(heap, this);
360 } else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
361 StaticVisitor::VisitExternalReference(this);
362 } else if (mode == RelocInfo::INTERNAL_REFERENCE ||
363 mode == RelocInfo::INTERNAL_REFERENCE_ENCODED) {
364 StaticVisitor::VisitInternalReference(this);
365 } else if (RelocInfo::IsCodeAgeSequence(mode)) {
366 StaticVisitor::VisitCodeAgeSequence(heap, this);
367 } else if (RelocInfo::IsDebugBreakSlot(mode) &&
368 IsPatchedDebugBreakSlotSequence()) {
369 StaticVisitor::VisitDebugTarget(heap, this);
370 } else if (IsRuntimeEntry(mode)) {
371 StaticVisitor::VisitRuntimeEntry(this);
372 }
373 }
374
Operand(intptr_t immediate,RelocInfo::Mode rmode)375 Operand::Operand(intptr_t immediate, RelocInfo::Mode rmode) {
376 rm_ = no_reg;
377 imm_ = immediate;
378 rmode_ = rmode;
379 }
380
Operand(const ExternalReference & f)381 Operand::Operand(const ExternalReference& f) {
382 rm_ = no_reg;
383 imm_ = reinterpret_cast<intptr_t>(f.address());
384 rmode_ = RelocInfo::EXTERNAL_REFERENCE;
385 }
386
Operand(Smi * value)387 Operand::Operand(Smi* value) {
388 rm_ = no_reg;
389 imm_ = reinterpret_cast<intptr_t>(value);
390 rmode_ = kRelocInfo_NONEPTR;
391 }
392
Operand(Register rm)393 Operand::Operand(Register rm) {
394 rm_ = rm;
395 rmode_ = kRelocInfo_NONEPTR; // PPC -why doesn't ARM do this?
396 }
397
CheckBuffer()398 void Assembler::CheckBuffer() {
399 if (buffer_space() <= kGap) {
400 GrowBuffer();
401 }
402 }
403
TrackBranch()404 void Assembler::TrackBranch() {
405 DCHECK(!trampoline_emitted_);
406 int count = tracked_branch_count_++;
407 if (count == 0) {
408 // We leave space (kMaxBlockTrampolineSectionSize)
409 // for BlockTrampolinePoolScope buffer.
410 next_trampoline_check_ =
411 pc_offset() + kMaxCondBranchReach - kMaxBlockTrampolineSectionSize;
412 } else {
413 next_trampoline_check_ -= kTrampolineSlotsSize;
414 }
415 }
416
UntrackBranch()417 void Assembler::UntrackBranch() {
418 DCHECK(!trampoline_emitted_);
419 DCHECK(tracked_branch_count_ > 0);
420 int count = --tracked_branch_count_;
421 if (count == 0) {
422 // Reset
423 next_trampoline_check_ = kMaxInt;
424 } else {
425 next_trampoline_check_ += kTrampolineSlotsSize;
426 }
427 }
428
CheckTrampolinePoolQuick()429 void Assembler::CheckTrampolinePoolQuick() {
430 if (pc_offset() >= next_trampoline_check_) {
431 CheckTrampolinePool();
432 }
433 }
434
emit(Instr x)435 void Assembler::emit(Instr x) {
436 CheckBuffer();
437 *reinterpret_cast<Instr*>(pc_) = x;
438 pc_ += kInstrSize;
439 CheckTrampolinePoolQuick();
440 }
441
is_reg()442 bool Operand::is_reg() const { return rm_.is_valid(); }
443
444
445 // Fetch the 32bit value from the FIXED_SEQUENCE lis/ori
target_address_at(Address pc,Address constant_pool)446 Address Assembler::target_address_at(Address pc, Address constant_pool) {
447 if (FLAG_enable_embedded_constant_pool && constant_pool) {
448 ConstantPoolEntry::Access access;
449 if (IsConstantPoolLoadStart(pc, &access))
450 return Memory::Address_at(target_constant_pool_address_at(
451 pc, constant_pool, access, ConstantPoolEntry::INTPTR));
452 }
453
454 Instr instr1 = instr_at(pc);
455 Instr instr2 = instr_at(pc + kInstrSize);
456 // Interpret 2 instructions generated by lis/ori
457 if (IsLis(instr1) && IsOri(instr2)) {
458 #if V8_TARGET_ARCH_PPC64
459 Instr instr4 = instr_at(pc + (3 * kInstrSize));
460 Instr instr5 = instr_at(pc + (4 * kInstrSize));
461 // Assemble the 64 bit value.
462 uint64_t hi = (static_cast<uint32_t>((instr1 & kImm16Mask) << 16) |
463 static_cast<uint32_t>(instr2 & kImm16Mask));
464 uint64_t lo = (static_cast<uint32_t>((instr4 & kImm16Mask) << 16) |
465 static_cast<uint32_t>(instr5 & kImm16Mask));
466 return reinterpret_cast<Address>((hi << 32) | lo);
467 #else
468 // Assemble the 32 bit value.
469 return reinterpret_cast<Address>(((instr1 & kImm16Mask) << 16) |
470 (instr2 & kImm16Mask));
471 #endif
472 }
473
474 UNREACHABLE();
475 return NULL;
476 }
477
478
479 #if V8_TARGET_ARCH_PPC64
480 const uint32_t kLoadIntptrOpcode = LD;
481 #else
482 const uint32_t kLoadIntptrOpcode = LWZ;
483 #endif
484
485 // Constant pool load sequence detection:
486 // 1) REGULAR access:
487 // load <dst>, kConstantPoolRegister + <offset>
488 //
489 // 2) OVERFLOWED access:
490 // addis <scratch>, kConstantPoolRegister, <offset_high>
491 // load <dst>, <scratch> + <offset_low>
IsConstantPoolLoadStart(Address pc,ConstantPoolEntry::Access * access)492 bool Assembler::IsConstantPoolLoadStart(Address pc,
493 ConstantPoolEntry::Access* access) {
494 Instr instr = instr_at(pc);
495 uint32_t opcode = instr & kOpcodeMask;
496 if (!GetRA(instr).is(kConstantPoolRegister)) return false;
497 bool overflowed = (opcode == ADDIS);
498 #ifdef DEBUG
499 if (overflowed) {
500 opcode = instr_at(pc + kInstrSize) & kOpcodeMask;
501 }
502 DCHECK(opcode == kLoadIntptrOpcode || opcode == LFD);
503 #endif
504 if (access) {
505 *access = (overflowed ? ConstantPoolEntry::OVERFLOWED
506 : ConstantPoolEntry::REGULAR);
507 }
508 return true;
509 }
510
511
IsConstantPoolLoadEnd(Address pc,ConstantPoolEntry::Access * access)512 bool Assembler::IsConstantPoolLoadEnd(Address pc,
513 ConstantPoolEntry::Access* access) {
514 Instr instr = instr_at(pc);
515 uint32_t opcode = instr & kOpcodeMask;
516 bool overflowed = false;
517 if (!(opcode == kLoadIntptrOpcode || opcode == LFD)) return false;
518 if (!GetRA(instr).is(kConstantPoolRegister)) {
519 instr = instr_at(pc - kInstrSize);
520 opcode = instr & kOpcodeMask;
521 if ((opcode != ADDIS) || !GetRA(instr).is(kConstantPoolRegister)) {
522 return false;
523 }
524 overflowed = true;
525 }
526 if (access) {
527 *access = (overflowed ? ConstantPoolEntry::OVERFLOWED
528 : ConstantPoolEntry::REGULAR);
529 }
530 return true;
531 }
532
533
GetConstantPoolOffset(Address pc,ConstantPoolEntry::Access access,ConstantPoolEntry::Type type)534 int Assembler::GetConstantPoolOffset(Address pc,
535 ConstantPoolEntry::Access access,
536 ConstantPoolEntry::Type type) {
537 bool overflowed = (access == ConstantPoolEntry::OVERFLOWED);
538 #ifdef DEBUG
539 ConstantPoolEntry::Access access_check =
540 static_cast<ConstantPoolEntry::Access>(-1);
541 DCHECK(IsConstantPoolLoadStart(pc, &access_check));
542 DCHECK(access_check == access);
543 #endif
544 int offset;
545 if (overflowed) {
546 offset = (instr_at(pc) & kImm16Mask) << 16;
547 offset += SIGN_EXT_IMM16(instr_at(pc + kInstrSize) & kImm16Mask);
548 DCHECK(!is_int16(offset));
549 } else {
550 offset = SIGN_EXT_IMM16((instr_at(pc) & kImm16Mask));
551 }
552 return offset;
553 }
554
555
PatchConstantPoolAccessInstruction(int pc_offset,int offset,ConstantPoolEntry::Access access,ConstantPoolEntry::Type type)556 void Assembler::PatchConstantPoolAccessInstruction(
557 int pc_offset, int offset, ConstantPoolEntry::Access access,
558 ConstantPoolEntry::Type type) {
559 Address pc = buffer_ + pc_offset;
560 bool overflowed = (access == ConstantPoolEntry::OVERFLOWED);
561 CHECK(overflowed != is_int16(offset));
562 #ifdef DEBUG
563 ConstantPoolEntry::Access access_check =
564 static_cast<ConstantPoolEntry::Access>(-1);
565 DCHECK(IsConstantPoolLoadStart(pc, &access_check));
566 DCHECK(access_check == access);
567 #endif
568 if (overflowed) {
569 int hi_word = static_cast<int>(offset >> 16);
570 int lo_word = static_cast<int>(offset & 0xffff);
571 if (lo_word & 0x8000) hi_word++;
572
573 Instr instr1 = instr_at(pc);
574 Instr instr2 = instr_at(pc + kInstrSize);
575 instr1 &= ~kImm16Mask;
576 instr1 |= (hi_word & kImm16Mask);
577 instr2 &= ~kImm16Mask;
578 instr2 |= (lo_word & kImm16Mask);
579 instr_at_put(pc, instr1);
580 instr_at_put(pc + kInstrSize, instr2);
581 } else {
582 Instr instr = instr_at(pc);
583 instr &= ~kImm16Mask;
584 instr |= (offset & kImm16Mask);
585 instr_at_put(pc, instr);
586 }
587 }
588
589
target_constant_pool_address_at(Address pc,Address constant_pool,ConstantPoolEntry::Access access,ConstantPoolEntry::Type type)590 Address Assembler::target_constant_pool_address_at(
591 Address pc, Address constant_pool, ConstantPoolEntry::Access access,
592 ConstantPoolEntry::Type type) {
593 Address addr = constant_pool;
594 DCHECK(addr);
595 addr += GetConstantPoolOffset(pc, access, type);
596 return addr;
597 }
598
599
600 // This sets the branch destination (which gets loaded at the call address).
601 // This is for calls and branches within generated code. The serializer
602 // has already deserialized the mov instructions etc.
603 // There is a FIXED_SEQUENCE assumption here
deserialization_set_special_target_at(Isolate * isolate,Address instruction_payload,Code * code,Address target)604 void Assembler::deserialization_set_special_target_at(
605 Isolate* isolate, Address instruction_payload, Code* code, Address target) {
606 set_target_address_at(isolate, instruction_payload, code, target);
607 }
608
609
deserialization_set_target_internal_reference_at(Isolate * isolate,Address pc,Address target,RelocInfo::Mode mode)610 void Assembler::deserialization_set_target_internal_reference_at(
611 Isolate* isolate, Address pc, Address target, RelocInfo::Mode mode) {
612 if (RelocInfo::IsInternalReferenceEncoded(mode)) {
613 Code* code = NULL;
614 set_target_address_at(isolate, pc, code, target, SKIP_ICACHE_FLUSH);
615 } else {
616 Memory::Address_at(pc) = target;
617 }
618 }
619
620
621 // This code assumes the FIXED_SEQUENCE of lis/ori
set_target_address_at(Isolate * isolate,Address pc,Address constant_pool,Address target,ICacheFlushMode icache_flush_mode)622 void Assembler::set_target_address_at(Isolate* isolate, Address pc,
623 Address constant_pool, Address target,
624 ICacheFlushMode icache_flush_mode) {
625 if (FLAG_enable_embedded_constant_pool && constant_pool) {
626 ConstantPoolEntry::Access access;
627 if (IsConstantPoolLoadStart(pc, &access)) {
628 Memory::Address_at(target_constant_pool_address_at(
629 pc, constant_pool, access, ConstantPoolEntry::INTPTR)) = target;
630 return;
631 }
632 }
633
634 Instr instr1 = instr_at(pc);
635 Instr instr2 = instr_at(pc + kInstrSize);
636 // Interpret 2 instructions generated by lis/ori
637 if (IsLis(instr1) && IsOri(instr2)) {
638 #if V8_TARGET_ARCH_PPC64
639 Instr instr4 = instr_at(pc + (3 * kInstrSize));
640 Instr instr5 = instr_at(pc + (4 * kInstrSize));
641 // Needs to be fixed up when mov changes to handle 64-bit values.
642 uint32_t* p = reinterpret_cast<uint32_t*>(pc);
643 uintptr_t itarget = reinterpret_cast<uintptr_t>(target);
644
645 instr5 &= ~kImm16Mask;
646 instr5 |= itarget & kImm16Mask;
647 itarget = itarget >> 16;
648
649 instr4 &= ~kImm16Mask;
650 instr4 |= itarget & kImm16Mask;
651 itarget = itarget >> 16;
652
653 instr2 &= ~kImm16Mask;
654 instr2 |= itarget & kImm16Mask;
655 itarget = itarget >> 16;
656
657 instr1 &= ~kImm16Mask;
658 instr1 |= itarget & kImm16Mask;
659 itarget = itarget >> 16;
660
661 *p = instr1;
662 *(p + 1) = instr2;
663 *(p + 3) = instr4;
664 *(p + 4) = instr5;
665 if (icache_flush_mode != SKIP_ICACHE_FLUSH) {
666 Assembler::FlushICache(isolate, p, 5 * kInstrSize);
667 }
668 #else
669 uint32_t* p = reinterpret_cast<uint32_t*>(pc);
670 uint32_t itarget = reinterpret_cast<uint32_t>(target);
671 int lo_word = itarget & kImm16Mask;
672 int hi_word = itarget >> 16;
673 instr1 &= ~kImm16Mask;
674 instr1 |= hi_word;
675 instr2 &= ~kImm16Mask;
676 instr2 |= lo_word;
677
678 *p = instr1;
679 *(p + 1) = instr2;
680 if (icache_flush_mode != SKIP_ICACHE_FLUSH) {
681 Assembler::FlushICache(isolate, p, 2 * kInstrSize);
682 }
683 #endif
684 return;
685 }
686 UNREACHABLE();
687 }
688 } // namespace internal
689 } // namespace v8
690
691 #endif // V8_PPC_ASSEMBLER_PPC_INL_H_
692