1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #ifndef V8_X64_ASSEMBLER_X64_INL_H_
6 #define V8_X64_ASSEMBLER_X64_INL_H_
7
8 #include "src/x64/assembler-x64.h"
9
10 #include "src/base/cpu.h"
11 #include "src/debug/debug.h"
12 #include "src/objects-inl.h"
13 #include "src/v8memory.h"
14
15 namespace v8 {
16 namespace internal {
17
SupportsCrankshaft()18 bool CpuFeatures::SupportsCrankshaft() { return true; }
19
SupportsSimd128()20 bool CpuFeatures::SupportsSimd128() { return true; }
21
22 // -----------------------------------------------------------------------------
23 // Implementation of Assembler
24
25
26 static const byte kCallOpcode = 0xE8;
27
28
emitl(uint32_t x)29 void Assembler::emitl(uint32_t x) {
30 Memory::uint32_at(pc_) = x;
31 pc_ += sizeof(uint32_t);
32 }
33
34
emitp(void * x,RelocInfo::Mode rmode)35 void Assembler::emitp(void* x, RelocInfo::Mode rmode) {
36 uintptr_t value = reinterpret_cast<uintptr_t>(x);
37 Memory::uintptr_at(pc_) = value;
38 if (!RelocInfo::IsNone(rmode)) {
39 RecordRelocInfo(rmode, value);
40 }
41 pc_ += sizeof(uintptr_t);
42 }
43
44
emitq(uint64_t x)45 void Assembler::emitq(uint64_t x) {
46 Memory::uint64_at(pc_) = x;
47 pc_ += sizeof(uint64_t);
48 }
49
50
emitw(uint16_t x)51 void Assembler::emitw(uint16_t x) {
52 Memory::uint16_at(pc_) = x;
53 pc_ += sizeof(uint16_t);
54 }
55
56
emit_code_target(Handle<Code> target,RelocInfo::Mode rmode,TypeFeedbackId ast_id)57 void Assembler::emit_code_target(Handle<Code> target,
58 RelocInfo::Mode rmode,
59 TypeFeedbackId ast_id) {
60 DCHECK(RelocInfo::IsCodeTarget(rmode) ||
61 rmode == RelocInfo::CODE_AGE_SEQUENCE);
62 if (rmode == RelocInfo::CODE_TARGET && !ast_id.IsNone()) {
63 RecordRelocInfo(RelocInfo::CODE_TARGET_WITH_ID, ast_id.ToInt());
64 } else {
65 RecordRelocInfo(rmode);
66 }
67 int current = code_targets_.length();
68 if (current > 0 && code_targets_.last().address() == target.address()) {
69 // Optimization if we keep jumping to the same code target.
70 emitl(current - 1);
71 } else {
72 code_targets_.Add(target);
73 emitl(current);
74 }
75 }
76
77
emit_runtime_entry(Address entry,RelocInfo::Mode rmode)78 void Assembler::emit_runtime_entry(Address entry, RelocInfo::Mode rmode) {
79 DCHECK(RelocInfo::IsRuntimeEntry(rmode));
80 RecordRelocInfo(rmode);
81 emitl(static_cast<uint32_t>(
82 entry - isolate()->heap()->memory_allocator()->code_range()->start()));
83 }
84
emit(Immediate x)85 void Assembler::emit(Immediate x) {
86 if (!RelocInfo::IsNone(x.rmode_)) {
87 RecordRelocInfo(x.rmode_);
88 }
89 emitl(x.value_);
90 }
91
emit_rex_64(Register reg,Register rm_reg)92 void Assembler::emit_rex_64(Register reg, Register rm_reg) {
93 emit(0x48 | reg.high_bit() << 2 | rm_reg.high_bit());
94 }
95
96
emit_rex_64(XMMRegister reg,Register rm_reg)97 void Assembler::emit_rex_64(XMMRegister reg, Register rm_reg) {
98 emit(0x48 | (reg.code() & 0x8) >> 1 | rm_reg.code() >> 3);
99 }
100
101
emit_rex_64(Register reg,XMMRegister rm_reg)102 void Assembler::emit_rex_64(Register reg, XMMRegister rm_reg) {
103 emit(0x48 | (reg.code() & 0x8) >> 1 | rm_reg.code() >> 3);
104 }
105
106
emit_rex_64(Register reg,const Operand & op)107 void Assembler::emit_rex_64(Register reg, const Operand& op) {
108 emit(0x48 | reg.high_bit() << 2 | op.rex_);
109 }
110
111
emit_rex_64(XMMRegister reg,const Operand & op)112 void Assembler::emit_rex_64(XMMRegister reg, const Operand& op) {
113 emit(0x48 | (reg.code() & 0x8) >> 1 | op.rex_);
114 }
115
116
emit_rex_64(Register rm_reg)117 void Assembler::emit_rex_64(Register rm_reg) {
118 DCHECK_EQ(rm_reg.code() & 0xf, rm_reg.code());
119 emit(0x48 | rm_reg.high_bit());
120 }
121
122
emit_rex_64(const Operand & op)123 void Assembler::emit_rex_64(const Operand& op) {
124 emit(0x48 | op.rex_);
125 }
126
127
emit_rex_32(Register reg,Register rm_reg)128 void Assembler::emit_rex_32(Register reg, Register rm_reg) {
129 emit(0x40 | reg.high_bit() << 2 | rm_reg.high_bit());
130 }
131
132
emit_rex_32(Register reg,const Operand & op)133 void Assembler::emit_rex_32(Register reg, const Operand& op) {
134 emit(0x40 | reg.high_bit() << 2 | op.rex_);
135 }
136
137
emit_rex_32(Register rm_reg)138 void Assembler::emit_rex_32(Register rm_reg) {
139 emit(0x40 | rm_reg.high_bit());
140 }
141
142
emit_rex_32(const Operand & op)143 void Assembler::emit_rex_32(const Operand& op) {
144 emit(0x40 | op.rex_);
145 }
146
147
emit_optional_rex_32(Register reg,Register rm_reg)148 void Assembler::emit_optional_rex_32(Register reg, Register rm_reg) {
149 byte rex_bits = reg.high_bit() << 2 | rm_reg.high_bit();
150 if (rex_bits != 0) emit(0x40 | rex_bits);
151 }
152
153
emit_optional_rex_32(Register reg,const Operand & op)154 void Assembler::emit_optional_rex_32(Register reg, const Operand& op) {
155 byte rex_bits = reg.high_bit() << 2 | op.rex_;
156 if (rex_bits != 0) emit(0x40 | rex_bits);
157 }
158
159
emit_optional_rex_32(XMMRegister reg,const Operand & op)160 void Assembler::emit_optional_rex_32(XMMRegister reg, const Operand& op) {
161 byte rex_bits = (reg.code() & 0x8) >> 1 | op.rex_;
162 if (rex_bits != 0) emit(0x40 | rex_bits);
163 }
164
165
emit_optional_rex_32(XMMRegister reg,XMMRegister base)166 void Assembler::emit_optional_rex_32(XMMRegister reg, XMMRegister base) {
167 byte rex_bits = (reg.code() & 0x8) >> 1 | (base.code() & 0x8) >> 3;
168 if (rex_bits != 0) emit(0x40 | rex_bits);
169 }
170
171
emit_optional_rex_32(XMMRegister reg,Register base)172 void Assembler::emit_optional_rex_32(XMMRegister reg, Register base) {
173 byte rex_bits = (reg.code() & 0x8) >> 1 | (base.code() & 0x8) >> 3;
174 if (rex_bits != 0) emit(0x40 | rex_bits);
175 }
176
177
emit_optional_rex_32(Register reg,XMMRegister base)178 void Assembler::emit_optional_rex_32(Register reg, XMMRegister base) {
179 byte rex_bits = (reg.code() & 0x8) >> 1 | (base.code() & 0x8) >> 3;
180 if (rex_bits != 0) emit(0x40 | rex_bits);
181 }
182
183
emit_optional_rex_32(Register rm_reg)184 void Assembler::emit_optional_rex_32(Register rm_reg) {
185 if (rm_reg.high_bit()) emit(0x41);
186 }
187
emit_optional_rex_32(XMMRegister rm_reg)188 void Assembler::emit_optional_rex_32(XMMRegister rm_reg) {
189 if (rm_reg.high_bit()) emit(0x41);
190 }
191
emit_optional_rex_32(const Operand & op)192 void Assembler::emit_optional_rex_32(const Operand& op) {
193 if (op.rex_ != 0) emit(0x40 | op.rex_);
194 }
195
196
197 // byte 1 of 3-byte VEX
emit_vex3_byte1(XMMRegister reg,XMMRegister rm,LeadingOpcode m)198 void Assembler::emit_vex3_byte1(XMMRegister reg, XMMRegister rm,
199 LeadingOpcode m) {
200 byte rxb = ~((reg.high_bit() << 2) | rm.high_bit()) << 5;
201 emit(rxb | m);
202 }
203
204
205 // byte 1 of 3-byte VEX
emit_vex3_byte1(XMMRegister reg,const Operand & rm,LeadingOpcode m)206 void Assembler::emit_vex3_byte1(XMMRegister reg, const Operand& rm,
207 LeadingOpcode m) {
208 byte rxb = ~((reg.high_bit() << 2) | rm.rex_) << 5;
209 emit(rxb | m);
210 }
211
212
213 // byte 1 of 2-byte VEX
emit_vex2_byte1(XMMRegister reg,XMMRegister v,VectorLength l,SIMDPrefix pp)214 void Assembler::emit_vex2_byte1(XMMRegister reg, XMMRegister v, VectorLength l,
215 SIMDPrefix pp) {
216 byte rv = ~((reg.high_bit() << 4) | v.code()) << 3;
217 emit(rv | l | pp);
218 }
219
220
221 // byte 2 of 3-byte VEX
emit_vex3_byte2(VexW w,XMMRegister v,VectorLength l,SIMDPrefix pp)222 void Assembler::emit_vex3_byte2(VexW w, XMMRegister v, VectorLength l,
223 SIMDPrefix pp) {
224 emit(w | ((~v.code() & 0xf) << 3) | l | pp);
225 }
226
227
emit_vex_prefix(XMMRegister reg,XMMRegister vreg,XMMRegister rm,VectorLength l,SIMDPrefix pp,LeadingOpcode mm,VexW w)228 void Assembler::emit_vex_prefix(XMMRegister reg, XMMRegister vreg,
229 XMMRegister rm, VectorLength l, SIMDPrefix pp,
230 LeadingOpcode mm, VexW w) {
231 if (rm.high_bit() || mm != k0F || w != kW0) {
232 emit_vex3_byte0();
233 emit_vex3_byte1(reg, rm, mm);
234 emit_vex3_byte2(w, vreg, l, pp);
235 } else {
236 emit_vex2_byte0();
237 emit_vex2_byte1(reg, vreg, l, pp);
238 }
239 }
240
241
emit_vex_prefix(Register reg,Register vreg,Register rm,VectorLength l,SIMDPrefix pp,LeadingOpcode mm,VexW w)242 void Assembler::emit_vex_prefix(Register reg, Register vreg, Register rm,
243 VectorLength l, SIMDPrefix pp, LeadingOpcode mm,
244 VexW w) {
245 XMMRegister ireg = {reg.code()};
246 XMMRegister ivreg = {vreg.code()};
247 XMMRegister irm = {rm.code()};
248 emit_vex_prefix(ireg, ivreg, irm, l, pp, mm, w);
249 }
250
251
emit_vex_prefix(XMMRegister reg,XMMRegister vreg,const Operand & rm,VectorLength l,SIMDPrefix pp,LeadingOpcode mm,VexW w)252 void Assembler::emit_vex_prefix(XMMRegister reg, XMMRegister vreg,
253 const Operand& rm, VectorLength l,
254 SIMDPrefix pp, LeadingOpcode mm, VexW w) {
255 if (rm.rex_ || mm != k0F || w != kW0) {
256 emit_vex3_byte0();
257 emit_vex3_byte1(reg, rm, mm);
258 emit_vex3_byte2(w, vreg, l, pp);
259 } else {
260 emit_vex2_byte0();
261 emit_vex2_byte1(reg, vreg, l, pp);
262 }
263 }
264
265
emit_vex_prefix(Register reg,Register vreg,const Operand & rm,VectorLength l,SIMDPrefix pp,LeadingOpcode mm,VexW w)266 void Assembler::emit_vex_prefix(Register reg, Register vreg, const Operand& rm,
267 VectorLength l, SIMDPrefix pp, LeadingOpcode mm,
268 VexW w) {
269 XMMRegister ireg = {reg.code()};
270 XMMRegister ivreg = {vreg.code()};
271 emit_vex_prefix(ireg, ivreg, rm, l, pp, mm, w);
272 }
273
274
target_address_at(Address pc,Address constant_pool)275 Address Assembler::target_address_at(Address pc, Address constant_pool) {
276 return Memory::int32_at(pc) + pc + 4;
277 }
278
279
set_target_address_at(Isolate * isolate,Address pc,Address constant_pool,Address target,ICacheFlushMode icache_flush_mode)280 void Assembler::set_target_address_at(Isolate* isolate, Address pc,
281 Address constant_pool, Address target,
282 ICacheFlushMode icache_flush_mode) {
283 Memory::int32_at(pc) = static_cast<int32_t>(target - pc - 4);
284 if (icache_flush_mode != SKIP_ICACHE_FLUSH) {
285 Assembler::FlushICache(isolate, pc, sizeof(int32_t));
286 }
287 }
288
target_address_at(Address pc,Code * code)289 Address Assembler::target_address_at(Address pc, Code* code) {
290 Address constant_pool = code ? code->constant_pool() : NULL;
291 return target_address_at(pc, constant_pool);
292 }
293
set_target_address_at(Isolate * isolate,Address pc,Code * code,Address target,ICacheFlushMode icache_flush_mode)294 void Assembler::set_target_address_at(Isolate* isolate, Address pc, Code* code,
295 Address target,
296 ICacheFlushMode icache_flush_mode) {
297 Address constant_pool = code ? code->constant_pool() : NULL;
298 set_target_address_at(isolate, pc, constant_pool, target, icache_flush_mode);
299 }
300
deserialization_set_target_internal_reference_at(Isolate * isolate,Address pc,Address target,RelocInfo::Mode mode)301 void Assembler::deserialization_set_target_internal_reference_at(
302 Isolate* isolate, Address pc, Address target, RelocInfo::Mode mode) {
303 Memory::Address_at(pc) = target;
304 }
305
306
target_address_from_return_address(Address pc)307 Address Assembler::target_address_from_return_address(Address pc) {
308 return pc - kCallTargetAddressOffset;
309 }
310
deserialization_set_special_target_at(Isolate * isolate,Address instruction_payload,Code * code,Address target)311 void Assembler::deserialization_set_special_target_at(
312 Isolate* isolate, Address instruction_payload, Code* code, Address target) {
313 set_target_address_at(isolate, instruction_payload, code, target);
314 }
315
code_target_object_handle_at(Address pc)316 Handle<Object> Assembler::code_target_object_handle_at(Address pc) {
317 return code_targets_[Memory::int32_at(pc)];
318 }
319
320
runtime_entry_at(Address pc)321 Address Assembler::runtime_entry_at(Address pc) {
322 return Memory::int32_at(pc) +
323 isolate()->heap()->memory_allocator()->code_range()->start();
324 }
325
326 // -----------------------------------------------------------------------------
327 // Implementation of RelocInfo
328
329 // The modes possibly affected by apply must be in kApplyMask.
apply(intptr_t delta)330 void RelocInfo::apply(intptr_t delta) {
331 if (IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_)) {
332 Memory::int32_at(pc_) -= static_cast<int32_t>(delta);
333 } else if (IsCodeAgeSequence(rmode_)) {
334 if (*pc_ == kCallOpcode) {
335 int32_t* p = reinterpret_cast<int32_t*>(pc_ + 1);
336 *p -= static_cast<int32_t>(delta); // Relocate entry.
337 }
338 } else if (IsInternalReference(rmode_)) {
339 // absolute code pointer inside code object moves with the code object.
340 Memory::Address_at(pc_) += delta;
341 }
342 }
343
344
target_address()345 Address RelocInfo::target_address() {
346 DCHECK(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_));
347 return Assembler::target_address_at(pc_, host_);
348 }
349
target_address_address()350 Address RelocInfo::target_address_address() {
351 DCHECK(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_)
352 || rmode_ == EMBEDDED_OBJECT
353 || rmode_ == EXTERNAL_REFERENCE);
354 return reinterpret_cast<Address>(pc_);
355 }
356
357
constant_pool_entry_address()358 Address RelocInfo::constant_pool_entry_address() {
359 UNREACHABLE();
360 return NULL;
361 }
362
363
target_address_size()364 int RelocInfo::target_address_size() {
365 if (IsCodedSpecially()) {
366 return Assembler::kSpecialTargetSize;
367 } else {
368 return kPointerSize;
369 }
370 }
371
372
target_object()373 Object* RelocInfo::target_object() {
374 DCHECK(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
375 return Memory::Object_at(pc_);
376 }
377
378
target_object_handle(Assembler * origin)379 Handle<Object> RelocInfo::target_object_handle(Assembler* origin) {
380 DCHECK(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
381 if (rmode_ == EMBEDDED_OBJECT) {
382 return Memory::Object_Handle_at(pc_);
383 } else {
384 return origin->code_target_object_handle_at(pc_);
385 }
386 }
387
388
target_external_reference()389 Address RelocInfo::target_external_reference() {
390 DCHECK(rmode_ == RelocInfo::EXTERNAL_REFERENCE);
391 return Memory::Address_at(pc_);
392 }
393
394
target_internal_reference()395 Address RelocInfo::target_internal_reference() {
396 DCHECK(rmode_ == INTERNAL_REFERENCE);
397 return Memory::Address_at(pc_);
398 }
399
400
target_internal_reference_address()401 Address RelocInfo::target_internal_reference_address() {
402 DCHECK(rmode_ == INTERNAL_REFERENCE);
403 return reinterpret_cast<Address>(pc_);
404 }
405
406
set_target_object(Object * target,WriteBarrierMode write_barrier_mode,ICacheFlushMode icache_flush_mode)407 void RelocInfo::set_target_object(Object* target,
408 WriteBarrierMode write_barrier_mode,
409 ICacheFlushMode icache_flush_mode) {
410 DCHECK(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
411 Memory::Object_at(pc_) = target;
412 if (icache_flush_mode != SKIP_ICACHE_FLUSH) {
413 Assembler::FlushICache(isolate_, pc_, sizeof(Address));
414 }
415 if (write_barrier_mode == UPDATE_WRITE_BARRIER &&
416 host() != NULL &&
417 target->IsHeapObject()) {
418 host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(
419 host(), this, HeapObject::cast(target));
420 host()->GetHeap()->RecordWriteIntoCode(host(), this, target);
421 }
422 }
423
424
target_runtime_entry(Assembler * origin)425 Address RelocInfo::target_runtime_entry(Assembler* origin) {
426 DCHECK(IsRuntimeEntry(rmode_));
427 return origin->runtime_entry_at(pc_);
428 }
429
430
set_target_runtime_entry(Address target,WriteBarrierMode write_barrier_mode,ICacheFlushMode icache_flush_mode)431 void RelocInfo::set_target_runtime_entry(Address target,
432 WriteBarrierMode write_barrier_mode,
433 ICacheFlushMode icache_flush_mode) {
434 DCHECK(IsRuntimeEntry(rmode_));
435 if (target_address() != target) {
436 set_target_address(target, write_barrier_mode, icache_flush_mode);
437 }
438 }
439
440
target_cell_handle()441 Handle<Cell> RelocInfo::target_cell_handle() {
442 DCHECK(rmode_ == RelocInfo::CELL);
443 Address address = Memory::Address_at(pc_);
444 return Handle<Cell>(reinterpret_cast<Cell**>(address));
445 }
446
447
target_cell()448 Cell* RelocInfo::target_cell() {
449 DCHECK(rmode_ == RelocInfo::CELL);
450 return Cell::FromValueAddress(Memory::Address_at(pc_));
451 }
452
453
set_target_cell(Cell * cell,WriteBarrierMode write_barrier_mode,ICacheFlushMode icache_flush_mode)454 void RelocInfo::set_target_cell(Cell* cell,
455 WriteBarrierMode write_barrier_mode,
456 ICacheFlushMode icache_flush_mode) {
457 DCHECK(rmode_ == RelocInfo::CELL);
458 Address address = cell->address() + Cell::kValueOffset;
459 Memory::Address_at(pc_) = address;
460 if (icache_flush_mode != SKIP_ICACHE_FLUSH) {
461 Assembler::FlushICache(isolate_, pc_, sizeof(Address));
462 }
463 if (write_barrier_mode == UPDATE_WRITE_BARRIER &&
464 host() != NULL) {
465 host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(host(), this,
466 cell);
467 }
468 }
469
470
WipeOut()471 void RelocInfo::WipeOut() {
472 if (IsEmbeddedObject(rmode_) || IsExternalReference(rmode_) ||
473 IsInternalReference(rmode_)) {
474 Memory::Address_at(pc_) = NULL;
475 } else if (IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_)) {
476 // Effectively write zero into the relocation.
477 Assembler::set_target_address_at(isolate_, pc_, host_,
478 pc_ + sizeof(int32_t));
479 } else {
480 UNREACHABLE();
481 }
482 }
483
484
code_age_stub_handle(Assembler * origin)485 Handle<Object> RelocInfo::code_age_stub_handle(Assembler* origin) {
486 DCHECK(rmode_ == RelocInfo::CODE_AGE_SEQUENCE);
487 DCHECK(*pc_ == kCallOpcode);
488 return origin->code_target_object_handle_at(pc_ + 1);
489 }
490
491
code_age_stub()492 Code* RelocInfo::code_age_stub() {
493 DCHECK(rmode_ == RelocInfo::CODE_AGE_SEQUENCE);
494 DCHECK(*pc_ == kCallOpcode);
495 return Code::GetCodeFromTargetAddress(
496 Assembler::target_address_at(pc_ + 1, host_));
497 }
498
499
set_code_age_stub(Code * stub,ICacheFlushMode icache_flush_mode)500 void RelocInfo::set_code_age_stub(Code* stub,
501 ICacheFlushMode icache_flush_mode) {
502 DCHECK(*pc_ == kCallOpcode);
503 DCHECK(rmode_ == RelocInfo::CODE_AGE_SEQUENCE);
504 Assembler::set_target_address_at(
505 isolate_, pc_ + 1, host_, stub->instruction_start(), icache_flush_mode);
506 }
507
508
debug_call_address()509 Address RelocInfo::debug_call_address() {
510 DCHECK(IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence());
511 return Memory::Address_at(pc_ + Assembler::kPatchDebugBreakSlotAddressOffset);
512 }
513
514
set_debug_call_address(Address target)515 void RelocInfo::set_debug_call_address(Address target) {
516 DCHECK(IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence());
517 Memory::Address_at(pc_ + Assembler::kPatchDebugBreakSlotAddressOffset) =
518 target;
519 Assembler::FlushICache(isolate_,
520 pc_ + Assembler::kPatchDebugBreakSlotAddressOffset,
521 sizeof(Address));
522 if (host() != NULL) {
523 Object* target_code = Code::GetCodeFromTargetAddress(target);
524 host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(
525 host(), this, HeapObject::cast(target_code));
526 }
527 }
528
529 template <typename ObjectVisitor>
Visit(Isolate * isolate,ObjectVisitor * visitor)530 void RelocInfo::Visit(Isolate* isolate, ObjectVisitor* visitor) {
531 RelocInfo::Mode mode = rmode();
532 if (mode == RelocInfo::EMBEDDED_OBJECT) {
533 visitor->VisitEmbeddedPointer(this);
534 Assembler::FlushICache(isolate, pc_, sizeof(Address));
535 } else if (RelocInfo::IsCodeTarget(mode)) {
536 visitor->VisitCodeTarget(this);
537 } else if (mode == RelocInfo::CELL) {
538 visitor->VisitCell(this);
539 } else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
540 visitor->VisitExternalReference(this);
541 } else if (mode == RelocInfo::INTERNAL_REFERENCE) {
542 visitor->VisitInternalReference(this);
543 } else if (RelocInfo::IsCodeAgeSequence(mode)) {
544 visitor->VisitCodeAgeSequence(this);
545 } else if (RelocInfo::IsDebugBreakSlot(mode) &&
546 IsPatchedDebugBreakSlotSequence()) {
547 visitor->VisitDebugTarget(this);
548 } else if (RelocInfo::IsRuntimeEntry(mode)) {
549 visitor->VisitRuntimeEntry(this);
550 }
551 }
552
553
554 template<typename StaticVisitor>
Visit(Heap * heap)555 void RelocInfo::Visit(Heap* heap) {
556 RelocInfo::Mode mode = rmode();
557 if (mode == RelocInfo::EMBEDDED_OBJECT) {
558 StaticVisitor::VisitEmbeddedPointer(heap, this);
559 Assembler::FlushICache(heap->isolate(), pc_, sizeof(Address));
560 } else if (RelocInfo::IsCodeTarget(mode)) {
561 StaticVisitor::VisitCodeTarget(heap, this);
562 } else if (mode == RelocInfo::CELL) {
563 StaticVisitor::VisitCell(heap, this);
564 } else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
565 StaticVisitor::VisitExternalReference(this);
566 } else if (mode == RelocInfo::INTERNAL_REFERENCE) {
567 StaticVisitor::VisitInternalReference(this);
568 } else if (RelocInfo::IsCodeAgeSequence(mode)) {
569 StaticVisitor::VisitCodeAgeSequence(heap, this);
570 } else if (RelocInfo::IsDebugBreakSlot(mode) &&
571 IsPatchedDebugBreakSlotSequence()) {
572 StaticVisitor::VisitDebugTarget(heap, this);
573 } else if (RelocInfo::IsRuntimeEntry(mode)) {
574 StaticVisitor::VisitRuntimeEntry(this);
575 }
576 }
577
578
579 // -----------------------------------------------------------------------------
580 // Implementation of Operand
581
set_modrm(int mod,Register rm_reg)582 void Operand::set_modrm(int mod, Register rm_reg) {
583 DCHECK(is_uint2(mod));
584 buf_[0] = mod << 6 | rm_reg.low_bits();
585 // Set REX.B to the high bit of rm.code().
586 rex_ |= rm_reg.high_bit();
587 }
588
589
set_sib(ScaleFactor scale,Register index,Register base)590 void Operand::set_sib(ScaleFactor scale, Register index, Register base) {
591 DCHECK(len_ == 1);
592 DCHECK(is_uint2(scale));
593 // Use SIB with no index register only for base rsp or r12. Otherwise we
594 // would skip the SIB byte entirely.
595 DCHECK(!index.is(rsp) || base.is(rsp) || base.is(r12));
596 buf_[1] = (scale << 6) | (index.low_bits() << 3) | base.low_bits();
597 rex_ |= index.high_bit() << 1 | base.high_bit();
598 len_ = 2;
599 }
600
set_disp8(int disp)601 void Operand::set_disp8(int disp) {
602 DCHECK(is_int8(disp));
603 DCHECK(len_ == 1 || len_ == 2);
604 int8_t* p = reinterpret_cast<int8_t*>(&buf_[len_]);
605 *p = disp;
606 len_ += sizeof(int8_t);
607 }
608
set_disp32(int disp)609 void Operand::set_disp32(int disp) {
610 DCHECK(len_ == 1 || len_ == 2);
611 int32_t* p = reinterpret_cast<int32_t*>(&buf_[len_]);
612 *p = disp;
613 len_ += sizeof(int32_t);
614 }
615
set_disp64(int64_t disp)616 void Operand::set_disp64(int64_t disp) {
617 DCHECK_EQ(1, len_);
618 int64_t* p = reinterpret_cast<int64_t*>(&buf_[len_]);
619 *p = disp;
620 len_ += sizeof(disp);
621 }
622 } // namespace internal
623 } // namespace v8
624
625 #endif // V8_X64_ASSEMBLER_X64_INL_H_
626