1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #ifndef V8_X64_ASSEMBLER_X64_INL_H_
6 #define V8_X64_ASSEMBLER_X64_INL_H_
7
8 #include "src/x64/assembler-x64.h"
9
10 #include "src/base/cpu.h"
11 #include "src/debug/debug.h"
12 #include "src/v8memory.h"
13
14 namespace v8 {
15 namespace internal {
16
SupportsCrankshaft()17 bool CpuFeatures::SupportsCrankshaft() { return true; }
18
19
20 // -----------------------------------------------------------------------------
21 // Implementation of Assembler
22
23
24 static const byte kCallOpcode = 0xE8;
25 // The length of pushq(rbp), movp(rbp, rsp), Push(rsi) and Push(rdi).
26 static const int kNoCodeAgeSequenceLength = kPointerSize == kInt64Size ? 6 : 17;
27
28
emitl(uint32_t x)29 void Assembler::emitl(uint32_t x) {
30 Memory::uint32_at(pc_) = x;
31 pc_ += sizeof(uint32_t);
32 }
33
34
emitp(void * x,RelocInfo::Mode rmode)35 void Assembler::emitp(void* x, RelocInfo::Mode rmode) {
36 uintptr_t value = reinterpret_cast<uintptr_t>(x);
37 Memory::uintptr_at(pc_) = value;
38 if (!RelocInfo::IsNone(rmode)) {
39 RecordRelocInfo(rmode, value);
40 }
41 pc_ += sizeof(uintptr_t);
42 }
43
44
emitq(uint64_t x)45 void Assembler::emitq(uint64_t x) {
46 Memory::uint64_at(pc_) = x;
47 pc_ += sizeof(uint64_t);
48 }
49
50
emitw(uint16_t x)51 void Assembler::emitw(uint16_t x) {
52 Memory::uint16_at(pc_) = x;
53 pc_ += sizeof(uint16_t);
54 }
55
56
emit_code_target(Handle<Code> target,RelocInfo::Mode rmode,TypeFeedbackId ast_id)57 void Assembler::emit_code_target(Handle<Code> target,
58 RelocInfo::Mode rmode,
59 TypeFeedbackId ast_id) {
60 DCHECK(RelocInfo::IsCodeTarget(rmode) ||
61 rmode == RelocInfo::CODE_AGE_SEQUENCE);
62 if (rmode == RelocInfo::CODE_TARGET && !ast_id.IsNone()) {
63 RecordRelocInfo(RelocInfo::CODE_TARGET_WITH_ID, ast_id.ToInt());
64 } else {
65 RecordRelocInfo(rmode);
66 }
67 int current = code_targets_.length();
68 if (current > 0 && code_targets_.last().is_identical_to(target)) {
69 // Optimization if we keep jumping to the same code target.
70 emitl(current - 1);
71 } else {
72 code_targets_.Add(target);
73 emitl(current);
74 }
75 }
76
77
emit_runtime_entry(Address entry,RelocInfo::Mode rmode)78 void Assembler::emit_runtime_entry(Address entry, RelocInfo::Mode rmode) {
79 DCHECK(RelocInfo::IsRuntimeEntry(rmode));
80 RecordRelocInfo(rmode);
81 emitl(static_cast<uint32_t>(
82 entry - isolate()->heap()->memory_allocator()->code_range()->start()));
83 }
84
85
emit_rex_64(Register reg,Register rm_reg)86 void Assembler::emit_rex_64(Register reg, Register rm_reg) {
87 emit(0x48 | reg.high_bit() << 2 | rm_reg.high_bit());
88 }
89
90
emit_rex_64(XMMRegister reg,Register rm_reg)91 void Assembler::emit_rex_64(XMMRegister reg, Register rm_reg) {
92 emit(0x48 | (reg.code() & 0x8) >> 1 | rm_reg.code() >> 3);
93 }
94
95
emit_rex_64(Register reg,XMMRegister rm_reg)96 void Assembler::emit_rex_64(Register reg, XMMRegister rm_reg) {
97 emit(0x48 | (reg.code() & 0x8) >> 1 | rm_reg.code() >> 3);
98 }
99
100
emit_rex_64(Register reg,const Operand & op)101 void Assembler::emit_rex_64(Register reg, const Operand& op) {
102 emit(0x48 | reg.high_bit() << 2 | op.rex_);
103 }
104
105
emit_rex_64(XMMRegister reg,const Operand & op)106 void Assembler::emit_rex_64(XMMRegister reg, const Operand& op) {
107 emit(0x48 | (reg.code() & 0x8) >> 1 | op.rex_);
108 }
109
110
emit_rex_64(Register rm_reg)111 void Assembler::emit_rex_64(Register rm_reg) {
112 DCHECK_EQ(rm_reg.code() & 0xf, rm_reg.code());
113 emit(0x48 | rm_reg.high_bit());
114 }
115
116
emit_rex_64(const Operand & op)117 void Assembler::emit_rex_64(const Operand& op) {
118 emit(0x48 | op.rex_);
119 }
120
121
emit_rex_32(Register reg,Register rm_reg)122 void Assembler::emit_rex_32(Register reg, Register rm_reg) {
123 emit(0x40 | reg.high_bit() << 2 | rm_reg.high_bit());
124 }
125
126
emit_rex_32(Register reg,const Operand & op)127 void Assembler::emit_rex_32(Register reg, const Operand& op) {
128 emit(0x40 | reg.high_bit() << 2 | op.rex_);
129 }
130
131
emit_rex_32(Register rm_reg)132 void Assembler::emit_rex_32(Register rm_reg) {
133 emit(0x40 | rm_reg.high_bit());
134 }
135
136
emit_rex_32(const Operand & op)137 void Assembler::emit_rex_32(const Operand& op) {
138 emit(0x40 | op.rex_);
139 }
140
141
emit_optional_rex_32(Register reg,Register rm_reg)142 void Assembler::emit_optional_rex_32(Register reg, Register rm_reg) {
143 byte rex_bits = reg.high_bit() << 2 | rm_reg.high_bit();
144 if (rex_bits != 0) emit(0x40 | rex_bits);
145 }
146
147
emit_optional_rex_32(Register reg,const Operand & op)148 void Assembler::emit_optional_rex_32(Register reg, const Operand& op) {
149 byte rex_bits = reg.high_bit() << 2 | op.rex_;
150 if (rex_bits != 0) emit(0x40 | rex_bits);
151 }
152
153
emit_optional_rex_32(XMMRegister reg,const Operand & op)154 void Assembler::emit_optional_rex_32(XMMRegister reg, const Operand& op) {
155 byte rex_bits = (reg.code() & 0x8) >> 1 | op.rex_;
156 if (rex_bits != 0) emit(0x40 | rex_bits);
157 }
158
159
emit_optional_rex_32(XMMRegister reg,XMMRegister base)160 void Assembler::emit_optional_rex_32(XMMRegister reg, XMMRegister base) {
161 byte rex_bits = (reg.code() & 0x8) >> 1 | (base.code() & 0x8) >> 3;
162 if (rex_bits != 0) emit(0x40 | rex_bits);
163 }
164
165
emit_optional_rex_32(XMMRegister reg,Register base)166 void Assembler::emit_optional_rex_32(XMMRegister reg, Register base) {
167 byte rex_bits = (reg.code() & 0x8) >> 1 | (base.code() & 0x8) >> 3;
168 if (rex_bits != 0) emit(0x40 | rex_bits);
169 }
170
171
emit_optional_rex_32(Register reg,XMMRegister base)172 void Assembler::emit_optional_rex_32(Register reg, XMMRegister base) {
173 byte rex_bits = (reg.code() & 0x8) >> 1 | (base.code() & 0x8) >> 3;
174 if (rex_bits != 0) emit(0x40 | rex_bits);
175 }
176
177
emit_optional_rex_32(Register rm_reg)178 void Assembler::emit_optional_rex_32(Register rm_reg) {
179 if (rm_reg.high_bit()) emit(0x41);
180 }
181
emit_optional_rex_32(XMMRegister rm_reg)182 void Assembler::emit_optional_rex_32(XMMRegister rm_reg) {
183 if (rm_reg.high_bit()) emit(0x41);
184 }
185
emit_optional_rex_32(const Operand & op)186 void Assembler::emit_optional_rex_32(const Operand& op) {
187 if (op.rex_ != 0) emit(0x40 | op.rex_);
188 }
189
190
191 // byte 1 of 3-byte VEX
emit_vex3_byte1(XMMRegister reg,XMMRegister rm,LeadingOpcode m)192 void Assembler::emit_vex3_byte1(XMMRegister reg, XMMRegister rm,
193 LeadingOpcode m) {
194 byte rxb = ~((reg.high_bit() << 2) | rm.high_bit()) << 5;
195 emit(rxb | m);
196 }
197
198
199 // byte 1 of 3-byte VEX
emit_vex3_byte1(XMMRegister reg,const Operand & rm,LeadingOpcode m)200 void Assembler::emit_vex3_byte1(XMMRegister reg, const Operand& rm,
201 LeadingOpcode m) {
202 byte rxb = ~((reg.high_bit() << 2) | rm.rex_) << 5;
203 emit(rxb | m);
204 }
205
206
207 // byte 1 of 2-byte VEX
emit_vex2_byte1(XMMRegister reg,XMMRegister v,VectorLength l,SIMDPrefix pp)208 void Assembler::emit_vex2_byte1(XMMRegister reg, XMMRegister v, VectorLength l,
209 SIMDPrefix pp) {
210 byte rv = ~((reg.high_bit() << 4) | v.code()) << 3;
211 emit(rv | l | pp);
212 }
213
214
215 // byte 2 of 3-byte VEX
emit_vex3_byte2(VexW w,XMMRegister v,VectorLength l,SIMDPrefix pp)216 void Assembler::emit_vex3_byte2(VexW w, XMMRegister v, VectorLength l,
217 SIMDPrefix pp) {
218 emit(w | ((~v.code() & 0xf) << 3) | l | pp);
219 }
220
221
emit_vex_prefix(XMMRegister reg,XMMRegister vreg,XMMRegister rm,VectorLength l,SIMDPrefix pp,LeadingOpcode mm,VexW w)222 void Assembler::emit_vex_prefix(XMMRegister reg, XMMRegister vreg,
223 XMMRegister rm, VectorLength l, SIMDPrefix pp,
224 LeadingOpcode mm, VexW w) {
225 if (rm.high_bit() || mm != k0F || w != kW0) {
226 emit_vex3_byte0();
227 emit_vex3_byte1(reg, rm, mm);
228 emit_vex3_byte2(w, vreg, l, pp);
229 } else {
230 emit_vex2_byte0();
231 emit_vex2_byte1(reg, vreg, l, pp);
232 }
233 }
234
235
emit_vex_prefix(Register reg,Register vreg,Register rm,VectorLength l,SIMDPrefix pp,LeadingOpcode mm,VexW w)236 void Assembler::emit_vex_prefix(Register reg, Register vreg, Register rm,
237 VectorLength l, SIMDPrefix pp, LeadingOpcode mm,
238 VexW w) {
239 XMMRegister ireg = {reg.code()};
240 XMMRegister ivreg = {vreg.code()};
241 XMMRegister irm = {rm.code()};
242 emit_vex_prefix(ireg, ivreg, irm, l, pp, mm, w);
243 }
244
245
emit_vex_prefix(XMMRegister reg,XMMRegister vreg,const Operand & rm,VectorLength l,SIMDPrefix pp,LeadingOpcode mm,VexW w)246 void Assembler::emit_vex_prefix(XMMRegister reg, XMMRegister vreg,
247 const Operand& rm, VectorLength l,
248 SIMDPrefix pp, LeadingOpcode mm, VexW w) {
249 if (rm.rex_ || mm != k0F || w != kW0) {
250 emit_vex3_byte0();
251 emit_vex3_byte1(reg, rm, mm);
252 emit_vex3_byte2(w, vreg, l, pp);
253 } else {
254 emit_vex2_byte0();
255 emit_vex2_byte1(reg, vreg, l, pp);
256 }
257 }
258
259
emit_vex_prefix(Register reg,Register vreg,const Operand & rm,VectorLength l,SIMDPrefix pp,LeadingOpcode mm,VexW w)260 void Assembler::emit_vex_prefix(Register reg, Register vreg, const Operand& rm,
261 VectorLength l, SIMDPrefix pp, LeadingOpcode mm,
262 VexW w) {
263 XMMRegister ireg = {reg.code()};
264 XMMRegister ivreg = {vreg.code()};
265 emit_vex_prefix(ireg, ivreg, rm, l, pp, mm, w);
266 }
267
268
target_address_at(Address pc,Address constant_pool)269 Address Assembler::target_address_at(Address pc, Address constant_pool) {
270 return Memory::int32_at(pc) + pc + 4;
271 }
272
273
set_target_address_at(Isolate * isolate,Address pc,Address constant_pool,Address target,ICacheFlushMode icache_flush_mode)274 void Assembler::set_target_address_at(Isolate* isolate, Address pc,
275 Address constant_pool, Address target,
276 ICacheFlushMode icache_flush_mode) {
277 Memory::int32_at(pc) = static_cast<int32_t>(target - pc - 4);
278 if (icache_flush_mode != SKIP_ICACHE_FLUSH) {
279 Assembler::FlushICache(isolate, pc, sizeof(int32_t));
280 }
281 }
282
283
deserialization_set_target_internal_reference_at(Isolate * isolate,Address pc,Address target,RelocInfo::Mode mode)284 void Assembler::deserialization_set_target_internal_reference_at(
285 Isolate* isolate, Address pc, Address target, RelocInfo::Mode mode) {
286 Memory::Address_at(pc) = target;
287 }
288
289
target_address_from_return_address(Address pc)290 Address Assembler::target_address_from_return_address(Address pc) {
291 return pc - kCallTargetAddressOffset;
292 }
293
294
code_target_object_handle_at(Address pc)295 Handle<Object> Assembler::code_target_object_handle_at(Address pc) {
296 return code_targets_[Memory::int32_at(pc)];
297 }
298
299
runtime_entry_at(Address pc)300 Address Assembler::runtime_entry_at(Address pc) {
301 return Memory::int32_at(pc) +
302 isolate()->heap()->memory_allocator()->code_range()->start();
303 }
304
305 // -----------------------------------------------------------------------------
306 // Implementation of RelocInfo
307
308 // The modes possibly affected by apply must be in kApplyMask.
apply(intptr_t delta)309 void RelocInfo::apply(intptr_t delta) {
310 if (IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_)) {
311 Memory::int32_at(pc_) -= static_cast<int32_t>(delta);
312 } else if (IsCodeAgeSequence(rmode_)) {
313 if (*pc_ == kCallOpcode) {
314 int32_t* p = reinterpret_cast<int32_t*>(pc_ + 1);
315 *p -= static_cast<int32_t>(delta); // Relocate entry.
316 }
317 } else if (IsInternalReference(rmode_)) {
318 // absolute code pointer inside code object moves with the code object.
319 Memory::Address_at(pc_) += delta;
320 }
321 }
322
323
target_address()324 Address RelocInfo::target_address() {
325 DCHECK(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_));
326 return Assembler::target_address_at(pc_, host_);
327 }
328
target_address_address()329 Address RelocInfo::target_address_address() {
330 DCHECK(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_)
331 || rmode_ == EMBEDDED_OBJECT
332 || rmode_ == EXTERNAL_REFERENCE);
333 return reinterpret_cast<Address>(pc_);
334 }
335
336
constant_pool_entry_address()337 Address RelocInfo::constant_pool_entry_address() {
338 UNREACHABLE();
339 return NULL;
340 }
341
342
target_address_size()343 int RelocInfo::target_address_size() {
344 if (IsCodedSpecially()) {
345 return Assembler::kSpecialTargetSize;
346 } else {
347 return kPointerSize;
348 }
349 }
350
351
set_target_address(Address target,WriteBarrierMode write_barrier_mode,ICacheFlushMode icache_flush_mode)352 void RelocInfo::set_target_address(Address target,
353 WriteBarrierMode write_barrier_mode,
354 ICacheFlushMode icache_flush_mode) {
355 DCHECK(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_));
356 Assembler::set_target_address_at(isolate_, pc_, host_, target,
357 icache_flush_mode);
358 if (write_barrier_mode == UPDATE_WRITE_BARRIER && host() != NULL &&
359 IsCodeTarget(rmode_)) {
360 Object* target_code = Code::GetCodeFromTargetAddress(target);
361 host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(
362 host(), this, HeapObject::cast(target_code));
363 }
364 }
365
target_object()366 Object* RelocInfo::target_object() {
367 DCHECK(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
368 return Memory::Object_at(pc_);
369 }
370
371
target_object_handle(Assembler * origin)372 Handle<Object> RelocInfo::target_object_handle(Assembler* origin) {
373 DCHECK(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
374 if (rmode_ == EMBEDDED_OBJECT) {
375 return Memory::Object_Handle_at(pc_);
376 } else {
377 return origin->code_target_object_handle_at(pc_);
378 }
379 }
380
381
target_external_reference()382 Address RelocInfo::target_external_reference() {
383 DCHECK(rmode_ == RelocInfo::EXTERNAL_REFERENCE);
384 return Memory::Address_at(pc_);
385 }
386
387
target_internal_reference()388 Address RelocInfo::target_internal_reference() {
389 DCHECK(rmode_ == INTERNAL_REFERENCE);
390 return Memory::Address_at(pc_);
391 }
392
393
target_internal_reference_address()394 Address RelocInfo::target_internal_reference_address() {
395 DCHECK(rmode_ == INTERNAL_REFERENCE);
396 return reinterpret_cast<Address>(pc_);
397 }
398
399
set_target_object(Object * target,WriteBarrierMode write_barrier_mode,ICacheFlushMode icache_flush_mode)400 void RelocInfo::set_target_object(Object* target,
401 WriteBarrierMode write_barrier_mode,
402 ICacheFlushMode icache_flush_mode) {
403 DCHECK(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
404 Memory::Object_at(pc_) = target;
405 if (icache_flush_mode != SKIP_ICACHE_FLUSH) {
406 Assembler::FlushICache(isolate_, pc_, sizeof(Address));
407 }
408 if (write_barrier_mode == UPDATE_WRITE_BARRIER &&
409 host() != NULL &&
410 target->IsHeapObject()) {
411 host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(
412 host(), this, HeapObject::cast(target));
413 }
414 }
415
416
target_runtime_entry(Assembler * origin)417 Address RelocInfo::target_runtime_entry(Assembler* origin) {
418 DCHECK(IsRuntimeEntry(rmode_));
419 return origin->runtime_entry_at(pc_);
420 }
421
422
set_target_runtime_entry(Address target,WriteBarrierMode write_barrier_mode,ICacheFlushMode icache_flush_mode)423 void RelocInfo::set_target_runtime_entry(Address target,
424 WriteBarrierMode write_barrier_mode,
425 ICacheFlushMode icache_flush_mode) {
426 DCHECK(IsRuntimeEntry(rmode_));
427 if (target_address() != target) {
428 set_target_address(target, write_barrier_mode, icache_flush_mode);
429 }
430 }
431
432
target_cell_handle()433 Handle<Cell> RelocInfo::target_cell_handle() {
434 DCHECK(rmode_ == RelocInfo::CELL);
435 Address address = Memory::Address_at(pc_);
436 return Handle<Cell>(reinterpret_cast<Cell**>(address));
437 }
438
439
target_cell()440 Cell* RelocInfo::target_cell() {
441 DCHECK(rmode_ == RelocInfo::CELL);
442 return Cell::FromValueAddress(Memory::Address_at(pc_));
443 }
444
445
set_target_cell(Cell * cell,WriteBarrierMode write_barrier_mode,ICacheFlushMode icache_flush_mode)446 void RelocInfo::set_target_cell(Cell* cell,
447 WriteBarrierMode write_barrier_mode,
448 ICacheFlushMode icache_flush_mode) {
449 DCHECK(rmode_ == RelocInfo::CELL);
450 Address address = cell->address() + Cell::kValueOffset;
451 Memory::Address_at(pc_) = address;
452 if (icache_flush_mode != SKIP_ICACHE_FLUSH) {
453 Assembler::FlushICache(isolate_, pc_, sizeof(Address));
454 }
455 if (write_barrier_mode == UPDATE_WRITE_BARRIER &&
456 host() != NULL) {
457 host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(host(), this,
458 cell);
459 }
460 }
461
462
WipeOut()463 void RelocInfo::WipeOut() {
464 if (IsEmbeddedObject(rmode_) || IsExternalReference(rmode_) ||
465 IsInternalReference(rmode_)) {
466 Memory::Address_at(pc_) = NULL;
467 } else if (IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_)) {
468 // Effectively write zero into the relocation.
469 Assembler::set_target_address_at(isolate_, pc_, host_,
470 pc_ + sizeof(int32_t));
471 } else {
472 UNREACHABLE();
473 }
474 }
475
476
code_age_stub_handle(Assembler * origin)477 Handle<Object> RelocInfo::code_age_stub_handle(Assembler* origin) {
478 DCHECK(rmode_ == RelocInfo::CODE_AGE_SEQUENCE);
479 DCHECK(*pc_ == kCallOpcode);
480 return origin->code_target_object_handle_at(pc_ + 1);
481 }
482
483
code_age_stub()484 Code* RelocInfo::code_age_stub() {
485 DCHECK(rmode_ == RelocInfo::CODE_AGE_SEQUENCE);
486 DCHECK(*pc_ == kCallOpcode);
487 return Code::GetCodeFromTargetAddress(
488 Assembler::target_address_at(pc_ + 1, host_));
489 }
490
491
set_code_age_stub(Code * stub,ICacheFlushMode icache_flush_mode)492 void RelocInfo::set_code_age_stub(Code* stub,
493 ICacheFlushMode icache_flush_mode) {
494 DCHECK(*pc_ == kCallOpcode);
495 DCHECK(rmode_ == RelocInfo::CODE_AGE_SEQUENCE);
496 Assembler::set_target_address_at(
497 isolate_, pc_ + 1, host_, stub->instruction_start(), icache_flush_mode);
498 }
499
500
debug_call_address()501 Address RelocInfo::debug_call_address() {
502 DCHECK(IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence());
503 return Memory::Address_at(pc_ + Assembler::kPatchDebugBreakSlotAddressOffset);
504 }
505
506
set_debug_call_address(Address target)507 void RelocInfo::set_debug_call_address(Address target) {
508 DCHECK(IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence());
509 Memory::Address_at(pc_ + Assembler::kPatchDebugBreakSlotAddressOffset) =
510 target;
511 Assembler::FlushICache(isolate_,
512 pc_ + Assembler::kPatchDebugBreakSlotAddressOffset,
513 sizeof(Address));
514 if (host() != NULL) {
515 Object* target_code = Code::GetCodeFromTargetAddress(target);
516 host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(
517 host(), this, HeapObject::cast(target_code));
518 }
519 }
520
521 template <typename ObjectVisitor>
Visit(Isolate * isolate,ObjectVisitor * visitor)522 void RelocInfo::Visit(Isolate* isolate, ObjectVisitor* visitor) {
523 RelocInfo::Mode mode = rmode();
524 if (mode == RelocInfo::EMBEDDED_OBJECT) {
525 visitor->VisitEmbeddedPointer(this);
526 Assembler::FlushICache(isolate, pc_, sizeof(Address));
527 } else if (RelocInfo::IsCodeTarget(mode)) {
528 visitor->VisitCodeTarget(this);
529 } else if (mode == RelocInfo::CELL) {
530 visitor->VisitCell(this);
531 } else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
532 visitor->VisitExternalReference(this);
533 } else if (mode == RelocInfo::INTERNAL_REFERENCE) {
534 visitor->VisitInternalReference(this);
535 } else if (RelocInfo::IsCodeAgeSequence(mode)) {
536 visitor->VisitCodeAgeSequence(this);
537 } else if (RelocInfo::IsDebugBreakSlot(mode) &&
538 IsPatchedDebugBreakSlotSequence()) {
539 visitor->VisitDebugTarget(this);
540 } else if (RelocInfo::IsRuntimeEntry(mode)) {
541 visitor->VisitRuntimeEntry(this);
542 }
543 }
544
545
546 template<typename StaticVisitor>
Visit(Heap * heap)547 void RelocInfo::Visit(Heap* heap) {
548 RelocInfo::Mode mode = rmode();
549 if (mode == RelocInfo::EMBEDDED_OBJECT) {
550 StaticVisitor::VisitEmbeddedPointer(heap, this);
551 Assembler::FlushICache(heap->isolate(), pc_, sizeof(Address));
552 } else if (RelocInfo::IsCodeTarget(mode)) {
553 StaticVisitor::VisitCodeTarget(heap, this);
554 } else if (mode == RelocInfo::CELL) {
555 StaticVisitor::VisitCell(heap, this);
556 } else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
557 StaticVisitor::VisitExternalReference(this);
558 } else if (mode == RelocInfo::INTERNAL_REFERENCE) {
559 StaticVisitor::VisitInternalReference(this);
560 } else if (RelocInfo::IsCodeAgeSequence(mode)) {
561 StaticVisitor::VisitCodeAgeSequence(heap, this);
562 } else if (RelocInfo::IsDebugBreakSlot(mode) &&
563 IsPatchedDebugBreakSlotSequence()) {
564 StaticVisitor::VisitDebugTarget(heap, this);
565 } else if (RelocInfo::IsRuntimeEntry(mode)) {
566 StaticVisitor::VisitRuntimeEntry(this);
567 }
568 }
569
570
571 // -----------------------------------------------------------------------------
572 // Implementation of Operand
573
set_modrm(int mod,Register rm_reg)574 void Operand::set_modrm(int mod, Register rm_reg) {
575 DCHECK(is_uint2(mod));
576 buf_[0] = mod << 6 | rm_reg.low_bits();
577 // Set REX.B to the high bit of rm.code().
578 rex_ |= rm_reg.high_bit();
579 }
580
581
set_sib(ScaleFactor scale,Register index,Register base)582 void Operand::set_sib(ScaleFactor scale, Register index, Register base) {
583 DCHECK(len_ == 1);
584 DCHECK(is_uint2(scale));
585 // Use SIB with no index register only for base rsp or r12. Otherwise we
586 // would skip the SIB byte entirely.
587 DCHECK(!index.is(rsp) || base.is(rsp) || base.is(r12));
588 buf_[1] = (scale << 6) | (index.low_bits() << 3) | base.low_bits();
589 rex_ |= index.high_bit() << 1 | base.high_bit();
590 len_ = 2;
591 }
592
set_disp8(int disp)593 void Operand::set_disp8(int disp) {
594 DCHECK(is_int8(disp));
595 DCHECK(len_ == 1 || len_ == 2);
596 int8_t* p = reinterpret_cast<int8_t*>(&buf_[len_]);
597 *p = disp;
598 len_ += sizeof(int8_t);
599 }
600
set_disp32(int disp)601 void Operand::set_disp32(int disp) {
602 DCHECK(len_ == 1 || len_ == 2);
603 int32_t* p = reinterpret_cast<int32_t*>(&buf_[len_]);
604 *p = disp;
605 len_ += sizeof(int32_t);
606 }
607
set_disp64(int64_t disp)608 void Operand::set_disp64(int64_t disp) {
609 DCHECK_EQ(1, len_);
610 int64_t* p = reinterpret_cast<int64_t*>(&buf_[len_]);
611 *p = disp;
612 len_ += sizeof(disp);
613 }
614 } // namespace internal
615 } // namespace v8
616
617 #endif // V8_X64_ASSEMBLER_X64_INL_H_
618