1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #ifndef V8_X64_ASSEMBLER_X64_INL_H_
6 #define V8_X64_ASSEMBLER_X64_INL_H_
7
8 #include "src/x64/assembler-x64.h"
9
10 #include "src/cpu.h"
11 #include "src/debug.h"
12 #include "src/v8memory.h"
13
14 namespace v8 {
15 namespace internal {
16
SupportsCrankshaft()17 bool CpuFeatures::SupportsCrankshaft() { return true; }
18
19
20 // -----------------------------------------------------------------------------
21 // Implementation of Assembler
22
23
24 static const byte kCallOpcode = 0xE8;
25 // The length of pushq(rbp), movp(rbp, rsp), Push(rsi) and Push(rdi).
26 static const int kNoCodeAgeSequenceLength = kPointerSize == kInt64Size ? 6 : 17;
27
28
emitl(uint32_t x)29 void Assembler::emitl(uint32_t x) {
30 Memory::uint32_at(pc_) = x;
31 pc_ += sizeof(uint32_t);
32 }
33
34
emitp(void * x,RelocInfo::Mode rmode)35 void Assembler::emitp(void* x, RelocInfo::Mode rmode) {
36 uintptr_t value = reinterpret_cast<uintptr_t>(x);
37 Memory::uintptr_at(pc_) = value;
38 if (!RelocInfo::IsNone(rmode)) {
39 RecordRelocInfo(rmode, value);
40 }
41 pc_ += sizeof(uintptr_t);
42 }
43
44
emitq(uint64_t x)45 void Assembler::emitq(uint64_t x) {
46 Memory::uint64_at(pc_) = x;
47 pc_ += sizeof(uint64_t);
48 }
49
50
emitw(uint16_t x)51 void Assembler::emitw(uint16_t x) {
52 Memory::uint16_at(pc_) = x;
53 pc_ += sizeof(uint16_t);
54 }
55
56
emit_code_target(Handle<Code> target,RelocInfo::Mode rmode,TypeFeedbackId ast_id)57 void Assembler::emit_code_target(Handle<Code> target,
58 RelocInfo::Mode rmode,
59 TypeFeedbackId ast_id) {
60 ASSERT(RelocInfo::IsCodeTarget(rmode) ||
61 rmode == RelocInfo::CODE_AGE_SEQUENCE);
62 if (rmode == RelocInfo::CODE_TARGET && !ast_id.IsNone()) {
63 RecordRelocInfo(RelocInfo::CODE_TARGET_WITH_ID, ast_id.ToInt());
64 } else {
65 RecordRelocInfo(rmode);
66 }
67 int current = code_targets_.length();
68 if (current > 0 && code_targets_.last().is_identical_to(target)) {
69 // Optimization if we keep jumping to the same code target.
70 emitl(current - 1);
71 } else {
72 code_targets_.Add(target);
73 emitl(current);
74 }
75 }
76
77
emit_runtime_entry(Address entry,RelocInfo::Mode rmode)78 void Assembler::emit_runtime_entry(Address entry, RelocInfo::Mode rmode) {
79 ASSERT(RelocInfo::IsRuntimeEntry(rmode));
80 RecordRelocInfo(rmode);
81 emitl(static_cast<uint32_t>(entry - isolate()->code_range()->start()));
82 }
83
84
emit_rex_64(Register reg,Register rm_reg)85 void Assembler::emit_rex_64(Register reg, Register rm_reg) {
86 emit(0x48 | reg.high_bit() << 2 | rm_reg.high_bit());
87 }
88
89
emit_rex_64(XMMRegister reg,Register rm_reg)90 void Assembler::emit_rex_64(XMMRegister reg, Register rm_reg) {
91 emit(0x48 | (reg.code() & 0x8) >> 1 | rm_reg.code() >> 3);
92 }
93
94
emit_rex_64(Register reg,XMMRegister rm_reg)95 void Assembler::emit_rex_64(Register reg, XMMRegister rm_reg) {
96 emit(0x48 | (reg.code() & 0x8) >> 1 | rm_reg.code() >> 3);
97 }
98
99
emit_rex_64(Register reg,const Operand & op)100 void Assembler::emit_rex_64(Register reg, const Operand& op) {
101 emit(0x48 | reg.high_bit() << 2 | op.rex_);
102 }
103
104
emit_rex_64(XMMRegister reg,const Operand & op)105 void Assembler::emit_rex_64(XMMRegister reg, const Operand& op) {
106 emit(0x48 | (reg.code() & 0x8) >> 1 | op.rex_);
107 }
108
109
emit_rex_64(Register rm_reg)110 void Assembler::emit_rex_64(Register rm_reg) {
111 ASSERT_EQ(rm_reg.code() & 0xf, rm_reg.code());
112 emit(0x48 | rm_reg.high_bit());
113 }
114
115
emit_rex_64(const Operand & op)116 void Assembler::emit_rex_64(const Operand& op) {
117 emit(0x48 | op.rex_);
118 }
119
120
emit_rex_32(Register reg,Register rm_reg)121 void Assembler::emit_rex_32(Register reg, Register rm_reg) {
122 emit(0x40 | reg.high_bit() << 2 | rm_reg.high_bit());
123 }
124
125
emit_rex_32(Register reg,const Operand & op)126 void Assembler::emit_rex_32(Register reg, const Operand& op) {
127 emit(0x40 | reg.high_bit() << 2 | op.rex_);
128 }
129
130
emit_rex_32(Register rm_reg)131 void Assembler::emit_rex_32(Register rm_reg) {
132 emit(0x40 | rm_reg.high_bit());
133 }
134
135
emit_rex_32(const Operand & op)136 void Assembler::emit_rex_32(const Operand& op) {
137 emit(0x40 | op.rex_);
138 }
139
140
emit_optional_rex_32(Register reg,Register rm_reg)141 void Assembler::emit_optional_rex_32(Register reg, Register rm_reg) {
142 byte rex_bits = reg.high_bit() << 2 | rm_reg.high_bit();
143 if (rex_bits != 0) emit(0x40 | rex_bits);
144 }
145
146
emit_optional_rex_32(Register reg,const Operand & op)147 void Assembler::emit_optional_rex_32(Register reg, const Operand& op) {
148 byte rex_bits = reg.high_bit() << 2 | op.rex_;
149 if (rex_bits != 0) emit(0x40 | rex_bits);
150 }
151
152
emit_optional_rex_32(XMMRegister reg,const Operand & op)153 void Assembler::emit_optional_rex_32(XMMRegister reg, const Operand& op) {
154 byte rex_bits = (reg.code() & 0x8) >> 1 | op.rex_;
155 if (rex_bits != 0) emit(0x40 | rex_bits);
156 }
157
158
emit_optional_rex_32(XMMRegister reg,XMMRegister base)159 void Assembler::emit_optional_rex_32(XMMRegister reg, XMMRegister base) {
160 byte rex_bits = (reg.code() & 0x8) >> 1 | (base.code() & 0x8) >> 3;
161 if (rex_bits != 0) emit(0x40 | rex_bits);
162 }
163
164
emit_optional_rex_32(XMMRegister reg,Register base)165 void Assembler::emit_optional_rex_32(XMMRegister reg, Register base) {
166 byte rex_bits = (reg.code() & 0x8) >> 1 | (base.code() & 0x8) >> 3;
167 if (rex_bits != 0) emit(0x40 | rex_bits);
168 }
169
170
emit_optional_rex_32(Register reg,XMMRegister base)171 void Assembler::emit_optional_rex_32(Register reg, XMMRegister base) {
172 byte rex_bits = (reg.code() & 0x8) >> 1 | (base.code() & 0x8) >> 3;
173 if (rex_bits != 0) emit(0x40 | rex_bits);
174 }
175
176
emit_optional_rex_32(Register rm_reg)177 void Assembler::emit_optional_rex_32(Register rm_reg) {
178 if (rm_reg.high_bit()) emit(0x41);
179 }
180
181
emit_optional_rex_32(const Operand & op)182 void Assembler::emit_optional_rex_32(const Operand& op) {
183 if (op.rex_ != 0) emit(0x40 | op.rex_);
184 }
185
186
target_address_at(Address pc,ConstantPoolArray * constant_pool)187 Address Assembler::target_address_at(Address pc,
188 ConstantPoolArray* constant_pool) {
189 return Memory::int32_at(pc) + pc + 4;
190 }
191
192
set_target_address_at(Address pc,ConstantPoolArray * constant_pool,Address target,ICacheFlushMode icache_flush_mode)193 void Assembler::set_target_address_at(Address pc,
194 ConstantPoolArray* constant_pool,
195 Address target,
196 ICacheFlushMode icache_flush_mode) {
197 Memory::int32_at(pc) = static_cast<int32_t>(target - pc - 4);
198 if (icache_flush_mode != SKIP_ICACHE_FLUSH) {
199 CPU::FlushICache(pc, sizeof(int32_t));
200 }
201 }
202
203
target_address_from_return_address(Address pc)204 Address Assembler::target_address_from_return_address(Address pc) {
205 return pc - kCallTargetAddressOffset;
206 }
207
208
code_target_object_handle_at(Address pc)209 Handle<Object> Assembler::code_target_object_handle_at(Address pc) {
210 return code_targets_[Memory::int32_at(pc)];
211 }
212
213
runtime_entry_at(Address pc)214 Address Assembler::runtime_entry_at(Address pc) {
215 return Memory::int32_at(pc) + isolate()->code_range()->start();
216 }
217
218 // -----------------------------------------------------------------------------
219 // Implementation of RelocInfo
220
221 // The modes possibly affected by apply must be in kApplyMask.
apply(intptr_t delta,ICacheFlushMode icache_flush_mode)222 void RelocInfo::apply(intptr_t delta, ICacheFlushMode icache_flush_mode) {
223 bool flush_icache = icache_flush_mode != SKIP_ICACHE_FLUSH;
224 if (IsInternalReference(rmode_)) {
225 // absolute code pointer inside code object moves with the code object.
226 Memory::Address_at(pc_) += static_cast<int32_t>(delta);
227 if (flush_icache) CPU::FlushICache(pc_, sizeof(Address));
228 } else if (IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_)) {
229 Memory::int32_at(pc_) -= static_cast<int32_t>(delta);
230 if (flush_icache) CPU::FlushICache(pc_, sizeof(int32_t));
231 } else if (rmode_ == CODE_AGE_SEQUENCE) {
232 if (*pc_ == kCallOpcode) {
233 int32_t* p = reinterpret_cast<int32_t*>(pc_ + 1);
234 *p -= static_cast<int32_t>(delta); // Relocate entry.
235 if (flush_icache) CPU::FlushICache(p, sizeof(uint32_t));
236 }
237 }
238 }
239
240
target_address()241 Address RelocInfo::target_address() {
242 ASSERT(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_));
243 return Assembler::target_address_at(pc_, host_);
244 }
245
246
target_address_address()247 Address RelocInfo::target_address_address() {
248 ASSERT(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_)
249 || rmode_ == EMBEDDED_OBJECT
250 || rmode_ == EXTERNAL_REFERENCE);
251 return reinterpret_cast<Address>(pc_);
252 }
253
254
constant_pool_entry_address()255 Address RelocInfo::constant_pool_entry_address() {
256 UNREACHABLE();
257 return NULL;
258 }
259
260
target_address_size()261 int RelocInfo::target_address_size() {
262 if (IsCodedSpecially()) {
263 return Assembler::kSpecialTargetSize;
264 } else {
265 return kPointerSize;
266 }
267 }
268
269
set_target_address(Address target,WriteBarrierMode write_barrier_mode,ICacheFlushMode icache_flush_mode)270 void RelocInfo::set_target_address(Address target,
271 WriteBarrierMode write_barrier_mode,
272 ICacheFlushMode icache_flush_mode) {
273 ASSERT(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_));
274 Assembler::set_target_address_at(pc_, host_, target, icache_flush_mode);
275 if (write_barrier_mode == UPDATE_WRITE_BARRIER && host() != NULL &&
276 IsCodeTarget(rmode_)) {
277 Object* target_code = Code::GetCodeFromTargetAddress(target);
278 host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(
279 host(), this, HeapObject::cast(target_code));
280 }
281 }
282
283
target_object()284 Object* RelocInfo::target_object() {
285 ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
286 return Memory::Object_at(pc_);
287 }
288
289
target_object_handle(Assembler * origin)290 Handle<Object> RelocInfo::target_object_handle(Assembler* origin) {
291 ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
292 if (rmode_ == EMBEDDED_OBJECT) {
293 return Memory::Object_Handle_at(pc_);
294 } else {
295 return origin->code_target_object_handle_at(pc_);
296 }
297 }
298
299
target_reference()300 Address RelocInfo::target_reference() {
301 ASSERT(rmode_ == RelocInfo::EXTERNAL_REFERENCE);
302 return Memory::Address_at(pc_);
303 }
304
305
set_target_object(Object * target,WriteBarrierMode write_barrier_mode,ICacheFlushMode icache_flush_mode)306 void RelocInfo::set_target_object(Object* target,
307 WriteBarrierMode write_barrier_mode,
308 ICacheFlushMode icache_flush_mode) {
309 ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
310 ASSERT(!target->IsConsString());
311 Memory::Object_at(pc_) = target;
312 if (icache_flush_mode != SKIP_ICACHE_FLUSH) {
313 CPU::FlushICache(pc_, sizeof(Address));
314 }
315 if (write_barrier_mode == UPDATE_WRITE_BARRIER &&
316 host() != NULL &&
317 target->IsHeapObject()) {
318 host()->GetHeap()->incremental_marking()->RecordWrite(
319 host(), &Memory::Object_at(pc_), HeapObject::cast(target));
320 }
321 }
322
323
target_runtime_entry(Assembler * origin)324 Address RelocInfo::target_runtime_entry(Assembler* origin) {
325 ASSERT(IsRuntimeEntry(rmode_));
326 return origin->runtime_entry_at(pc_);
327 }
328
329
set_target_runtime_entry(Address target,WriteBarrierMode write_barrier_mode,ICacheFlushMode icache_flush_mode)330 void RelocInfo::set_target_runtime_entry(Address target,
331 WriteBarrierMode write_barrier_mode,
332 ICacheFlushMode icache_flush_mode) {
333 ASSERT(IsRuntimeEntry(rmode_));
334 if (target_address() != target) {
335 set_target_address(target, write_barrier_mode, icache_flush_mode);
336 }
337 }
338
339
target_cell_handle()340 Handle<Cell> RelocInfo::target_cell_handle() {
341 ASSERT(rmode_ == RelocInfo::CELL);
342 Address address = Memory::Address_at(pc_);
343 return Handle<Cell>(reinterpret_cast<Cell**>(address));
344 }
345
346
target_cell()347 Cell* RelocInfo::target_cell() {
348 ASSERT(rmode_ == RelocInfo::CELL);
349 return Cell::FromValueAddress(Memory::Address_at(pc_));
350 }
351
352
set_target_cell(Cell * cell,WriteBarrierMode write_barrier_mode,ICacheFlushMode icache_flush_mode)353 void RelocInfo::set_target_cell(Cell* cell,
354 WriteBarrierMode write_barrier_mode,
355 ICacheFlushMode icache_flush_mode) {
356 ASSERT(rmode_ == RelocInfo::CELL);
357 Address address = cell->address() + Cell::kValueOffset;
358 Memory::Address_at(pc_) = address;
359 if (icache_flush_mode != SKIP_ICACHE_FLUSH) {
360 CPU::FlushICache(pc_, sizeof(Address));
361 }
362 if (write_barrier_mode == UPDATE_WRITE_BARRIER &&
363 host() != NULL) {
364 // TODO(1550) We are passing NULL as a slot because cell can never be on
365 // evacuation candidate.
366 host()->GetHeap()->incremental_marking()->RecordWrite(
367 host(), NULL, cell);
368 }
369 }
370
371
WipeOut()372 void RelocInfo::WipeOut() {
373 if (IsEmbeddedObject(rmode_) || IsExternalReference(rmode_)) {
374 Memory::Address_at(pc_) = NULL;
375 } else if (IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_)) {
376 // Effectively write zero into the relocation.
377 Assembler::set_target_address_at(pc_, host_, pc_ + sizeof(int32_t));
378 } else {
379 UNREACHABLE();
380 }
381 }
382
383
IsPatchedReturnSequence()384 bool RelocInfo::IsPatchedReturnSequence() {
385 // The recognized call sequence is:
386 // movq(kScratchRegister, address); call(kScratchRegister);
387 // It only needs to be distinguished from a return sequence
388 // movq(rsp, rbp); pop(rbp); ret(n); int3 *6
389 // The 11th byte is int3 (0xCC) in the return sequence and
390 // REX.WB (0x48+register bit) for the call sequence.
391 return pc_[Assembler::kMoveAddressIntoScratchRegisterInstructionLength] !=
392 0xCC;
393 }
394
395
IsPatchedDebugBreakSlotSequence()396 bool RelocInfo::IsPatchedDebugBreakSlotSequence() {
397 return !Assembler::IsNop(pc());
398 }
399
400
code_age_stub_handle(Assembler * origin)401 Handle<Object> RelocInfo::code_age_stub_handle(Assembler* origin) {
402 ASSERT(rmode_ == RelocInfo::CODE_AGE_SEQUENCE);
403 ASSERT(*pc_ == kCallOpcode);
404 return origin->code_target_object_handle_at(pc_ + 1);
405 }
406
407
code_age_stub()408 Code* RelocInfo::code_age_stub() {
409 ASSERT(rmode_ == RelocInfo::CODE_AGE_SEQUENCE);
410 ASSERT(*pc_ == kCallOpcode);
411 return Code::GetCodeFromTargetAddress(
412 Assembler::target_address_at(pc_ + 1, host_));
413 }
414
415
set_code_age_stub(Code * stub,ICacheFlushMode icache_flush_mode)416 void RelocInfo::set_code_age_stub(Code* stub,
417 ICacheFlushMode icache_flush_mode) {
418 ASSERT(*pc_ == kCallOpcode);
419 ASSERT(rmode_ == RelocInfo::CODE_AGE_SEQUENCE);
420 Assembler::set_target_address_at(pc_ + 1, host_, stub->instruction_start(),
421 icache_flush_mode);
422 }
423
424
call_address()425 Address RelocInfo::call_address() {
426 ASSERT((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
427 (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
428 return Memory::Address_at(
429 pc_ + Assembler::kRealPatchReturnSequenceAddressOffset);
430 }
431
432
set_call_address(Address target)433 void RelocInfo::set_call_address(Address target) {
434 ASSERT((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
435 (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
436 Memory::Address_at(pc_ + Assembler::kRealPatchReturnSequenceAddressOffset) =
437 target;
438 CPU::FlushICache(pc_ + Assembler::kRealPatchReturnSequenceAddressOffset,
439 sizeof(Address));
440 if (host() != NULL) {
441 Object* target_code = Code::GetCodeFromTargetAddress(target);
442 host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(
443 host(), this, HeapObject::cast(target_code));
444 }
445 }
446
447
call_object()448 Object* RelocInfo::call_object() {
449 return *call_object_address();
450 }
451
452
set_call_object(Object * target)453 void RelocInfo::set_call_object(Object* target) {
454 *call_object_address() = target;
455 }
456
457
call_object_address()458 Object** RelocInfo::call_object_address() {
459 ASSERT((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
460 (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
461 return reinterpret_cast<Object**>(
462 pc_ + Assembler::kPatchReturnSequenceAddressOffset);
463 }
464
465
Visit(Isolate * isolate,ObjectVisitor * visitor)466 void RelocInfo::Visit(Isolate* isolate, ObjectVisitor* visitor) {
467 RelocInfo::Mode mode = rmode();
468 if (mode == RelocInfo::EMBEDDED_OBJECT) {
469 visitor->VisitEmbeddedPointer(this);
470 CPU::FlushICache(pc_, sizeof(Address));
471 } else if (RelocInfo::IsCodeTarget(mode)) {
472 visitor->VisitCodeTarget(this);
473 } else if (mode == RelocInfo::CELL) {
474 visitor->VisitCell(this);
475 } else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
476 visitor->VisitExternalReference(this);
477 CPU::FlushICache(pc_, sizeof(Address));
478 } else if (RelocInfo::IsCodeAgeSequence(mode)) {
479 visitor->VisitCodeAgeSequence(this);
480 } else if (((RelocInfo::IsJSReturn(mode) &&
481 IsPatchedReturnSequence()) ||
482 (RelocInfo::IsDebugBreakSlot(mode) &&
483 IsPatchedDebugBreakSlotSequence())) &&
484 isolate->debug()->has_break_points()) {
485 visitor->VisitDebugTarget(this);
486 } else if (RelocInfo::IsRuntimeEntry(mode)) {
487 visitor->VisitRuntimeEntry(this);
488 }
489 }
490
491
492 template<typename StaticVisitor>
Visit(Heap * heap)493 void RelocInfo::Visit(Heap* heap) {
494 RelocInfo::Mode mode = rmode();
495 if (mode == RelocInfo::EMBEDDED_OBJECT) {
496 StaticVisitor::VisitEmbeddedPointer(heap, this);
497 CPU::FlushICache(pc_, sizeof(Address));
498 } else if (RelocInfo::IsCodeTarget(mode)) {
499 StaticVisitor::VisitCodeTarget(heap, this);
500 } else if (mode == RelocInfo::CELL) {
501 StaticVisitor::VisitCell(heap, this);
502 } else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
503 StaticVisitor::VisitExternalReference(this);
504 CPU::FlushICache(pc_, sizeof(Address));
505 } else if (RelocInfo::IsCodeAgeSequence(mode)) {
506 StaticVisitor::VisitCodeAgeSequence(heap, this);
507 } else if (heap->isolate()->debug()->has_break_points() &&
508 ((RelocInfo::IsJSReturn(mode) &&
509 IsPatchedReturnSequence()) ||
510 (RelocInfo::IsDebugBreakSlot(mode) &&
511 IsPatchedDebugBreakSlotSequence()))) {
512 StaticVisitor::VisitDebugTarget(heap, this);
513 } else if (RelocInfo::IsRuntimeEntry(mode)) {
514 StaticVisitor::VisitRuntimeEntry(this);
515 }
516 }
517
518
519 // -----------------------------------------------------------------------------
520 // Implementation of Operand
521
set_modrm(int mod,Register rm_reg)522 void Operand::set_modrm(int mod, Register rm_reg) {
523 ASSERT(is_uint2(mod));
524 buf_[0] = mod << 6 | rm_reg.low_bits();
525 // Set REX.B to the high bit of rm.code().
526 rex_ |= rm_reg.high_bit();
527 }
528
529
set_sib(ScaleFactor scale,Register index,Register base)530 void Operand::set_sib(ScaleFactor scale, Register index, Register base) {
531 ASSERT(len_ == 1);
532 ASSERT(is_uint2(scale));
533 // Use SIB with no index register only for base rsp or r12. Otherwise we
534 // would skip the SIB byte entirely.
535 ASSERT(!index.is(rsp) || base.is(rsp) || base.is(r12));
536 buf_[1] = (scale << 6) | (index.low_bits() << 3) | base.low_bits();
537 rex_ |= index.high_bit() << 1 | base.high_bit();
538 len_ = 2;
539 }
540
set_disp8(int disp)541 void Operand::set_disp8(int disp) {
542 ASSERT(is_int8(disp));
543 ASSERT(len_ == 1 || len_ == 2);
544 int8_t* p = reinterpret_cast<int8_t*>(&buf_[len_]);
545 *p = disp;
546 len_ += sizeof(int8_t);
547 }
548
set_disp32(int disp)549 void Operand::set_disp32(int disp) {
550 ASSERT(len_ == 1 || len_ == 2);
551 int32_t* p = reinterpret_cast<int32_t*>(&buf_[len_]);
552 *p = disp;
553 len_ += sizeof(int32_t);
554 }
555
556
557 } } // namespace v8::internal
558
559 #endif // V8_X64_ASSEMBLER_X64_INL_H_
560