1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28 #include "v8.h"
29
30 #if V8_TARGET_ARCH_X64
31
32 #include "macro-assembler.h"
33 #include "serialize.h"
34
35 namespace v8 {
36 namespace internal {
37
38 // -----------------------------------------------------------------------------
39 // Implementation of CpuFeatures
40
41
42 #ifdef DEBUG
43 bool CpuFeatures::initialized_ = false;
44 #endif
45 uint64_t CpuFeatures::supported_ = CpuFeatures::kDefaultCpuFeatures;
46 uint64_t CpuFeatures::found_by_runtime_probing_only_ = 0;
47 uint64_t CpuFeatures::cross_compile_ = 0;
48
cpu_features()49 ExternalReference ExternalReference::cpu_features() {
50 ASSERT(CpuFeatures::initialized_);
51 return ExternalReference(&CpuFeatures::supported_);
52 }
53
54
Probe()55 void CpuFeatures::Probe() {
56 ASSERT(supported_ == CpuFeatures::kDefaultCpuFeatures);
57 #ifdef DEBUG
58 initialized_ = true;
59 #endif
60 supported_ = kDefaultCpuFeatures;
61 if (Serializer::enabled()) {
62 supported_ |= OS::CpuFeaturesImpliedByPlatform();
63 return; // No features if we might serialize.
64 }
65
66 uint64_t probed_features = 0;
67 CPU cpu;
68 if (cpu.has_sse41()) {
69 probed_features |= static_cast<uint64_t>(1) << SSE4_1;
70 }
71 if (cpu.has_sse3()) {
72 probed_features |= static_cast<uint64_t>(1) << SSE3;
73 }
74
75 // SSE2 must be available on every x64 CPU.
76 ASSERT(cpu.has_sse2());
77 probed_features |= static_cast<uint64_t>(1) << SSE2;
78
79 // CMOV must be available on every x64 CPU.
80 ASSERT(cpu.has_cmov());
81 probed_features |= static_cast<uint64_t>(1) << CMOV;
82
83 // SAHF is not generally available in long mode.
84 if (cpu.has_sahf()) {
85 probed_features |= static_cast<uint64_t>(1) << SAHF;
86 }
87
88 uint64_t platform_features = OS::CpuFeaturesImpliedByPlatform();
89 supported_ = probed_features | platform_features;
90 found_by_runtime_probing_only_
91 = probed_features & ~kDefaultCpuFeatures & ~platform_features;
92 }
93
94
95 // -----------------------------------------------------------------------------
96 // Implementation of RelocInfo
97
98 // Patch the code at the current PC with a call to the target address.
99 // Additional guard int3 instructions can be added if required.
PatchCodeWithCall(Address target,int guard_bytes)100 void RelocInfo::PatchCodeWithCall(Address target, int guard_bytes) {
101 int code_size = Assembler::kCallSequenceLength + guard_bytes;
102
103 // Create a code patcher.
104 CodePatcher patcher(pc_, code_size);
105
106 // Add a label for checking the size of the code used for returning.
107 #ifdef DEBUG
108 Label check_codesize;
109 patcher.masm()->bind(&check_codesize);
110 #endif
111
112 // Patch the code.
113 patcher.masm()->movq(kScratchRegister, target, RelocInfo::NONE64);
114 patcher.masm()->call(kScratchRegister);
115
116 // Check that the size of the code generated is as expected.
117 ASSERT_EQ(Assembler::kCallSequenceLength,
118 patcher.masm()->SizeOfCodeGeneratedSince(&check_codesize));
119
120 // Add the requested number of int3 instructions after the call.
121 for (int i = 0; i < guard_bytes; i++) {
122 patcher.masm()->int3();
123 }
124 }
125
126
PatchCode(byte * instructions,int instruction_count)127 void RelocInfo::PatchCode(byte* instructions, int instruction_count) {
128 // Patch the code at the current address with the supplied instructions.
129 for (int i = 0; i < instruction_count; i++) {
130 *(pc_ + i) = *(instructions + i);
131 }
132
133 // Indicate that code has changed.
134 CPU::FlushICache(pc_, instruction_count);
135 }
136
137
138 // -----------------------------------------------------------------------------
139 // Register constants.
140
141 const int
142 Register::kRegisterCodeByAllocationIndex[kMaxNumAllocatableRegisters] = {
143 // rax, rbx, rdx, rcx, rsi, rdi, r8, r9, r11, r14, r15
144 0, 3, 2, 1, 6, 7, 8, 9, 11, 14, 15
145 };
146
147 const int Register::kAllocationIndexByRegisterCode[kNumRegisters] = {
148 0, 3, 2, 1, -1, -1, 4, 5, 6, 7, -1, 8, -1, -1, 9, 10
149 };
150
151
152 // -----------------------------------------------------------------------------
153 // Implementation of Operand
154
Operand(Register base,int32_t disp)155 Operand::Operand(Register base, int32_t disp) : rex_(0) {
156 len_ = 1;
157 if (base.is(rsp) || base.is(r12)) {
158 // SIB byte is needed to encode (rsp + offset) or (r12 + offset).
159 set_sib(times_1, rsp, base);
160 }
161
162 if (disp == 0 && !base.is(rbp) && !base.is(r13)) {
163 set_modrm(0, base);
164 } else if (is_int8(disp)) {
165 set_modrm(1, base);
166 set_disp8(disp);
167 } else {
168 set_modrm(2, base);
169 set_disp32(disp);
170 }
171 }
172
173
Operand(Register base,Register index,ScaleFactor scale,int32_t disp)174 Operand::Operand(Register base,
175 Register index,
176 ScaleFactor scale,
177 int32_t disp) : rex_(0) {
178 ASSERT(!index.is(rsp));
179 len_ = 1;
180 set_sib(scale, index, base);
181 if (disp == 0 && !base.is(rbp) && !base.is(r13)) {
182 // This call to set_modrm doesn't overwrite the REX.B (or REX.X) bits
183 // possibly set by set_sib.
184 set_modrm(0, rsp);
185 } else if (is_int8(disp)) {
186 set_modrm(1, rsp);
187 set_disp8(disp);
188 } else {
189 set_modrm(2, rsp);
190 set_disp32(disp);
191 }
192 }
193
194
Operand(Register index,ScaleFactor scale,int32_t disp)195 Operand::Operand(Register index,
196 ScaleFactor scale,
197 int32_t disp) : rex_(0) {
198 ASSERT(!index.is(rsp));
199 len_ = 1;
200 set_modrm(0, rsp);
201 set_sib(scale, index, rbp);
202 set_disp32(disp);
203 }
204
205
Operand(const Operand & operand,int32_t offset)206 Operand::Operand(const Operand& operand, int32_t offset) {
207 ASSERT(operand.len_ >= 1);
208 // Operand encodes REX ModR/M [SIB] [Disp].
209 byte modrm = operand.buf_[0];
210 ASSERT(modrm < 0xC0); // Disallow mode 3 (register target).
211 bool has_sib = ((modrm & 0x07) == 0x04);
212 byte mode = modrm & 0xC0;
213 int disp_offset = has_sib ? 2 : 1;
214 int base_reg = (has_sib ? operand.buf_[1] : modrm) & 0x07;
215 // Mode 0 with rbp/r13 as ModR/M or SIB base register always has a 32-bit
216 // displacement.
217 bool is_baseless = (mode == 0) && (base_reg == 0x05); // No base or RIP base.
218 int32_t disp_value = 0;
219 if (mode == 0x80 || is_baseless) {
220 // Mode 2 or mode 0 with rbp/r13 as base: Word displacement.
221 disp_value = *BitCast<const int32_t*>(&operand.buf_[disp_offset]);
222 } else if (mode == 0x40) {
223 // Mode 1: Byte displacement.
224 disp_value = static_cast<signed char>(operand.buf_[disp_offset]);
225 }
226
227 // Write new operand with same registers, but with modified displacement.
228 ASSERT(offset >= 0 ? disp_value + offset > disp_value
229 : disp_value + offset < disp_value); // No overflow.
230 disp_value += offset;
231 rex_ = operand.rex_;
232 if (!is_int8(disp_value) || is_baseless) {
233 // Need 32 bits of displacement, mode 2 or mode 1 with register rbp/r13.
234 buf_[0] = (modrm & 0x3f) | (is_baseless ? 0x00 : 0x80);
235 len_ = disp_offset + 4;
236 Memory::int32_at(&buf_[disp_offset]) = disp_value;
237 } else if (disp_value != 0 || (base_reg == 0x05)) {
238 // Need 8 bits of displacement.
239 buf_[0] = (modrm & 0x3f) | 0x40; // Mode 1.
240 len_ = disp_offset + 1;
241 buf_[disp_offset] = static_cast<byte>(disp_value);
242 } else {
243 // Need no displacement.
244 buf_[0] = (modrm & 0x3f); // Mode 0.
245 len_ = disp_offset;
246 }
247 if (has_sib) {
248 buf_[1] = operand.buf_[1];
249 }
250 }
251
252
AddressUsesRegister(Register reg) const253 bool Operand::AddressUsesRegister(Register reg) const {
254 int code = reg.code();
255 ASSERT((buf_[0] & 0xC0) != 0xC0); // Always a memory operand.
256 // Start with only low three bits of base register. Initial decoding doesn't
257 // distinguish on the REX.B bit.
258 int base_code = buf_[0] & 0x07;
259 if (base_code == rsp.code()) {
260 // SIB byte present in buf_[1].
261 // Check the index register from the SIB byte + REX.X prefix.
262 int index_code = ((buf_[1] >> 3) & 0x07) | ((rex_ & 0x02) << 2);
263 // Index code (including REX.X) of 0x04 (rsp) means no index register.
264 if (index_code != rsp.code() && index_code == code) return true;
265 // Add REX.B to get the full base register code.
266 base_code = (buf_[1] & 0x07) | ((rex_ & 0x01) << 3);
267 // A base register of 0x05 (rbp) with mod = 0 means no base register.
268 if (base_code == rbp.code() && ((buf_[0] & 0xC0) == 0)) return false;
269 return code == base_code;
270 } else {
271 // A base register with low bits of 0x05 (rbp or r13) and mod = 0 means
272 // no base register.
273 if (base_code == rbp.code() && ((buf_[0] & 0xC0) == 0)) return false;
274 base_code |= ((rex_ & 0x01) << 3);
275 return code == base_code;
276 }
277 }
278
279
280 // -----------------------------------------------------------------------------
281 // Implementation of Assembler.
282
283 #ifdef GENERATED_CODE_COVERAGE
284 static void InitCoverageLog();
285 #endif
286
Assembler(Isolate * isolate,void * buffer,int buffer_size)287 Assembler::Assembler(Isolate* isolate, void* buffer, int buffer_size)
288 : AssemblerBase(isolate, buffer, buffer_size),
289 code_targets_(100),
290 positions_recorder_(this) {
291 // Clear the buffer in debug mode unless it was provided by the
292 // caller in which case we can't be sure it's okay to overwrite
293 // existing code in it.
294 #ifdef DEBUG
295 if (own_buffer_) {
296 memset(buffer_, 0xCC, buffer_size_); // int3
297 }
298 #endif
299
300 reloc_info_writer.Reposition(buffer_ + buffer_size_, pc_);
301
302
303 #ifdef GENERATED_CODE_COVERAGE
304 InitCoverageLog();
305 #endif
306 }
307
308
GetCode(CodeDesc * desc)309 void Assembler::GetCode(CodeDesc* desc) {
310 // Finalize code (at this point overflow() may be true, but the gap ensures
311 // that we are still not overlapping instructions and relocation info).
312 ASSERT(pc_ <= reloc_info_writer.pos()); // No overlap.
313 // Set up code descriptor.
314 desc->buffer = buffer_;
315 desc->buffer_size = buffer_size_;
316 desc->instr_size = pc_offset();
317 ASSERT(desc->instr_size > 0); // Zero-size code objects upset the system.
318 desc->reloc_size =
319 static_cast<int>((buffer_ + buffer_size_) - reloc_info_writer.pos());
320 desc->origin = this;
321 }
322
323
Align(int m)324 void Assembler::Align(int m) {
325 ASSERT(IsPowerOf2(m));
326 int delta = (m - (pc_offset() & (m - 1))) & (m - 1);
327 Nop(delta);
328 }
329
330
CodeTargetAlign()331 void Assembler::CodeTargetAlign() {
332 Align(16); // Preferred alignment of jump targets on x64.
333 }
334
335
IsNop(Address addr)336 bool Assembler::IsNop(Address addr) {
337 Address a = addr;
338 while (*a == 0x66) a++;
339 if (*a == 0x90) return true;
340 if (a[0] == 0xf && a[1] == 0x1f) return true;
341 return false;
342 }
343
344
bind_to(Label * L,int pos)345 void Assembler::bind_to(Label* L, int pos) {
346 ASSERT(!L->is_bound()); // Label may only be bound once.
347 ASSERT(0 <= pos && pos <= pc_offset()); // Position must be valid.
348 if (L->is_linked()) {
349 int current = L->pos();
350 int next = long_at(current);
351 while (next != current) {
352 // Relative address, relative to point after address.
353 int imm32 = pos - (current + sizeof(int32_t));
354 long_at_put(current, imm32);
355 current = next;
356 next = long_at(next);
357 }
358 // Fix up last fixup on linked list.
359 int last_imm32 = pos - (current + sizeof(int32_t));
360 long_at_put(current, last_imm32);
361 }
362 while (L->is_near_linked()) {
363 int fixup_pos = L->near_link_pos();
364 int offset_to_next =
365 static_cast<int>(*reinterpret_cast<int8_t*>(addr_at(fixup_pos)));
366 ASSERT(offset_to_next <= 0);
367 int disp = pos - (fixup_pos + sizeof(int8_t));
368 CHECK(is_int8(disp));
369 set_byte_at(fixup_pos, disp);
370 if (offset_to_next < 0) {
371 L->link_to(fixup_pos + offset_to_next, Label::kNear);
372 } else {
373 L->UnuseNear();
374 }
375 }
376 L->bind_to(pos);
377 }
378
379
bind(Label * L)380 void Assembler::bind(Label* L) {
381 bind_to(L, pc_offset());
382 }
383
384
GrowBuffer()385 void Assembler::GrowBuffer() {
386 ASSERT(buffer_overflow());
387 if (!own_buffer_) FATAL("external code buffer is too small");
388
389 // Compute new buffer size.
390 CodeDesc desc; // the new buffer
391 if (buffer_size_ < 4*KB) {
392 desc.buffer_size = 4*KB;
393 } else {
394 desc.buffer_size = 2*buffer_size_;
395 }
396 // Some internal data structures overflow for very large buffers,
397 // they must ensure that kMaximalBufferSize is not too large.
398 if ((desc.buffer_size > kMaximalBufferSize) ||
399 (desc.buffer_size > isolate()->heap()->MaxOldGenerationSize())) {
400 V8::FatalProcessOutOfMemory("Assembler::GrowBuffer");
401 }
402
403 // Set up new buffer.
404 desc.buffer = NewArray<byte>(desc.buffer_size);
405 desc.instr_size = pc_offset();
406 desc.reloc_size =
407 static_cast<int>((buffer_ + buffer_size_) - (reloc_info_writer.pos()));
408
409 // Clear the buffer in debug mode. Use 'int3' instructions to make
410 // sure to get into problems if we ever run uninitialized code.
411 #ifdef DEBUG
412 memset(desc.buffer, 0xCC, desc.buffer_size);
413 #endif
414
415 // Copy the data.
416 intptr_t pc_delta = desc.buffer - buffer_;
417 intptr_t rc_delta = (desc.buffer + desc.buffer_size) -
418 (buffer_ + buffer_size_);
419 OS::MemMove(desc.buffer, buffer_, desc.instr_size);
420 OS::MemMove(rc_delta + reloc_info_writer.pos(),
421 reloc_info_writer.pos(), desc.reloc_size);
422
423 // Switch buffers.
424 if (isolate() != NULL &&
425 isolate()->assembler_spare_buffer() == NULL &&
426 buffer_size_ == kMinimalBufferSize) {
427 isolate()->set_assembler_spare_buffer(buffer_);
428 } else {
429 DeleteArray(buffer_);
430 }
431 buffer_ = desc.buffer;
432 buffer_size_ = desc.buffer_size;
433 pc_ += pc_delta;
434 reloc_info_writer.Reposition(reloc_info_writer.pos() + rc_delta,
435 reloc_info_writer.last_pc() + pc_delta);
436
437 // Relocate runtime entries.
438 for (RelocIterator it(desc); !it.done(); it.next()) {
439 RelocInfo::Mode rmode = it.rinfo()->rmode();
440 if (rmode == RelocInfo::INTERNAL_REFERENCE) {
441 intptr_t* p = reinterpret_cast<intptr_t*>(it.rinfo()->pc());
442 if (*p != 0) { // 0 means uninitialized.
443 *p += pc_delta;
444 }
445 }
446 }
447
448 ASSERT(!buffer_overflow());
449 }
450
451
emit_operand(int code,const Operand & adr)452 void Assembler::emit_operand(int code, const Operand& adr) {
453 ASSERT(is_uint3(code));
454 const unsigned length = adr.len_;
455 ASSERT(length > 0);
456
457 // Emit updated ModR/M byte containing the given register.
458 ASSERT((adr.buf_[0] & 0x38) == 0);
459 pc_[0] = adr.buf_[0] | code << 3;
460
461 // Emit the rest of the encoded operand.
462 for (unsigned i = 1; i < length; i++) pc_[i] = adr.buf_[i];
463 pc_ += length;
464 }
465
466
467 // Assembler Instruction implementations.
468
arithmetic_op(byte opcode,Register reg,const Operand & op)469 void Assembler::arithmetic_op(byte opcode, Register reg, const Operand& op) {
470 EnsureSpace ensure_space(this);
471 emit_rex_64(reg, op);
472 emit(opcode);
473 emit_operand(reg, op);
474 }
475
476
arithmetic_op(byte opcode,Register reg,Register rm_reg)477 void Assembler::arithmetic_op(byte opcode, Register reg, Register rm_reg) {
478 EnsureSpace ensure_space(this);
479 ASSERT((opcode & 0xC6) == 2);
480 if (rm_reg.low_bits() == 4) { // Forces SIB byte.
481 // Swap reg and rm_reg and change opcode operand order.
482 emit_rex_64(rm_reg, reg);
483 emit(opcode ^ 0x02);
484 emit_modrm(rm_reg, reg);
485 } else {
486 emit_rex_64(reg, rm_reg);
487 emit(opcode);
488 emit_modrm(reg, rm_reg);
489 }
490 }
491
492
arithmetic_op_16(byte opcode,Register reg,Register rm_reg)493 void Assembler::arithmetic_op_16(byte opcode, Register reg, Register rm_reg) {
494 EnsureSpace ensure_space(this);
495 ASSERT((opcode & 0xC6) == 2);
496 if (rm_reg.low_bits() == 4) { // Forces SIB byte.
497 // Swap reg and rm_reg and change opcode operand order.
498 emit(0x66);
499 emit_optional_rex_32(rm_reg, reg);
500 emit(opcode ^ 0x02);
501 emit_modrm(rm_reg, reg);
502 } else {
503 emit(0x66);
504 emit_optional_rex_32(reg, rm_reg);
505 emit(opcode);
506 emit_modrm(reg, rm_reg);
507 }
508 }
509
510
arithmetic_op_16(byte opcode,Register reg,const Operand & rm_reg)511 void Assembler::arithmetic_op_16(byte opcode,
512 Register reg,
513 const Operand& rm_reg) {
514 EnsureSpace ensure_space(this);
515 emit(0x66);
516 emit_optional_rex_32(reg, rm_reg);
517 emit(opcode);
518 emit_operand(reg, rm_reg);
519 }
520
521
arithmetic_op_32(byte opcode,Register reg,Register rm_reg)522 void Assembler::arithmetic_op_32(byte opcode, Register reg, Register rm_reg) {
523 EnsureSpace ensure_space(this);
524 ASSERT((opcode & 0xC6) == 2);
525 if (rm_reg.low_bits() == 4) { // Forces SIB byte.
526 // Swap reg and rm_reg and change opcode operand order.
527 emit_optional_rex_32(rm_reg, reg);
528 emit(opcode ^ 0x02); // E.g. 0x03 -> 0x01 for ADD.
529 emit_modrm(rm_reg, reg);
530 } else {
531 emit_optional_rex_32(reg, rm_reg);
532 emit(opcode);
533 emit_modrm(reg, rm_reg);
534 }
535 }
536
537
arithmetic_op_32(byte opcode,Register reg,const Operand & rm_reg)538 void Assembler::arithmetic_op_32(byte opcode,
539 Register reg,
540 const Operand& rm_reg) {
541 EnsureSpace ensure_space(this);
542 emit_optional_rex_32(reg, rm_reg);
543 emit(opcode);
544 emit_operand(reg, rm_reg);
545 }
546
547
immediate_arithmetic_op(byte subcode,Register dst,Immediate src)548 void Assembler::immediate_arithmetic_op(byte subcode,
549 Register dst,
550 Immediate src) {
551 EnsureSpace ensure_space(this);
552 emit_rex_64(dst);
553 if (is_int8(src.value_)) {
554 emit(0x83);
555 emit_modrm(subcode, dst);
556 emit(src.value_);
557 } else if (dst.is(rax)) {
558 emit(0x05 | (subcode << 3));
559 emitl(src.value_);
560 } else {
561 emit(0x81);
562 emit_modrm(subcode, dst);
563 emitl(src.value_);
564 }
565 }
566
immediate_arithmetic_op(byte subcode,const Operand & dst,Immediate src)567 void Assembler::immediate_arithmetic_op(byte subcode,
568 const Operand& dst,
569 Immediate src) {
570 EnsureSpace ensure_space(this);
571 emit_rex_64(dst);
572 if (is_int8(src.value_)) {
573 emit(0x83);
574 emit_operand(subcode, dst);
575 emit(src.value_);
576 } else {
577 emit(0x81);
578 emit_operand(subcode, dst);
579 emitl(src.value_);
580 }
581 }
582
583
immediate_arithmetic_op_16(byte subcode,Register dst,Immediate src)584 void Assembler::immediate_arithmetic_op_16(byte subcode,
585 Register dst,
586 Immediate src) {
587 EnsureSpace ensure_space(this);
588 emit(0x66); // Operand size override prefix.
589 emit_optional_rex_32(dst);
590 if (is_int8(src.value_)) {
591 emit(0x83);
592 emit_modrm(subcode, dst);
593 emit(src.value_);
594 } else if (dst.is(rax)) {
595 emit(0x05 | (subcode << 3));
596 emitw(src.value_);
597 } else {
598 emit(0x81);
599 emit_modrm(subcode, dst);
600 emitw(src.value_);
601 }
602 }
603
604
immediate_arithmetic_op_16(byte subcode,const Operand & dst,Immediate src)605 void Assembler::immediate_arithmetic_op_16(byte subcode,
606 const Operand& dst,
607 Immediate src) {
608 EnsureSpace ensure_space(this);
609 emit(0x66); // Operand size override prefix.
610 emit_optional_rex_32(dst);
611 if (is_int8(src.value_)) {
612 emit(0x83);
613 emit_operand(subcode, dst);
614 emit(src.value_);
615 } else {
616 emit(0x81);
617 emit_operand(subcode, dst);
618 emitw(src.value_);
619 }
620 }
621
622
immediate_arithmetic_op_32(byte subcode,Register dst,Immediate src)623 void Assembler::immediate_arithmetic_op_32(byte subcode,
624 Register dst,
625 Immediate src) {
626 EnsureSpace ensure_space(this);
627 emit_optional_rex_32(dst);
628 if (is_int8(src.value_)) {
629 emit(0x83);
630 emit_modrm(subcode, dst);
631 emit(src.value_);
632 } else if (dst.is(rax)) {
633 emit(0x05 | (subcode << 3));
634 emitl(src.value_);
635 } else {
636 emit(0x81);
637 emit_modrm(subcode, dst);
638 emitl(src.value_);
639 }
640 }
641
642
immediate_arithmetic_op_32(byte subcode,const Operand & dst,Immediate src)643 void Assembler::immediate_arithmetic_op_32(byte subcode,
644 const Operand& dst,
645 Immediate src) {
646 EnsureSpace ensure_space(this);
647 emit_optional_rex_32(dst);
648 if (is_int8(src.value_)) {
649 emit(0x83);
650 emit_operand(subcode, dst);
651 emit(src.value_);
652 } else {
653 emit(0x81);
654 emit_operand(subcode, dst);
655 emitl(src.value_);
656 }
657 }
658
659
immediate_arithmetic_op_8(byte subcode,const Operand & dst,Immediate src)660 void Assembler::immediate_arithmetic_op_8(byte subcode,
661 const Operand& dst,
662 Immediate src) {
663 EnsureSpace ensure_space(this);
664 emit_optional_rex_32(dst);
665 ASSERT(is_int8(src.value_) || is_uint8(src.value_));
666 emit(0x80);
667 emit_operand(subcode, dst);
668 emit(src.value_);
669 }
670
671
immediate_arithmetic_op_8(byte subcode,Register dst,Immediate src)672 void Assembler::immediate_arithmetic_op_8(byte subcode,
673 Register dst,
674 Immediate src) {
675 EnsureSpace ensure_space(this);
676 if (!dst.is_byte_register()) {
677 // Use 64-bit mode byte registers.
678 emit_rex_64(dst);
679 }
680 ASSERT(is_int8(src.value_) || is_uint8(src.value_));
681 emit(0x80);
682 emit_modrm(subcode, dst);
683 emit(src.value_);
684 }
685
686
shift(Register dst,Immediate shift_amount,int subcode)687 void Assembler::shift(Register dst, Immediate shift_amount, int subcode) {
688 EnsureSpace ensure_space(this);
689 ASSERT(is_uint6(shift_amount.value_)); // illegal shift count
690 if (shift_amount.value_ == 1) {
691 emit_rex_64(dst);
692 emit(0xD1);
693 emit_modrm(subcode, dst);
694 } else {
695 emit_rex_64(dst);
696 emit(0xC1);
697 emit_modrm(subcode, dst);
698 emit(shift_amount.value_);
699 }
700 }
701
702
shift(Register dst,int subcode)703 void Assembler::shift(Register dst, int subcode) {
704 EnsureSpace ensure_space(this);
705 emit_rex_64(dst);
706 emit(0xD3);
707 emit_modrm(subcode, dst);
708 }
709
710
shift_32(Register dst,int subcode)711 void Assembler::shift_32(Register dst, int subcode) {
712 EnsureSpace ensure_space(this);
713 emit_optional_rex_32(dst);
714 emit(0xD3);
715 emit_modrm(subcode, dst);
716 }
717
718
shift_32(Register dst,Immediate shift_amount,int subcode)719 void Assembler::shift_32(Register dst, Immediate shift_amount, int subcode) {
720 EnsureSpace ensure_space(this);
721 ASSERT(is_uint5(shift_amount.value_)); // illegal shift count
722 if (shift_amount.value_ == 1) {
723 emit_optional_rex_32(dst);
724 emit(0xD1);
725 emit_modrm(subcode, dst);
726 } else {
727 emit_optional_rex_32(dst);
728 emit(0xC1);
729 emit_modrm(subcode, dst);
730 emit(shift_amount.value_);
731 }
732 }
733
734
bt(const Operand & dst,Register src)735 void Assembler::bt(const Operand& dst, Register src) {
736 EnsureSpace ensure_space(this);
737 emit_rex_64(src, dst);
738 emit(0x0F);
739 emit(0xA3);
740 emit_operand(src, dst);
741 }
742
743
bts(const Operand & dst,Register src)744 void Assembler::bts(const Operand& dst, Register src) {
745 EnsureSpace ensure_space(this);
746 emit_rex_64(src, dst);
747 emit(0x0F);
748 emit(0xAB);
749 emit_operand(src, dst);
750 }
751
752
call(Label * L)753 void Assembler::call(Label* L) {
754 positions_recorder()->WriteRecordedPositions();
755 EnsureSpace ensure_space(this);
756 // 1110 1000 #32-bit disp.
757 emit(0xE8);
758 if (L->is_bound()) {
759 int offset = L->pos() - pc_offset() - sizeof(int32_t);
760 ASSERT(offset <= 0);
761 emitl(offset);
762 } else if (L->is_linked()) {
763 emitl(L->pos());
764 L->link_to(pc_offset() - sizeof(int32_t));
765 } else {
766 ASSERT(L->is_unused());
767 int32_t current = pc_offset();
768 emitl(current);
769 L->link_to(current);
770 }
771 }
772
773
call(Address entry,RelocInfo::Mode rmode)774 void Assembler::call(Address entry, RelocInfo::Mode rmode) {
775 ASSERT(RelocInfo::IsRuntimeEntry(rmode));
776 positions_recorder()->WriteRecordedPositions();
777 EnsureSpace ensure_space(this);
778 // 1110 1000 #32-bit disp.
779 emit(0xE8);
780 emit_runtime_entry(entry, rmode);
781 }
782
783
call(Handle<Code> target,RelocInfo::Mode rmode,TypeFeedbackId ast_id)784 void Assembler::call(Handle<Code> target,
785 RelocInfo::Mode rmode,
786 TypeFeedbackId ast_id) {
787 positions_recorder()->WriteRecordedPositions();
788 EnsureSpace ensure_space(this);
789 // 1110 1000 #32-bit disp.
790 emit(0xE8);
791 emit_code_target(target, rmode, ast_id);
792 }
793
794
call(Register adr)795 void Assembler::call(Register adr) {
796 positions_recorder()->WriteRecordedPositions();
797 EnsureSpace ensure_space(this);
798 // Opcode: FF /2 r64.
799 emit_optional_rex_32(adr);
800 emit(0xFF);
801 emit_modrm(0x2, adr);
802 }
803
804
call(const Operand & op)805 void Assembler::call(const Operand& op) {
806 positions_recorder()->WriteRecordedPositions();
807 EnsureSpace ensure_space(this);
808 // Opcode: FF /2 m64.
809 emit_optional_rex_32(op);
810 emit(0xFF);
811 emit_operand(0x2, op);
812 }
813
814
815 // Calls directly to the given address using a relative offset.
816 // Should only ever be used in Code objects for calls within the
817 // same Code object. Should not be used when generating new code (use labels),
818 // but only when patching existing code.
call(Address target)819 void Assembler::call(Address target) {
820 positions_recorder()->WriteRecordedPositions();
821 EnsureSpace ensure_space(this);
822 // 1110 1000 #32-bit disp.
823 emit(0xE8);
824 Address source = pc_ + 4;
825 intptr_t displacement = target - source;
826 ASSERT(is_int32(displacement));
827 emitl(static_cast<int32_t>(displacement));
828 }
829
830
clc()831 void Assembler::clc() {
832 EnsureSpace ensure_space(this);
833 emit(0xF8);
834 }
835
836
cld()837 void Assembler::cld() {
838 EnsureSpace ensure_space(this);
839 emit(0xFC);
840 }
841
842
cdq()843 void Assembler::cdq() {
844 EnsureSpace ensure_space(this);
845 emit(0x99);
846 }
847
848
cmovq(Condition cc,Register dst,Register src)849 void Assembler::cmovq(Condition cc, Register dst, Register src) {
850 if (cc == always) {
851 movq(dst, src);
852 } else if (cc == never) {
853 return;
854 }
855 // No need to check CpuInfo for CMOV support, it's a required part of the
856 // 64-bit architecture.
857 ASSERT(cc >= 0); // Use mov for unconditional moves.
858 EnsureSpace ensure_space(this);
859 // Opcode: REX.W 0f 40 + cc /r.
860 emit_rex_64(dst, src);
861 emit(0x0f);
862 emit(0x40 + cc);
863 emit_modrm(dst, src);
864 }
865
866
cmovq(Condition cc,Register dst,const Operand & src)867 void Assembler::cmovq(Condition cc, Register dst, const Operand& src) {
868 if (cc == always) {
869 movq(dst, src);
870 } else if (cc == never) {
871 return;
872 }
873 ASSERT(cc >= 0);
874 EnsureSpace ensure_space(this);
875 // Opcode: REX.W 0f 40 + cc /r.
876 emit_rex_64(dst, src);
877 emit(0x0f);
878 emit(0x40 + cc);
879 emit_operand(dst, src);
880 }
881
882
cmovl(Condition cc,Register dst,Register src)883 void Assembler::cmovl(Condition cc, Register dst, Register src) {
884 if (cc == always) {
885 movl(dst, src);
886 } else if (cc == never) {
887 return;
888 }
889 ASSERT(cc >= 0);
890 EnsureSpace ensure_space(this);
891 // Opcode: 0f 40 + cc /r.
892 emit_optional_rex_32(dst, src);
893 emit(0x0f);
894 emit(0x40 + cc);
895 emit_modrm(dst, src);
896 }
897
898
cmovl(Condition cc,Register dst,const Operand & src)899 void Assembler::cmovl(Condition cc, Register dst, const Operand& src) {
900 if (cc == always) {
901 movl(dst, src);
902 } else if (cc == never) {
903 return;
904 }
905 ASSERT(cc >= 0);
906 EnsureSpace ensure_space(this);
907 // Opcode: 0f 40 + cc /r.
908 emit_optional_rex_32(dst, src);
909 emit(0x0f);
910 emit(0x40 + cc);
911 emit_operand(dst, src);
912 }
913
914
cmpb_al(Immediate imm8)915 void Assembler::cmpb_al(Immediate imm8) {
916 ASSERT(is_int8(imm8.value_) || is_uint8(imm8.value_));
917 EnsureSpace ensure_space(this);
918 emit(0x3c);
919 emit(imm8.value_);
920 }
921
922
cpuid()923 void Assembler::cpuid() {
924 EnsureSpace ensure_space(this);
925 emit(0x0F);
926 emit(0xA2);
927 }
928
929
cqo()930 void Assembler::cqo() {
931 EnsureSpace ensure_space(this);
932 emit_rex_64();
933 emit(0x99);
934 }
935
936
decq(Register dst)937 void Assembler::decq(Register dst) {
938 EnsureSpace ensure_space(this);
939 emit_rex_64(dst);
940 emit(0xFF);
941 emit_modrm(0x1, dst);
942 }
943
944
decq(const Operand & dst)945 void Assembler::decq(const Operand& dst) {
946 EnsureSpace ensure_space(this);
947 emit_rex_64(dst);
948 emit(0xFF);
949 emit_operand(1, dst);
950 }
951
952
decl(Register dst)953 void Assembler::decl(Register dst) {
954 EnsureSpace ensure_space(this);
955 emit_optional_rex_32(dst);
956 emit(0xFF);
957 emit_modrm(0x1, dst);
958 }
959
960
decl(const Operand & dst)961 void Assembler::decl(const Operand& dst) {
962 EnsureSpace ensure_space(this);
963 emit_optional_rex_32(dst);
964 emit(0xFF);
965 emit_operand(1, dst);
966 }
967
968
decb(Register dst)969 void Assembler::decb(Register dst) {
970 EnsureSpace ensure_space(this);
971 if (!dst.is_byte_register()) {
972 // Register is not one of al, bl, cl, dl. Its encoding needs REX.
973 emit_rex_32(dst);
974 }
975 emit(0xFE);
976 emit_modrm(0x1, dst);
977 }
978
979
decb(const Operand & dst)980 void Assembler::decb(const Operand& dst) {
981 EnsureSpace ensure_space(this);
982 emit_optional_rex_32(dst);
983 emit(0xFE);
984 emit_operand(1, dst);
985 }
986
987
enter(Immediate size)988 void Assembler::enter(Immediate size) {
989 EnsureSpace ensure_space(this);
990 emit(0xC8);
991 emitw(size.value_); // 16 bit operand, always.
992 emit(0);
993 }
994
995
hlt()996 void Assembler::hlt() {
997 EnsureSpace ensure_space(this);
998 emit(0xF4);
999 }
1000
1001
idivq(Register src)1002 void Assembler::idivq(Register src) {
1003 EnsureSpace ensure_space(this);
1004 emit_rex_64(src);
1005 emit(0xF7);
1006 emit_modrm(0x7, src);
1007 }
1008
1009
idivl(Register src)1010 void Assembler::idivl(Register src) {
1011 EnsureSpace ensure_space(this);
1012 emit_optional_rex_32(src);
1013 emit(0xF7);
1014 emit_modrm(0x7, src);
1015 }
1016
1017
imul(Register src)1018 void Assembler::imul(Register src) {
1019 EnsureSpace ensure_space(this);
1020 emit_rex_64(src);
1021 emit(0xF7);
1022 emit_modrm(0x5, src);
1023 }
1024
1025
imul(Register dst,Register src)1026 void Assembler::imul(Register dst, Register src) {
1027 EnsureSpace ensure_space(this);
1028 emit_rex_64(dst, src);
1029 emit(0x0F);
1030 emit(0xAF);
1031 emit_modrm(dst, src);
1032 }
1033
1034
imul(Register dst,const Operand & src)1035 void Assembler::imul(Register dst, const Operand& src) {
1036 EnsureSpace ensure_space(this);
1037 emit_rex_64(dst, src);
1038 emit(0x0F);
1039 emit(0xAF);
1040 emit_operand(dst, src);
1041 }
1042
1043
imul(Register dst,Register src,Immediate imm)1044 void Assembler::imul(Register dst, Register src, Immediate imm) {
1045 EnsureSpace ensure_space(this);
1046 emit_rex_64(dst, src);
1047 if (is_int8(imm.value_)) {
1048 emit(0x6B);
1049 emit_modrm(dst, src);
1050 emit(imm.value_);
1051 } else {
1052 emit(0x69);
1053 emit_modrm(dst, src);
1054 emitl(imm.value_);
1055 }
1056 }
1057
1058
imull(Register dst,Register src)1059 void Assembler::imull(Register dst, Register src) {
1060 EnsureSpace ensure_space(this);
1061 emit_optional_rex_32(dst, src);
1062 emit(0x0F);
1063 emit(0xAF);
1064 emit_modrm(dst, src);
1065 }
1066
1067
imull(Register dst,const Operand & src)1068 void Assembler::imull(Register dst, const Operand& src) {
1069 EnsureSpace ensure_space(this);
1070 emit_optional_rex_32(dst, src);
1071 emit(0x0F);
1072 emit(0xAF);
1073 emit_operand(dst, src);
1074 }
1075
1076
imull(Register dst,Register src,Immediate imm)1077 void Assembler::imull(Register dst, Register src, Immediate imm) {
1078 EnsureSpace ensure_space(this);
1079 emit_optional_rex_32(dst, src);
1080 if (is_int8(imm.value_)) {
1081 emit(0x6B);
1082 emit_modrm(dst, src);
1083 emit(imm.value_);
1084 } else {
1085 emit(0x69);
1086 emit_modrm(dst, src);
1087 emitl(imm.value_);
1088 }
1089 }
1090
1091
incq(Register dst)1092 void Assembler::incq(Register dst) {
1093 EnsureSpace ensure_space(this);
1094 emit_rex_64(dst);
1095 emit(0xFF);
1096 emit_modrm(0x0, dst);
1097 }
1098
1099
incq(const Operand & dst)1100 void Assembler::incq(const Operand& dst) {
1101 EnsureSpace ensure_space(this);
1102 emit_rex_64(dst);
1103 emit(0xFF);
1104 emit_operand(0, dst);
1105 }
1106
1107
incl(const Operand & dst)1108 void Assembler::incl(const Operand& dst) {
1109 EnsureSpace ensure_space(this);
1110 emit_optional_rex_32(dst);
1111 emit(0xFF);
1112 emit_operand(0, dst);
1113 }
1114
1115
incl(Register dst)1116 void Assembler::incl(Register dst) {
1117 EnsureSpace ensure_space(this);
1118 emit_optional_rex_32(dst);
1119 emit(0xFF);
1120 emit_modrm(0, dst);
1121 }
1122
1123
int3()1124 void Assembler::int3() {
1125 EnsureSpace ensure_space(this);
1126 emit(0xCC);
1127 }
1128
1129
j(Condition cc,Label * L,Label::Distance distance)1130 void Assembler::j(Condition cc, Label* L, Label::Distance distance) {
1131 if (cc == always) {
1132 jmp(L);
1133 return;
1134 } else if (cc == never) {
1135 return;
1136 }
1137 EnsureSpace ensure_space(this);
1138 ASSERT(is_uint4(cc));
1139 if (L->is_bound()) {
1140 const int short_size = 2;
1141 const int long_size = 6;
1142 int offs = L->pos() - pc_offset();
1143 ASSERT(offs <= 0);
1144 // Determine whether we can use 1-byte offsets for backwards branches,
1145 // which have a max range of 128 bytes.
1146
1147 // We also need to check predictable_code_size() flag here, because on x64,
1148 // when the full code generator recompiles code for debugging, some places
1149 // need to be padded out to a certain size. The debugger is keeping track of
1150 // how often it did this so that it can adjust return addresses on the
1151 // stack, but if the size of jump instructions can also change, that's not
1152 // enough and the calculated offsets would be incorrect.
1153 if (is_int8(offs - short_size) && !predictable_code_size()) {
1154 // 0111 tttn #8-bit disp.
1155 emit(0x70 | cc);
1156 emit((offs - short_size) & 0xFF);
1157 } else {
1158 // 0000 1111 1000 tttn #32-bit disp.
1159 emit(0x0F);
1160 emit(0x80 | cc);
1161 emitl(offs - long_size);
1162 }
1163 } else if (distance == Label::kNear) {
1164 // 0111 tttn #8-bit disp
1165 emit(0x70 | cc);
1166 byte disp = 0x00;
1167 if (L->is_near_linked()) {
1168 int offset = L->near_link_pos() - pc_offset();
1169 ASSERT(is_int8(offset));
1170 disp = static_cast<byte>(offset & 0xFF);
1171 }
1172 L->link_to(pc_offset(), Label::kNear);
1173 emit(disp);
1174 } else if (L->is_linked()) {
1175 // 0000 1111 1000 tttn #32-bit disp.
1176 emit(0x0F);
1177 emit(0x80 | cc);
1178 emitl(L->pos());
1179 L->link_to(pc_offset() - sizeof(int32_t));
1180 } else {
1181 ASSERT(L->is_unused());
1182 emit(0x0F);
1183 emit(0x80 | cc);
1184 int32_t current = pc_offset();
1185 emitl(current);
1186 L->link_to(current);
1187 }
1188 }
1189
1190
j(Condition cc,Address entry,RelocInfo::Mode rmode)1191 void Assembler::j(Condition cc, Address entry, RelocInfo::Mode rmode) {
1192 ASSERT(RelocInfo::IsRuntimeEntry(rmode));
1193 EnsureSpace ensure_space(this);
1194 ASSERT(is_uint4(cc));
1195 emit(0x0F);
1196 emit(0x80 | cc);
1197 emit_runtime_entry(entry, rmode);
1198 }
1199
1200
j(Condition cc,Handle<Code> target,RelocInfo::Mode rmode)1201 void Assembler::j(Condition cc,
1202 Handle<Code> target,
1203 RelocInfo::Mode rmode) {
1204 EnsureSpace ensure_space(this);
1205 ASSERT(is_uint4(cc));
1206 // 0000 1111 1000 tttn #32-bit disp.
1207 emit(0x0F);
1208 emit(0x80 | cc);
1209 emit_code_target(target, rmode);
1210 }
1211
1212
jmp(Label * L,Label::Distance distance)1213 void Assembler::jmp(Label* L, Label::Distance distance) {
1214 EnsureSpace ensure_space(this);
1215 const int short_size = sizeof(int8_t);
1216 const int long_size = sizeof(int32_t);
1217 if (L->is_bound()) {
1218 int offs = L->pos() - pc_offset() - 1;
1219 ASSERT(offs <= 0);
1220 if (is_int8(offs - short_size) && !predictable_code_size()) {
1221 // 1110 1011 #8-bit disp.
1222 emit(0xEB);
1223 emit((offs - short_size) & 0xFF);
1224 } else {
1225 // 1110 1001 #32-bit disp.
1226 emit(0xE9);
1227 emitl(offs - long_size);
1228 }
1229 } else if (distance == Label::kNear) {
1230 emit(0xEB);
1231 byte disp = 0x00;
1232 if (L->is_near_linked()) {
1233 int offset = L->near_link_pos() - pc_offset();
1234 ASSERT(is_int8(offset));
1235 disp = static_cast<byte>(offset & 0xFF);
1236 }
1237 L->link_to(pc_offset(), Label::kNear);
1238 emit(disp);
1239 } else if (L->is_linked()) {
1240 // 1110 1001 #32-bit disp.
1241 emit(0xE9);
1242 emitl(L->pos());
1243 L->link_to(pc_offset() - long_size);
1244 } else {
1245 // 1110 1001 #32-bit disp.
1246 ASSERT(L->is_unused());
1247 emit(0xE9);
1248 int32_t current = pc_offset();
1249 emitl(current);
1250 L->link_to(current);
1251 }
1252 }
1253
1254
jmp(Handle<Code> target,RelocInfo::Mode rmode)1255 void Assembler::jmp(Handle<Code> target, RelocInfo::Mode rmode) {
1256 EnsureSpace ensure_space(this);
1257 // 1110 1001 #32-bit disp.
1258 emit(0xE9);
1259 emit_code_target(target, rmode);
1260 }
1261
1262
jmp(Address entry,RelocInfo::Mode rmode)1263 void Assembler::jmp(Address entry, RelocInfo::Mode rmode) {
1264 ASSERT(RelocInfo::IsRuntimeEntry(rmode));
1265 EnsureSpace ensure_space(this);
1266 ASSERT(RelocInfo::IsRuntimeEntry(rmode));
1267 emit(0xE9);
1268 emit_runtime_entry(entry, rmode);
1269 }
1270
1271
jmp(Register target)1272 void Assembler::jmp(Register target) {
1273 EnsureSpace ensure_space(this);
1274 // Opcode FF/4 r64.
1275 emit_optional_rex_32(target);
1276 emit(0xFF);
1277 emit_modrm(0x4, target);
1278 }
1279
1280
jmp(const Operand & src)1281 void Assembler::jmp(const Operand& src) {
1282 EnsureSpace ensure_space(this);
1283 // Opcode FF/4 m64.
1284 emit_optional_rex_32(src);
1285 emit(0xFF);
1286 emit_operand(0x4, src);
1287 }
1288
1289
lea(Register dst,const Operand & src)1290 void Assembler::lea(Register dst, const Operand& src) {
1291 EnsureSpace ensure_space(this);
1292 emit_rex_64(dst, src);
1293 emit(0x8D);
1294 emit_operand(dst, src);
1295 }
1296
1297
leal(Register dst,const Operand & src)1298 void Assembler::leal(Register dst, const Operand& src) {
1299 EnsureSpace ensure_space(this);
1300 emit_optional_rex_32(dst, src);
1301 emit(0x8D);
1302 emit_operand(dst, src);
1303 }
1304
1305
load_rax(void * value,RelocInfo::Mode mode)1306 void Assembler::load_rax(void* value, RelocInfo::Mode mode) {
1307 EnsureSpace ensure_space(this);
1308 emit(0x48); // REX.W
1309 emit(0xA1);
1310 emitp(value, mode);
1311 }
1312
1313
load_rax(ExternalReference ref)1314 void Assembler::load_rax(ExternalReference ref) {
1315 load_rax(ref.address(), RelocInfo::EXTERNAL_REFERENCE);
1316 }
1317
1318
leave()1319 void Assembler::leave() {
1320 EnsureSpace ensure_space(this);
1321 emit(0xC9);
1322 }
1323
1324
movb(Register dst,const Operand & src)1325 void Assembler::movb(Register dst, const Operand& src) {
1326 EnsureSpace ensure_space(this);
1327 if (!dst.is_byte_register()) {
1328 // Register is not one of al, bl, cl, dl. Its encoding needs REX.
1329 emit_rex_32(dst, src);
1330 } else {
1331 emit_optional_rex_32(dst, src);
1332 }
1333 emit(0x8A);
1334 emit_operand(dst, src);
1335 }
1336
1337
movb(Register dst,Immediate imm)1338 void Assembler::movb(Register dst, Immediate imm) {
1339 EnsureSpace ensure_space(this);
1340 if (!dst.is_byte_register()) {
1341 emit_rex_32(dst);
1342 }
1343 emit(0xB0 + dst.low_bits());
1344 emit(imm.value_);
1345 }
1346
1347
movb(const Operand & dst,Register src)1348 void Assembler::movb(const Operand& dst, Register src) {
1349 EnsureSpace ensure_space(this);
1350 if (!src.is_byte_register()) {
1351 emit_rex_32(src, dst);
1352 } else {
1353 emit_optional_rex_32(src, dst);
1354 }
1355 emit(0x88);
1356 emit_operand(src, dst);
1357 }
1358
1359
movb(const Operand & dst,Immediate imm)1360 void Assembler::movb(const Operand& dst, Immediate imm) {
1361 EnsureSpace ensure_space(this);
1362 emit_optional_rex_32(dst);
1363 emit(0xC6);
1364 emit_operand(0x0, dst);
1365 emit(static_cast<byte>(imm.value_));
1366 }
1367
1368
movw(Register dst,const Operand & src)1369 void Assembler::movw(Register dst, const Operand& src) {
1370 EnsureSpace ensure_space(this);
1371 emit(0x66);
1372 emit_optional_rex_32(dst, src);
1373 emit(0x8B);
1374 emit_operand(dst, src);
1375 }
1376
1377
movw(const Operand & dst,Register src)1378 void Assembler::movw(const Operand& dst, Register src) {
1379 EnsureSpace ensure_space(this);
1380 emit(0x66);
1381 emit_optional_rex_32(src, dst);
1382 emit(0x89);
1383 emit_operand(src, dst);
1384 }
1385
1386
movw(const Operand & dst,Immediate imm)1387 void Assembler::movw(const Operand& dst, Immediate imm) {
1388 EnsureSpace ensure_space(this);
1389 emit(0x66);
1390 emit_optional_rex_32(dst);
1391 emit(0xC7);
1392 emit_operand(0x0, dst);
1393 emit(static_cast<byte>(imm.value_ & 0xff));
1394 emit(static_cast<byte>(imm.value_ >> 8));
1395 }
1396
1397
emit_mov(Register dst,const Operand & src,int size)1398 void Assembler::emit_mov(Register dst, const Operand& src, int size) {
1399 EnsureSpace ensure_space(this);
1400 emit_rex(dst, src, size);
1401 emit(0x8B);
1402 emit_operand(dst, src);
1403 }
1404
1405
emit_mov(Register dst,Register src,int size)1406 void Assembler::emit_mov(Register dst, Register src, int size) {
1407 EnsureSpace ensure_space(this);
1408 if (src.low_bits() == 4) {
1409 emit_rex(src, dst, size);
1410 emit(0x89);
1411 emit_modrm(src, dst);
1412 } else {
1413 emit_rex(dst, src, size);
1414 emit(0x8B);
1415 emit_modrm(dst, src);
1416 }
1417 }
1418
1419
emit_mov(const Operand & dst,Register src,int size)1420 void Assembler::emit_mov(const Operand& dst, Register src, int size) {
1421 EnsureSpace ensure_space(this);
1422 emit_rex(src, dst, size);
1423 emit(0x89);
1424 emit_operand(src, dst);
1425 }
1426
1427
emit_mov(Register dst,Immediate value,int size)1428 void Assembler::emit_mov(Register dst, Immediate value, int size) {
1429 EnsureSpace ensure_space(this);
1430 emit_rex(dst, size);
1431 if (size == kInt64Size) {
1432 emit(0xC7);
1433 emit_modrm(0x0, dst);
1434 } else {
1435 ASSERT(size == kInt32Size);
1436 emit(0xB8 + dst.low_bits());
1437 }
1438 emit(value);
1439 }
1440
1441
emit_mov(const Operand & dst,Immediate value,int size)1442 void Assembler::emit_mov(const Operand& dst, Immediate value, int size) {
1443 EnsureSpace ensure_space(this);
1444 emit_rex(dst, size);
1445 emit(0xC7);
1446 emit_operand(0x0, dst);
1447 emit(value);
1448 }
1449
1450
movq(Register dst,void * value,RelocInfo::Mode rmode)1451 void Assembler::movq(Register dst, void* value, RelocInfo::Mode rmode) {
1452 // This method must not be used with heap object references. The stored
1453 // address is not GC safe. Use the handle version instead.
1454 ASSERT(rmode > RelocInfo::LAST_GCED_ENUM);
1455 if (RelocInfo::IsNone(rmode)) {
1456 movq(dst, reinterpret_cast<int64_t>(value));
1457 } else {
1458 EnsureSpace ensure_space(this);
1459 emit_rex_64(dst);
1460 emit(0xB8 | dst.low_bits());
1461 emitp(value, rmode);
1462 }
1463 }
1464
1465
movq(Register dst,int64_t value)1466 void Assembler::movq(Register dst, int64_t value) {
1467 EnsureSpace ensure_space(this);
1468 emit_rex_64(dst);
1469 emit(0xB8 | dst.low_bits());
1470 emitq(value);
1471 }
1472
1473
movq(Register dst,uint64_t value)1474 void Assembler::movq(Register dst, uint64_t value) {
1475 movq(dst, static_cast<int64_t>(value));
1476 }
1477
1478
1479 // Loads the ip-relative location of the src label into the target location
1480 // (as a 32-bit offset sign extended to 64-bit).
movl(const Operand & dst,Label * src)1481 void Assembler::movl(const Operand& dst, Label* src) {
1482 EnsureSpace ensure_space(this);
1483 emit_optional_rex_32(dst);
1484 emit(0xC7);
1485 emit_operand(0, dst);
1486 if (src->is_bound()) {
1487 int offset = src->pos() - pc_offset() - sizeof(int32_t);
1488 ASSERT(offset <= 0);
1489 emitl(offset);
1490 } else if (src->is_linked()) {
1491 emitl(src->pos());
1492 src->link_to(pc_offset() - sizeof(int32_t));
1493 } else {
1494 ASSERT(src->is_unused());
1495 int32_t current = pc_offset();
1496 emitl(current);
1497 src->link_to(current);
1498 }
1499 }
1500
1501
movq(Register dst,Handle<Object> value,RelocInfo::Mode mode)1502 void Assembler::movq(Register dst, Handle<Object> value, RelocInfo::Mode mode) {
1503 AllowDeferredHandleDereference using_raw_address;
1504 ASSERT(!RelocInfo::IsNone(mode));
1505 EnsureSpace ensure_space(this);
1506 ASSERT(value->IsHeapObject());
1507 ASSERT(!isolate()->heap()->InNewSpace(*value));
1508 emit_rex_64(dst);
1509 emit(0xB8 | dst.low_bits());
1510 emitp(value.location(), mode);
1511 }
1512
1513
movsxbq(Register dst,const Operand & src)1514 void Assembler::movsxbq(Register dst, const Operand& src) {
1515 EnsureSpace ensure_space(this);
1516 emit_rex_64(dst, src);
1517 emit(0x0F);
1518 emit(0xBE);
1519 emit_operand(dst, src);
1520 }
1521
1522
movsxwq(Register dst,const Operand & src)1523 void Assembler::movsxwq(Register dst, const Operand& src) {
1524 EnsureSpace ensure_space(this);
1525 emit_rex_64(dst, src);
1526 emit(0x0F);
1527 emit(0xBF);
1528 emit_operand(dst, src);
1529 }
1530
1531
movsxlq(Register dst,Register src)1532 void Assembler::movsxlq(Register dst, Register src) {
1533 EnsureSpace ensure_space(this);
1534 emit_rex_64(dst, src);
1535 emit(0x63);
1536 emit_modrm(dst, src);
1537 }
1538
1539
movsxlq(Register dst,const Operand & src)1540 void Assembler::movsxlq(Register dst, const Operand& src) {
1541 EnsureSpace ensure_space(this);
1542 emit_rex_64(dst, src);
1543 emit(0x63);
1544 emit_operand(dst, src);
1545 }
1546
1547
movzxbq(Register dst,const Operand & src)1548 void Assembler::movzxbq(Register dst, const Operand& src) {
1549 EnsureSpace ensure_space(this);
1550 // 32 bit operations zero the top 32 bits of 64 bit registers. Therefore
1551 // there is no need to make this a 64 bit operation.
1552 emit_optional_rex_32(dst, src);
1553 emit(0x0F);
1554 emit(0xB6);
1555 emit_operand(dst, src);
1556 }
1557
1558
movzxbl(Register dst,const Operand & src)1559 void Assembler::movzxbl(Register dst, const Operand& src) {
1560 EnsureSpace ensure_space(this);
1561 emit_optional_rex_32(dst, src);
1562 emit(0x0F);
1563 emit(0xB6);
1564 emit_operand(dst, src);
1565 }
1566
1567
movzxwq(Register dst,const Operand & src)1568 void Assembler::movzxwq(Register dst, const Operand& src) {
1569 EnsureSpace ensure_space(this);
1570 emit_optional_rex_32(dst, src);
1571 emit(0x0F);
1572 emit(0xB7);
1573 emit_operand(dst, src);
1574 }
1575
1576
movzxwl(Register dst,const Operand & src)1577 void Assembler::movzxwl(Register dst, const Operand& src) {
1578 EnsureSpace ensure_space(this);
1579 emit_optional_rex_32(dst, src);
1580 emit(0x0F);
1581 emit(0xB7);
1582 emit_operand(dst, src);
1583 }
1584
1585
movzxwl(Register dst,Register src)1586 void Assembler::movzxwl(Register dst, Register src) {
1587 EnsureSpace ensure_space(this);
1588 emit_optional_rex_32(dst, src);
1589 emit(0x0F);
1590 emit(0xB7);
1591 emit_modrm(dst, src);
1592 }
1593
1594
repmovsb()1595 void Assembler::repmovsb() {
1596 EnsureSpace ensure_space(this);
1597 emit(0xF3);
1598 emit(0xA4);
1599 }
1600
1601
repmovsw()1602 void Assembler::repmovsw() {
1603 EnsureSpace ensure_space(this);
1604 emit(0x66); // Operand size override.
1605 emit(0xF3);
1606 emit(0xA4);
1607 }
1608
1609
repmovsl()1610 void Assembler::repmovsl() {
1611 EnsureSpace ensure_space(this);
1612 emit(0xF3);
1613 emit(0xA5);
1614 }
1615
1616
repmovsq()1617 void Assembler::repmovsq() {
1618 EnsureSpace ensure_space(this);
1619 emit(0xF3);
1620 emit_rex_64();
1621 emit(0xA5);
1622 }
1623
1624
mul(Register src)1625 void Assembler::mul(Register src) {
1626 EnsureSpace ensure_space(this);
1627 emit_rex_64(src);
1628 emit(0xF7);
1629 emit_modrm(0x4, src);
1630 }
1631
1632
neg(Register dst)1633 void Assembler::neg(Register dst) {
1634 EnsureSpace ensure_space(this);
1635 emit_rex_64(dst);
1636 emit(0xF7);
1637 emit_modrm(0x3, dst);
1638 }
1639
1640
negl(Register dst)1641 void Assembler::negl(Register dst) {
1642 EnsureSpace ensure_space(this);
1643 emit_optional_rex_32(dst);
1644 emit(0xF7);
1645 emit_modrm(0x3, dst);
1646 }
1647
1648
neg(const Operand & dst)1649 void Assembler::neg(const Operand& dst) {
1650 EnsureSpace ensure_space(this);
1651 emit_rex_64(dst);
1652 emit(0xF7);
1653 emit_operand(3, dst);
1654 }
1655
1656
nop()1657 void Assembler::nop() {
1658 EnsureSpace ensure_space(this);
1659 emit(0x90);
1660 }
1661
1662
not_(Register dst)1663 void Assembler::not_(Register dst) {
1664 EnsureSpace ensure_space(this);
1665 emit_rex_64(dst);
1666 emit(0xF7);
1667 emit_modrm(0x2, dst);
1668 }
1669
1670
not_(const Operand & dst)1671 void Assembler::not_(const Operand& dst) {
1672 EnsureSpace ensure_space(this);
1673 emit_rex_64(dst);
1674 emit(0xF7);
1675 emit_operand(2, dst);
1676 }
1677
1678
notl(Register dst)1679 void Assembler::notl(Register dst) {
1680 EnsureSpace ensure_space(this);
1681 emit_optional_rex_32(dst);
1682 emit(0xF7);
1683 emit_modrm(0x2, dst);
1684 }
1685
1686
Nop(int n)1687 void Assembler::Nop(int n) {
1688 // The recommended muti-byte sequences of NOP instructions from the Intel 64
1689 // and IA-32 Architectures Software Developer's Manual.
1690 //
1691 // Length Assembly Byte Sequence
1692 // 2 bytes 66 NOP 66 90H
1693 // 3 bytes NOP DWORD ptr [EAX] 0F 1F 00H
1694 // 4 bytes NOP DWORD ptr [EAX + 00H] 0F 1F 40 00H
1695 // 5 bytes NOP DWORD ptr [EAX + EAX*1 + 00H] 0F 1F 44 00 00H
1696 // 6 bytes 66 NOP DWORD ptr [EAX + EAX*1 + 00H] 66 0F 1F 44 00 00H
1697 // 7 bytes NOP DWORD ptr [EAX + 00000000H] 0F 1F 80 00 00 00 00H
1698 // 8 bytes NOP DWORD ptr [EAX + EAX*1 + 00000000H] 0F 1F 84 00 00 00 00 00H
1699 // 9 bytes 66 NOP DWORD ptr [EAX + EAX*1 + 66 0F 1F 84 00 00 00 00
1700 // 00000000H] 00H
1701
1702 EnsureSpace ensure_space(this);
1703 while (n > 0) {
1704 switch (n) {
1705 case 2:
1706 emit(0x66);
1707 case 1:
1708 emit(0x90);
1709 return;
1710 case 3:
1711 emit(0x0f);
1712 emit(0x1f);
1713 emit(0x00);
1714 return;
1715 case 4:
1716 emit(0x0f);
1717 emit(0x1f);
1718 emit(0x40);
1719 emit(0x00);
1720 return;
1721 case 6:
1722 emit(0x66);
1723 case 5:
1724 emit(0x0f);
1725 emit(0x1f);
1726 emit(0x44);
1727 emit(0x00);
1728 emit(0x00);
1729 return;
1730 case 7:
1731 emit(0x0f);
1732 emit(0x1f);
1733 emit(0x80);
1734 emit(0x00);
1735 emit(0x00);
1736 emit(0x00);
1737 emit(0x00);
1738 return;
1739 default:
1740 case 11:
1741 emit(0x66);
1742 n--;
1743 case 10:
1744 emit(0x66);
1745 n--;
1746 case 9:
1747 emit(0x66);
1748 n--;
1749 case 8:
1750 emit(0x0f);
1751 emit(0x1f);
1752 emit(0x84);
1753 emit(0x00);
1754 emit(0x00);
1755 emit(0x00);
1756 emit(0x00);
1757 emit(0x00);
1758 n -= 8;
1759 }
1760 }
1761 }
1762
1763
pop(Register dst)1764 void Assembler::pop(Register dst) {
1765 EnsureSpace ensure_space(this);
1766 emit_optional_rex_32(dst);
1767 emit(0x58 | dst.low_bits());
1768 }
1769
1770
pop(const Operand & dst)1771 void Assembler::pop(const Operand& dst) {
1772 EnsureSpace ensure_space(this);
1773 emit_optional_rex_32(dst);
1774 emit(0x8F);
1775 emit_operand(0, dst);
1776 }
1777
1778
popfq()1779 void Assembler::popfq() {
1780 EnsureSpace ensure_space(this);
1781 emit(0x9D);
1782 }
1783
1784
push(Register src)1785 void Assembler::push(Register src) {
1786 EnsureSpace ensure_space(this);
1787 emit_optional_rex_32(src);
1788 emit(0x50 | src.low_bits());
1789 }
1790
1791
push(const Operand & src)1792 void Assembler::push(const Operand& src) {
1793 EnsureSpace ensure_space(this);
1794 emit_optional_rex_32(src);
1795 emit(0xFF);
1796 emit_operand(6, src);
1797 }
1798
1799
push(Immediate value)1800 void Assembler::push(Immediate value) {
1801 EnsureSpace ensure_space(this);
1802 if (is_int8(value.value_)) {
1803 emit(0x6A);
1804 emit(value.value_); // Emit low byte of value.
1805 } else {
1806 emit(0x68);
1807 emitl(value.value_);
1808 }
1809 }
1810
1811
push_imm32(int32_t imm32)1812 void Assembler::push_imm32(int32_t imm32) {
1813 EnsureSpace ensure_space(this);
1814 emit(0x68);
1815 emitl(imm32);
1816 }
1817
1818
pushfq()1819 void Assembler::pushfq() {
1820 EnsureSpace ensure_space(this);
1821 emit(0x9C);
1822 }
1823
1824
ret(int imm16)1825 void Assembler::ret(int imm16) {
1826 EnsureSpace ensure_space(this);
1827 ASSERT(is_uint16(imm16));
1828 if (imm16 == 0) {
1829 emit(0xC3);
1830 } else {
1831 emit(0xC2);
1832 emit(imm16 & 0xFF);
1833 emit((imm16 >> 8) & 0xFF);
1834 }
1835 }
1836
1837
setcc(Condition cc,Register reg)1838 void Assembler::setcc(Condition cc, Register reg) {
1839 if (cc > last_condition) {
1840 movb(reg, Immediate(cc == always ? 1 : 0));
1841 return;
1842 }
1843 EnsureSpace ensure_space(this);
1844 ASSERT(is_uint4(cc));
1845 if (!reg.is_byte_register()) { // Use x64 byte registers, where different.
1846 emit_rex_32(reg);
1847 }
1848 emit(0x0F);
1849 emit(0x90 | cc);
1850 emit_modrm(0x0, reg);
1851 }
1852
1853
shld(Register dst,Register src)1854 void Assembler::shld(Register dst, Register src) {
1855 EnsureSpace ensure_space(this);
1856 emit_rex_64(src, dst);
1857 emit(0x0F);
1858 emit(0xA5);
1859 emit_modrm(src, dst);
1860 }
1861
1862
shrd(Register dst,Register src)1863 void Assembler::shrd(Register dst, Register src) {
1864 EnsureSpace ensure_space(this);
1865 emit_rex_64(src, dst);
1866 emit(0x0F);
1867 emit(0xAD);
1868 emit_modrm(src, dst);
1869 }
1870
1871
xchgq(Register dst,Register src)1872 void Assembler::xchgq(Register dst, Register src) {
1873 EnsureSpace ensure_space(this);
1874 if (src.is(rax) || dst.is(rax)) { // Single-byte encoding
1875 Register other = src.is(rax) ? dst : src;
1876 emit_rex_64(other);
1877 emit(0x90 | other.low_bits());
1878 } else if (dst.low_bits() == 4) {
1879 emit_rex_64(dst, src);
1880 emit(0x87);
1881 emit_modrm(dst, src);
1882 } else {
1883 emit_rex_64(src, dst);
1884 emit(0x87);
1885 emit_modrm(src, dst);
1886 }
1887 }
1888
1889
xchgl(Register dst,Register src)1890 void Assembler::xchgl(Register dst, Register src) {
1891 EnsureSpace ensure_space(this);
1892 if (src.is(rax) || dst.is(rax)) { // Single-byte encoding
1893 Register other = src.is(rax) ? dst : src;
1894 emit_optional_rex_32(other);
1895 emit(0x90 | other.low_bits());
1896 } else if (dst.low_bits() == 4) {
1897 emit_optional_rex_32(dst, src);
1898 emit(0x87);
1899 emit_modrm(dst, src);
1900 } else {
1901 emit_optional_rex_32(src, dst);
1902 emit(0x87);
1903 emit_modrm(src, dst);
1904 }
1905 }
1906
1907
store_rax(void * dst,RelocInfo::Mode mode)1908 void Assembler::store_rax(void* dst, RelocInfo::Mode mode) {
1909 EnsureSpace ensure_space(this);
1910 emit(0x48); // REX.W
1911 emit(0xA3);
1912 emitp(dst, mode);
1913 }
1914
1915
store_rax(ExternalReference ref)1916 void Assembler::store_rax(ExternalReference ref) {
1917 store_rax(ref.address(), RelocInfo::EXTERNAL_REFERENCE);
1918 }
1919
1920
testb(Register dst,Register src)1921 void Assembler::testb(Register dst, Register src) {
1922 EnsureSpace ensure_space(this);
1923 if (src.low_bits() == 4) {
1924 emit_rex_32(src, dst);
1925 emit(0x84);
1926 emit_modrm(src, dst);
1927 } else {
1928 if (!dst.is_byte_register() || !src.is_byte_register()) {
1929 // Register is not one of al, bl, cl, dl. Its encoding needs REX.
1930 emit_rex_32(dst, src);
1931 }
1932 emit(0x84);
1933 emit_modrm(dst, src);
1934 }
1935 }
1936
1937
testb(Register reg,Immediate mask)1938 void Assembler::testb(Register reg, Immediate mask) {
1939 ASSERT(is_int8(mask.value_) || is_uint8(mask.value_));
1940 EnsureSpace ensure_space(this);
1941 if (reg.is(rax)) {
1942 emit(0xA8);
1943 emit(mask.value_); // Low byte emitted.
1944 } else {
1945 if (!reg.is_byte_register()) {
1946 // Register is not one of al, bl, cl, dl. Its encoding needs REX.
1947 emit_rex_32(reg);
1948 }
1949 emit(0xF6);
1950 emit_modrm(0x0, reg);
1951 emit(mask.value_); // Low byte emitted.
1952 }
1953 }
1954
1955
testb(const Operand & op,Immediate mask)1956 void Assembler::testb(const Operand& op, Immediate mask) {
1957 ASSERT(is_int8(mask.value_) || is_uint8(mask.value_));
1958 EnsureSpace ensure_space(this);
1959 emit_optional_rex_32(rax, op);
1960 emit(0xF6);
1961 emit_operand(rax, op); // Operation code 0
1962 emit(mask.value_); // Low byte emitted.
1963 }
1964
1965
testb(const Operand & op,Register reg)1966 void Assembler::testb(const Operand& op, Register reg) {
1967 EnsureSpace ensure_space(this);
1968 if (!reg.is_byte_register()) {
1969 // Register is not one of al, bl, cl, dl. Its encoding needs REX.
1970 emit_rex_32(reg, op);
1971 } else {
1972 emit_optional_rex_32(reg, op);
1973 }
1974 emit(0x84);
1975 emit_operand(reg, op);
1976 }
1977
1978
testl(Register dst,Register src)1979 void Assembler::testl(Register dst, Register src) {
1980 EnsureSpace ensure_space(this);
1981 if (src.low_bits() == 4) {
1982 emit_optional_rex_32(src, dst);
1983 emit(0x85);
1984 emit_modrm(src, dst);
1985 } else {
1986 emit_optional_rex_32(dst, src);
1987 emit(0x85);
1988 emit_modrm(dst, src);
1989 }
1990 }
1991
1992
testl(Register reg,Immediate mask)1993 void Assembler::testl(Register reg, Immediate mask) {
1994 // testl with a mask that fits in the low byte is exactly testb.
1995 if (is_uint8(mask.value_)) {
1996 testb(reg, mask);
1997 return;
1998 }
1999 EnsureSpace ensure_space(this);
2000 if (reg.is(rax)) {
2001 emit(0xA9);
2002 emit(mask);
2003 } else {
2004 emit_optional_rex_32(rax, reg);
2005 emit(0xF7);
2006 emit_modrm(0x0, reg);
2007 emit(mask);
2008 }
2009 }
2010
2011
testl(const Operand & op,Immediate mask)2012 void Assembler::testl(const Operand& op, Immediate mask) {
2013 // testl with a mask that fits in the low byte is exactly testb.
2014 if (is_uint8(mask.value_)) {
2015 testb(op, mask);
2016 return;
2017 }
2018 EnsureSpace ensure_space(this);
2019 emit_optional_rex_32(rax, op);
2020 emit(0xF7);
2021 emit_operand(rax, op); // Operation code 0
2022 emit(mask);
2023 }
2024
2025
testl(const Operand & op,Register reg)2026 void Assembler::testl(const Operand& op, Register reg) {
2027 EnsureSpace ensure_space(this);
2028 emit_optional_rex_32(reg, op);
2029 emit(0x85);
2030 emit_operand(reg, op);
2031 }
2032
2033
testq(const Operand & op,Register reg)2034 void Assembler::testq(const Operand& op, Register reg) {
2035 EnsureSpace ensure_space(this);
2036 emit_rex_64(reg, op);
2037 emit(0x85);
2038 emit_operand(reg, op);
2039 }
2040
2041
testq(Register dst,Register src)2042 void Assembler::testq(Register dst, Register src) {
2043 EnsureSpace ensure_space(this);
2044 if (src.low_bits() == 4) {
2045 emit_rex_64(src, dst);
2046 emit(0x85);
2047 emit_modrm(src, dst);
2048 } else {
2049 emit_rex_64(dst, src);
2050 emit(0x85);
2051 emit_modrm(dst, src);
2052 }
2053 }
2054
2055
testq(Register dst,Immediate mask)2056 void Assembler::testq(Register dst, Immediate mask) {
2057 if (is_uint8(mask.value_)) {
2058 testb(dst, mask);
2059 return;
2060 }
2061 EnsureSpace ensure_space(this);
2062 if (dst.is(rax)) {
2063 emit_rex_64();
2064 emit(0xA9);
2065 emit(mask);
2066 } else {
2067 emit_rex_64(dst);
2068 emit(0xF7);
2069 emit_modrm(0, dst);
2070 emit(mask);
2071 }
2072 }
2073
2074
2075 // FPU instructions.
2076
2077
fld(int i)2078 void Assembler::fld(int i) {
2079 EnsureSpace ensure_space(this);
2080 emit_farith(0xD9, 0xC0, i);
2081 }
2082
2083
fld1()2084 void Assembler::fld1() {
2085 EnsureSpace ensure_space(this);
2086 emit(0xD9);
2087 emit(0xE8);
2088 }
2089
2090
fldz()2091 void Assembler::fldz() {
2092 EnsureSpace ensure_space(this);
2093 emit(0xD9);
2094 emit(0xEE);
2095 }
2096
2097
fldpi()2098 void Assembler::fldpi() {
2099 EnsureSpace ensure_space(this);
2100 emit(0xD9);
2101 emit(0xEB);
2102 }
2103
2104
fldln2()2105 void Assembler::fldln2() {
2106 EnsureSpace ensure_space(this);
2107 emit(0xD9);
2108 emit(0xED);
2109 }
2110
2111
fld_s(const Operand & adr)2112 void Assembler::fld_s(const Operand& adr) {
2113 EnsureSpace ensure_space(this);
2114 emit_optional_rex_32(adr);
2115 emit(0xD9);
2116 emit_operand(0, adr);
2117 }
2118
2119
fld_d(const Operand & adr)2120 void Assembler::fld_d(const Operand& adr) {
2121 EnsureSpace ensure_space(this);
2122 emit_optional_rex_32(adr);
2123 emit(0xDD);
2124 emit_operand(0, adr);
2125 }
2126
2127
fstp_s(const Operand & adr)2128 void Assembler::fstp_s(const Operand& adr) {
2129 EnsureSpace ensure_space(this);
2130 emit_optional_rex_32(adr);
2131 emit(0xD9);
2132 emit_operand(3, adr);
2133 }
2134
2135
fstp_d(const Operand & adr)2136 void Assembler::fstp_d(const Operand& adr) {
2137 EnsureSpace ensure_space(this);
2138 emit_optional_rex_32(adr);
2139 emit(0xDD);
2140 emit_operand(3, adr);
2141 }
2142
2143
fstp(int index)2144 void Assembler::fstp(int index) {
2145 ASSERT(is_uint3(index));
2146 EnsureSpace ensure_space(this);
2147 emit_farith(0xDD, 0xD8, index);
2148 }
2149
2150
fild_s(const Operand & adr)2151 void Assembler::fild_s(const Operand& adr) {
2152 EnsureSpace ensure_space(this);
2153 emit_optional_rex_32(adr);
2154 emit(0xDB);
2155 emit_operand(0, adr);
2156 }
2157
2158
fild_d(const Operand & adr)2159 void Assembler::fild_d(const Operand& adr) {
2160 EnsureSpace ensure_space(this);
2161 emit_optional_rex_32(adr);
2162 emit(0xDF);
2163 emit_operand(5, adr);
2164 }
2165
2166
fistp_s(const Operand & adr)2167 void Assembler::fistp_s(const Operand& adr) {
2168 EnsureSpace ensure_space(this);
2169 emit_optional_rex_32(adr);
2170 emit(0xDB);
2171 emit_operand(3, adr);
2172 }
2173
2174
fisttp_s(const Operand & adr)2175 void Assembler::fisttp_s(const Operand& adr) {
2176 ASSERT(IsEnabled(SSE3));
2177 EnsureSpace ensure_space(this);
2178 emit_optional_rex_32(adr);
2179 emit(0xDB);
2180 emit_operand(1, adr);
2181 }
2182
2183
fisttp_d(const Operand & adr)2184 void Assembler::fisttp_d(const Operand& adr) {
2185 ASSERT(IsEnabled(SSE3));
2186 EnsureSpace ensure_space(this);
2187 emit_optional_rex_32(adr);
2188 emit(0xDD);
2189 emit_operand(1, adr);
2190 }
2191
2192
fist_s(const Operand & adr)2193 void Assembler::fist_s(const Operand& adr) {
2194 EnsureSpace ensure_space(this);
2195 emit_optional_rex_32(adr);
2196 emit(0xDB);
2197 emit_operand(2, adr);
2198 }
2199
2200
fistp_d(const Operand & adr)2201 void Assembler::fistp_d(const Operand& adr) {
2202 EnsureSpace ensure_space(this);
2203 emit_optional_rex_32(adr);
2204 emit(0xDF);
2205 emit_operand(7, adr);
2206 }
2207
2208
fabs()2209 void Assembler::fabs() {
2210 EnsureSpace ensure_space(this);
2211 emit(0xD9);
2212 emit(0xE1);
2213 }
2214
2215
fchs()2216 void Assembler::fchs() {
2217 EnsureSpace ensure_space(this);
2218 emit(0xD9);
2219 emit(0xE0);
2220 }
2221
2222
fcos()2223 void Assembler::fcos() {
2224 EnsureSpace ensure_space(this);
2225 emit(0xD9);
2226 emit(0xFF);
2227 }
2228
2229
fsin()2230 void Assembler::fsin() {
2231 EnsureSpace ensure_space(this);
2232 emit(0xD9);
2233 emit(0xFE);
2234 }
2235
2236
fptan()2237 void Assembler::fptan() {
2238 EnsureSpace ensure_space(this);
2239 emit(0xD9);
2240 emit(0xF2);
2241 }
2242
2243
fyl2x()2244 void Assembler::fyl2x() {
2245 EnsureSpace ensure_space(this);
2246 emit(0xD9);
2247 emit(0xF1);
2248 }
2249
2250
f2xm1()2251 void Assembler::f2xm1() {
2252 EnsureSpace ensure_space(this);
2253 emit(0xD9);
2254 emit(0xF0);
2255 }
2256
2257
fscale()2258 void Assembler::fscale() {
2259 EnsureSpace ensure_space(this);
2260 emit(0xD9);
2261 emit(0xFD);
2262 }
2263
2264
fninit()2265 void Assembler::fninit() {
2266 EnsureSpace ensure_space(this);
2267 emit(0xDB);
2268 emit(0xE3);
2269 }
2270
2271
fadd(int i)2272 void Assembler::fadd(int i) {
2273 EnsureSpace ensure_space(this);
2274 emit_farith(0xDC, 0xC0, i);
2275 }
2276
2277
fsub(int i)2278 void Assembler::fsub(int i) {
2279 EnsureSpace ensure_space(this);
2280 emit_farith(0xDC, 0xE8, i);
2281 }
2282
2283
fisub_s(const Operand & adr)2284 void Assembler::fisub_s(const Operand& adr) {
2285 EnsureSpace ensure_space(this);
2286 emit_optional_rex_32(adr);
2287 emit(0xDA);
2288 emit_operand(4, adr);
2289 }
2290
2291
fmul(int i)2292 void Assembler::fmul(int i) {
2293 EnsureSpace ensure_space(this);
2294 emit_farith(0xDC, 0xC8, i);
2295 }
2296
2297
fdiv(int i)2298 void Assembler::fdiv(int i) {
2299 EnsureSpace ensure_space(this);
2300 emit_farith(0xDC, 0xF8, i);
2301 }
2302
2303
faddp(int i)2304 void Assembler::faddp(int i) {
2305 EnsureSpace ensure_space(this);
2306 emit_farith(0xDE, 0xC0, i);
2307 }
2308
2309
fsubp(int i)2310 void Assembler::fsubp(int i) {
2311 EnsureSpace ensure_space(this);
2312 emit_farith(0xDE, 0xE8, i);
2313 }
2314
2315
fsubrp(int i)2316 void Assembler::fsubrp(int i) {
2317 EnsureSpace ensure_space(this);
2318 emit_farith(0xDE, 0xE0, i);
2319 }
2320
2321
fmulp(int i)2322 void Assembler::fmulp(int i) {
2323 EnsureSpace ensure_space(this);
2324 emit_farith(0xDE, 0xC8, i);
2325 }
2326
2327
fdivp(int i)2328 void Assembler::fdivp(int i) {
2329 EnsureSpace ensure_space(this);
2330 emit_farith(0xDE, 0xF8, i);
2331 }
2332
2333
fprem()2334 void Assembler::fprem() {
2335 EnsureSpace ensure_space(this);
2336 emit(0xD9);
2337 emit(0xF8);
2338 }
2339
2340
fprem1()2341 void Assembler::fprem1() {
2342 EnsureSpace ensure_space(this);
2343 emit(0xD9);
2344 emit(0xF5);
2345 }
2346
2347
fxch(int i)2348 void Assembler::fxch(int i) {
2349 EnsureSpace ensure_space(this);
2350 emit_farith(0xD9, 0xC8, i);
2351 }
2352
2353
fincstp()2354 void Assembler::fincstp() {
2355 EnsureSpace ensure_space(this);
2356 emit(0xD9);
2357 emit(0xF7);
2358 }
2359
2360
ffree(int i)2361 void Assembler::ffree(int i) {
2362 EnsureSpace ensure_space(this);
2363 emit_farith(0xDD, 0xC0, i);
2364 }
2365
2366
ftst()2367 void Assembler::ftst() {
2368 EnsureSpace ensure_space(this);
2369 emit(0xD9);
2370 emit(0xE4);
2371 }
2372
2373
fucomp(int i)2374 void Assembler::fucomp(int i) {
2375 EnsureSpace ensure_space(this);
2376 emit_farith(0xDD, 0xE8, i);
2377 }
2378
2379
fucompp()2380 void Assembler::fucompp() {
2381 EnsureSpace ensure_space(this);
2382 emit(0xDA);
2383 emit(0xE9);
2384 }
2385
2386
fucomi(int i)2387 void Assembler::fucomi(int i) {
2388 EnsureSpace ensure_space(this);
2389 emit(0xDB);
2390 emit(0xE8 + i);
2391 }
2392
2393
fucomip()2394 void Assembler::fucomip() {
2395 EnsureSpace ensure_space(this);
2396 emit(0xDF);
2397 emit(0xE9);
2398 }
2399
2400
fcompp()2401 void Assembler::fcompp() {
2402 EnsureSpace ensure_space(this);
2403 emit(0xDE);
2404 emit(0xD9);
2405 }
2406
2407
fnstsw_ax()2408 void Assembler::fnstsw_ax() {
2409 EnsureSpace ensure_space(this);
2410 emit(0xDF);
2411 emit(0xE0);
2412 }
2413
2414
fwait()2415 void Assembler::fwait() {
2416 EnsureSpace ensure_space(this);
2417 emit(0x9B);
2418 }
2419
2420
frndint()2421 void Assembler::frndint() {
2422 EnsureSpace ensure_space(this);
2423 emit(0xD9);
2424 emit(0xFC);
2425 }
2426
2427
fnclex()2428 void Assembler::fnclex() {
2429 EnsureSpace ensure_space(this);
2430 emit(0xDB);
2431 emit(0xE2);
2432 }
2433
2434
sahf()2435 void Assembler::sahf() {
2436 // TODO(X64): Test for presence. Not all 64-bit intel CPU's have sahf
2437 // in 64-bit mode. Test CpuID.
2438 EnsureSpace ensure_space(this);
2439 emit(0x9E);
2440 }
2441
2442
emit_farith(int b1,int b2,int i)2443 void Assembler::emit_farith(int b1, int b2, int i) {
2444 ASSERT(is_uint8(b1) && is_uint8(b2)); // wrong opcode
2445 ASSERT(is_uint3(i)); // illegal stack offset
2446 emit(b1);
2447 emit(b2 + i);
2448 }
2449
2450
2451 // SSE operations.
2452
andps(XMMRegister dst,XMMRegister src)2453 void Assembler::andps(XMMRegister dst, XMMRegister src) {
2454 EnsureSpace ensure_space(this);
2455 emit_optional_rex_32(dst, src);
2456 emit(0x0F);
2457 emit(0x54);
2458 emit_sse_operand(dst, src);
2459 }
2460
2461
andps(XMMRegister dst,const Operand & src)2462 void Assembler::andps(XMMRegister dst, const Operand& src) {
2463 EnsureSpace ensure_space(this);
2464 emit_optional_rex_32(dst, src);
2465 emit(0x0F);
2466 emit(0x54);
2467 emit_sse_operand(dst, src);
2468 }
2469
2470
orps(XMMRegister dst,XMMRegister src)2471 void Assembler::orps(XMMRegister dst, XMMRegister src) {
2472 EnsureSpace ensure_space(this);
2473 emit_optional_rex_32(dst, src);
2474 emit(0x0F);
2475 emit(0x56);
2476 emit_sse_operand(dst, src);
2477 }
2478
2479
orps(XMMRegister dst,const Operand & src)2480 void Assembler::orps(XMMRegister dst, const Operand& src) {
2481 EnsureSpace ensure_space(this);
2482 emit_optional_rex_32(dst, src);
2483 emit(0x0F);
2484 emit(0x56);
2485 emit_sse_operand(dst, src);
2486 }
2487
2488
xorps(XMMRegister dst,XMMRegister src)2489 void Assembler::xorps(XMMRegister dst, XMMRegister src) {
2490 EnsureSpace ensure_space(this);
2491 emit_optional_rex_32(dst, src);
2492 emit(0x0F);
2493 emit(0x57);
2494 emit_sse_operand(dst, src);
2495 }
2496
2497
xorps(XMMRegister dst,const Operand & src)2498 void Assembler::xorps(XMMRegister dst, const Operand& src) {
2499 EnsureSpace ensure_space(this);
2500 emit_optional_rex_32(dst, src);
2501 emit(0x0F);
2502 emit(0x57);
2503 emit_sse_operand(dst, src);
2504 }
2505
2506
addps(XMMRegister dst,XMMRegister src)2507 void Assembler::addps(XMMRegister dst, XMMRegister src) {
2508 EnsureSpace ensure_space(this);
2509 emit_optional_rex_32(dst, src);
2510 emit(0x0F);
2511 emit(0x58);
2512 emit_sse_operand(dst, src);
2513 }
2514
2515
addps(XMMRegister dst,const Operand & src)2516 void Assembler::addps(XMMRegister dst, const Operand& src) {
2517 EnsureSpace ensure_space(this);
2518 emit_optional_rex_32(dst, src);
2519 emit(0x0F);
2520 emit(0x58);
2521 emit_sse_operand(dst, src);
2522 }
2523
2524
subps(XMMRegister dst,XMMRegister src)2525 void Assembler::subps(XMMRegister dst, XMMRegister src) {
2526 EnsureSpace ensure_space(this);
2527 emit_optional_rex_32(dst, src);
2528 emit(0x0F);
2529 emit(0x5C);
2530 emit_sse_operand(dst, src);
2531 }
2532
2533
subps(XMMRegister dst,const Operand & src)2534 void Assembler::subps(XMMRegister dst, const Operand& src) {
2535 EnsureSpace ensure_space(this);
2536 emit_optional_rex_32(dst, src);
2537 emit(0x0F);
2538 emit(0x5C);
2539 emit_sse_operand(dst, src);
2540 }
2541
2542
mulps(XMMRegister dst,XMMRegister src)2543 void Assembler::mulps(XMMRegister dst, XMMRegister src) {
2544 EnsureSpace ensure_space(this);
2545 emit_optional_rex_32(dst, src);
2546 emit(0x0F);
2547 emit(0x59);
2548 emit_sse_operand(dst, src);
2549 }
2550
2551
mulps(XMMRegister dst,const Operand & src)2552 void Assembler::mulps(XMMRegister dst, const Operand& src) {
2553 EnsureSpace ensure_space(this);
2554 emit_optional_rex_32(dst, src);
2555 emit(0x0F);
2556 emit(0x59);
2557 emit_sse_operand(dst, src);
2558 }
2559
2560
divps(XMMRegister dst,XMMRegister src)2561 void Assembler::divps(XMMRegister dst, XMMRegister src) {
2562 EnsureSpace ensure_space(this);
2563 emit_optional_rex_32(dst, src);
2564 emit(0x0F);
2565 emit(0x5E);
2566 emit_sse_operand(dst, src);
2567 }
2568
2569
divps(XMMRegister dst,const Operand & src)2570 void Assembler::divps(XMMRegister dst, const Operand& src) {
2571 EnsureSpace ensure_space(this);
2572 emit_optional_rex_32(dst, src);
2573 emit(0x0F);
2574 emit(0x5E);
2575 emit_sse_operand(dst, src);
2576 }
2577
2578
2579 // SSE 2 operations.
2580
movd(XMMRegister dst,Register src)2581 void Assembler::movd(XMMRegister dst, Register src) {
2582 EnsureSpace ensure_space(this);
2583 emit(0x66);
2584 emit_optional_rex_32(dst, src);
2585 emit(0x0F);
2586 emit(0x6E);
2587 emit_sse_operand(dst, src);
2588 }
2589
2590
movd(Register dst,XMMRegister src)2591 void Assembler::movd(Register dst, XMMRegister src) {
2592 EnsureSpace ensure_space(this);
2593 emit(0x66);
2594 emit_optional_rex_32(src, dst);
2595 emit(0x0F);
2596 emit(0x7E);
2597 emit_sse_operand(src, dst);
2598 }
2599
2600
movq(XMMRegister dst,Register src)2601 void Assembler::movq(XMMRegister dst, Register src) {
2602 EnsureSpace ensure_space(this);
2603 emit(0x66);
2604 emit_rex_64(dst, src);
2605 emit(0x0F);
2606 emit(0x6E);
2607 emit_sse_operand(dst, src);
2608 }
2609
2610
movq(Register dst,XMMRegister src)2611 void Assembler::movq(Register dst, XMMRegister src) {
2612 EnsureSpace ensure_space(this);
2613 emit(0x66);
2614 emit_rex_64(src, dst);
2615 emit(0x0F);
2616 emit(0x7E);
2617 emit_sse_operand(src, dst);
2618 }
2619
2620
movq(XMMRegister dst,XMMRegister src)2621 void Assembler::movq(XMMRegister dst, XMMRegister src) {
2622 EnsureSpace ensure_space(this);
2623 if (dst.low_bits() == 4) {
2624 // Avoid unnecessary SIB byte.
2625 emit(0xf3);
2626 emit_optional_rex_32(dst, src);
2627 emit(0x0F);
2628 emit(0x7e);
2629 emit_sse_operand(dst, src);
2630 } else {
2631 emit(0x66);
2632 emit_optional_rex_32(src, dst);
2633 emit(0x0F);
2634 emit(0xD6);
2635 emit_sse_operand(src, dst);
2636 }
2637 }
2638
2639
movdqa(const Operand & dst,XMMRegister src)2640 void Assembler::movdqa(const Operand& dst, XMMRegister src) {
2641 EnsureSpace ensure_space(this);
2642 emit(0x66);
2643 emit_rex_64(src, dst);
2644 emit(0x0F);
2645 emit(0x7F);
2646 emit_sse_operand(src, dst);
2647 }
2648
2649
movdqa(XMMRegister dst,const Operand & src)2650 void Assembler::movdqa(XMMRegister dst, const Operand& src) {
2651 EnsureSpace ensure_space(this);
2652 emit(0x66);
2653 emit_rex_64(dst, src);
2654 emit(0x0F);
2655 emit(0x6F);
2656 emit_sse_operand(dst, src);
2657 }
2658
2659
movdqu(const Operand & dst,XMMRegister src)2660 void Assembler::movdqu(const Operand& dst, XMMRegister src) {
2661 EnsureSpace ensure_space(this);
2662 emit(0xF3);
2663 emit_rex_64(src, dst);
2664 emit(0x0F);
2665 emit(0x7F);
2666 emit_sse_operand(src, dst);
2667 }
2668
2669
movdqu(XMMRegister dst,const Operand & src)2670 void Assembler::movdqu(XMMRegister dst, const Operand& src) {
2671 EnsureSpace ensure_space(this);
2672 emit(0xF3);
2673 emit_rex_64(dst, src);
2674 emit(0x0F);
2675 emit(0x6F);
2676 emit_sse_operand(dst, src);
2677 }
2678
2679
extractps(Register dst,XMMRegister src,byte imm8)2680 void Assembler::extractps(Register dst, XMMRegister src, byte imm8) {
2681 ASSERT(IsEnabled(SSE4_1));
2682 ASSERT(is_uint8(imm8));
2683 EnsureSpace ensure_space(this);
2684 emit(0x66);
2685 emit_optional_rex_32(src, dst);
2686 emit(0x0F);
2687 emit(0x3A);
2688 emit(0x17);
2689 emit_sse_operand(src, dst);
2690 emit(imm8);
2691 }
2692
2693
movsd(const Operand & dst,XMMRegister src)2694 void Assembler::movsd(const Operand& dst, XMMRegister src) {
2695 EnsureSpace ensure_space(this);
2696 emit(0xF2); // double
2697 emit_optional_rex_32(src, dst);
2698 emit(0x0F);
2699 emit(0x11); // store
2700 emit_sse_operand(src, dst);
2701 }
2702
2703
movsd(XMMRegister dst,XMMRegister src)2704 void Assembler::movsd(XMMRegister dst, XMMRegister src) {
2705 EnsureSpace ensure_space(this);
2706 emit(0xF2); // double
2707 emit_optional_rex_32(dst, src);
2708 emit(0x0F);
2709 emit(0x10); // load
2710 emit_sse_operand(dst, src);
2711 }
2712
2713
movsd(XMMRegister dst,const Operand & src)2714 void Assembler::movsd(XMMRegister dst, const Operand& src) {
2715 EnsureSpace ensure_space(this);
2716 emit(0xF2); // double
2717 emit_optional_rex_32(dst, src);
2718 emit(0x0F);
2719 emit(0x10); // load
2720 emit_sse_operand(dst, src);
2721 }
2722
2723
movaps(XMMRegister dst,XMMRegister src)2724 void Assembler::movaps(XMMRegister dst, XMMRegister src) {
2725 EnsureSpace ensure_space(this);
2726 if (src.low_bits() == 4) {
2727 // Try to avoid an unnecessary SIB byte.
2728 emit_optional_rex_32(src, dst);
2729 emit(0x0F);
2730 emit(0x29);
2731 emit_sse_operand(src, dst);
2732 } else {
2733 emit_optional_rex_32(dst, src);
2734 emit(0x0F);
2735 emit(0x28);
2736 emit_sse_operand(dst, src);
2737 }
2738 }
2739
2740
shufps(XMMRegister dst,XMMRegister src,byte imm8)2741 void Assembler::shufps(XMMRegister dst, XMMRegister src, byte imm8) {
2742 ASSERT(is_uint8(imm8));
2743 EnsureSpace ensure_space(this);
2744 emit_optional_rex_32(src, dst);
2745 emit(0x0F);
2746 emit(0xC6);
2747 emit_sse_operand(dst, src);
2748 emit(imm8);
2749 }
2750
2751
movapd(XMMRegister dst,XMMRegister src)2752 void Assembler::movapd(XMMRegister dst, XMMRegister src) {
2753 EnsureSpace ensure_space(this);
2754 if (src.low_bits() == 4) {
2755 // Try to avoid an unnecessary SIB byte.
2756 emit(0x66);
2757 emit_optional_rex_32(src, dst);
2758 emit(0x0F);
2759 emit(0x29);
2760 emit_sse_operand(src, dst);
2761 } else {
2762 emit(0x66);
2763 emit_optional_rex_32(dst, src);
2764 emit(0x0F);
2765 emit(0x28);
2766 emit_sse_operand(dst, src);
2767 }
2768 }
2769
2770
movss(XMMRegister dst,const Operand & src)2771 void Assembler::movss(XMMRegister dst, const Operand& src) {
2772 EnsureSpace ensure_space(this);
2773 emit(0xF3); // single
2774 emit_optional_rex_32(dst, src);
2775 emit(0x0F);
2776 emit(0x10); // load
2777 emit_sse_operand(dst, src);
2778 }
2779
2780
movss(const Operand & src,XMMRegister dst)2781 void Assembler::movss(const Operand& src, XMMRegister dst) {
2782 EnsureSpace ensure_space(this);
2783 emit(0xF3); // single
2784 emit_optional_rex_32(dst, src);
2785 emit(0x0F);
2786 emit(0x11); // store
2787 emit_sse_operand(dst, src);
2788 }
2789
2790
cvttss2si(Register dst,const Operand & src)2791 void Assembler::cvttss2si(Register dst, const Operand& src) {
2792 EnsureSpace ensure_space(this);
2793 emit(0xF3);
2794 emit_optional_rex_32(dst, src);
2795 emit(0x0F);
2796 emit(0x2C);
2797 emit_operand(dst, src);
2798 }
2799
2800
cvttss2si(Register dst,XMMRegister src)2801 void Assembler::cvttss2si(Register dst, XMMRegister src) {
2802 EnsureSpace ensure_space(this);
2803 emit(0xF3);
2804 emit_optional_rex_32(dst, src);
2805 emit(0x0F);
2806 emit(0x2C);
2807 emit_sse_operand(dst, src);
2808 }
2809
2810
cvttsd2si(Register dst,const Operand & src)2811 void Assembler::cvttsd2si(Register dst, const Operand& src) {
2812 EnsureSpace ensure_space(this);
2813 emit(0xF2);
2814 emit_optional_rex_32(dst, src);
2815 emit(0x0F);
2816 emit(0x2C);
2817 emit_operand(dst, src);
2818 }
2819
2820
cvttsd2si(Register dst,XMMRegister src)2821 void Assembler::cvttsd2si(Register dst, XMMRegister src) {
2822 EnsureSpace ensure_space(this);
2823 emit(0xF2);
2824 emit_optional_rex_32(dst, src);
2825 emit(0x0F);
2826 emit(0x2C);
2827 emit_sse_operand(dst, src);
2828 }
2829
2830
cvttsd2siq(Register dst,XMMRegister src)2831 void Assembler::cvttsd2siq(Register dst, XMMRegister src) {
2832 EnsureSpace ensure_space(this);
2833 emit(0xF2);
2834 emit_rex_64(dst, src);
2835 emit(0x0F);
2836 emit(0x2C);
2837 emit_sse_operand(dst, src);
2838 }
2839
2840
cvtlsi2sd(XMMRegister dst,const Operand & src)2841 void Assembler::cvtlsi2sd(XMMRegister dst, const Operand& src) {
2842 EnsureSpace ensure_space(this);
2843 emit(0xF2);
2844 emit_optional_rex_32(dst, src);
2845 emit(0x0F);
2846 emit(0x2A);
2847 emit_sse_operand(dst, src);
2848 }
2849
2850
cvtlsi2sd(XMMRegister dst,Register src)2851 void Assembler::cvtlsi2sd(XMMRegister dst, Register src) {
2852 EnsureSpace ensure_space(this);
2853 emit(0xF2);
2854 emit_optional_rex_32(dst, src);
2855 emit(0x0F);
2856 emit(0x2A);
2857 emit_sse_operand(dst, src);
2858 }
2859
2860
cvtlsi2ss(XMMRegister dst,Register src)2861 void Assembler::cvtlsi2ss(XMMRegister dst, Register src) {
2862 EnsureSpace ensure_space(this);
2863 emit(0xF3);
2864 emit_optional_rex_32(dst, src);
2865 emit(0x0F);
2866 emit(0x2A);
2867 emit_sse_operand(dst, src);
2868 }
2869
2870
cvtqsi2sd(XMMRegister dst,Register src)2871 void Assembler::cvtqsi2sd(XMMRegister dst, Register src) {
2872 EnsureSpace ensure_space(this);
2873 emit(0xF2);
2874 emit_rex_64(dst, src);
2875 emit(0x0F);
2876 emit(0x2A);
2877 emit_sse_operand(dst, src);
2878 }
2879
2880
cvtss2sd(XMMRegister dst,XMMRegister src)2881 void Assembler::cvtss2sd(XMMRegister dst, XMMRegister src) {
2882 EnsureSpace ensure_space(this);
2883 emit(0xF3);
2884 emit_optional_rex_32(dst, src);
2885 emit(0x0F);
2886 emit(0x5A);
2887 emit_sse_operand(dst, src);
2888 }
2889
2890
cvtss2sd(XMMRegister dst,const Operand & src)2891 void Assembler::cvtss2sd(XMMRegister dst, const Operand& src) {
2892 EnsureSpace ensure_space(this);
2893 emit(0xF3);
2894 emit_optional_rex_32(dst, src);
2895 emit(0x0F);
2896 emit(0x5A);
2897 emit_sse_operand(dst, src);
2898 }
2899
2900
cvtsd2ss(XMMRegister dst,XMMRegister src)2901 void Assembler::cvtsd2ss(XMMRegister dst, XMMRegister src) {
2902 EnsureSpace ensure_space(this);
2903 emit(0xF2);
2904 emit_optional_rex_32(dst, src);
2905 emit(0x0F);
2906 emit(0x5A);
2907 emit_sse_operand(dst, src);
2908 }
2909
2910
cvtsd2si(Register dst,XMMRegister src)2911 void Assembler::cvtsd2si(Register dst, XMMRegister src) {
2912 EnsureSpace ensure_space(this);
2913 emit(0xF2);
2914 emit_optional_rex_32(dst, src);
2915 emit(0x0F);
2916 emit(0x2D);
2917 emit_sse_operand(dst, src);
2918 }
2919
2920
cvtsd2siq(Register dst,XMMRegister src)2921 void Assembler::cvtsd2siq(Register dst, XMMRegister src) {
2922 EnsureSpace ensure_space(this);
2923 emit(0xF2);
2924 emit_rex_64(dst, src);
2925 emit(0x0F);
2926 emit(0x2D);
2927 emit_sse_operand(dst, src);
2928 }
2929
2930
addsd(XMMRegister dst,XMMRegister src)2931 void Assembler::addsd(XMMRegister dst, XMMRegister src) {
2932 EnsureSpace ensure_space(this);
2933 emit(0xF2);
2934 emit_optional_rex_32(dst, src);
2935 emit(0x0F);
2936 emit(0x58);
2937 emit_sse_operand(dst, src);
2938 }
2939
2940
addsd(XMMRegister dst,const Operand & src)2941 void Assembler::addsd(XMMRegister dst, const Operand& src) {
2942 EnsureSpace ensure_space(this);
2943 emit(0xF2);
2944 emit_optional_rex_32(dst, src);
2945 emit(0x0F);
2946 emit(0x58);
2947 emit_sse_operand(dst, src);
2948 }
2949
2950
mulsd(XMMRegister dst,XMMRegister src)2951 void Assembler::mulsd(XMMRegister dst, XMMRegister src) {
2952 EnsureSpace ensure_space(this);
2953 emit(0xF2);
2954 emit_optional_rex_32(dst, src);
2955 emit(0x0F);
2956 emit(0x59);
2957 emit_sse_operand(dst, src);
2958 }
2959
2960
mulsd(XMMRegister dst,const Operand & src)2961 void Assembler::mulsd(XMMRegister dst, const Operand& src) {
2962 EnsureSpace ensure_space(this);
2963 emit(0xF2);
2964 emit_optional_rex_32(dst, src);
2965 emit(0x0F);
2966 emit(0x59);
2967 emit_sse_operand(dst, src);
2968 }
2969
2970
subsd(XMMRegister dst,XMMRegister src)2971 void Assembler::subsd(XMMRegister dst, XMMRegister src) {
2972 EnsureSpace ensure_space(this);
2973 emit(0xF2);
2974 emit_optional_rex_32(dst, src);
2975 emit(0x0F);
2976 emit(0x5C);
2977 emit_sse_operand(dst, src);
2978 }
2979
2980
divsd(XMMRegister dst,XMMRegister src)2981 void Assembler::divsd(XMMRegister dst, XMMRegister src) {
2982 EnsureSpace ensure_space(this);
2983 emit(0xF2);
2984 emit_optional_rex_32(dst, src);
2985 emit(0x0F);
2986 emit(0x5E);
2987 emit_sse_operand(dst, src);
2988 }
2989
2990
andpd(XMMRegister dst,XMMRegister src)2991 void Assembler::andpd(XMMRegister dst, XMMRegister src) {
2992 EnsureSpace ensure_space(this);
2993 emit(0x66);
2994 emit_optional_rex_32(dst, src);
2995 emit(0x0F);
2996 emit(0x54);
2997 emit_sse_operand(dst, src);
2998 }
2999
3000
orpd(XMMRegister dst,XMMRegister src)3001 void Assembler::orpd(XMMRegister dst, XMMRegister src) {
3002 EnsureSpace ensure_space(this);
3003 emit(0x66);
3004 emit_optional_rex_32(dst, src);
3005 emit(0x0F);
3006 emit(0x56);
3007 emit_sse_operand(dst, src);
3008 }
3009
3010
xorpd(XMMRegister dst,XMMRegister src)3011 void Assembler::xorpd(XMMRegister dst, XMMRegister src) {
3012 EnsureSpace ensure_space(this);
3013 emit(0x66);
3014 emit_optional_rex_32(dst, src);
3015 emit(0x0F);
3016 emit(0x57);
3017 emit_sse_operand(dst, src);
3018 }
3019
3020
sqrtsd(XMMRegister dst,XMMRegister src)3021 void Assembler::sqrtsd(XMMRegister dst, XMMRegister src) {
3022 EnsureSpace ensure_space(this);
3023 emit(0xF2);
3024 emit_optional_rex_32(dst, src);
3025 emit(0x0F);
3026 emit(0x51);
3027 emit_sse_operand(dst, src);
3028 }
3029
3030
ucomisd(XMMRegister dst,XMMRegister src)3031 void Assembler::ucomisd(XMMRegister dst, XMMRegister src) {
3032 EnsureSpace ensure_space(this);
3033 emit(0x66);
3034 emit_optional_rex_32(dst, src);
3035 emit(0x0f);
3036 emit(0x2e);
3037 emit_sse_operand(dst, src);
3038 }
3039
3040
ucomisd(XMMRegister dst,const Operand & src)3041 void Assembler::ucomisd(XMMRegister dst, const Operand& src) {
3042 EnsureSpace ensure_space(this);
3043 emit(0x66);
3044 emit_optional_rex_32(dst, src);
3045 emit(0x0f);
3046 emit(0x2e);
3047 emit_sse_operand(dst, src);
3048 }
3049
3050
cmpltsd(XMMRegister dst,XMMRegister src)3051 void Assembler::cmpltsd(XMMRegister dst, XMMRegister src) {
3052 EnsureSpace ensure_space(this);
3053 emit(0xF2);
3054 emit_optional_rex_32(dst, src);
3055 emit(0x0F);
3056 emit(0xC2);
3057 emit_sse_operand(dst, src);
3058 emit(0x01); // LT == 1
3059 }
3060
3061
roundsd(XMMRegister dst,XMMRegister src,Assembler::RoundingMode mode)3062 void Assembler::roundsd(XMMRegister dst, XMMRegister src,
3063 Assembler::RoundingMode mode) {
3064 ASSERT(IsEnabled(SSE4_1));
3065 EnsureSpace ensure_space(this);
3066 emit(0x66);
3067 emit_optional_rex_32(dst, src);
3068 emit(0x0f);
3069 emit(0x3a);
3070 emit(0x0b);
3071 emit_sse_operand(dst, src);
3072 // Mask precision exeption.
3073 emit(static_cast<byte>(mode) | 0x8);
3074 }
3075
3076
movmskpd(Register dst,XMMRegister src)3077 void Assembler::movmskpd(Register dst, XMMRegister src) {
3078 EnsureSpace ensure_space(this);
3079 emit(0x66);
3080 emit_optional_rex_32(dst, src);
3081 emit(0x0f);
3082 emit(0x50);
3083 emit_sse_operand(dst, src);
3084 }
3085
3086
movmskps(Register dst,XMMRegister src)3087 void Assembler::movmskps(Register dst, XMMRegister src) {
3088 EnsureSpace ensure_space(this);
3089 emit_optional_rex_32(dst, src);
3090 emit(0x0f);
3091 emit(0x50);
3092 emit_sse_operand(dst, src);
3093 }
3094
3095
emit_sse_operand(XMMRegister reg,const Operand & adr)3096 void Assembler::emit_sse_operand(XMMRegister reg, const Operand& adr) {
3097 Register ireg = { reg.code() };
3098 emit_operand(ireg, adr);
3099 }
3100
3101
emit_sse_operand(XMMRegister dst,XMMRegister src)3102 void Assembler::emit_sse_operand(XMMRegister dst, XMMRegister src) {
3103 emit(0xC0 | (dst.low_bits() << 3) | src.low_bits());
3104 }
3105
3106
emit_sse_operand(XMMRegister dst,Register src)3107 void Assembler::emit_sse_operand(XMMRegister dst, Register src) {
3108 emit(0xC0 | (dst.low_bits() << 3) | src.low_bits());
3109 }
3110
3111
emit_sse_operand(Register dst,XMMRegister src)3112 void Assembler::emit_sse_operand(Register dst, XMMRegister src) {
3113 emit(0xC0 | (dst.low_bits() << 3) | src.low_bits());
3114 }
3115
3116
db(uint8_t data)3117 void Assembler::db(uint8_t data) {
3118 EnsureSpace ensure_space(this);
3119 emit(data);
3120 }
3121
3122
dd(uint32_t data)3123 void Assembler::dd(uint32_t data) {
3124 EnsureSpace ensure_space(this);
3125 emitl(data);
3126 }
3127
3128
3129 // Relocation information implementations.
3130
RecordRelocInfo(RelocInfo::Mode rmode,intptr_t data)3131 void Assembler::RecordRelocInfo(RelocInfo::Mode rmode, intptr_t data) {
3132 ASSERT(!RelocInfo::IsNone(rmode));
3133 if (rmode == RelocInfo::EXTERNAL_REFERENCE) {
3134 // Don't record external references unless the heap will be serialized.
3135 #ifdef DEBUG
3136 if (!Serializer::enabled()) {
3137 Serializer::TooLateToEnableNow();
3138 }
3139 #endif
3140 if (!Serializer::enabled() && !emit_debug_code()) {
3141 return;
3142 }
3143 } else if (rmode == RelocInfo::CODE_AGE_SEQUENCE) {
3144 // Don't record psuedo relocation info for code age sequence mode.
3145 return;
3146 }
3147 RelocInfo rinfo(pc_, rmode, data, NULL);
3148 reloc_info_writer.Write(&rinfo);
3149 }
3150
3151
RecordJSReturn()3152 void Assembler::RecordJSReturn() {
3153 positions_recorder()->WriteRecordedPositions();
3154 EnsureSpace ensure_space(this);
3155 RecordRelocInfo(RelocInfo::JS_RETURN);
3156 }
3157
3158
RecordDebugBreakSlot()3159 void Assembler::RecordDebugBreakSlot() {
3160 positions_recorder()->WriteRecordedPositions();
3161 EnsureSpace ensure_space(this);
3162 RecordRelocInfo(RelocInfo::DEBUG_BREAK_SLOT);
3163 }
3164
3165
RecordComment(const char * msg,bool force)3166 void Assembler::RecordComment(const char* msg, bool force) {
3167 if (FLAG_code_comments || force) {
3168 EnsureSpace ensure_space(this);
3169 RecordRelocInfo(RelocInfo::COMMENT, reinterpret_cast<intptr_t>(msg));
3170 }
3171 }
3172
3173
3174 const int RelocInfo::kApplyMask = RelocInfo::kCodeTargetMask |
3175 1 << RelocInfo::RUNTIME_ENTRY |
3176 1 << RelocInfo::INTERNAL_REFERENCE |
3177 1 << RelocInfo::CODE_AGE_SEQUENCE;
3178
3179
IsCodedSpecially()3180 bool RelocInfo::IsCodedSpecially() {
3181 // The deserializer needs to know whether a pointer is specially coded. Being
3182 // specially coded on x64 means that it is a relative 32 bit address, as used
3183 // by branch instructions.
3184 return (1 << rmode_) & kApplyMask;
3185 }
3186
3187 } } // namespace v8::internal
3188
3189 #endif // V8_TARGET_ARCH_X64
3190