1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #ifndef V8_CODEGEN_X64_ASSEMBLER_X64_INL_H_
6 #define V8_CODEGEN_X64_ASSEMBLER_X64_INL_H_
7
8 #include "src/codegen/x64/assembler-x64.h"
9
10 #include "src/base/cpu.h"
11 #include "src/base/memory.h"
12 #include "src/debug/debug.h"
13 #include "src/objects/objects-inl.h"
14
15 namespace v8 {
16 namespace internal {
17
SupportsOptimizer()18 bool CpuFeatures::SupportsOptimizer() { return true; }
19
SupportsWasmSimd128()20 bool CpuFeatures::SupportsWasmSimd128() { return IsSupported(SSE4_1); }
21
22 // -----------------------------------------------------------------------------
23 // Implementation of Assembler
24
emitl(uint32_t x)25 void Assembler::emitl(uint32_t x) {
26 WriteUnalignedValue(reinterpret_cast<Address>(pc_), x);
27 pc_ += sizeof(uint32_t);
28 }
29
emitq(uint64_t x)30 void Assembler::emitq(uint64_t x) {
31 WriteUnalignedValue(reinterpret_cast<Address>(pc_), x);
32 pc_ += sizeof(uint64_t);
33 }
34
emitw(uint16_t x)35 void Assembler::emitw(uint16_t x) {
36 WriteUnalignedValue(reinterpret_cast<Address>(pc_), x);
37 pc_ += sizeof(uint16_t);
38 }
39
emit_runtime_entry(Address entry,RelocInfo::Mode rmode)40 void Assembler::emit_runtime_entry(Address entry, RelocInfo::Mode rmode) {
41 DCHECK(RelocInfo::IsRuntimeEntry(rmode));
42 RecordRelocInfo(rmode);
43 emitl(static_cast<uint32_t>(entry - options().code_range_start));
44 }
45
emit(Immediate x)46 void Assembler::emit(Immediate x) {
47 if (!RelocInfo::IsNone(x.rmode_)) {
48 RecordRelocInfo(x.rmode_);
49 }
50 emitl(x.value_);
51 }
52
emit(Immediate64 x)53 void Assembler::emit(Immediate64 x) {
54 if (!RelocInfo::IsNone(x.rmode_)) {
55 RecordRelocInfo(x.rmode_);
56 }
57 emitq(static_cast<uint64_t>(x.value_));
58 }
59
emit_rex_64(Register reg,Register rm_reg)60 void Assembler::emit_rex_64(Register reg, Register rm_reg) {
61 emit(0x48 | reg.high_bit() << 2 | rm_reg.high_bit());
62 }
63
emit_rex_64(XMMRegister reg,Register rm_reg)64 void Assembler::emit_rex_64(XMMRegister reg, Register rm_reg) {
65 emit(0x48 | (reg.code() & 0x8) >> 1 | rm_reg.code() >> 3);
66 }
67
emit_rex_64(Register reg,XMMRegister rm_reg)68 void Assembler::emit_rex_64(Register reg, XMMRegister rm_reg) {
69 emit(0x48 | (reg.code() & 0x8) >> 1 | rm_reg.code() >> 3);
70 }
71
emit_rex_64(XMMRegister reg,XMMRegister rm_reg)72 void Assembler::emit_rex_64(XMMRegister reg, XMMRegister rm_reg) {
73 emit(0x48 | (reg.code() & 0x8) >> 1 | rm_reg.code() >> 3);
74 }
75
emit_rex_64(Register reg,Operand op)76 void Assembler::emit_rex_64(Register reg, Operand op) {
77 emit(0x48 | reg.high_bit() << 2 | op.data().rex);
78 }
79
emit_rex_64(XMMRegister reg,Operand op)80 void Assembler::emit_rex_64(XMMRegister reg, Operand op) {
81 emit(0x48 | (reg.code() & 0x8) >> 1 | op.data().rex);
82 }
83
emit_rex_64(Register rm_reg)84 void Assembler::emit_rex_64(Register rm_reg) {
85 DCHECK_EQ(rm_reg.code() & 0xf, rm_reg.code());
86 emit(0x48 | rm_reg.high_bit());
87 }
88
emit_rex_64(Operand op)89 void Assembler::emit_rex_64(Operand op) { emit(0x48 | op.data().rex); }
90
emit_rex_32(Register reg,Register rm_reg)91 void Assembler::emit_rex_32(Register reg, Register rm_reg) {
92 emit(0x40 | reg.high_bit() << 2 | rm_reg.high_bit());
93 }
94
emit_rex_32(Register reg,Operand op)95 void Assembler::emit_rex_32(Register reg, Operand op) {
96 emit(0x40 | reg.high_bit() << 2 | op.data().rex);
97 }
98
emit_rex_32(Register rm_reg)99 void Assembler::emit_rex_32(Register rm_reg) { emit(0x40 | rm_reg.high_bit()); }
100
emit_rex_32(Operand op)101 void Assembler::emit_rex_32(Operand op) { emit(0x40 | op.data().rex); }
102
emit_optional_rex_32(Register reg,Register rm_reg)103 void Assembler::emit_optional_rex_32(Register reg, Register rm_reg) {
104 byte rex_bits = reg.high_bit() << 2 | rm_reg.high_bit();
105 if (rex_bits != 0) emit(0x40 | rex_bits);
106 }
107
emit_optional_rex_32(Register reg,Operand op)108 void Assembler::emit_optional_rex_32(Register reg, Operand op) {
109 byte rex_bits = reg.high_bit() << 2 | op.data().rex;
110 if (rex_bits != 0) emit(0x40 | rex_bits);
111 }
112
emit_optional_rex_32(XMMRegister reg,Operand op)113 void Assembler::emit_optional_rex_32(XMMRegister reg, Operand op) {
114 byte rex_bits = (reg.code() & 0x8) >> 1 | op.data().rex;
115 if (rex_bits != 0) emit(0x40 | rex_bits);
116 }
117
emit_optional_rex_32(XMMRegister reg,XMMRegister base)118 void Assembler::emit_optional_rex_32(XMMRegister reg, XMMRegister base) {
119 byte rex_bits = (reg.code() & 0x8) >> 1 | (base.code() & 0x8) >> 3;
120 if (rex_bits != 0) emit(0x40 | rex_bits);
121 }
122
emit_optional_rex_32(XMMRegister reg,Register base)123 void Assembler::emit_optional_rex_32(XMMRegister reg, Register base) {
124 byte rex_bits = (reg.code() & 0x8) >> 1 | (base.code() & 0x8) >> 3;
125 if (rex_bits != 0) emit(0x40 | rex_bits);
126 }
127
emit_optional_rex_32(Register reg,XMMRegister base)128 void Assembler::emit_optional_rex_32(Register reg, XMMRegister base) {
129 byte rex_bits = (reg.code() & 0x8) >> 1 | (base.code() & 0x8) >> 3;
130 if (rex_bits != 0) emit(0x40 | rex_bits);
131 }
132
emit_optional_rex_32(Register rm_reg)133 void Assembler::emit_optional_rex_32(Register rm_reg) {
134 if (rm_reg.high_bit()) emit(0x41);
135 }
136
emit_optional_rex_32(XMMRegister rm_reg)137 void Assembler::emit_optional_rex_32(XMMRegister rm_reg) {
138 if (rm_reg.high_bit()) emit(0x41);
139 }
140
emit_optional_rex_32(Operand op)141 void Assembler::emit_optional_rex_32(Operand op) {
142 if (op.data().rex != 0) emit(0x40 | op.data().rex);
143 }
144
emit_optional_rex_8(Register reg)145 void Assembler::emit_optional_rex_8(Register reg) {
146 if (!reg.is_byte_register()) {
147 // Register is not one of al, bl, cl, dl. Its encoding needs REX.
148 emit_rex_32(reg);
149 }
150 }
151
emit_optional_rex_8(Register reg,Operand op)152 void Assembler::emit_optional_rex_8(Register reg, Operand op) {
153 if (!reg.is_byte_register()) {
154 // Register is not one of al, bl, cl, dl. Its encoding needs REX.
155 emit_rex_32(reg, op);
156 } else {
157 emit_optional_rex_32(reg, op);
158 }
159 }
160
161 // byte 1 of 3-byte VEX
emit_vex3_byte1(XMMRegister reg,XMMRegister rm,LeadingOpcode m)162 void Assembler::emit_vex3_byte1(XMMRegister reg, XMMRegister rm,
163 LeadingOpcode m) {
164 byte rxb = static_cast<byte>(~((reg.high_bit() << 2) | rm.high_bit())) << 5;
165 emit(rxb | m);
166 }
167
168 // byte 1 of 3-byte VEX
emit_vex3_byte1(XMMRegister reg,Operand rm,LeadingOpcode m)169 void Assembler::emit_vex3_byte1(XMMRegister reg, Operand rm, LeadingOpcode m) {
170 byte rxb = static_cast<byte>(~((reg.high_bit() << 2) | rm.data().rex)) << 5;
171 emit(rxb | m);
172 }
173
174 // byte 1 of 2-byte VEX
emit_vex2_byte1(XMMRegister reg,XMMRegister v,VectorLength l,SIMDPrefix pp)175 void Assembler::emit_vex2_byte1(XMMRegister reg, XMMRegister v, VectorLength l,
176 SIMDPrefix pp) {
177 byte rv = static_cast<byte>(~((reg.high_bit() << 4) | v.code())) << 3;
178 emit(rv | l | pp);
179 }
180
181 // byte 2 of 3-byte VEX
emit_vex3_byte2(VexW w,XMMRegister v,VectorLength l,SIMDPrefix pp)182 void Assembler::emit_vex3_byte2(VexW w, XMMRegister v, VectorLength l,
183 SIMDPrefix pp) {
184 emit(w | ((~v.code() & 0xf) << 3) | l | pp);
185 }
186
emit_vex_prefix(XMMRegister reg,XMMRegister vreg,XMMRegister rm,VectorLength l,SIMDPrefix pp,LeadingOpcode mm,VexW w)187 void Assembler::emit_vex_prefix(XMMRegister reg, XMMRegister vreg,
188 XMMRegister rm, VectorLength l, SIMDPrefix pp,
189 LeadingOpcode mm, VexW w) {
190 if (rm.high_bit() || mm != k0F || w != kW0) {
191 emit_vex3_byte0();
192 emit_vex3_byte1(reg, rm, mm);
193 emit_vex3_byte2(w, vreg, l, pp);
194 } else {
195 emit_vex2_byte0();
196 emit_vex2_byte1(reg, vreg, l, pp);
197 }
198 }
199
emit_vex_prefix(Register reg,Register vreg,Register rm,VectorLength l,SIMDPrefix pp,LeadingOpcode mm,VexW w)200 void Assembler::emit_vex_prefix(Register reg, Register vreg, Register rm,
201 VectorLength l, SIMDPrefix pp, LeadingOpcode mm,
202 VexW w) {
203 XMMRegister ireg = XMMRegister::from_code(reg.code());
204 XMMRegister ivreg = XMMRegister::from_code(vreg.code());
205 XMMRegister irm = XMMRegister::from_code(rm.code());
206 emit_vex_prefix(ireg, ivreg, irm, l, pp, mm, w);
207 }
208
emit_vex_prefix(XMMRegister reg,XMMRegister vreg,Operand rm,VectorLength l,SIMDPrefix pp,LeadingOpcode mm,VexW w)209 void Assembler::emit_vex_prefix(XMMRegister reg, XMMRegister vreg, Operand rm,
210 VectorLength l, SIMDPrefix pp, LeadingOpcode mm,
211 VexW w) {
212 if (rm.data().rex || mm != k0F || w != kW0) {
213 emit_vex3_byte0();
214 emit_vex3_byte1(reg, rm, mm);
215 emit_vex3_byte2(w, vreg, l, pp);
216 } else {
217 emit_vex2_byte0();
218 emit_vex2_byte1(reg, vreg, l, pp);
219 }
220 }
221
emit_vex_prefix(Register reg,Register vreg,Operand rm,VectorLength l,SIMDPrefix pp,LeadingOpcode mm,VexW w)222 void Assembler::emit_vex_prefix(Register reg, Register vreg, Operand rm,
223 VectorLength l, SIMDPrefix pp, LeadingOpcode mm,
224 VexW w) {
225 XMMRegister ireg = XMMRegister::from_code(reg.code());
226 XMMRegister ivreg = XMMRegister::from_code(vreg.code());
227 emit_vex_prefix(ireg, ivreg, rm, l, pp, mm, w);
228 }
229
target_address_at(Address pc,Address constant_pool)230 Address Assembler::target_address_at(Address pc, Address constant_pool) {
231 return ReadUnalignedValue<int32_t>(pc) + pc + 4;
232 }
233
set_target_address_at(Address pc,Address constant_pool,Address target,ICacheFlushMode icache_flush_mode)234 void Assembler::set_target_address_at(Address pc, Address constant_pool,
235 Address target,
236 ICacheFlushMode icache_flush_mode) {
237 DCHECK(is_int32(target - pc - 4));
238 WriteUnalignedValue(pc, static_cast<int32_t>(target - pc - 4));
239 if (icache_flush_mode != SKIP_ICACHE_FLUSH) {
240 FlushInstructionCache(pc, sizeof(int32_t));
241 }
242 }
243
deserialization_set_target_internal_reference_at(Address pc,Address target,RelocInfo::Mode mode)244 void Assembler::deserialization_set_target_internal_reference_at(
245 Address pc, Address target, RelocInfo::Mode mode) {
246 WriteUnalignedValue(pc, target);
247 }
248
deserialization_set_special_target_at(Address instruction_payload,Code code,Address target)249 void Assembler::deserialization_set_special_target_at(
250 Address instruction_payload, Code code, Address target) {
251 set_target_address_at(instruction_payload,
252 !code.is_null() ? code.constant_pool() : kNullAddress,
253 target);
254 }
255
deserialization_special_target_size(Address instruction_payload)256 int Assembler::deserialization_special_target_size(
257 Address instruction_payload) {
258 return kSpecialTargetSize;
259 }
260
code_target_object_handle_at(Address pc)261 Handle<Code> Assembler::code_target_object_handle_at(Address pc) {
262 return GetCodeTarget(ReadUnalignedValue<int32_t>(pc));
263 }
264
compressed_embedded_object_handle_at(Address pc)265 Handle<HeapObject> Assembler::compressed_embedded_object_handle_at(Address pc) {
266 return GetEmbeddedObject(ReadUnalignedValue<uint32_t>(pc));
267 }
268
runtime_entry_at(Address pc)269 Address Assembler::runtime_entry_at(Address pc) {
270 return ReadUnalignedValue<int32_t>(pc) + options().code_range_start;
271 }
272
273 // -----------------------------------------------------------------------------
274 // Implementation of RelocInfo
275
276 // The modes possibly affected by apply must be in kApplyMask.
apply(intptr_t delta)277 void RelocInfo::apply(intptr_t delta) {
278 if (IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_)) {
279 WriteUnalignedValue(
280 pc_, ReadUnalignedValue<int32_t>(pc_) - static_cast<int32_t>(delta));
281 } else if (IsInternalReference(rmode_)) {
282 // Absolute code pointer inside code object moves with the code object.
283 WriteUnalignedValue(pc_, ReadUnalignedValue<Address>(pc_) + delta);
284 }
285 }
286
target_address()287 Address RelocInfo::target_address() {
288 DCHECK(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_) || IsWasmCall(rmode_));
289 return Assembler::target_address_at(pc_, constant_pool_);
290 }
291
target_address_address()292 Address RelocInfo::target_address_address() {
293 DCHECK(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_) || IsWasmCall(rmode_) ||
294 IsWasmStubCall(rmode_) || IsFullEmbeddedObject(rmode_) ||
295 IsCompressedEmbeddedObject(rmode_) || IsExternalReference(rmode_) ||
296 IsOffHeapTarget(rmode_));
297 return pc_;
298 }
299
constant_pool_entry_address()300 Address RelocInfo::constant_pool_entry_address() { UNREACHABLE(); }
301
target_address_size()302 int RelocInfo::target_address_size() {
303 if (IsCodedSpecially()) {
304 return Assembler::kSpecialTargetSize;
305 } else {
306 return IsCompressedEmbeddedObject(rmode_) ? kTaggedSize
307 : kSystemPointerSize;
308 }
309 }
310
target_object()311 HeapObject RelocInfo::target_object() {
312 DCHECK(IsCodeTarget(rmode_) || IsEmbeddedObjectMode(rmode_));
313 if (IsCompressedEmbeddedObject(rmode_)) {
314 CHECK(!host_.is_null());
315 Object o = static_cast<Object>(DecompressTaggedPointer(
316 host_.ptr(), ReadUnalignedValue<Tagged_t>(pc_)));
317 return HeapObject::cast(o);
318 }
319 return HeapObject::cast(Object(ReadUnalignedValue<Address>(pc_)));
320 }
321
target_object_no_host(Isolate * isolate)322 HeapObject RelocInfo::target_object_no_host(Isolate* isolate) {
323 DCHECK(IsCodeTarget(rmode_) || IsEmbeddedObjectMode(rmode_));
324 if (IsCompressedEmbeddedObject(rmode_)) {
325 Tagged_t compressed = ReadUnalignedValue<Tagged_t>(pc_);
326 DCHECK(!HAS_SMI_TAG(compressed));
327 Object obj(DecompressTaggedPointer(isolate, compressed));
328 return HeapObject::cast(obj);
329 }
330 return HeapObject::cast(Object(ReadUnalignedValue<Address>(pc_)));
331 }
332
target_object_handle(Assembler * origin)333 Handle<HeapObject> RelocInfo::target_object_handle(Assembler* origin) {
334 DCHECK(IsCodeTarget(rmode_) || IsEmbeddedObjectMode(rmode_));
335 if (IsCodeTarget(rmode_)) {
336 return origin->code_target_object_handle_at(pc_);
337 } else {
338 if (IsCompressedEmbeddedObject(rmode_)) {
339 return origin->compressed_embedded_object_handle_at(pc_);
340 }
341 return Handle<HeapObject>::cast(ReadUnalignedValue<Handle<Object>>(pc_));
342 }
343 }
344
target_external_reference()345 Address RelocInfo::target_external_reference() {
346 DCHECK(rmode_ == RelocInfo::EXTERNAL_REFERENCE);
347 return ReadUnalignedValue<Address>(pc_);
348 }
349
set_target_external_reference(Address target,ICacheFlushMode icache_flush_mode)350 void RelocInfo::set_target_external_reference(
351 Address target, ICacheFlushMode icache_flush_mode) {
352 DCHECK(rmode_ == RelocInfo::EXTERNAL_REFERENCE);
353 WriteUnalignedValue(pc_, target);
354 if (icache_flush_mode != SKIP_ICACHE_FLUSH) {
355 FlushInstructionCache(pc_, sizeof(Address));
356 }
357 }
358
target_internal_reference()359 Address RelocInfo::target_internal_reference() {
360 DCHECK(rmode_ == INTERNAL_REFERENCE);
361 return ReadUnalignedValue<Address>(pc_);
362 }
363
target_internal_reference_address()364 Address RelocInfo::target_internal_reference_address() {
365 DCHECK(rmode_ == INTERNAL_REFERENCE);
366 return pc_;
367 }
368
set_target_object(Heap * heap,HeapObject target,WriteBarrierMode write_barrier_mode,ICacheFlushMode icache_flush_mode)369 void RelocInfo::set_target_object(Heap* heap, HeapObject target,
370 WriteBarrierMode write_barrier_mode,
371 ICacheFlushMode icache_flush_mode) {
372 DCHECK(IsCodeTarget(rmode_) || IsEmbeddedObjectMode(rmode_));
373 if (IsCompressedEmbeddedObject(rmode_)) {
374 DCHECK(COMPRESS_POINTERS_BOOL);
375 Tagged_t tagged = CompressTagged(target.ptr());
376 WriteUnalignedValue(pc_, tagged);
377 } else {
378 WriteUnalignedValue(pc_, target.ptr());
379 }
380 if (icache_flush_mode != SKIP_ICACHE_FLUSH) {
381 FlushInstructionCache(pc_, sizeof(Address));
382 }
383 if (write_barrier_mode == UPDATE_WRITE_BARRIER && !host().is_null() &&
384 !FLAG_disable_write_barriers) {
385 WriteBarrierForCode(host(), this, target);
386 }
387 }
388
target_runtime_entry(Assembler * origin)389 Address RelocInfo::target_runtime_entry(Assembler* origin) {
390 DCHECK(IsRuntimeEntry(rmode_));
391 return origin->runtime_entry_at(pc_);
392 }
393
set_target_runtime_entry(Address target,WriteBarrierMode write_barrier_mode,ICacheFlushMode icache_flush_mode)394 void RelocInfo::set_target_runtime_entry(Address target,
395 WriteBarrierMode write_barrier_mode,
396 ICacheFlushMode icache_flush_mode) {
397 DCHECK(IsRuntimeEntry(rmode_));
398 if (target_address() != target) {
399 set_target_address(target, write_barrier_mode, icache_flush_mode);
400 }
401 }
402
target_off_heap_target()403 Address RelocInfo::target_off_heap_target() {
404 DCHECK(IsOffHeapTarget(rmode_));
405 return ReadUnalignedValue<Address>(pc_);
406 }
407
WipeOut()408 void RelocInfo::WipeOut() {
409 if (IsFullEmbeddedObject(rmode_) || IsExternalReference(rmode_) ||
410 IsInternalReference(rmode_) || IsOffHeapTarget(rmode_)) {
411 WriteUnalignedValue(pc_, kNullAddress);
412 } else if (IsCompressedEmbeddedObject(rmode_)) {
413 Address smi_address = Smi::FromInt(0).ptr();
414 WriteUnalignedValue(pc_, CompressTagged(smi_address));
415 } else if (IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_)) {
416 // Effectively write zero into the relocation.
417 Assembler::set_target_address_at(pc_, constant_pool_,
418 pc_ + sizeof(int32_t));
419 } else {
420 UNREACHABLE();
421 }
422 }
423
424 } // namespace internal
425 } // namespace v8
426
427 #endif // V8_CODEGEN_X64_ASSEMBLER_X64_INL_H_
428