• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #ifndef V8_CODEGEN_ARM64_ASSEMBLER_ARM64_INL_H_
6 #define V8_CODEGEN_ARM64_ASSEMBLER_ARM64_INL_H_
7 
8 #include <type_traits>
9 
10 #include "src/base/memory.h"
11 #include "src/codegen/arm64/assembler-arm64.h"
12 #include "src/codegen/assembler.h"
13 #include "src/debug/debug.h"
14 #include "src/objects/objects-inl.h"
15 #include "src/objects/smi.h"
16 
17 namespace v8 {
18 namespace internal {
19 
SupportsOptimizer()20 bool CpuFeatures::SupportsOptimizer() { return true; }
21 
SupportsWasmSimd128()22 bool CpuFeatures::SupportsWasmSimd128() { return true; }
23 
apply(intptr_t delta)24 void RelocInfo::apply(intptr_t delta) {
25   // On arm64 only internal references and immediate branches need extra work.
26   if (RelocInfo::IsInternalReference(rmode_)) {
27     // Absolute code pointer inside code object moves with the code object.
28     intptr_t internal_ref = ReadUnalignedValue<intptr_t>(pc_);
29     internal_ref += delta;  // Relocate entry.
30     WriteUnalignedValue<intptr_t>(pc_, internal_ref);
31   } else {
32     Instruction* instr = reinterpret_cast<Instruction*>(pc_);
33     if (instr->IsBranchAndLink() || instr->IsUnconditionalBranch()) {
34       Address old_target =
35           reinterpret_cast<Address>(instr->ImmPCOffsetTarget());
36       Address new_target = old_target - delta;
37       instr->SetBranchImmTarget(reinterpret_cast<Instruction*>(new_target));
38     }
39   }
40 }
41 
IsSameSizeAndType(const CPURegister & other)42 inline bool CPURegister::IsSameSizeAndType(const CPURegister& other) const {
43   return (reg_size_ == other.reg_size_) && (reg_type_ == other.reg_type_);
44 }
45 
IsZero()46 inline bool CPURegister::IsZero() const {
47   DCHECK(is_valid());
48   return IsRegister() && (code() == kZeroRegCode);
49 }
50 
IsSP()51 inline bool CPURegister::IsSP() const {
52   DCHECK(is_valid());
53   return IsRegister() && (code() == kSPRegInternalCode);
54 }
55 
Combine(const CPURegList & other)56 inline void CPURegList::Combine(const CPURegList& other) {
57   DCHECK(other.type() == type_);
58   DCHECK(other.RegisterSizeInBits() == size_);
59   list_ |= other.list();
60 }
61 
Remove(const CPURegList & other)62 inline void CPURegList::Remove(const CPURegList& other) {
63   if (other.type() == type_) {
64     list_ &= ~other.list();
65   }
66 }
67 
Combine(const CPURegister & other)68 inline void CPURegList::Combine(const CPURegister& other) {
69   DCHECK(other.type() == type_);
70   DCHECK(other.SizeInBits() == size_);
71   Combine(other.code());
72 }
73 
Remove(const CPURegister & other1,const CPURegister & other2,const CPURegister & other3,const CPURegister & other4)74 inline void CPURegList::Remove(const CPURegister& other1,
75                                const CPURegister& other2,
76                                const CPURegister& other3,
77                                const CPURegister& other4) {
78   if (!other1.IsNone() && (other1.type() == type_)) Remove(other1.code());
79   if (!other2.IsNone() && (other2.type() == type_)) Remove(other2.code());
80   if (!other3.IsNone() && (other3.type() == type_)) Remove(other3.code());
81   if (!other4.IsNone() && (other4.type() == type_)) Remove(other4.code());
82 }
83 
Combine(int code)84 inline void CPURegList::Combine(int code) {
85   DCHECK(CPURegister::Create(code, size_, type_).is_valid());
86   list_ |= (1ULL << code);
87   DCHECK(is_valid());
88 }
89 
Remove(int code)90 inline void CPURegList::Remove(int code) {
91   DCHECK(CPURegister::Create(code, size_, type_).is_valid());
92   list_ &= ~(1ULL << code);
93 }
94 
XRegFromCode(unsigned code)95 inline Register Register::XRegFromCode(unsigned code) {
96   if (code == kSPRegInternalCode) {
97     return sp;
98   } else {
99     DCHECK_LT(code, static_cast<unsigned>(kNumberOfRegisters));
100     return Register::Create(code, kXRegSizeInBits);
101   }
102 }
103 
WRegFromCode(unsigned code)104 inline Register Register::WRegFromCode(unsigned code) {
105   if (code == kSPRegInternalCode) {
106     return wsp;
107   } else {
108     DCHECK_LT(code, static_cast<unsigned>(kNumberOfRegisters));
109     return Register::Create(code, kWRegSizeInBits);
110   }
111 }
112 
BRegFromCode(unsigned code)113 inline VRegister VRegister::BRegFromCode(unsigned code) {
114   DCHECK_LT(code, static_cast<unsigned>(kNumberOfVRegisters));
115   return VRegister::Create(code, kBRegSizeInBits);
116 }
117 
HRegFromCode(unsigned code)118 inline VRegister VRegister::HRegFromCode(unsigned code) {
119   DCHECK_LT(code, static_cast<unsigned>(kNumberOfVRegisters));
120   return VRegister::Create(code, kHRegSizeInBits);
121 }
122 
SRegFromCode(unsigned code)123 inline VRegister VRegister::SRegFromCode(unsigned code) {
124   DCHECK_LT(code, static_cast<unsigned>(kNumberOfVRegisters));
125   return VRegister::Create(code, kSRegSizeInBits);
126 }
127 
DRegFromCode(unsigned code)128 inline VRegister VRegister::DRegFromCode(unsigned code) {
129   DCHECK_LT(code, static_cast<unsigned>(kNumberOfVRegisters));
130   return VRegister::Create(code, kDRegSizeInBits);
131 }
132 
QRegFromCode(unsigned code)133 inline VRegister VRegister::QRegFromCode(unsigned code) {
134   DCHECK_LT(code, static_cast<unsigned>(kNumberOfVRegisters));
135   return VRegister::Create(code, kQRegSizeInBits);
136 }
137 
VRegFromCode(unsigned code)138 inline VRegister VRegister::VRegFromCode(unsigned code) {
139   DCHECK_LT(code, static_cast<unsigned>(kNumberOfVRegisters));
140   return VRegister::Create(code, kVRegSizeInBits);
141 }
142 
W()143 inline Register CPURegister::W() const {
144   DCHECK(IsRegister());
145   return Register::WRegFromCode(code());
146 }
147 
Reg()148 inline Register CPURegister::Reg() const {
149   DCHECK(IsRegister());
150   return Register::Create(code(), reg_size_);
151 }
152 
VReg()153 inline VRegister CPURegister::VReg() const {
154   DCHECK(IsVRegister());
155   return VRegister::Create(code(), reg_size_);
156 }
157 
X()158 inline Register CPURegister::X() const {
159   DCHECK(IsRegister());
160   return Register::XRegFromCode(code());
161 }
162 
V()163 inline VRegister CPURegister::V() const {
164   DCHECK(IsVRegister());
165   return VRegister::VRegFromCode(code());
166 }
167 
B()168 inline VRegister CPURegister::B() const {
169   DCHECK(IsVRegister());
170   return VRegister::BRegFromCode(code());
171 }
172 
H()173 inline VRegister CPURegister::H() const {
174   DCHECK(IsVRegister());
175   return VRegister::HRegFromCode(code());
176 }
177 
S()178 inline VRegister CPURegister::S() const {
179   DCHECK(IsVRegister());
180   return VRegister::SRegFromCode(code());
181 }
182 
D()183 inline VRegister CPURegister::D() const {
184   DCHECK(IsVRegister());
185   return VRegister::DRegFromCode(code());
186 }
187 
Q()188 inline VRegister CPURegister::Q() const {
189   DCHECK(IsVRegister());
190   return VRegister::QRegFromCode(code());
191 }
192 
193 // Immediate.
194 // Default initializer is for int types
195 template <typename T>
196 struct ImmediateInitializer {
rmode_forImmediateInitializer197   static inline RelocInfo::Mode rmode_for(T) { return RelocInfo::NONE; }
immediate_forImmediateInitializer198   static inline int64_t immediate_for(T t) {
199     STATIC_ASSERT(sizeof(T) <= 8);
200     STATIC_ASSERT(std::is_integral<T>::value || std::is_enum<T>::value);
201     return t;
202   }
203 };
204 
205 template <>
206 struct ImmediateInitializer<Smi> {
207   static inline RelocInfo::Mode rmode_for(Smi t) { return RelocInfo::NONE; }
208   static inline int64_t immediate_for(Smi t) {
209     return static_cast<int64_t>(t.ptr());
210   }
211 };
212 
213 template <>
214 struct ImmediateInitializer<ExternalReference> {
215   static inline RelocInfo::Mode rmode_for(ExternalReference t) {
216     return RelocInfo::EXTERNAL_REFERENCE;
217   }
218   static inline int64_t immediate_for(ExternalReference t) {
219     return static_cast<int64_t>(t.address());
220   }
221 };
222 
223 template <typename T>
224 Immediate::Immediate(Handle<T> handle, RelocInfo::Mode mode)
225     : value_(static_cast<intptr_t>(handle.address())), rmode_(mode) {
226   DCHECK(RelocInfo::IsEmbeddedObjectMode(mode));
227 }
228 
229 template <typename T>
230 Immediate::Immediate(T t)
231     : value_(ImmediateInitializer<T>::immediate_for(t)),
232       rmode_(ImmediateInitializer<T>::rmode_for(t)) {}
233 
234 template <typename T>
235 Immediate::Immediate(T t, RelocInfo::Mode rmode)
236     : value_(ImmediateInitializer<T>::immediate_for(t)), rmode_(rmode) {
237   STATIC_ASSERT(std::is_integral<T>::value);
238 }
239 
240 template <typename T>
241 Operand::Operand(T t) : immediate_(t), reg_(NoReg) {}
242 
243 template <typename T>
244 Operand::Operand(T t, RelocInfo::Mode rmode)
245     : immediate_(t, rmode), reg_(NoReg) {}
246 
247 Operand::Operand(Register reg, Shift shift, unsigned shift_amount)
248     : immediate_(0),
249       reg_(reg),
250       shift_(shift),
251       extend_(NO_EXTEND),
252       shift_amount_(shift_amount) {
253   DCHECK(reg.Is64Bits() || (shift_amount < kWRegSizeInBits));
254   DCHECK(reg.Is32Bits() || (shift_amount < kXRegSizeInBits));
255   DCHECK_IMPLIES(reg.IsSP(), shift_amount == 0);
256 }
257 
258 Operand::Operand(Register reg, Extend extend, unsigned shift_amount)
259     : immediate_(0),
260       reg_(reg),
261       shift_(NO_SHIFT),
262       extend_(extend),
263       shift_amount_(shift_amount) {
264   DCHECK(reg.is_valid());
265   DCHECK_LE(shift_amount, 4);
266   DCHECK(!reg.IsSP());
267 
268   // Extend modes SXTX and UXTX require a 64-bit register.
269   DCHECK(reg.Is64Bits() || ((extend != SXTX) && (extend != UXTX)));
270 }
271 
272 bool Operand::IsHeapObjectRequest() const {
273   DCHECK_IMPLIES(heap_object_request_.has_value(), reg_ == NoReg);
274   DCHECK_IMPLIES(heap_object_request_.has_value(),
275                  immediate_.rmode() == RelocInfo::FULL_EMBEDDED_OBJECT ||
276                      immediate_.rmode() == RelocInfo::CODE_TARGET);
277   return heap_object_request_.has_value();
278 }
279 
280 HeapObjectRequest Operand::heap_object_request() const {
281   DCHECK(IsHeapObjectRequest());
282   return *heap_object_request_;
283 }
284 
285 bool Operand::IsImmediate() const {
286   return reg_ == NoReg && !IsHeapObjectRequest();
287 }
288 
289 bool Operand::IsShiftedRegister() const {
290   return reg_.is_valid() && (shift_ != NO_SHIFT);
291 }
292 
293 bool Operand::IsExtendedRegister() const {
294   return reg_.is_valid() && (extend_ != NO_EXTEND);
295 }
296 
297 bool Operand::IsZero() const {
298   if (IsImmediate()) {
299     return ImmediateValue() == 0;
300   } else {
301     return reg().IsZero();
302   }
303 }
304 
305 Operand Operand::ToExtendedRegister() const {
306   DCHECK(IsShiftedRegister());
307   DCHECK((shift_ == LSL) && (shift_amount_ <= 4));
308   return Operand(reg_, reg_.Is64Bits() ? UXTX : UXTW, shift_amount_);
309 }
310 
311 Operand Operand::ToW() const {
312   if (IsShiftedRegister()) {
313     DCHECK(reg_.Is64Bits());
314     return Operand(reg_.W(), shift(), shift_amount());
315   } else if (IsExtendedRegister()) {
316     DCHECK(reg_.Is64Bits());
317     return Operand(reg_.W(), extend(), shift_amount());
318   }
319   DCHECK(IsImmediate());
320   return *this;
321 }
322 
323 Immediate Operand::immediate_for_heap_object_request() const {
324   DCHECK((heap_object_request().kind() == HeapObjectRequest::kHeapNumber &&
325           immediate_.rmode() == RelocInfo::FULL_EMBEDDED_OBJECT) ||
326          (heap_object_request().kind() == HeapObjectRequest::kStringConstant &&
327           immediate_.rmode() == RelocInfo::FULL_EMBEDDED_OBJECT));
328   return immediate_;
329 }
330 
331 Immediate Operand::immediate() const {
332   DCHECK(IsImmediate());
333   return immediate_;
334 }
335 
336 int64_t Operand::ImmediateValue() const {
337   DCHECK(IsImmediate());
338   return immediate_.value();
339 }
340 
341 RelocInfo::Mode Operand::ImmediateRMode() const {
342   DCHECK(IsImmediate() || IsHeapObjectRequest());
343   return immediate_.rmode();
344 }
345 
346 Register Operand::reg() const {
347   DCHECK(IsShiftedRegister() || IsExtendedRegister());
348   return reg_;
349 }
350 
351 Shift Operand::shift() const {
352   DCHECK(IsShiftedRegister());
353   return shift_;
354 }
355 
356 Extend Operand::extend() const {
357   DCHECK(IsExtendedRegister());
358   return extend_;
359 }
360 
361 unsigned Operand::shift_amount() const {
362   DCHECK(IsShiftedRegister() || IsExtendedRegister());
363   return shift_amount_;
364 }
365 
366 MemOperand::MemOperand()
367     : base_(NoReg),
368       regoffset_(NoReg),
369       offset_(0),
370       addrmode_(Offset),
371       shift_(NO_SHIFT),
372       extend_(NO_EXTEND),
373       shift_amount_(0) {}
374 
375 MemOperand::MemOperand(Register base, int64_t offset, AddrMode addrmode)
376     : base_(base),
377       regoffset_(NoReg),
378       offset_(offset),
379       addrmode_(addrmode),
380       shift_(NO_SHIFT),
381       extend_(NO_EXTEND),
382       shift_amount_(0) {
383   DCHECK(base.Is64Bits() && !base.IsZero());
384 }
385 
386 MemOperand::MemOperand(Register base, Register regoffset, Extend extend,
387                        unsigned shift_amount)
388     : base_(base),
389       regoffset_(regoffset),
390       offset_(0),
391       addrmode_(Offset),
392       shift_(NO_SHIFT),
393       extend_(extend),
394       shift_amount_(shift_amount) {
395   DCHECK(base.Is64Bits() && !base.IsZero());
396   DCHECK(!regoffset.IsSP());
397   DCHECK((extend == UXTW) || (extend == SXTW) || (extend == SXTX));
398 
399   // SXTX extend mode requires a 64-bit offset register.
400   DCHECK(regoffset.Is64Bits() || (extend != SXTX));
401 }
402 
403 MemOperand::MemOperand(Register base, Register regoffset, Shift shift,
404                        unsigned shift_amount)
405     : base_(base),
406       regoffset_(regoffset),
407       offset_(0),
408       addrmode_(Offset),
409       shift_(shift),
410       extend_(NO_EXTEND),
411       shift_amount_(shift_amount) {
412   DCHECK(base.Is64Bits() && !base.IsZero());
413   DCHECK(regoffset.Is64Bits() && !regoffset.IsSP());
414   DCHECK(shift == LSL);
415 }
416 
417 MemOperand::MemOperand(Register base, const Operand& offset, AddrMode addrmode)
418     : base_(base), regoffset_(NoReg), addrmode_(addrmode) {
419   DCHECK(base.Is64Bits() && !base.IsZero());
420 
421   if (offset.IsImmediate()) {
422     offset_ = offset.ImmediateValue();
423   } else if (offset.IsShiftedRegister()) {
424     DCHECK((addrmode == Offset) || (addrmode == PostIndex));
425 
426     regoffset_ = offset.reg();
427     shift_ = offset.shift();
428     shift_amount_ = offset.shift_amount();
429 
430     extend_ = NO_EXTEND;
431     offset_ = 0;
432 
433     // These assertions match those in the shifted-register constructor.
434     DCHECK(regoffset_.Is64Bits() && !regoffset_.IsSP());
435     DCHECK(shift_ == LSL);
436   } else {
437     DCHECK(offset.IsExtendedRegister());
438     DCHECK(addrmode == Offset);
439 
440     regoffset_ = offset.reg();
441     extend_ = offset.extend();
442     shift_amount_ = offset.shift_amount();
443 
444     shift_ = NO_SHIFT;
445     offset_ = 0;
446 
447     // These assertions match those in the extended-register constructor.
448     DCHECK(!regoffset_.IsSP());
449     DCHECK((extend_ == UXTW) || (extend_ == SXTW) || (extend_ == SXTX));
450     DCHECK((regoffset_.Is64Bits() || (extend_ != SXTX)));
451   }
452 }
453 
454 bool MemOperand::IsImmediateOffset() const {
455   return (addrmode_ == Offset) && regoffset_ == NoReg;
456 }
457 
458 bool MemOperand::IsRegisterOffset() const {
459   return (addrmode_ == Offset) && regoffset_ != NoReg;
460 }
461 
462 bool MemOperand::IsPreIndex() const { return addrmode_ == PreIndex; }
463 
464 bool MemOperand::IsPostIndex() const { return addrmode_ == PostIndex; }
465 
466 void Assembler::Unreachable() { debug("UNREACHABLE", __LINE__, BREAK); }
467 
468 Address Assembler::target_pointer_address_at(Address pc) {
469   Instruction* instr = reinterpret_cast<Instruction*>(pc);
470   DCHECK(instr->IsLdrLiteralX() || instr->IsLdrLiteralW());
471   return reinterpret_cast<Address>(instr->ImmPCOffsetTarget());
472 }
473 
474 // Read/Modify the code target address in the branch/call instruction at pc.
475 Address Assembler::target_address_at(Address pc, Address constant_pool) {
476   Instruction* instr = reinterpret_cast<Instruction*>(pc);
477   if (instr->IsLdrLiteralX()) {
478     return Memory<Address>(target_pointer_address_at(pc));
479   } else {
480     DCHECK(instr->IsBranchAndLink() || instr->IsUnconditionalBranch());
481     return reinterpret_cast<Address>(instr->ImmPCOffsetTarget());
482   }
483 }
484 
485 Tagged_t Assembler::target_compressed_address_at(Address pc,
486                                                  Address constant_pool) {
487   Instruction* instr = reinterpret_cast<Instruction*>(pc);
488   CHECK(instr->IsLdrLiteralW());
489   return Memory<Tagged_t>(target_pointer_address_at(pc));
490 }
491 
492 Handle<Code> Assembler::code_target_object_handle_at(Address pc) {
493   Instruction* instr = reinterpret_cast<Instruction*>(pc);
494   if (instr->IsLdrLiteralX()) {
495     return Handle<Code>(reinterpret_cast<Address*>(
496         Assembler::target_address_at(pc, 0 /* unused */)));
497   } else {
498     DCHECK(instr->IsBranchAndLink() || instr->IsUnconditionalBranch());
499     DCHECK_EQ(instr->ImmPCOffset() % kInstrSize, 0);
500     return Handle<Code>::cast(
501         GetEmbeddedObject(instr->ImmPCOffset() >> kInstrSizeLog2));
502   }
503 }
504 
505 AssemblerBase::EmbeddedObjectIndex
506 Assembler::embedded_object_index_referenced_from(Address pc) {
507   Instruction* instr = reinterpret_cast<Instruction*>(pc);
508   if (instr->IsLdrLiteralX()) {
509     STATIC_ASSERT(sizeof(EmbeddedObjectIndex) == sizeof(intptr_t));
510     return Memory<EmbeddedObjectIndex>(target_pointer_address_at(pc));
511   } else {
512     DCHECK(instr->IsLdrLiteralW());
513     return Memory<uint32_t>(target_pointer_address_at(pc));
514   }
515 }
516 
517 void Assembler::set_embedded_object_index_referenced_from(
518     Address pc, EmbeddedObjectIndex data) {
519   Instruction* instr = reinterpret_cast<Instruction*>(pc);
520   if (instr->IsLdrLiteralX()) {
521     Memory<EmbeddedObjectIndex>(target_pointer_address_at(pc)) = data;
522   } else {
523     DCHECK(instr->IsLdrLiteralW());
524     DCHECK(is_uint32(data));
525     WriteUnalignedValue<uint32_t>(target_pointer_address_at(pc),
526                                   static_cast<uint32_t>(data));
527   }
528 }
529 
530 Handle<HeapObject> Assembler::target_object_handle_at(Address pc) {
531   return GetEmbeddedObject(
532       Assembler::embedded_object_index_referenced_from(pc));
533 }
534 
535 Address Assembler::runtime_entry_at(Address pc) {
536   Instruction* instr = reinterpret_cast<Instruction*>(pc);
537   if (instr->IsLdrLiteralX()) {
538     return Assembler::target_address_at(pc, 0 /* unused */);
539   } else {
540     DCHECK(instr->IsBranchAndLink() || instr->IsUnconditionalBranch());
541     return instr->ImmPCOffset() + options().code_range_start;
542   }
543 }
544 
545 int Assembler::deserialization_special_target_size(Address location) {
546   Instruction* instr = reinterpret_cast<Instruction*>(location);
547   if (instr->IsBranchAndLink() || instr->IsUnconditionalBranch()) {
548     return kSpecialTargetSize;
549   } else {
550     DCHECK_EQ(instr->InstructionBits(), 0);
551     return kSystemPointerSize;
552   }
553 }
554 
555 void Assembler::deserialization_set_special_target_at(Address location,
556                                                       Code code,
557                                                       Address target) {
558   Instruction* instr = reinterpret_cast<Instruction*>(location);
559   if (instr->IsBranchAndLink() || instr->IsUnconditionalBranch()) {
560     if (target == 0) {
561       // We are simply wiping the target out for serialization. Set the offset
562       // to zero instead.
563       target = location;
564     }
565     instr->SetBranchImmTarget(reinterpret_cast<Instruction*>(target));
566     FlushInstructionCache(location, kInstrSize);
567   } else {
568     DCHECK_EQ(instr->InstructionBits(), 0);
569     Memory<Address>(location) = target;
570     // Intuitively, we would think it is necessary to always flush the
571     // instruction cache after patching a target address in the code. However,
572     // in this case, only the constant pool contents change. The instruction
573     // accessing the constant pool remains unchanged, so a flush is not
574     // required.
575   }
576 }
577 
578 void Assembler::deserialization_set_target_internal_reference_at(
579     Address pc, Address target, RelocInfo::Mode mode) {
580   WriteUnalignedValue<Address>(pc, target);
581 }
582 
583 void Assembler::set_target_address_at(Address pc, Address constant_pool,
584                                       Address target,
585                                       ICacheFlushMode icache_flush_mode) {
586   Instruction* instr = reinterpret_cast<Instruction*>(pc);
587   if (instr->IsLdrLiteralX()) {
588     Memory<Address>(target_pointer_address_at(pc)) = target;
589     // Intuitively, we would think it is necessary to always flush the
590     // instruction cache after patching a target address in the code. However,
591     // in this case, only the constant pool contents change. The instruction
592     // accessing the constant pool remains unchanged, so a flush is not
593     // required.
594   } else {
595     DCHECK(instr->IsBranchAndLink() || instr->IsUnconditionalBranch());
596     if (target == 0) {
597       // We are simply wiping the target out for serialization. Set the offset
598       // to zero instead.
599       target = pc;
600     }
601     instr->SetBranchImmTarget(reinterpret_cast<Instruction*>(target));
602     if (icache_flush_mode != SKIP_ICACHE_FLUSH) {
603       FlushInstructionCache(pc, kInstrSize);
604     }
605   }
606 }
607 
608 void Assembler::set_target_compressed_address_at(
609     Address pc, Address constant_pool, Tagged_t target,
610     ICacheFlushMode icache_flush_mode) {
611   Instruction* instr = reinterpret_cast<Instruction*>(pc);
612   CHECK(instr->IsLdrLiteralW());
613   Memory<Tagged_t>(target_pointer_address_at(pc)) = target;
614 }
615 
616 int RelocInfo::target_address_size() {
617   if (IsCodedSpecially()) {
618     return Assembler::kSpecialTargetSize;
619   } else {
620     Instruction* instr = reinterpret_cast<Instruction*>(pc_);
621     DCHECK(instr->IsLdrLiteralX() || instr->IsLdrLiteralW());
622     return instr->IsLdrLiteralW() ? kTaggedSize : kSystemPointerSize;
623   }
624 }
625 
626 Address RelocInfo::target_address() {
627   DCHECK(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_) || IsWasmCall(rmode_));
628   return Assembler::target_address_at(pc_, constant_pool_);
629 }
630 
631 Address RelocInfo::target_address_address() {
632   DCHECK(HasTargetAddressAddress());
633   Instruction* instr = reinterpret_cast<Instruction*>(pc_);
634   // Read the address of the word containing the target_address in an
635   // instruction stream.
636   // The only architecture-independent user of this function is the serializer.
637   // The serializer uses it to find out how many raw bytes of instruction to
638   // output before the next target.
639   // For an instruction like B/BL, where the target bits are mixed into the
640   // instruction bits, the size of the target will be zero, indicating that the
641   // serializer should not step forward in memory after a target is resolved
642   // and written.
643   // For LDR literal instructions, we can skip up to the constant pool entry
644   // address. We make sure that RelocInfo is ordered by the
645   // target_address_address so that we do not skip over any relocatable
646   // instruction sequences.
647   if (instr->IsLdrLiteralX()) {
648     return constant_pool_entry_address();
649   } else {
650     DCHECK(instr->IsBranchAndLink() || instr->IsUnconditionalBranch());
651     return pc_;
652   }
653 }
654 
655 Address RelocInfo::constant_pool_entry_address() {
656   DCHECK(IsInConstantPool());
657   return Assembler::target_pointer_address_at(pc_);
658 }
659 
660 HeapObject RelocInfo::target_object() {
661   DCHECK(IsCodeTarget(rmode_) || IsEmbeddedObjectMode(rmode_));
662   if (IsCompressedEmbeddedObject(rmode_)) {
663     CHECK(!host_.is_null());
664     return HeapObject::cast(Object(DecompressTaggedAny(
665         host_.address(),
666         Assembler::target_compressed_address_at(pc_, constant_pool_))));
667   } else {
668     return HeapObject::cast(
669         Object(Assembler::target_address_at(pc_, constant_pool_)));
670   }
671 }
672 
673 HeapObject RelocInfo::target_object_no_host(Isolate* isolate) {
674   if (IsCompressedEmbeddedObject(rmode_)) {
675     return HeapObject::cast(Object(DecompressTaggedAny(
676         isolate,
677         Assembler::target_compressed_address_at(pc_, constant_pool_))));
678   } else {
679     return target_object();
680   }
681 }
682 
683 Handle<HeapObject> RelocInfo::target_object_handle(Assembler* origin) {
684   if (IsEmbeddedObjectMode(rmode_)) {
685     return origin->target_object_handle_at(pc_);
686   } else {
687     DCHECK(IsCodeTarget(rmode_));
688     return origin->code_target_object_handle_at(pc_);
689   }
690 }
691 
692 void RelocInfo::set_target_object(Heap* heap, HeapObject target,
693                                   WriteBarrierMode write_barrier_mode,
694                                   ICacheFlushMode icache_flush_mode) {
695   DCHECK(IsCodeTarget(rmode_) || IsEmbeddedObjectMode(rmode_));
696   if (IsCompressedEmbeddedObject(rmode_)) {
697     Assembler::set_target_compressed_address_at(
698         pc_, constant_pool_, CompressTagged(target.ptr()), icache_flush_mode);
699   } else {
700     DCHECK(IsFullEmbeddedObject(rmode_));
701     Assembler::set_target_address_at(pc_, constant_pool_, target.ptr(),
702                                      icache_flush_mode);
703   }
704   if (write_barrier_mode == UPDATE_WRITE_BARRIER && !host().is_null() &&
705       !FLAG_disable_write_barriers) {
706     WriteBarrierForCode(host(), this, target);
707   }
708 }
709 
710 Address RelocInfo::target_external_reference() {
711   DCHECK(rmode_ == EXTERNAL_REFERENCE);
712   return Assembler::target_address_at(pc_, constant_pool_);
713 }
714 
715 void RelocInfo::set_target_external_reference(
716     Address target, ICacheFlushMode icache_flush_mode) {
717   DCHECK(rmode_ == RelocInfo::EXTERNAL_REFERENCE);
718   Assembler::set_target_address_at(pc_, constant_pool_, target,
719                                    icache_flush_mode);
720 }
721 
722 Address RelocInfo::target_internal_reference() {
723   DCHECK(rmode_ == INTERNAL_REFERENCE);
724   return ReadUnalignedValue<Address>(pc_);
725 }
726 
727 Address RelocInfo::target_internal_reference_address() {
728   DCHECK(rmode_ == INTERNAL_REFERENCE);
729   return pc_;
730 }
731 
732 Address RelocInfo::target_runtime_entry(Assembler* origin) {
733   DCHECK(IsRuntimeEntry(rmode_));
734   return origin->runtime_entry_at(pc_);
735 }
736 
737 void RelocInfo::set_target_runtime_entry(Address target,
738                                          WriteBarrierMode write_barrier_mode,
739                                          ICacheFlushMode icache_flush_mode) {
740   DCHECK(IsRuntimeEntry(rmode_));
741   if (target_address() != target) {
742     set_target_address(target, write_barrier_mode, icache_flush_mode);
743   }
744 }
745 
746 Address RelocInfo::target_off_heap_target() {
747   DCHECK(IsOffHeapTarget(rmode_));
748   return Assembler::target_address_at(pc_, constant_pool_);
749 }
750 
751 void RelocInfo::WipeOut() {
752   DCHECK(IsEmbeddedObjectMode(rmode_) || IsCodeTarget(rmode_) ||
753          IsRuntimeEntry(rmode_) || IsExternalReference(rmode_) ||
754          IsInternalReference(rmode_) || IsOffHeapTarget(rmode_));
755   if (IsInternalReference(rmode_)) {
756     WriteUnalignedValue<Address>(pc_, kNullAddress);
757   } else if (IsCompressedEmbeddedObject(rmode_)) {
758     Assembler::set_target_compressed_address_at(pc_, constant_pool_,
759                                                 kNullAddress);
760   } else {
761     Assembler::set_target_address_at(pc_, constant_pool_, kNullAddress);
762   }
763 }
764 
765 LoadStoreOp Assembler::LoadOpFor(const CPURegister& rt) {
766   DCHECK(rt.is_valid());
767   if (rt.IsRegister()) {
768     return rt.Is64Bits() ? LDR_x : LDR_w;
769   } else {
770     DCHECK(rt.IsVRegister());
771     switch (rt.SizeInBits()) {
772       case kBRegSizeInBits:
773         return LDR_b;
774       case kHRegSizeInBits:
775         return LDR_h;
776       case kSRegSizeInBits:
777         return LDR_s;
778       case kDRegSizeInBits:
779         return LDR_d;
780       default:
781         DCHECK(rt.IsQ());
782         return LDR_q;
783     }
784   }
785 }
786 
787 LoadStoreOp Assembler::StoreOpFor(const CPURegister& rt) {
788   DCHECK(rt.is_valid());
789   if (rt.IsRegister()) {
790     return rt.Is64Bits() ? STR_x : STR_w;
791   } else {
792     DCHECK(rt.IsVRegister());
793     switch (rt.SizeInBits()) {
794       case kBRegSizeInBits:
795         return STR_b;
796       case kHRegSizeInBits:
797         return STR_h;
798       case kSRegSizeInBits:
799         return STR_s;
800       case kDRegSizeInBits:
801         return STR_d;
802       default:
803         DCHECK(rt.IsQ());
804         return STR_q;
805     }
806   }
807 }
808 
809 LoadStorePairOp Assembler::LoadPairOpFor(const CPURegister& rt,
810                                          const CPURegister& rt2) {
811   DCHECK_EQ(STP_w | LoadStorePairLBit, LDP_w);
812   return static_cast<LoadStorePairOp>(StorePairOpFor(rt, rt2) |
813                                       LoadStorePairLBit);
814 }
815 
816 LoadStorePairOp Assembler::StorePairOpFor(const CPURegister& rt,
817                                           const CPURegister& rt2) {
818   DCHECK(AreSameSizeAndType(rt, rt2));
819   USE(rt2);
820   if (rt.IsRegister()) {
821     return rt.Is64Bits() ? STP_x : STP_w;
822   } else {
823     DCHECK(rt.IsVRegister());
824     switch (rt.SizeInBits()) {
825       case kSRegSizeInBits:
826         return STP_s;
827       case kDRegSizeInBits:
828         return STP_d;
829       default:
830         DCHECK(rt.IsQ());
831         return STP_q;
832     }
833   }
834 }
835 
836 LoadLiteralOp Assembler::LoadLiteralOpFor(const CPURegister& rt) {
837   if (rt.IsRegister()) {
838     return rt.Is64Bits() ? LDR_x_lit : LDR_w_lit;
839   } else {
840     DCHECK(rt.IsVRegister());
841     return rt.Is64Bits() ? LDR_d_lit : LDR_s_lit;
842   }
843 }
844 
845 int Assembler::LinkAndGetInstructionOffsetTo(Label* label) {
846   DCHECK_EQ(kStartOfLabelLinkChain, 0);
847   int offset = LinkAndGetByteOffsetTo(label);
848   DCHECK(IsAligned(offset, kInstrSize));
849   return offset >> kInstrSizeLog2;
850 }
851 
852 Instr Assembler::Flags(FlagsUpdate S) {
853   if (S == SetFlags) {
854     return 1 << FlagsUpdate_offset;
855   } else if (S == LeaveFlags) {
856     return 0 << FlagsUpdate_offset;
857   }
858   UNREACHABLE();
859 }
860 
861 Instr Assembler::Cond(Condition cond) { return cond << Condition_offset; }
862 
863 Instr Assembler::ImmPCRelAddress(int imm21) {
864   CHECK(is_int21(imm21));
865   Instr imm = static_cast<Instr>(truncate_to_int21(imm21));
866   Instr immhi = (imm >> ImmPCRelLo_width) << ImmPCRelHi_offset;
867   Instr immlo = imm << ImmPCRelLo_offset;
868   return (immhi & ImmPCRelHi_mask) | (immlo & ImmPCRelLo_mask);
869 }
870 
871 Instr Assembler::ImmUncondBranch(int imm26) {
872   CHECK(is_int26(imm26));
873   return truncate_to_int26(imm26) << ImmUncondBranch_offset;
874 }
875 
876 Instr Assembler::ImmCondBranch(int imm19) {
877   CHECK(is_int19(imm19));
878   return truncate_to_int19(imm19) << ImmCondBranch_offset;
879 }
880 
881 Instr Assembler::ImmCmpBranch(int imm19) {
882   CHECK(is_int19(imm19));
883   return truncate_to_int19(imm19) << ImmCmpBranch_offset;
884 }
885 
886 Instr Assembler::ImmTestBranch(int imm14) {
887   CHECK(is_int14(imm14));
888   return truncate_to_int14(imm14) << ImmTestBranch_offset;
889 }
890 
891 Instr Assembler::ImmTestBranchBit(unsigned bit_pos) {
892   DCHECK(is_uint6(bit_pos));
893   // Subtract five from the shift offset, as we need bit 5 from bit_pos.
894   unsigned b5 = bit_pos << (ImmTestBranchBit5_offset - 5);
895   unsigned b40 = bit_pos << ImmTestBranchBit40_offset;
896   b5 &= ImmTestBranchBit5_mask;
897   b40 &= ImmTestBranchBit40_mask;
898   return b5 | b40;
899 }
900 
901 Instr Assembler::SF(Register rd) {
902   return rd.Is64Bits() ? SixtyFourBits : ThirtyTwoBits;
903 }
904 
905 Instr Assembler::ImmAddSub(int imm) {
906   DCHECK(IsImmAddSub(imm));
907   if (is_uint12(imm)) {  // No shift required.
908     imm <<= ImmAddSub_offset;
909   } else {
910     imm = ((imm >> 12) << ImmAddSub_offset) | (1 << ShiftAddSub_offset);
911   }
912   return imm;
913 }
914 
915 Instr Assembler::ImmS(unsigned imms, unsigned reg_size) {
916   DCHECK(((reg_size == kXRegSizeInBits) && is_uint6(imms)) ||
917          ((reg_size == kWRegSizeInBits) && is_uint5(imms)));
918   USE(reg_size);
919   return imms << ImmS_offset;
920 }
921 
922 Instr Assembler::ImmR(unsigned immr, unsigned reg_size) {
923   DCHECK(((reg_size == kXRegSizeInBits) && is_uint6(immr)) ||
924          ((reg_size == kWRegSizeInBits) && is_uint5(immr)));
925   USE(reg_size);
926   DCHECK(is_uint6(immr));
927   return immr << ImmR_offset;
928 }
929 
930 Instr Assembler::ImmSetBits(unsigned imms, unsigned reg_size) {
931   DCHECK((reg_size == kWRegSizeInBits) || (reg_size == kXRegSizeInBits));
932   DCHECK(is_uint6(imms));
933   DCHECK((reg_size == kXRegSizeInBits) || is_uint6(imms + 3));
934   USE(reg_size);
935   return imms << ImmSetBits_offset;
936 }
937 
938 Instr Assembler::ImmRotate(unsigned immr, unsigned reg_size) {
939   DCHECK((reg_size == kWRegSizeInBits) || (reg_size == kXRegSizeInBits));
940   DCHECK(((reg_size == kXRegSizeInBits) && is_uint6(immr)) ||
941          ((reg_size == kWRegSizeInBits) && is_uint5(immr)));
942   USE(reg_size);
943   return immr << ImmRotate_offset;
944 }
945 
946 Instr Assembler::ImmLLiteral(int imm19) {
947   CHECK(is_int19(imm19));
948   return truncate_to_int19(imm19) << ImmLLiteral_offset;
949 }
950 
951 Instr Assembler::BitN(unsigned bitn, unsigned reg_size) {
952   DCHECK((reg_size == kWRegSizeInBits) || (reg_size == kXRegSizeInBits));
953   DCHECK((reg_size == kXRegSizeInBits) || (bitn == 0));
954   USE(reg_size);
955   return bitn << BitN_offset;
956 }
957 
958 Instr Assembler::ShiftDP(Shift shift) {
959   DCHECK(shift == LSL || shift == LSR || shift == ASR || shift == ROR);
960   return shift << ShiftDP_offset;
961 }
962 
963 Instr Assembler::ImmDPShift(unsigned amount) {
964   DCHECK(is_uint6(amount));
965   return amount << ImmDPShift_offset;
966 }
967 
968 Instr Assembler::ExtendMode(Extend extend) {
969   return extend << ExtendMode_offset;
970 }
971 
972 Instr Assembler::ImmExtendShift(unsigned left_shift) {
973   DCHECK_LE(left_shift, 4);
974   return left_shift << ImmExtendShift_offset;
975 }
976 
977 Instr Assembler::ImmCondCmp(unsigned imm) {
978   DCHECK(is_uint5(imm));
979   return imm << ImmCondCmp_offset;
980 }
981 
982 Instr Assembler::Nzcv(StatusFlags nzcv) {
983   return ((nzcv >> Flags_offset) & 0xf) << Nzcv_offset;
984 }
985 
986 Instr Assembler::ImmLSUnsigned(int imm12) {
987   DCHECK(is_uint12(imm12));
988   return imm12 << ImmLSUnsigned_offset;
989 }
990 
991 Instr Assembler::ImmLS(int imm9) {
992   DCHECK(is_int9(imm9));
993   return truncate_to_int9(imm9) << ImmLS_offset;
994 }
995 
996 Instr Assembler::ImmLSPair(int imm7, unsigned size) {
997   DCHECK_EQ(imm7,
998             static_cast<int>(static_cast<uint32_t>(imm7 >> size) << size));
999   int scaled_imm7 = imm7 >> size;
1000   DCHECK(is_int7(scaled_imm7));
1001   return truncate_to_int7(scaled_imm7) << ImmLSPair_offset;
1002 }
1003 
1004 Instr Assembler::ImmShiftLS(unsigned shift_amount) {
1005   DCHECK(is_uint1(shift_amount));
1006   return shift_amount << ImmShiftLS_offset;
1007 }
1008 
1009 Instr Assembler::ImmException(int imm16) {
1010   DCHECK(is_uint16(imm16));
1011   return imm16 << ImmException_offset;
1012 }
1013 
1014 Instr Assembler::ImmSystemRegister(int imm15) {
1015   DCHECK(is_uint15(imm15));
1016   return imm15 << ImmSystemRegister_offset;
1017 }
1018 
1019 Instr Assembler::ImmHint(int imm7) {
1020   DCHECK(is_uint7(imm7));
1021   return imm7 << ImmHint_offset;
1022 }
1023 
1024 Instr Assembler::ImmBarrierDomain(int imm2) {
1025   DCHECK(is_uint2(imm2));
1026   return imm2 << ImmBarrierDomain_offset;
1027 }
1028 
1029 Instr Assembler::ImmBarrierType(int imm2) {
1030   DCHECK(is_uint2(imm2));
1031   return imm2 << ImmBarrierType_offset;
1032 }
1033 
1034 unsigned Assembler::CalcLSDataSize(LoadStoreOp op) {
1035   DCHECK((LSSize_offset + LSSize_width) == (kInstrSize * 8));
1036   unsigned size = static_cast<Instr>(op >> LSSize_offset);
1037   if ((op & LSVector_mask) != 0) {
1038     // Vector register memory operations encode the access size in the "size"
1039     // and "opc" fields.
1040     if ((size == 0) && ((op & LSOpc_mask) >> LSOpc_offset) >= 2) {
1041       size = kQRegSizeLog2;
1042     }
1043   }
1044   return size;
1045 }
1046 
1047 Instr Assembler::ImmMoveWide(int imm) {
1048   DCHECK(is_uint16(imm));
1049   return imm << ImmMoveWide_offset;
1050 }
1051 
1052 Instr Assembler::ShiftMoveWide(int shift) {
1053   DCHECK(is_uint2(shift));
1054   return shift << ShiftMoveWide_offset;
1055 }
1056 
1057 Instr Assembler::FPType(VRegister fd) { return fd.Is64Bits() ? FP64 : FP32; }
1058 
1059 Instr Assembler::FPScale(unsigned scale) {
1060   DCHECK(is_uint6(scale));
1061   return scale << FPScale_offset;
1062 }
1063 
1064 const Register& Assembler::AppropriateZeroRegFor(const CPURegister& reg) const {
1065   return reg.Is64Bits() ? xzr : wzr;
1066 }
1067 
1068 inline void Assembler::CheckBufferSpace() {
1069   DCHECK_LT(pc_, buffer_start_ + buffer_->size());
1070   if (buffer_space() < kGap) {
1071     GrowBuffer();
1072   }
1073 }
1074 
1075 inline void Assembler::CheckBuffer() {
1076   CheckBufferSpace();
1077   if (pc_offset() >= next_veneer_pool_check_) {
1078     CheckVeneerPool(false, true);
1079   }
1080   constpool_.MaybeCheck();
1081 }
1082 
1083 }  // namespace internal
1084 }  // namespace v8
1085 
1086 #endif  // V8_CODEGEN_ARM64_ASSEMBLER_ARM64_INL_H_
1087