1 // Copyright 2018 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #ifndef V8_CODEGEN_TURBO_ASSEMBLER_H_
6 #define V8_CODEGEN_TURBO_ASSEMBLER_H_
7
8 #include <memory>
9
10 #include "src/base/template-utils.h"
11 #include "src/builtins/builtins.h"
12 #include "src/codegen/assembler-arch.h"
13 #include "src/roots/roots.h"
14
15 namespace v8 {
16 namespace internal {
17
18 // Common base class for platform-specific TurboAssemblers containing
19 // platform-independent bits.
20 class V8_EXPORT_PRIVATE TurboAssemblerBase : public Assembler {
21 public:
22 // Constructors are declared public to inherit them in derived classes
23 // with `using` directive.
24 TurboAssemblerBase(Isolate* isolate, CodeObjectRequired create_code_object,
25 std::unique_ptr<AssemblerBuffer> buffer = {})
TurboAssemblerBase(isolate,AssemblerOptions::Default (isolate),create_code_object,std::move (buffer))26 : TurboAssemblerBase(isolate, AssemblerOptions::Default(isolate),
27 create_code_object, std::move(buffer)) {}
28
29 TurboAssemblerBase(Isolate* isolate, const AssemblerOptions& options,
30 CodeObjectRequired create_code_object,
31 std::unique_ptr<AssemblerBuffer> buffer = {});
32
isolate()33 Isolate* isolate() const {
34 return isolate_;
35 }
36
CodeObject()37 Handle<HeapObject> CodeObject() const {
38 DCHECK(!code_object_.is_null());
39 return code_object_;
40 }
41
root_array_available()42 bool root_array_available() const { return root_array_available_; }
set_root_array_available(bool v)43 void set_root_array_available(bool v) { root_array_available_ = v; }
44
trap_on_abort()45 bool trap_on_abort() const { return trap_on_abort_; }
46
should_abort_hard()47 bool should_abort_hard() const { return hard_abort_; }
set_abort_hard(bool v)48 void set_abort_hard(bool v) { hard_abort_ = v; }
49
set_builtin_index(int i)50 void set_builtin_index(int i) { maybe_builtin_index_ = i; }
51
set_has_frame(bool v)52 void set_has_frame(bool v) { has_frame_ = v; }
has_frame()53 bool has_frame() const { return has_frame_; }
54
55 virtual void Jump(const ExternalReference& reference) = 0;
56
57 // Calls the builtin given by the Smi in |builtin|. If builtins are embedded,
58 // the trampoline Code object on the heap is not used.
59 virtual void CallBuiltinByIndex(Register builtin_index) = 0;
60
61 // Calls/jumps to the given Code object. If builtins are embedded, the
62 // trampoline Code object on the heap is not used.
63 virtual void CallCodeObject(Register code_object) = 0;
64 virtual void JumpCodeObject(Register code_object) = 0;
65
66 // Loads the given Code object's entry point into the destination register.
67 virtual void LoadCodeObjectEntry(Register destination,
68 Register code_object) = 0;
69
70 // Loads the given constant or external reference without embedding its direct
71 // pointer. The produced code is isolate-independent.
72 void IndirectLoadConstant(Register destination, Handle<HeapObject> object);
73 void IndirectLoadExternalReference(Register destination,
74 ExternalReference reference);
75
76 virtual void LoadFromConstantsTable(Register destination,
77 int constant_index) = 0;
78
79 // Corresponds to: destination = kRootRegister + offset.
80 virtual void LoadRootRegisterOffset(Register destination,
81 intptr_t offset) = 0;
82
83 // Corresponds to: destination = [kRootRegister + offset].
84 virtual void LoadRootRelative(Register destination, int32_t offset) = 0;
85
86 virtual void LoadRoot(Register destination, RootIndex index) = 0;
87
88 virtual void Trap() = 0;
89 virtual void DebugBreak() = 0;
90
91 static int32_t RootRegisterOffsetForRootIndex(RootIndex root_index);
92 static int32_t RootRegisterOffsetForBuiltinIndex(int builtin_index);
93
94 // Returns the root-relative offset to reference.address().
95 static intptr_t RootRegisterOffsetForExternalReference(
96 Isolate* isolate, const ExternalReference& reference);
97
98 // Returns the root-relative offset to the external reference table entry,
99 // which itself contains reference.address().
100 static int32_t RootRegisterOffsetForExternalReferenceTableEntry(
101 Isolate* isolate, const ExternalReference& reference);
102
103 // An address is addressable through kRootRegister if it is located within
104 // isolate->root_register_addressable_region().
105 static bool IsAddressableThroughRootRegister(
106 Isolate* isolate, const ExternalReference& reference);
107
108 #ifdef V8_TARGET_OS_WIN
109 // Minimum page size. We must touch memory once per page when expanding the
110 // stack, to avoid access violations.
111 static constexpr int kStackPageSize = 4 * KB;
112 #endif
113
114 protected:
115 void RecordCommentForOffHeapTrampoline(int builtin_index);
116
117 Isolate* const isolate_ = nullptr;
118
119 // This handle will be patched with the code object on installation.
120 Handle<HeapObject> code_object_;
121
122 // Whether kRootRegister has been initialized.
123 bool root_array_available_ = true;
124
125 // Immediately trap instead of calling {Abort} when debug code fails.
126 bool trap_on_abort_ = FLAG_trap_on_abort;
127
128 // Emit a C call to abort instead of a runtime call.
129 bool hard_abort_ = false;
130
131 // May be set while generating builtins.
132 int maybe_builtin_index_ = Builtins::kNoBuiltinId;
133
134 bool has_frame_ = false;
135
136 DISALLOW_IMPLICIT_CONSTRUCTORS(TurboAssemblerBase);
137 };
138
139 // Avoids emitting calls to the {Builtins::kAbort} builtin when emitting debug
140 // code during the lifetime of this scope object. For disabling debug code
141 // entirely use the {DontEmitDebugCodeScope} instead.
142 class HardAbortScope {
143 public:
HardAbortScope(TurboAssemblerBase * assembler)144 explicit HardAbortScope(TurboAssemblerBase* assembler)
145 : assembler_(assembler), old_value_(assembler->should_abort_hard()) {
146 assembler_->set_abort_hard(true);
147 }
~HardAbortScope()148 ~HardAbortScope() { assembler_->set_abort_hard(old_value_); }
149
150 private:
151 TurboAssemblerBase* assembler_;
152 bool old_value_;
153 };
154
155 #ifdef DEBUG
156 struct CountIfValidRegisterFunctor {
157 template <typename RegType>
operatorCountIfValidRegisterFunctor158 constexpr int operator()(int count, RegType reg) const {
159 return count + (reg.is_valid() ? 1 : 0);
160 }
161 };
162
163 template <typename RegType, typename... RegTypes,
164 // All arguments must be either Register or DoubleRegister.
165 typename = typename std::enable_if<
166 base::is_same<Register, RegType, RegTypes...>::value ||
167 base::is_same<DoubleRegister, RegType, RegTypes...>::value>::type>
AreAliased(RegType first_reg,RegTypes...regs)168 inline bool AreAliased(RegType first_reg, RegTypes... regs) {
169 int num_different_regs = NumRegs(RegType::ListOf(first_reg, regs...));
170 int num_given_regs =
171 base::fold(CountIfValidRegisterFunctor{}, 0, first_reg, regs...);
172 return num_different_regs < num_given_regs;
173 }
174 #endif
175
176 } // namespace internal
177 } // namespace v8
178
179 #endif // V8_CODEGEN_TURBO_ASSEMBLER_H_
180