1
2 // Copyright (c) 1994-2006 Sun Microsystems Inc.
3 // All Rights Reserved.
4 //
5 // Redistribution and use in source and binary forms, with or without
6 // modification, are permitted provided that the following conditions are
7 // met:
8 //
9 // - Redistributions of source code must retain the above copyright notice,
10 // this list of conditions and the following disclaimer.
11 //
12 // - Redistribution in binary form must reproduce the above copyright
13 // notice, this list of conditions and the following disclaimer in the
14 // documentation and/or other materials provided with the distribution.
15 //
16 // - Neither the name of Sun Microsystems or the names of contributors may
17 // be used to endorse or promote products derived from this software without
18 // specific prior written permission.
19 //
20 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
21 // IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
22 // THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
23 // PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
24 // CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
25 // EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
26 // PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
27 // PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
28 // LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
29 // NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
30 // SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
31
32 // The original source code covered by the above license above has been
33 // modified significantly by Google Inc.
34 // Copyright 2012 the V8 project authors. All rights reserved.
35
36 #ifndef V8_CODEGEN_MIPS64_ASSEMBLER_MIPS64_INL_H_
37 #define V8_CODEGEN_MIPS64_ASSEMBLER_MIPS64_INL_H_
38
39 #include "src/codegen/mips64/assembler-mips64.h"
40
41 #include "src/codegen/assembler.h"
42 #include "src/debug/debug.h"
43 #include "src/objects/objects-inl.h"
44
45 namespace v8 {
46 namespace internal {
47
SupportsOptimizer()48 bool CpuFeatures::SupportsOptimizer() { return IsSupported(FPU); }
49
SupportsWasmSimd128()50 bool CpuFeatures::SupportsWasmSimd128() { return IsSupported(MIPS_SIMD); }
51
52 // -----------------------------------------------------------------------------
53 // Operand and MemOperand.
54
is_reg()55 bool Operand::is_reg() const { return rm_.is_valid(); }
56
immediate()57 int64_t Operand::immediate() const {
58 DCHECK(!is_reg());
59 DCHECK(!IsHeapObjectRequest());
60 return value_.immediate;
61 }
62
63 // -----------------------------------------------------------------------------
64 // RelocInfo.
65
apply(intptr_t delta)66 void RelocInfo::apply(intptr_t delta) {
67 if (IsInternalReference(rmode_) || IsInternalReferenceEncoded(rmode_)) {
68 // Absolute code pointer inside code object moves with the code object.
69 Assembler::RelocateInternalReference(rmode_, pc_, delta);
70 }
71 }
72
target_address()73 Address RelocInfo::target_address() {
74 DCHECK(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_) || IsWasmCall(rmode_));
75 return Assembler::target_address_at(pc_, constant_pool_);
76 }
77
target_address_address()78 Address RelocInfo::target_address_address() {
79 DCHECK(HasTargetAddressAddress());
80 // Read the address of the word containing the target_address in an
81 // instruction stream.
82 // The only architecture-independent user of this function is the serializer.
83 // The serializer uses it to find out how many raw bytes of instruction to
84 // output before the next target.
85 // For an instruction like LUI/ORI where the target bits are mixed into the
86 // instruction bits, the size of the target will be zero, indicating that the
87 // serializer should not step forward in memory after a target is resolved
88 // and written. In this case the target_address_address function should
89 // return the end of the instructions to be patched, allowing the
90 // deserializer to deserialize the instructions as raw bytes and put them in
91 // place, ready to be patched with the target. After jump optimization,
92 // that is the address of the instruction that follows J/JAL/JR/JALR
93 // instruction.
94 return pc_ + Assembler::kInstructionsFor64BitConstant * kInstrSize;
95 }
96
constant_pool_entry_address()97 Address RelocInfo::constant_pool_entry_address() { UNREACHABLE(); }
98
target_address_size()99 int RelocInfo::target_address_size() { return Assembler::kSpecialTargetSize; }
100
deserialization_set_special_target_at(Address instruction_payload,Code code,Address target)101 void Assembler::deserialization_set_special_target_at(
102 Address instruction_payload, Code code, Address target) {
103 set_target_address_at(instruction_payload,
104 !code.is_null() ? code.constant_pool() : kNullAddress,
105 target);
106 }
107
deserialization_special_target_size(Address instruction_payload)108 int Assembler::deserialization_special_target_size(
109 Address instruction_payload) {
110 return kSpecialTargetSize;
111 }
112
set_target_internal_reference_encoded_at(Address pc,Address target)113 void Assembler::set_target_internal_reference_encoded_at(Address pc,
114 Address target) {
115 // Encoded internal references are j/jal instructions.
116 Instr instr = Assembler::instr_at(pc + 0 * kInstrSize);
117
118 uint64_t imm28 = target & static_cast<uint64_t>(kImm28Mask);
119
120 instr &= ~kImm26Mask;
121 uint64_t imm26 = imm28 >> 2;
122 DCHECK(is_uint26(imm26));
123
124 instr_at_put(pc, instr | (imm26 & kImm26Mask));
125 // Currently used only by deserializer, and all code will be flushed
126 // after complete deserialization, no need to flush on each reference.
127 }
128
deserialization_set_target_internal_reference_at(Address pc,Address target,RelocInfo::Mode mode)129 void Assembler::deserialization_set_target_internal_reference_at(
130 Address pc, Address target, RelocInfo::Mode mode) {
131 if (mode == RelocInfo::INTERNAL_REFERENCE_ENCODED) {
132 DCHECK(IsJ(instr_at(pc)));
133 set_target_internal_reference_encoded_at(pc, target);
134 } else {
135 DCHECK(mode == RelocInfo::INTERNAL_REFERENCE);
136 Memory<Address>(pc) = target;
137 }
138 }
139
target_object()140 HeapObject RelocInfo::target_object() {
141 DCHECK(IsCodeTarget(rmode_) || IsFullEmbeddedObject(rmode_));
142 return HeapObject::cast(
143 Object(Assembler::target_address_at(pc_, constant_pool_)));
144 }
145
target_object_no_host(Isolate * isolate)146 HeapObject RelocInfo::target_object_no_host(Isolate* isolate) {
147 return target_object();
148 }
149
target_object_handle(Assembler * origin)150 Handle<HeapObject> RelocInfo::target_object_handle(Assembler* origin) {
151 DCHECK(IsCodeTarget(rmode_) || IsFullEmbeddedObject(rmode_));
152 return Handle<HeapObject>(reinterpret_cast<Address*>(
153 Assembler::target_address_at(pc_, constant_pool_)));
154 }
155
set_target_object(Heap * heap,HeapObject target,WriteBarrierMode write_barrier_mode,ICacheFlushMode icache_flush_mode)156 void RelocInfo::set_target_object(Heap* heap, HeapObject target,
157 WriteBarrierMode write_barrier_mode,
158 ICacheFlushMode icache_flush_mode) {
159 DCHECK(IsCodeTarget(rmode_) || IsFullEmbeddedObject(rmode_));
160 Assembler::set_target_address_at(pc_, constant_pool_, target.ptr(),
161 icache_flush_mode);
162 if (write_barrier_mode == UPDATE_WRITE_BARRIER && !host().is_null() &&
163 !FLAG_disable_write_barriers) {
164 WriteBarrierForCode(host(), this, target);
165 }
166 }
167
target_external_reference()168 Address RelocInfo::target_external_reference() {
169 DCHECK(rmode_ == EXTERNAL_REFERENCE);
170 return Assembler::target_address_at(pc_, constant_pool_);
171 }
172
set_target_external_reference(Address target,ICacheFlushMode icache_flush_mode)173 void RelocInfo::set_target_external_reference(
174 Address target, ICacheFlushMode icache_flush_mode) {
175 DCHECK(rmode_ == RelocInfo::EXTERNAL_REFERENCE);
176 Assembler::set_target_address_at(pc_, constant_pool_, target,
177 icache_flush_mode);
178 }
179
target_internal_reference()180 Address RelocInfo::target_internal_reference() {
181 if (rmode_ == INTERNAL_REFERENCE) {
182 return Memory<Address>(pc_);
183 } else {
184 // Encoded internal references are j/jal instructions.
185 DCHECK(rmode_ == INTERNAL_REFERENCE_ENCODED);
186 Instr instr = Assembler::instr_at(pc_ + 0 * kInstrSize);
187 instr &= kImm26Mask;
188 uint64_t imm28 = instr << 2;
189 uint64_t segment = pc_ & ~static_cast<uint64_t>(kImm28Mask);
190 return static_cast<Address>(segment | imm28);
191 }
192 }
193
target_internal_reference_address()194 Address RelocInfo::target_internal_reference_address() {
195 DCHECK(rmode_ == INTERNAL_REFERENCE || rmode_ == INTERNAL_REFERENCE_ENCODED);
196 return pc_;
197 }
198
target_runtime_entry(Assembler * origin)199 Address RelocInfo::target_runtime_entry(Assembler* origin) {
200 DCHECK(IsRuntimeEntry(rmode_));
201 return target_address();
202 }
203
set_target_runtime_entry(Address target,WriteBarrierMode write_barrier_mode,ICacheFlushMode icache_flush_mode)204 void RelocInfo::set_target_runtime_entry(Address target,
205 WriteBarrierMode write_barrier_mode,
206 ICacheFlushMode icache_flush_mode) {
207 DCHECK(IsRuntimeEntry(rmode_));
208 if (target_address() != target)
209 set_target_address(target, write_barrier_mode, icache_flush_mode);
210 }
211
target_off_heap_target()212 Address RelocInfo::target_off_heap_target() {
213 DCHECK(IsOffHeapTarget(rmode_));
214 return Assembler::target_address_at(pc_, constant_pool_);
215 }
216
WipeOut()217 void RelocInfo::WipeOut() {
218 DCHECK(IsFullEmbeddedObject(rmode_) || IsCodeTarget(rmode_) ||
219 IsRuntimeEntry(rmode_) || IsExternalReference(rmode_) ||
220 IsInternalReference(rmode_) || IsInternalReferenceEncoded(rmode_) ||
221 IsOffHeapTarget(rmode_));
222 if (IsInternalReference(rmode_)) {
223 Memory<Address>(pc_) = kNullAddress;
224 } else if (IsInternalReferenceEncoded(rmode_)) {
225 Assembler::set_target_internal_reference_encoded_at(pc_, kNullAddress);
226 } else {
227 Assembler::set_target_address_at(pc_, constant_pool_, kNullAddress);
228 }
229 }
230
231 // -----------------------------------------------------------------------------
232 // Assembler.
233
CheckBuffer()234 void Assembler::CheckBuffer() {
235 if (buffer_space() <= kGap) {
236 GrowBuffer();
237 }
238 }
239
CheckForEmitInForbiddenSlot()240 void Assembler::CheckForEmitInForbiddenSlot() {
241 if (!is_buffer_growth_blocked()) {
242 CheckBuffer();
243 }
244 if (IsPrevInstrCompactBranch()) {
245 // Nop instruction to precede a CTI in forbidden slot:
246 Instr nop = SPECIAL | SLL;
247 *reinterpret_cast<Instr*>(pc_) = nop;
248 pc_ += kInstrSize;
249
250 ClearCompactBranchState();
251 }
252 }
253
EmitHelper(Instr x,CompactBranchType is_compact_branch)254 void Assembler::EmitHelper(Instr x, CompactBranchType is_compact_branch) {
255 if (IsPrevInstrCompactBranch()) {
256 if (Instruction::IsForbiddenAfterBranchInstr(x)) {
257 // Nop instruction to precede a CTI in forbidden slot:
258 Instr nop = SPECIAL | SLL;
259 *reinterpret_cast<Instr*>(pc_) = nop;
260 pc_ += kInstrSize;
261 }
262 ClearCompactBranchState();
263 }
264 *reinterpret_cast<Instr*>(pc_) = x;
265 pc_ += kInstrSize;
266 if (is_compact_branch == CompactBranchType::COMPACT_BRANCH) {
267 EmittedCompactBranchInstruction();
268 }
269 CheckTrampolinePoolQuick();
270 }
271
272 template <>
273 inline void Assembler::EmitHelper(uint8_t x);
274
275 template <typename T>
EmitHelper(T x)276 void Assembler::EmitHelper(T x) {
277 *reinterpret_cast<T*>(pc_) = x;
278 pc_ += sizeof(x);
279 CheckTrampolinePoolQuick();
280 }
281
282 template <>
EmitHelper(uint8_t x)283 void Assembler::EmitHelper(uint8_t x) {
284 *reinterpret_cast<uint8_t*>(pc_) = x;
285 pc_ += sizeof(x);
286 if (reinterpret_cast<intptr_t>(pc_) % kInstrSize == 0) {
287 CheckTrampolinePoolQuick();
288 }
289 }
290
emit(Instr x,CompactBranchType is_compact_branch)291 void Assembler::emit(Instr x, CompactBranchType is_compact_branch) {
292 if (!is_buffer_growth_blocked()) {
293 CheckBuffer();
294 }
295 EmitHelper(x, is_compact_branch);
296 }
297
emit(uint64_t data)298 void Assembler::emit(uint64_t data) {
299 CheckForEmitInForbiddenSlot();
300 EmitHelper(data);
301 }
302
EnsureSpace(Assembler * assembler)303 EnsureSpace::EnsureSpace(Assembler* assembler) { assembler->CheckBuffer(); }
304
305 } // namespace internal
306 } // namespace v8
307
308 #endif // V8_CODEGEN_MIPS64_ASSEMBLER_MIPS64_INL_H_
309