1 // Copyright (c) 1994-2006 Sun Microsystems Inc.
2 // All Rights Reserved.
3 //
4 // Redistribution and use in source and binary forms, with or without
5 // modification, are permitted provided that the following conditions are
6 // met:
7 //
8 // - Redistributions of source code must retain the above copyright notice,
9 // this list of conditions and the following disclaimer.
10 //
11 // - Redistribution in binary form must reproduce the above copyright
12 // notice, this list of conditions and the following disclaimer in the
13 // documentation and/or other materials provided with the distribution.
14 //
15 // - Neither the name of Sun Microsystems or the names of contributors may
16 // be used to endorse or promote products derived from this software without
17 // specific prior written permission.
18 //
19 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
20 // IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
21 // THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
22 // PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
23 // CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
24 // EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
25 // PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
26 // PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
27 // LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
28 // NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
29 // SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
30
31 // The original source code covered by the above license above has been
32 // modified significantly by Google Inc.
33 // Copyright 2012 the V8 project authors. All rights reserved.
34
35
36 #ifndef V8_MIPS_ASSEMBLER_MIPS_INL_H_
37 #define V8_MIPS_ASSEMBLER_MIPS_INL_H_
38
39 #include "mips/assembler-mips.h"
40
41 #include "cpu.h"
42 #include "debug.h"
43
44
45 namespace v8 {
46 namespace internal {
47
48 // -----------------------------------------------------------------------------
49 // Operand and MemOperand.
50
Operand(int32_t immediate,RelocInfo::Mode rmode)51 Operand::Operand(int32_t immediate, RelocInfo::Mode rmode) {
52 rm_ = no_reg;
53 imm32_ = immediate;
54 rmode_ = rmode;
55 }
56
57
Operand(const ExternalReference & f)58 Operand::Operand(const ExternalReference& f) {
59 rm_ = no_reg;
60 imm32_ = reinterpret_cast<int32_t>(f.address());
61 rmode_ = RelocInfo::EXTERNAL_REFERENCE;
62 }
63
64
Operand(Smi * value)65 Operand::Operand(Smi* value) {
66 rm_ = no_reg;
67 imm32_ = reinterpret_cast<intptr_t>(value);
68 rmode_ = RelocInfo::NONE;
69 }
70
71
Operand(Register rm)72 Operand::Operand(Register rm) {
73 rm_ = rm;
74 }
75
76
is_reg()77 bool Operand::is_reg() const {
78 return rm_.is_valid();
79 }
80
81
ToAllocationIndex(FPURegister reg)82 int FPURegister::ToAllocationIndex(FPURegister reg) {
83 ASSERT(reg.code() % 2 == 0);
84 ASSERT(reg.code() / 2 < kNumAllocatableRegisters);
85 ASSERT(reg.is_valid());
86 ASSERT(!reg.is(kDoubleRegZero));
87 ASSERT(!reg.is(kLithiumScratchDouble));
88 return (reg.code() / 2);
89 }
90
91
92 // -----------------------------------------------------------------------------
93 // RelocInfo.
94
apply(intptr_t delta)95 void RelocInfo::apply(intptr_t delta) {
96 if (IsCodeTarget(rmode_)) {
97 uint32_t scope1 = (uint32_t) target_address() & ~kImm28Mask;
98 uint32_t scope2 = reinterpret_cast<uint32_t>(pc_) & ~kImm28Mask;
99
100 if (scope1 != scope2) {
101 Assembler::JumpLabelToJumpRegister(pc_);
102 }
103 }
104 if (IsInternalReference(rmode_)) {
105 // Absolute code pointer inside code object moves with the code object.
106 byte* p = reinterpret_cast<byte*>(pc_);
107 int count = Assembler::RelocateInternalReference(p, delta);
108 CPU::FlushICache(p, count * sizeof(uint32_t));
109 }
110 }
111
112
target_address()113 Address RelocInfo::target_address() {
114 ASSERT(IsCodeTarget(rmode_) || rmode_ == RUNTIME_ENTRY);
115 return Assembler::target_address_at(pc_);
116 }
117
118
target_address_address()119 Address RelocInfo::target_address_address() {
120 ASSERT(IsCodeTarget(rmode_) ||
121 rmode_ == RUNTIME_ENTRY ||
122 rmode_ == EMBEDDED_OBJECT ||
123 rmode_ == EXTERNAL_REFERENCE);
124 // Read the address of the word containing the target_address in an
125 // instruction stream.
126 // The only architecture-independent user of this function is the serializer.
127 // The serializer uses it to find out how many raw bytes of instruction to
128 // output before the next target.
129 // For an instruction like LUI/ORI where the target bits are mixed into the
130 // instruction bits, the size of the target will be zero, indicating that the
131 // serializer should not step forward in memory after a target is resolved
132 // and written. In this case the target_address_address function should
133 // return the end of the instructions to be patched, allowing the
134 // deserializer to deserialize the instructions as raw bytes and put them in
135 // place, ready to be patched with the target. After jump optimization,
136 // that is the address of the instruction that follows J/JAL/JR/JALR
137 // instruction.
138 return reinterpret_cast<Address>(
139 pc_ + Assembler::kInstructionsFor32BitConstant * Assembler::kInstrSize);
140 }
141
142
target_address_size()143 int RelocInfo::target_address_size() {
144 return Assembler::kSpecialTargetSize;
145 }
146
147
set_target_address(Address target,WriteBarrierMode mode)148 void RelocInfo::set_target_address(Address target, WriteBarrierMode mode) {
149 ASSERT(IsCodeTarget(rmode_) || rmode_ == RUNTIME_ENTRY);
150 Assembler::set_target_address_at(pc_, target);
151 if (mode == UPDATE_WRITE_BARRIER && host() != NULL && IsCodeTarget(rmode_)) {
152 Object* target_code = Code::GetCodeFromTargetAddress(target);
153 host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(
154 host(), this, HeapObject::cast(target_code));
155 }
156 }
157
158
target_object()159 Object* RelocInfo::target_object() {
160 ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
161 return reinterpret_cast<Object*>(Assembler::target_address_at(pc_));
162 }
163
164
target_object_handle(Assembler * origin)165 Handle<Object> RelocInfo::target_object_handle(Assembler* origin) {
166 ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
167 return Handle<Object>(reinterpret_cast<Object**>(
168 Assembler::target_address_at(pc_)));
169 }
170
171
target_object_address()172 Object** RelocInfo::target_object_address() {
173 // Provide a "natural pointer" to the embedded object,
174 // which can be de-referenced during heap iteration.
175 ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
176 reconstructed_obj_ptr_ =
177 reinterpret_cast<Object*>(Assembler::target_address_at(pc_));
178 return &reconstructed_obj_ptr_;
179 }
180
181
set_target_object(Object * target,WriteBarrierMode mode)182 void RelocInfo::set_target_object(Object* target, WriteBarrierMode mode) {
183 ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
184 Assembler::set_target_address_at(pc_, reinterpret_cast<Address>(target));
185 if (mode == UPDATE_WRITE_BARRIER &&
186 host() != NULL &&
187 target->IsHeapObject()) {
188 host()->GetHeap()->incremental_marking()->RecordWrite(
189 host(), &Memory::Object_at(pc_), HeapObject::cast(target));
190 }
191 }
192
193
target_reference_address()194 Address* RelocInfo::target_reference_address() {
195 ASSERT(rmode_ == EXTERNAL_REFERENCE);
196 reconstructed_adr_ptr_ = Assembler::target_address_at(pc_);
197 return &reconstructed_adr_ptr_;
198 }
199
200
target_cell_handle()201 Handle<JSGlobalPropertyCell> RelocInfo::target_cell_handle() {
202 ASSERT(rmode_ == RelocInfo::GLOBAL_PROPERTY_CELL);
203 Address address = Memory::Address_at(pc_);
204 return Handle<JSGlobalPropertyCell>(
205 reinterpret_cast<JSGlobalPropertyCell**>(address));
206 }
207
208
target_cell()209 JSGlobalPropertyCell* RelocInfo::target_cell() {
210 ASSERT(rmode_ == RelocInfo::GLOBAL_PROPERTY_CELL);
211 Address address = Memory::Address_at(pc_);
212 Object* object = HeapObject::FromAddress(
213 address - JSGlobalPropertyCell::kValueOffset);
214 return reinterpret_cast<JSGlobalPropertyCell*>(object);
215 }
216
217
set_target_cell(JSGlobalPropertyCell * cell,WriteBarrierMode mode)218 void RelocInfo::set_target_cell(JSGlobalPropertyCell* cell,
219 WriteBarrierMode mode) {
220 ASSERT(rmode_ == RelocInfo::GLOBAL_PROPERTY_CELL);
221 Address address = cell->address() + JSGlobalPropertyCell::kValueOffset;
222 Memory::Address_at(pc_) = address;
223 if (mode == UPDATE_WRITE_BARRIER && host() != NULL) {
224 // TODO(1550) We are passing NULL as a slot because cell can never be on
225 // evacuation candidate.
226 host()->GetHeap()->incremental_marking()->RecordWrite(
227 host(), NULL, cell);
228 }
229 }
230
231
call_address()232 Address RelocInfo::call_address() {
233 ASSERT((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
234 (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
235 // The pc_ offset of 0 assumes mips patched return sequence per
236 // debug-mips.cc BreakLocationIterator::SetDebugBreakAtReturn(), or
237 // debug break slot per BreakLocationIterator::SetDebugBreakAtSlot().
238 return Assembler::target_address_at(pc_);
239 }
240
241
set_call_address(Address target)242 void RelocInfo::set_call_address(Address target) {
243 ASSERT((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
244 (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
245 // The pc_ offset of 0 assumes mips patched return sequence per
246 // debug-mips.cc BreakLocationIterator::SetDebugBreakAtReturn(), or
247 // debug break slot per BreakLocationIterator::SetDebugBreakAtSlot().
248 Assembler::set_target_address_at(pc_, target);
249 if (host() != NULL) {
250 Object* target_code = Code::GetCodeFromTargetAddress(target);
251 host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(
252 host(), this, HeapObject::cast(target_code));
253 }
254 }
255
256
call_object()257 Object* RelocInfo::call_object() {
258 return *call_object_address();
259 }
260
261
call_object_address()262 Object** RelocInfo::call_object_address() {
263 ASSERT((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
264 (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
265 return reinterpret_cast<Object**>(pc_ + 2 * Assembler::kInstrSize);
266 }
267
268
set_call_object(Object * target)269 void RelocInfo::set_call_object(Object* target) {
270 *call_object_address() = target;
271 }
272
273
IsPatchedReturnSequence()274 bool RelocInfo::IsPatchedReturnSequence() {
275 Instr instr0 = Assembler::instr_at(pc_);
276 Instr instr1 = Assembler::instr_at(pc_ + 1 * Assembler::kInstrSize);
277 Instr instr2 = Assembler::instr_at(pc_ + 2 * Assembler::kInstrSize);
278 bool patched_return = ((instr0 & kOpcodeMask) == LUI &&
279 (instr1 & kOpcodeMask) == ORI &&
280 ((instr2 & kOpcodeMask) == JAL ||
281 ((instr2 & kOpcodeMask) == SPECIAL &&
282 (instr2 & kFunctionFieldMask) == JALR)));
283 return patched_return;
284 }
285
286
IsPatchedDebugBreakSlotSequence()287 bool RelocInfo::IsPatchedDebugBreakSlotSequence() {
288 Instr current_instr = Assembler::instr_at(pc_);
289 return !Assembler::IsNop(current_instr, Assembler::DEBUG_BREAK_NOP);
290 }
291
292
Visit(ObjectVisitor * visitor)293 void RelocInfo::Visit(ObjectVisitor* visitor) {
294 RelocInfo::Mode mode = rmode();
295 if (mode == RelocInfo::EMBEDDED_OBJECT) {
296 visitor->VisitEmbeddedPointer(this);
297 } else if (RelocInfo::IsCodeTarget(mode)) {
298 visitor->VisitCodeTarget(this);
299 } else if (mode == RelocInfo::GLOBAL_PROPERTY_CELL) {
300 visitor->VisitGlobalPropertyCell(this);
301 } else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
302 visitor->VisitExternalReference(this);
303 #ifdef ENABLE_DEBUGGER_SUPPORT
304 // TODO(isolates): Get a cached isolate below.
305 } else if (((RelocInfo::IsJSReturn(mode) &&
306 IsPatchedReturnSequence()) ||
307 (RelocInfo::IsDebugBreakSlot(mode) &&
308 IsPatchedDebugBreakSlotSequence())) &&
309 Isolate::Current()->debug()->has_break_points()) {
310 visitor->VisitDebugTarget(this);
311 #endif
312 } else if (mode == RelocInfo::RUNTIME_ENTRY) {
313 visitor->VisitRuntimeEntry(this);
314 }
315 }
316
317
318 template<typename StaticVisitor>
Visit(Heap * heap)319 void RelocInfo::Visit(Heap* heap) {
320 RelocInfo::Mode mode = rmode();
321 if (mode == RelocInfo::EMBEDDED_OBJECT) {
322 StaticVisitor::VisitEmbeddedPointer(heap, this);
323 } else if (RelocInfo::IsCodeTarget(mode)) {
324 StaticVisitor::VisitCodeTarget(heap, this);
325 } else if (mode == RelocInfo::GLOBAL_PROPERTY_CELL) {
326 StaticVisitor::VisitGlobalPropertyCell(heap, this);
327 } else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
328 StaticVisitor::VisitExternalReference(this);
329 #ifdef ENABLE_DEBUGGER_SUPPORT
330 } else if (heap->isolate()->debug()->has_break_points() &&
331 ((RelocInfo::IsJSReturn(mode) &&
332 IsPatchedReturnSequence()) ||
333 (RelocInfo::IsDebugBreakSlot(mode) &&
334 IsPatchedDebugBreakSlotSequence()))) {
335 StaticVisitor::VisitDebugTarget(heap, this);
336 #endif
337 } else if (mode == RelocInfo::RUNTIME_ENTRY) {
338 StaticVisitor::VisitRuntimeEntry(this);
339 }
340 }
341
342
343 // -----------------------------------------------------------------------------
344 // Assembler.
345
346
CheckBuffer()347 void Assembler::CheckBuffer() {
348 if (buffer_space() <= kGap) {
349 GrowBuffer();
350 }
351 }
352
353
CheckTrampolinePoolQuick()354 void Assembler::CheckTrampolinePoolQuick() {
355 if (pc_offset() >= next_buffer_check_) {
356 CheckTrampolinePool();
357 }
358 }
359
360
emit(Instr x)361 void Assembler::emit(Instr x) {
362 if (!is_buffer_growth_blocked()) {
363 CheckBuffer();
364 }
365 *reinterpret_cast<Instr*>(pc_) = x;
366 pc_ += kInstrSize;
367 CheckTrampolinePoolQuick();
368 }
369
370
371 } } // namespace v8::internal
372
373 #endif // V8_MIPS_ASSEMBLER_MIPS_INL_H_
374