• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright (c) 1994-2006 Sun Microsystems Inc.
2 // All Rights Reserved.
3 //
4 // Redistribution and use in source and binary forms, with or without
5 // modification, are permitted provided that the following conditions are
6 // met:
7 //
8 // - Redistributions of source code must retain the above copyright notice,
9 // this list of conditions and the following disclaimer.
10 //
11 // - Redistribution in binary form must reproduce the above copyright
12 // notice, this list of conditions and the following disclaimer in the
13 // documentation and/or other materials provided with the distribution.
14 //
15 // - Neither the name of Sun Microsystems or the names of contributors may
16 // be used to endorse or promote products derived from this software without
17 // specific prior written permission.
18 //
19 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
20 // IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
21 // THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
22 // PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
23 // CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
24 // EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
25 // PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
26 // PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
27 // LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
28 // NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
29 // SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
30 
31 // The original source code covered by the above license above has been
32 // modified significantly by Google Inc.
33 // Copyright 2012 the V8 project authors. All rights reserved.
34 
35 // A light-weight IA32 Assembler.
36 
37 #ifndef V8_IA32_ASSEMBLER_IA32_INL_H_
38 #define V8_IA32_ASSEMBLER_IA32_INL_H_
39 
40 #include "ia32/assembler-ia32.h"
41 
42 #include "cpu.h"
43 #include "debug.h"
44 
45 namespace v8 {
46 namespace internal {
47 
48 
49 // The modes possibly affected by apply must be in kApplyMask.
apply(intptr_t delta)50 void RelocInfo::apply(intptr_t delta) {
51   if (rmode_ == RUNTIME_ENTRY || IsCodeTarget(rmode_)) {
52     int32_t* p = reinterpret_cast<int32_t*>(pc_);
53     *p -= delta;  // Relocate entry.
54     CPU::FlushICache(p, sizeof(uint32_t));
55   } else if (rmode_ == JS_RETURN && IsPatchedReturnSequence()) {
56     // Special handling of js_return when a break point is set (call
57     // instruction has been inserted).
58     int32_t* p = reinterpret_cast<int32_t*>(pc_ + 1);
59     *p -= delta;  // Relocate entry.
60     CPU::FlushICache(p, sizeof(uint32_t));
61   } else if (rmode_ == DEBUG_BREAK_SLOT && IsPatchedDebugBreakSlotSequence()) {
62     // Special handling of a debug break slot when a break point is set (call
63     // instruction has been inserted).
64     int32_t* p = reinterpret_cast<int32_t*>(pc_ + 1);
65     *p -= delta;  // Relocate entry.
66     CPU::FlushICache(p, sizeof(uint32_t));
67   } else if (IsInternalReference(rmode_)) {
68     // absolute code pointer inside code object moves with the code object.
69     int32_t* p = reinterpret_cast<int32_t*>(pc_);
70     *p += delta;  // Relocate entry.
71     CPU::FlushICache(p, sizeof(uint32_t));
72   }
73 }
74 
75 
target_address()76 Address RelocInfo::target_address() {
77   ASSERT(IsCodeTarget(rmode_) || rmode_ == RUNTIME_ENTRY);
78   return Assembler::target_address_at(pc_);
79 }
80 
81 
target_address_address()82 Address RelocInfo::target_address_address() {
83   ASSERT(IsCodeTarget(rmode_) || rmode_ == RUNTIME_ENTRY
84                               || rmode_ == EMBEDDED_OBJECT
85                               || rmode_ == EXTERNAL_REFERENCE);
86   return reinterpret_cast<Address>(pc_);
87 }
88 
89 
target_address_size()90 int RelocInfo::target_address_size() {
91   return Assembler::kSpecialTargetSize;
92 }
93 
94 
set_target_address(Address target,WriteBarrierMode mode)95 void RelocInfo::set_target_address(Address target, WriteBarrierMode mode) {
96   Assembler::set_target_address_at(pc_, target);
97   ASSERT(IsCodeTarget(rmode_) || rmode_ == RUNTIME_ENTRY);
98   if (mode == UPDATE_WRITE_BARRIER && host() != NULL && IsCodeTarget(rmode_)) {
99     Object* target_code = Code::GetCodeFromTargetAddress(target);
100     host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(
101         host(), this, HeapObject::cast(target_code));
102   }
103 }
104 
105 
target_object()106 Object* RelocInfo::target_object() {
107   ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
108   return Memory::Object_at(pc_);
109 }
110 
111 
target_object_handle(Assembler * origin)112 Handle<Object> RelocInfo::target_object_handle(Assembler* origin) {
113   ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
114   return Memory::Object_Handle_at(pc_);
115 }
116 
117 
target_object_address()118 Object** RelocInfo::target_object_address() {
119   ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
120   return &Memory::Object_at(pc_);
121 }
122 
123 
set_target_object(Object * target,WriteBarrierMode mode)124 void RelocInfo::set_target_object(Object* target, WriteBarrierMode mode) {
125   ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
126   Memory::Object_at(pc_) = target;
127   CPU::FlushICache(pc_, sizeof(Address));
128   if (mode == UPDATE_WRITE_BARRIER &&
129       host() != NULL &&
130       target->IsHeapObject()) {
131     host()->GetHeap()->incremental_marking()->RecordWrite(
132         host(), &Memory::Object_at(pc_), HeapObject::cast(target));
133   }
134 }
135 
136 
target_reference_address()137 Address* RelocInfo::target_reference_address() {
138   ASSERT(rmode_ == RelocInfo::EXTERNAL_REFERENCE);
139   return reinterpret_cast<Address*>(pc_);
140 }
141 
142 
target_cell_handle()143 Handle<JSGlobalPropertyCell> RelocInfo::target_cell_handle() {
144   ASSERT(rmode_ == RelocInfo::GLOBAL_PROPERTY_CELL);
145   Address address = Memory::Address_at(pc_);
146   return Handle<JSGlobalPropertyCell>(
147       reinterpret_cast<JSGlobalPropertyCell**>(address));
148 }
149 
150 
target_cell()151 JSGlobalPropertyCell* RelocInfo::target_cell() {
152   ASSERT(rmode_ == RelocInfo::GLOBAL_PROPERTY_CELL);
153   Address address = Memory::Address_at(pc_);
154   Object* object = HeapObject::FromAddress(
155       address - JSGlobalPropertyCell::kValueOffset);
156   return reinterpret_cast<JSGlobalPropertyCell*>(object);
157 }
158 
159 
set_target_cell(JSGlobalPropertyCell * cell,WriteBarrierMode mode)160 void RelocInfo::set_target_cell(JSGlobalPropertyCell* cell,
161                                 WriteBarrierMode mode) {
162   ASSERT(rmode_ == RelocInfo::GLOBAL_PROPERTY_CELL);
163   Address address = cell->address() + JSGlobalPropertyCell::kValueOffset;
164   Memory::Address_at(pc_) = address;
165   CPU::FlushICache(pc_, sizeof(Address));
166   if (mode == UPDATE_WRITE_BARRIER && host() != NULL) {
167     // TODO(1550) We are passing NULL as a slot because cell can never be on
168     // evacuation candidate.
169     host()->GetHeap()->incremental_marking()->RecordWrite(
170         host(), NULL, cell);
171   }
172 }
173 
174 
call_address()175 Address RelocInfo::call_address() {
176   ASSERT((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
177          (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
178   return Assembler::target_address_at(pc_ + 1);
179 }
180 
181 
set_call_address(Address target)182 void RelocInfo::set_call_address(Address target) {
183   ASSERT((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
184          (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
185   Assembler::set_target_address_at(pc_ + 1, target);
186   if (host() != NULL) {
187     Object* target_code = Code::GetCodeFromTargetAddress(target);
188     host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(
189         host(), this, HeapObject::cast(target_code));
190   }
191 }
192 
193 
call_object()194 Object* RelocInfo::call_object() {
195   return *call_object_address();
196 }
197 
198 
set_call_object(Object * target)199 void RelocInfo::set_call_object(Object* target) {
200   *call_object_address() = target;
201 }
202 
203 
call_object_address()204 Object** RelocInfo::call_object_address() {
205   ASSERT((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
206          (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
207   return reinterpret_cast<Object**>(pc_ + 1);
208 }
209 
210 
IsPatchedReturnSequence()211 bool RelocInfo::IsPatchedReturnSequence() {
212   return *pc_ == 0xE8;
213 }
214 
215 
IsPatchedDebugBreakSlotSequence()216 bool RelocInfo::IsPatchedDebugBreakSlotSequence() {
217   return !Assembler::IsNop(pc());
218 }
219 
220 
Visit(ObjectVisitor * visitor)221 void RelocInfo::Visit(ObjectVisitor* visitor) {
222   RelocInfo::Mode mode = rmode();
223   if (mode == RelocInfo::EMBEDDED_OBJECT) {
224     visitor->VisitEmbeddedPointer(this);
225     CPU::FlushICache(pc_, sizeof(Address));
226   } else if (RelocInfo::IsCodeTarget(mode)) {
227     visitor->VisitCodeTarget(this);
228   } else if (mode == RelocInfo::GLOBAL_PROPERTY_CELL) {
229     visitor->VisitGlobalPropertyCell(this);
230   } else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
231     visitor->VisitExternalReference(this);
232     CPU::FlushICache(pc_, sizeof(Address));
233 #ifdef ENABLE_DEBUGGER_SUPPORT
234   // TODO(isolates): Get a cached isolate below.
235   } else if (((RelocInfo::IsJSReturn(mode) &&
236               IsPatchedReturnSequence()) ||
237              (RelocInfo::IsDebugBreakSlot(mode) &&
238               IsPatchedDebugBreakSlotSequence())) &&
239              Isolate::Current()->debug()->has_break_points()) {
240     visitor->VisitDebugTarget(this);
241 #endif
242   } else if (mode == RelocInfo::RUNTIME_ENTRY) {
243     visitor->VisitRuntimeEntry(this);
244   }
245 }
246 
247 
248 template<typename StaticVisitor>
Visit(Heap * heap)249 void RelocInfo::Visit(Heap* heap) {
250   RelocInfo::Mode mode = rmode();
251   if (mode == RelocInfo::EMBEDDED_OBJECT) {
252     StaticVisitor::VisitEmbeddedPointer(heap, this);
253     CPU::FlushICache(pc_, sizeof(Address));
254   } else if (RelocInfo::IsCodeTarget(mode)) {
255     StaticVisitor::VisitCodeTarget(heap, this);
256   } else if (mode == RelocInfo::GLOBAL_PROPERTY_CELL) {
257     StaticVisitor::VisitGlobalPropertyCell(heap, this);
258   } else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
259     StaticVisitor::VisitExternalReference(this);
260     CPU::FlushICache(pc_, sizeof(Address));
261 #ifdef ENABLE_DEBUGGER_SUPPORT
262   } else if (heap->isolate()->debug()->has_break_points() &&
263              ((RelocInfo::IsJSReturn(mode) &&
264               IsPatchedReturnSequence()) ||
265              (RelocInfo::IsDebugBreakSlot(mode) &&
266               IsPatchedDebugBreakSlotSequence()))) {
267     StaticVisitor::VisitDebugTarget(heap, this);
268 #endif
269   } else if (mode == RelocInfo::RUNTIME_ENTRY) {
270     StaticVisitor::VisitRuntimeEntry(this);
271   }
272 }
273 
274 
275 
Immediate(int x)276 Immediate::Immediate(int x)  {
277   x_ = x;
278   rmode_ = RelocInfo::NONE;
279 }
280 
281 
Immediate(const ExternalReference & ext)282 Immediate::Immediate(const ExternalReference& ext) {
283   x_ = reinterpret_cast<int32_t>(ext.address());
284   rmode_ = RelocInfo::EXTERNAL_REFERENCE;
285 }
286 
287 
Immediate(Label * internal_offset)288 Immediate::Immediate(Label* internal_offset) {
289   x_ = reinterpret_cast<int32_t>(internal_offset);
290   rmode_ = RelocInfo::INTERNAL_REFERENCE;
291 }
292 
293 
Immediate(Handle<Object> handle)294 Immediate::Immediate(Handle<Object> handle) {
295   // Verify all Objects referred by code are NOT in new space.
296   Object* obj = *handle;
297   ASSERT(!HEAP->InNewSpace(obj));
298   if (obj->IsHeapObject()) {
299     x_ = reinterpret_cast<intptr_t>(handle.location());
300     rmode_ = RelocInfo::EMBEDDED_OBJECT;
301   } else {
302     // no relocation needed
303     x_ =  reinterpret_cast<intptr_t>(obj);
304     rmode_ = RelocInfo::NONE;
305   }
306 }
307 
308 
Immediate(Smi * value)309 Immediate::Immediate(Smi* value) {
310   x_ = reinterpret_cast<intptr_t>(value);
311   rmode_ = RelocInfo::NONE;
312 }
313 
314 
Immediate(Address addr)315 Immediate::Immediate(Address addr) {
316   x_ = reinterpret_cast<int32_t>(addr);
317   rmode_ = RelocInfo::NONE;
318 }
319 
320 
emit(uint32_t x)321 void Assembler::emit(uint32_t x) {
322   *reinterpret_cast<uint32_t*>(pc_) = x;
323   pc_ += sizeof(uint32_t);
324 }
325 
326 
emit(Handle<Object> handle)327 void Assembler::emit(Handle<Object> handle) {
328   // Verify all Objects referred by code are NOT in new space.
329   Object* obj = *handle;
330   ASSERT(!isolate()->heap()->InNewSpace(obj));
331   if (obj->IsHeapObject()) {
332     emit(reinterpret_cast<intptr_t>(handle.location()),
333          RelocInfo::EMBEDDED_OBJECT);
334   } else {
335     // no relocation needed
336     emit(reinterpret_cast<intptr_t>(obj));
337   }
338 }
339 
340 
emit(uint32_t x,RelocInfo::Mode rmode,unsigned id)341 void Assembler::emit(uint32_t x, RelocInfo::Mode rmode, unsigned id) {
342   if (rmode == RelocInfo::CODE_TARGET && id != kNoASTId) {
343     RecordRelocInfo(RelocInfo::CODE_TARGET_WITH_ID, static_cast<intptr_t>(id));
344   } else if (rmode != RelocInfo::NONE) {
345     RecordRelocInfo(rmode);
346   }
347   emit(x);
348 }
349 
350 
emit(const Immediate & x)351 void Assembler::emit(const Immediate& x) {
352   if (x.rmode_ == RelocInfo::INTERNAL_REFERENCE) {
353     Label* label = reinterpret_cast<Label*>(x.x_);
354     emit_code_relative_offset(label);
355     return;
356   }
357   if (x.rmode_ != RelocInfo::NONE) RecordRelocInfo(x.rmode_);
358   emit(x.x_);
359 }
360 
361 
emit_code_relative_offset(Label * label)362 void Assembler::emit_code_relative_offset(Label* label) {
363   if (label->is_bound()) {
364     int32_t pos;
365     pos = label->pos() + Code::kHeaderSize - kHeapObjectTag;
366     emit(pos);
367   } else {
368     emit_disp(label, Displacement::CODE_RELATIVE);
369   }
370 }
371 
372 
emit_w(const Immediate & x)373 void Assembler::emit_w(const Immediate& x) {
374   ASSERT(x.rmode_ == RelocInfo::NONE);
375   uint16_t value = static_cast<uint16_t>(x.x_);
376   reinterpret_cast<uint16_t*>(pc_)[0] = value;
377   pc_ += sizeof(uint16_t);
378 }
379 
380 
target_address_at(Address pc)381 Address Assembler::target_address_at(Address pc) {
382   return pc + sizeof(int32_t) + *reinterpret_cast<int32_t*>(pc);
383 }
384 
385 
set_target_address_at(Address pc,Address target)386 void Assembler::set_target_address_at(Address pc, Address target) {
387   int32_t* p = reinterpret_cast<int32_t*>(pc);
388   *p = target - (pc + sizeof(int32_t));
389   CPU::FlushICache(p, sizeof(int32_t));
390 }
391 
392 
disp_at(Label * L)393 Displacement Assembler::disp_at(Label* L) {
394   return Displacement(long_at(L->pos()));
395 }
396 
397 
disp_at_put(Label * L,Displacement disp)398 void Assembler::disp_at_put(Label* L, Displacement disp) {
399   long_at_put(L->pos(), disp.data());
400 }
401 
402 
emit_disp(Label * L,Displacement::Type type)403 void Assembler::emit_disp(Label* L, Displacement::Type type) {
404   Displacement disp(L, type);
405   L->link_to(pc_offset());
406   emit(static_cast<int>(disp.data()));
407 }
408 
409 
emit_near_disp(Label * L)410 void Assembler::emit_near_disp(Label* L) {
411   byte disp = 0x00;
412   if (L->is_near_linked()) {
413     int offset = L->near_link_pos() - pc_offset();
414     ASSERT(is_int8(offset));
415     disp = static_cast<byte>(offset & 0xFF);
416   }
417   L->link_to(pc_offset(), Label::kNear);
418   *pc_++ = disp;
419 }
420 
421 
set_modrm(int mod,Register rm)422 void Operand::set_modrm(int mod, Register rm) {
423   ASSERT((mod & -4) == 0);
424   buf_[0] = mod << 6 | rm.code();
425   len_ = 1;
426 }
427 
428 
set_sib(ScaleFactor scale,Register index,Register base)429 void Operand::set_sib(ScaleFactor scale, Register index, Register base) {
430   ASSERT(len_ == 1);
431   ASSERT((scale & -4) == 0);
432   // Use SIB with no index register only for base esp.
433   ASSERT(!index.is(esp) || base.is(esp));
434   buf_[1] = scale << 6 | index.code() << 3 | base.code();
435   len_ = 2;
436 }
437 
438 
set_disp8(int8_t disp)439 void Operand::set_disp8(int8_t disp) {
440   ASSERT(len_ == 1 || len_ == 2);
441   *reinterpret_cast<int8_t*>(&buf_[len_++]) = disp;
442 }
443 
444 
set_dispr(int32_t disp,RelocInfo::Mode rmode)445 void Operand::set_dispr(int32_t disp, RelocInfo::Mode rmode) {
446   ASSERT(len_ == 1 || len_ == 2);
447   int32_t* p = reinterpret_cast<int32_t*>(&buf_[len_]);
448   *p = disp;
449   len_ += sizeof(int32_t);
450   rmode_ = rmode;
451 }
452 
Operand(Register reg)453 Operand::Operand(Register reg) {
454   // reg
455   set_modrm(3, reg);
456 }
457 
458 
Operand(XMMRegister xmm_reg)459 Operand::Operand(XMMRegister xmm_reg) {
460   Register reg = { xmm_reg.code() };
461   set_modrm(3, reg);
462 }
463 
464 
Operand(int32_t disp,RelocInfo::Mode rmode)465 Operand::Operand(int32_t disp, RelocInfo::Mode rmode) {
466   // [disp/r]
467   set_modrm(0, ebp);
468   set_dispr(disp, rmode);
469 }
470 
471 } }  // namespace v8::internal
472 
473 #endif  // V8_IA32_ASSEMBLER_IA32_INL_H_
474