• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 //     * Redistributions of source code must retain the above copyright
7 //       notice, this list of conditions and the following disclaimer.
8 //     * Redistributions in binary form must reproduce the above
9 //       copyright notice, this list of conditions and the following
10 //       disclaimer in the documentation and/or other materials provided
11 //       with the distribution.
12 //     * Neither the name of Google Inc. nor the names of its
13 //       contributors may be used to endorse or promote products derived
14 //       from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 
28 #ifndef V8_X64_ASSEMBLER_X64_INL_H_
29 #define V8_X64_ASSEMBLER_X64_INL_H_
30 
31 #include "cpu.h"
32 #include "debug.h"
33 #include "v8memory.h"
34 
35 namespace v8 {
36 namespace internal {
37 
38 
39 // -----------------------------------------------------------------------------
40 // Implementation of Assembler
41 
42 
emitl(uint32_t x)43 void Assembler::emitl(uint32_t x) {
44   Memory::uint32_at(pc_) = x;
45   pc_ += sizeof(uint32_t);
46 }
47 
48 
emitq(uint64_t x,RelocInfo::Mode rmode)49 void Assembler::emitq(uint64_t x, RelocInfo::Mode rmode) {
50   Memory::uint64_at(pc_) = x;
51   if (rmode != RelocInfo::NONE) {
52     RecordRelocInfo(rmode, x);
53   }
54   pc_ += sizeof(uint64_t);
55 }
56 
57 
emitw(uint16_t x)58 void Assembler::emitw(uint16_t x) {
59   Memory::uint16_at(pc_) = x;
60   pc_ += sizeof(uint16_t);
61 }
62 
63 
emit_code_target(Handle<Code> target,RelocInfo::Mode rmode)64 void Assembler::emit_code_target(Handle<Code> target, RelocInfo::Mode rmode) {
65   ASSERT(RelocInfo::IsCodeTarget(rmode));
66   RecordRelocInfo(rmode);
67   int current = code_targets_.length();
68   if (current > 0 && code_targets_.last().is_identical_to(target)) {
69     // Optimization if we keep jumping to the same code target.
70     emitl(current - 1);
71   } else {
72     code_targets_.Add(target);
73     emitl(current);
74   }
75 }
76 
77 
emit_rex_64(Register reg,Register rm_reg)78 void Assembler::emit_rex_64(Register reg, Register rm_reg) {
79   emit(0x48 | reg.high_bit() << 2 | rm_reg.high_bit());
80 }
81 
82 
emit_rex_64(XMMRegister reg,Register rm_reg)83 void Assembler::emit_rex_64(XMMRegister reg, Register rm_reg) {
84   emit(0x48 | (reg.code() & 0x8) >> 1 | rm_reg.code() >> 3);
85 }
86 
87 
emit_rex_64(Register reg,XMMRegister rm_reg)88 void Assembler::emit_rex_64(Register reg, XMMRegister rm_reg) {
89   emit(0x48 | (reg.code() & 0x8) >> 1 | rm_reg.code() >> 3);
90 }
91 
92 
emit_rex_64(Register reg,const Operand & op)93 void Assembler::emit_rex_64(Register reg, const Operand& op) {
94   emit(0x48 | reg.high_bit() << 2 | op.rex_);
95 }
96 
97 
emit_rex_64(XMMRegister reg,const Operand & op)98 void Assembler::emit_rex_64(XMMRegister reg, const Operand& op) {
99   emit(0x48 | (reg.code() & 0x8) >> 1 | op.rex_);
100 }
101 
102 
emit_rex_64(Register rm_reg)103 void Assembler::emit_rex_64(Register rm_reg) {
104   ASSERT_EQ(rm_reg.code() & 0xf, rm_reg.code());
105   emit(0x48 | rm_reg.high_bit());
106 }
107 
108 
emit_rex_64(const Operand & op)109 void Assembler::emit_rex_64(const Operand& op) {
110   emit(0x48 | op.rex_);
111 }
112 
113 
emit_rex_32(Register reg,Register rm_reg)114 void Assembler::emit_rex_32(Register reg, Register rm_reg) {
115   emit(0x40 | reg.high_bit() << 2 | rm_reg.high_bit());
116 }
117 
118 
emit_rex_32(Register reg,const Operand & op)119 void Assembler::emit_rex_32(Register reg, const Operand& op) {
120   emit(0x40 | reg.high_bit() << 2  | op.rex_);
121 }
122 
123 
emit_rex_32(Register rm_reg)124 void Assembler::emit_rex_32(Register rm_reg) {
125   emit(0x40 | rm_reg.high_bit());
126 }
127 
128 
emit_rex_32(const Operand & op)129 void Assembler::emit_rex_32(const Operand& op) {
130   emit(0x40 | op.rex_);
131 }
132 
133 
emit_optional_rex_32(Register reg,Register rm_reg)134 void Assembler::emit_optional_rex_32(Register reg, Register rm_reg) {
135   byte rex_bits = reg.high_bit() << 2 | rm_reg.high_bit();
136   if (rex_bits != 0) emit(0x40 | rex_bits);
137 }
138 
139 
emit_optional_rex_32(Register reg,const Operand & op)140 void Assembler::emit_optional_rex_32(Register reg, const Operand& op) {
141   byte rex_bits =  reg.high_bit() << 2 | op.rex_;
142   if (rex_bits != 0) emit(0x40 | rex_bits);
143 }
144 
145 
emit_optional_rex_32(XMMRegister reg,const Operand & op)146 void Assembler::emit_optional_rex_32(XMMRegister reg, const Operand& op) {
147   byte rex_bits =  (reg.code() & 0x8) >> 1 | op.rex_;
148   if (rex_bits != 0) emit(0x40 | rex_bits);
149 }
150 
151 
emit_optional_rex_32(XMMRegister reg,XMMRegister base)152 void Assembler::emit_optional_rex_32(XMMRegister reg, XMMRegister base) {
153   byte rex_bits =  (reg.code() & 0x8) >> 1 | (base.code() & 0x8) >> 3;
154   if (rex_bits != 0) emit(0x40 | rex_bits);
155 }
156 
157 
emit_optional_rex_32(XMMRegister reg,Register base)158 void Assembler::emit_optional_rex_32(XMMRegister reg, Register base) {
159   byte rex_bits =  (reg.code() & 0x8) >> 1 | (base.code() & 0x8) >> 3;
160   if (rex_bits != 0) emit(0x40 | rex_bits);
161 }
162 
163 
emit_optional_rex_32(Register reg,XMMRegister base)164 void Assembler::emit_optional_rex_32(Register reg, XMMRegister base) {
165   byte rex_bits =  (reg.code() & 0x8) >> 1 | (base.code() & 0x8) >> 3;
166   if (rex_bits != 0) emit(0x40 | rex_bits);
167 }
168 
169 
emit_optional_rex_32(Register rm_reg)170 void Assembler::emit_optional_rex_32(Register rm_reg) {
171   if (rm_reg.high_bit()) emit(0x41);
172 }
173 
174 
emit_optional_rex_32(const Operand & op)175 void Assembler::emit_optional_rex_32(const Operand& op) {
176   if (op.rex_ != 0) emit(0x40 | op.rex_);
177 }
178 
179 
target_address_at(Address pc)180 Address Assembler::target_address_at(Address pc) {
181   return Memory::int32_at(pc) + pc + 4;
182 }
183 
184 
set_target_address_at(Address pc,Address target)185 void Assembler::set_target_address_at(Address pc, Address target) {
186   Memory::int32_at(pc) = static_cast<int32_t>(target - pc - 4);
187   CPU::FlushICache(pc, sizeof(int32_t));
188 }
189 
code_target_object_handle_at(Address pc)190 Handle<Object> Assembler::code_target_object_handle_at(Address pc) {
191   return code_targets_[Memory::int32_at(pc)];
192 }
193 
194 // -----------------------------------------------------------------------------
195 // Implementation of RelocInfo
196 
197 // The modes possibly affected by apply must be in kApplyMask.
apply(intptr_t delta)198 void RelocInfo::apply(intptr_t delta) {
199   if (IsInternalReference(rmode_)) {
200     // absolute code pointer inside code object moves with the code object.
201     Memory::Address_at(pc_) += static_cast<int32_t>(delta);
202     CPU::FlushICache(pc_, sizeof(Address));
203   } else if (IsCodeTarget(rmode_)) {
204     Memory::int32_at(pc_) -= static_cast<int32_t>(delta);
205     CPU::FlushICache(pc_, sizeof(int32_t));
206   }
207 }
208 
209 
target_address()210 Address RelocInfo::target_address() {
211   ASSERT(IsCodeTarget(rmode_) || rmode_ == RUNTIME_ENTRY);
212   if (IsCodeTarget(rmode_)) {
213     return Assembler::target_address_at(pc_);
214   } else {
215     return Memory::Address_at(pc_);
216   }
217 }
218 
219 
target_address_address()220 Address RelocInfo::target_address_address() {
221   ASSERT(IsCodeTarget(rmode_) || rmode_ == RUNTIME_ENTRY);
222   return reinterpret_cast<Address>(pc_);
223 }
224 
225 
target_address_size()226 int RelocInfo::target_address_size() {
227   if (IsCodedSpecially()) {
228     return Assembler::kCallTargetSize;
229   } else {
230     return Assembler::kExternalTargetSize;
231   }
232 }
233 
234 
set_target_address(Address target)235 void RelocInfo::set_target_address(Address target) {
236   ASSERT(IsCodeTarget(rmode_) || rmode_ == RUNTIME_ENTRY);
237   if (IsCodeTarget(rmode_)) {
238     Assembler::set_target_address_at(pc_, target);
239   } else {
240     Memory::Address_at(pc_) = target;
241     CPU::FlushICache(pc_, sizeof(Address));
242   }
243 }
244 
245 
target_object()246 Object* RelocInfo::target_object() {
247   ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
248   return Memory::Object_at(pc_);
249 }
250 
251 
target_object_handle(Assembler * origin)252 Handle<Object> RelocInfo::target_object_handle(Assembler *origin) {
253   ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
254   if (rmode_ == EMBEDDED_OBJECT) {
255     return Memory::Object_Handle_at(pc_);
256   } else {
257     return origin->code_target_object_handle_at(pc_);
258   }
259 }
260 
261 
target_object_address()262 Object** RelocInfo::target_object_address() {
263   ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
264   return reinterpret_cast<Object**>(pc_);
265 }
266 
267 
target_reference_address()268 Address* RelocInfo::target_reference_address() {
269   ASSERT(rmode_ == RelocInfo::EXTERNAL_REFERENCE);
270   return reinterpret_cast<Address*>(pc_);
271 }
272 
273 
set_target_object(Object * target)274 void RelocInfo::set_target_object(Object* target) {
275   ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
276   *reinterpret_cast<Object**>(pc_) = target;
277   CPU::FlushICache(pc_, sizeof(Address));
278 }
279 
280 
target_cell_handle()281 Handle<JSGlobalPropertyCell> RelocInfo::target_cell_handle() {
282   ASSERT(rmode_ == RelocInfo::GLOBAL_PROPERTY_CELL);
283   Address address = Memory::Address_at(pc_);
284   return Handle<JSGlobalPropertyCell>(
285       reinterpret_cast<JSGlobalPropertyCell**>(address));
286 }
287 
288 
target_cell()289 JSGlobalPropertyCell* RelocInfo::target_cell() {
290   ASSERT(rmode_ == RelocInfo::GLOBAL_PROPERTY_CELL);
291   Address address = Memory::Address_at(pc_);
292   Object* object = HeapObject::FromAddress(
293       address - JSGlobalPropertyCell::kValueOffset);
294   return reinterpret_cast<JSGlobalPropertyCell*>(object);
295 }
296 
297 
set_target_cell(JSGlobalPropertyCell * cell)298 void RelocInfo::set_target_cell(JSGlobalPropertyCell* cell) {
299   ASSERT(rmode_ == RelocInfo::GLOBAL_PROPERTY_CELL);
300   Address address = cell->address() + JSGlobalPropertyCell::kValueOffset;
301   Memory::Address_at(pc_) = address;
302   CPU::FlushICache(pc_, sizeof(Address));
303 }
304 
305 
IsPatchedReturnSequence()306 bool RelocInfo::IsPatchedReturnSequence() {
307   // The recognized call sequence is:
308   //  movq(kScratchRegister, immediate64); call(kScratchRegister);
309   // It only needs to be distinguished from a return sequence
310   //  movq(rsp, rbp); pop(rbp); ret(n); int3 *6
311   // The 11th byte is int3 (0xCC) in the return sequence and
312   // REX.WB (0x48+register bit) for the call sequence.
313 #ifdef ENABLE_DEBUGGER_SUPPORT
314   return pc_[10] != 0xCC;
315 #else
316   return false;
317 #endif
318 }
319 
320 
IsPatchedDebugBreakSlotSequence()321 bool RelocInfo::IsPatchedDebugBreakSlotSequence() {
322   return !Assembler::IsNop(pc());
323 }
324 
325 
call_address()326 Address RelocInfo::call_address() {
327   ASSERT((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
328          (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
329   return Memory::Address_at(
330       pc_ + Assembler::kRealPatchReturnSequenceAddressOffset);
331 }
332 
333 
set_call_address(Address target)334 void RelocInfo::set_call_address(Address target) {
335   ASSERT((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
336          (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
337   Memory::Address_at(pc_ + Assembler::kRealPatchReturnSequenceAddressOffset) =
338       target;
339   CPU::FlushICache(pc_ + Assembler::kRealPatchReturnSequenceAddressOffset,
340                    sizeof(Address));
341 }
342 
343 
call_object()344 Object* RelocInfo::call_object() {
345   return *call_object_address();
346 }
347 
348 
set_call_object(Object * target)349 void RelocInfo::set_call_object(Object* target) {
350   *call_object_address() = target;
351 }
352 
353 
call_object_address()354 Object** RelocInfo::call_object_address() {
355   ASSERT((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
356          (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
357   return reinterpret_cast<Object**>(
358       pc_ + Assembler::kPatchReturnSequenceAddressOffset);
359 }
360 
361 
Visit(ObjectVisitor * visitor)362 void RelocInfo::Visit(ObjectVisitor* visitor) {
363   RelocInfo::Mode mode = rmode();
364   if (mode == RelocInfo::EMBEDDED_OBJECT) {
365     visitor->VisitPointer(target_object_address());
366     CPU::FlushICache(pc_, sizeof(Address));
367   } else if (RelocInfo::IsCodeTarget(mode)) {
368     visitor->VisitCodeTarget(this);
369   } else if (mode == RelocInfo::GLOBAL_PROPERTY_CELL) {
370     visitor->VisitGlobalPropertyCell(this);
371   } else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
372     visitor->VisitExternalReference(target_reference_address());
373     CPU::FlushICache(pc_, sizeof(Address));
374 #ifdef ENABLE_DEBUGGER_SUPPORT
375   // TODO(isolates): Get a cached isolate below.
376   } else if (((RelocInfo::IsJSReturn(mode) &&
377               IsPatchedReturnSequence()) ||
378              (RelocInfo::IsDebugBreakSlot(mode) &&
379               IsPatchedDebugBreakSlotSequence())) &&
380              Isolate::Current()->debug()->has_break_points()) {
381     visitor->VisitDebugTarget(this);
382 #endif
383   } else if (mode == RelocInfo::RUNTIME_ENTRY) {
384     visitor->VisitRuntimeEntry(this);
385   }
386 }
387 
388 
389 template<typename StaticVisitor>
Visit(Heap * heap)390 void RelocInfo::Visit(Heap* heap) {
391   RelocInfo::Mode mode = rmode();
392   if (mode == RelocInfo::EMBEDDED_OBJECT) {
393     StaticVisitor::VisitPointer(heap, target_object_address());
394     CPU::FlushICache(pc_, sizeof(Address));
395   } else if (RelocInfo::IsCodeTarget(mode)) {
396     StaticVisitor::VisitCodeTarget(heap, this);
397   } else if (mode == RelocInfo::GLOBAL_PROPERTY_CELL) {
398     StaticVisitor::VisitGlobalPropertyCell(heap, this);
399   } else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
400     StaticVisitor::VisitExternalReference(target_reference_address());
401     CPU::FlushICache(pc_, sizeof(Address));
402 #ifdef ENABLE_DEBUGGER_SUPPORT
403   } else if (heap->isolate()->debug()->has_break_points() &&
404              ((RelocInfo::IsJSReturn(mode) &&
405               IsPatchedReturnSequence()) ||
406              (RelocInfo::IsDebugBreakSlot(mode) &&
407               IsPatchedDebugBreakSlotSequence()))) {
408     StaticVisitor::VisitDebugTarget(heap, this);
409 #endif
410   } else if (mode == RelocInfo::RUNTIME_ENTRY) {
411     StaticVisitor::VisitRuntimeEntry(this);
412   }
413 }
414 
415 
416 // -----------------------------------------------------------------------------
417 // Implementation of Operand
418 
set_modrm(int mod,Register rm_reg)419 void Operand::set_modrm(int mod, Register rm_reg) {
420   ASSERT(is_uint2(mod));
421   buf_[0] = mod << 6 | rm_reg.low_bits();
422   // Set REX.B to the high bit of rm.code().
423   rex_ |= rm_reg.high_bit();
424 }
425 
426 
set_sib(ScaleFactor scale,Register index,Register base)427 void Operand::set_sib(ScaleFactor scale, Register index, Register base) {
428   ASSERT(len_ == 1);
429   ASSERT(is_uint2(scale));
430   // Use SIB with no index register only for base rsp or r12. Otherwise we
431   // would skip the SIB byte entirely.
432   ASSERT(!index.is(rsp) || base.is(rsp) || base.is(r12));
433   buf_[1] = (scale << 6) | (index.low_bits() << 3) | base.low_bits();
434   rex_ |= index.high_bit() << 1 | base.high_bit();
435   len_ = 2;
436 }
437 
set_disp8(int disp)438 void Operand::set_disp8(int disp) {
439   ASSERT(is_int8(disp));
440   ASSERT(len_ == 1 || len_ == 2);
441   int8_t* p = reinterpret_cast<int8_t*>(&buf_[len_]);
442   *p = disp;
443   len_ += sizeof(int8_t);
444 }
445 
set_disp32(int disp)446 void Operand::set_disp32(int disp) {
447   ASSERT(len_ == 1 || len_ == 2);
448   int32_t* p = reinterpret_cast<int32_t*>(&buf_[len_]);
449   *p = disp;
450   len_ += sizeof(int32_t);
451 }
452 
453 
454 } }  // namespace v8::internal
455 
456 #endif  // V8_X64_ASSEMBLER_X64_INL_H_
457