• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 //     * Redistributions of source code must retain the above copyright
7 //       notice, this list of conditions and the following disclaimer.
8 //     * Redistributions in binary form must reproduce the above
9 //       copyright notice, this list of conditions and the following
10 //       disclaimer in the documentation and/or other materials provided
11 //       with the distribution.
12 //     * Neither the name of Google Inc. nor the names of its
13 //       contributors may be used to endorse or promote products derived
14 //       from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 
28 #include "v8.h"
29 
30 #if V8_TARGET_ARCH_IA32
31 
32 #include "bootstrapper.h"
33 #include "codegen.h"
34 #include "cpu-profiler.h"
35 #include "debug.h"
36 #include "isolate-inl.h"
37 #include "runtime.h"
38 #include "serialize.h"
39 
40 namespace v8 {
41 namespace internal {
42 
43 // -------------------------------------------------------------------------
44 // MacroAssembler implementation.
45 
MacroAssembler(Isolate * arg_isolate,void * buffer,int size)46 MacroAssembler::MacroAssembler(Isolate* arg_isolate, void* buffer, int size)
47     : Assembler(arg_isolate, buffer, size),
48       generating_stub_(false),
49       has_frame_(false) {
50   if (isolate() != NULL) {
51     // TODO(titzer): should we just use a null handle here instead?
52     code_object_ = Handle<Object>(isolate()->heap()->undefined_value(),
53                                   isolate());
54   }
55 }
56 
57 
Load(Register dst,const Operand & src,Representation r)58 void MacroAssembler::Load(Register dst, const Operand& src, Representation r) {
59   ASSERT(!r.IsDouble());
60   if (r.IsInteger8()) {
61     movsx_b(dst, src);
62   } else if (r.IsUInteger8()) {
63     movzx_b(dst, src);
64   } else if (r.IsInteger16()) {
65     movsx_w(dst, src);
66   } else if (r.IsUInteger16()) {
67     movzx_w(dst, src);
68   } else {
69     mov(dst, src);
70   }
71 }
72 
73 
Store(Register src,const Operand & dst,Representation r)74 void MacroAssembler::Store(Register src, const Operand& dst, Representation r) {
75   ASSERT(!r.IsDouble());
76   if (r.IsInteger8() || r.IsUInteger8()) {
77     mov_b(dst, src);
78   } else if (r.IsInteger16() || r.IsUInteger16()) {
79     mov_w(dst, src);
80   } else {
81     mov(dst, src);
82   }
83 }
84 
85 
LoadRoot(Register destination,Heap::RootListIndex index)86 void MacroAssembler::LoadRoot(Register destination, Heap::RootListIndex index) {
87   if (isolate()->heap()->RootCanBeTreatedAsConstant(index)) {
88     Handle<Object> value(&isolate()->heap()->roots_array_start()[index]);
89     mov(destination, value);
90     return;
91   }
92   ExternalReference roots_array_start =
93       ExternalReference::roots_array_start(isolate());
94   mov(destination, Immediate(index));
95   mov(destination, Operand::StaticArray(destination,
96                                         times_pointer_size,
97                                         roots_array_start));
98 }
99 
100 
StoreRoot(Register source,Register scratch,Heap::RootListIndex index)101 void MacroAssembler::StoreRoot(Register source,
102                                Register scratch,
103                                Heap::RootListIndex index) {
104   ASSERT(Heap::RootCanBeWrittenAfterInitialization(index));
105   ExternalReference roots_array_start =
106       ExternalReference::roots_array_start(isolate());
107   mov(scratch, Immediate(index));
108   mov(Operand::StaticArray(scratch, times_pointer_size, roots_array_start),
109       source);
110 }
111 
112 
CompareRoot(Register with,Register scratch,Heap::RootListIndex index)113 void MacroAssembler::CompareRoot(Register with,
114                                  Register scratch,
115                                  Heap::RootListIndex index) {
116   ExternalReference roots_array_start =
117       ExternalReference::roots_array_start(isolate());
118   mov(scratch, Immediate(index));
119   cmp(with, Operand::StaticArray(scratch,
120                                 times_pointer_size,
121                                 roots_array_start));
122 }
123 
124 
CompareRoot(Register with,Heap::RootListIndex index)125 void MacroAssembler::CompareRoot(Register with, Heap::RootListIndex index) {
126   ASSERT(isolate()->heap()->RootCanBeTreatedAsConstant(index));
127   Handle<Object> value(&isolate()->heap()->roots_array_start()[index]);
128   cmp(with, value);
129 }
130 
131 
CompareRoot(const Operand & with,Heap::RootListIndex index)132 void MacroAssembler::CompareRoot(const Operand& with,
133                                  Heap::RootListIndex index) {
134   ASSERT(isolate()->heap()->RootCanBeTreatedAsConstant(index));
135   Handle<Object> value(&isolate()->heap()->roots_array_start()[index]);
136   cmp(with, value);
137 }
138 
139 
InNewSpace(Register object,Register scratch,Condition cc,Label * condition_met,Label::Distance condition_met_distance)140 void MacroAssembler::InNewSpace(
141     Register object,
142     Register scratch,
143     Condition cc,
144     Label* condition_met,
145     Label::Distance condition_met_distance) {
146   ASSERT(cc == equal || cc == not_equal);
147   if (scratch.is(object)) {
148     and_(scratch, Immediate(~Page::kPageAlignmentMask));
149   } else {
150     mov(scratch, Immediate(~Page::kPageAlignmentMask));
151     and_(scratch, object);
152   }
153   // Check that we can use a test_b.
154   ASSERT(MemoryChunk::IN_FROM_SPACE < 8);
155   ASSERT(MemoryChunk::IN_TO_SPACE < 8);
156   int mask = (1 << MemoryChunk::IN_FROM_SPACE)
157            | (1 << MemoryChunk::IN_TO_SPACE);
158   // If non-zero, the page belongs to new-space.
159   test_b(Operand(scratch, MemoryChunk::kFlagsOffset),
160          static_cast<uint8_t>(mask));
161   j(cc, condition_met, condition_met_distance);
162 }
163 
164 
RememberedSetHelper(Register object,Register addr,Register scratch,SaveFPRegsMode save_fp,MacroAssembler::RememberedSetFinalAction and_then)165 void MacroAssembler::RememberedSetHelper(
166     Register object,  // Only used for debug checks.
167     Register addr,
168     Register scratch,
169     SaveFPRegsMode save_fp,
170     MacroAssembler::RememberedSetFinalAction and_then) {
171   Label done;
172   if (emit_debug_code()) {
173     Label ok;
174     JumpIfNotInNewSpace(object, scratch, &ok, Label::kNear);
175     int3();
176     bind(&ok);
177   }
178   // Load store buffer top.
179   ExternalReference store_buffer =
180       ExternalReference::store_buffer_top(isolate());
181   mov(scratch, Operand::StaticVariable(store_buffer));
182   // Store pointer to buffer.
183   mov(Operand(scratch, 0), addr);
184   // Increment buffer top.
185   add(scratch, Immediate(kPointerSize));
186   // Write back new top of buffer.
187   mov(Operand::StaticVariable(store_buffer), scratch);
188   // Call stub on end of buffer.
189   // Check for end of buffer.
190   test(scratch, Immediate(StoreBuffer::kStoreBufferOverflowBit));
191   if (and_then == kReturnAtEnd) {
192     Label buffer_overflowed;
193     j(not_equal, &buffer_overflowed, Label::kNear);
194     ret(0);
195     bind(&buffer_overflowed);
196   } else {
197     ASSERT(and_then == kFallThroughAtEnd);
198     j(equal, &done, Label::kNear);
199   }
200   StoreBufferOverflowStub store_buffer_overflow =
201       StoreBufferOverflowStub(save_fp);
202   CallStub(&store_buffer_overflow);
203   if (and_then == kReturnAtEnd) {
204     ret(0);
205   } else {
206     ASSERT(and_then == kFallThroughAtEnd);
207     bind(&done);
208   }
209 }
210 
211 
ClampDoubleToUint8(XMMRegister input_reg,XMMRegister scratch_reg,Register result_reg)212 void MacroAssembler::ClampDoubleToUint8(XMMRegister input_reg,
213                                         XMMRegister scratch_reg,
214                                         Register result_reg) {
215   Label done;
216   Label conv_failure;
217   pxor(scratch_reg, scratch_reg);
218   cvtsd2si(result_reg, input_reg);
219   test(result_reg, Immediate(0xFFFFFF00));
220   j(zero, &done, Label::kNear);
221   cmp(result_reg, Immediate(0x80000000));
222   j(equal, &conv_failure, Label::kNear);
223   mov(result_reg, Immediate(0));
224   setcc(above, result_reg);
225   sub(result_reg, Immediate(1));
226   and_(result_reg, Immediate(255));
227   jmp(&done, Label::kNear);
228   bind(&conv_failure);
229   Set(result_reg, Immediate(0));
230   ucomisd(input_reg, scratch_reg);
231   j(below, &done, Label::kNear);
232   Set(result_reg, Immediate(255));
233   bind(&done);
234 }
235 
236 
ClampUint8(Register reg)237 void MacroAssembler::ClampUint8(Register reg) {
238   Label done;
239   test(reg, Immediate(0xFFFFFF00));
240   j(zero, &done, Label::kNear);
241   setcc(negative, reg);  // 1 if negative, 0 if positive.
242   dec_b(reg);  // 0 if negative, 255 if positive.
243   bind(&done);
244 }
245 
246 
SlowTruncateToI(Register result_reg,Register input_reg,int offset)247 void MacroAssembler::SlowTruncateToI(Register result_reg,
248                                      Register input_reg,
249                                      int offset) {
250   DoubleToIStub stub(input_reg, result_reg, offset, true);
251   call(stub.GetCode(isolate()), RelocInfo::CODE_TARGET);
252 }
253 
254 
TruncateDoubleToI(Register result_reg,XMMRegister input_reg)255 void MacroAssembler::TruncateDoubleToI(Register result_reg,
256                                        XMMRegister input_reg) {
257   Label done;
258   cvttsd2si(result_reg, Operand(input_reg));
259   cmp(result_reg, 0x80000000u);
260   j(not_equal, &done, Label::kNear);
261 
262   sub(esp, Immediate(kDoubleSize));
263   movsd(MemOperand(esp, 0), input_reg);
264   SlowTruncateToI(result_reg, esp, 0);
265   add(esp, Immediate(kDoubleSize));
266   bind(&done);
267 }
268 
269 
TruncateX87TOSToI(Register result_reg)270 void MacroAssembler::TruncateX87TOSToI(Register result_reg) {
271   sub(esp, Immediate(kDoubleSize));
272   fst_d(MemOperand(esp, 0));
273   SlowTruncateToI(result_reg, esp, 0);
274   add(esp, Immediate(kDoubleSize));
275 }
276 
277 
X87TOSToI(Register result_reg,MinusZeroMode minus_zero_mode,Label * conversion_failed,Label::Distance dst)278 void MacroAssembler::X87TOSToI(Register result_reg,
279                                MinusZeroMode minus_zero_mode,
280                                Label* conversion_failed,
281                                Label::Distance dst) {
282   Label done;
283   sub(esp, Immediate(kPointerSize));
284   fld(0);
285   fist_s(MemOperand(esp, 0));
286   fild_s(MemOperand(esp, 0));
287   pop(result_reg);
288   FCmp();
289   j(not_equal, conversion_failed, dst);
290   j(parity_even, conversion_failed, dst);
291   if (minus_zero_mode == FAIL_ON_MINUS_ZERO) {
292     test(result_reg, Operand(result_reg));
293     j(not_zero, &done, Label::kNear);
294     // To check for minus zero, we load the value again as float, and check
295     // if that is still 0.
296     sub(esp, Immediate(kPointerSize));
297     fst_s(MemOperand(esp, 0));
298     pop(result_reg);
299     test(result_reg, Operand(result_reg));
300     j(not_zero, conversion_failed, dst);
301   }
302   bind(&done);
303 }
304 
305 
DoubleToI(Register result_reg,XMMRegister input_reg,XMMRegister scratch,MinusZeroMode minus_zero_mode,Label * conversion_failed,Label::Distance dst)306 void MacroAssembler::DoubleToI(Register result_reg,
307                                XMMRegister input_reg,
308                                XMMRegister scratch,
309                                MinusZeroMode minus_zero_mode,
310                                Label* conversion_failed,
311                                Label::Distance dst) {
312   ASSERT(!input_reg.is(scratch));
313   cvttsd2si(result_reg, Operand(input_reg));
314   Cvtsi2sd(scratch, Operand(result_reg));
315   ucomisd(scratch, input_reg);
316   j(not_equal, conversion_failed, dst);
317   j(parity_even, conversion_failed, dst);  // NaN.
318   if (minus_zero_mode == FAIL_ON_MINUS_ZERO) {
319     Label done;
320     // The integer converted back is equal to the original. We
321     // only have to test if we got -0 as an input.
322     test(result_reg, Operand(result_reg));
323     j(not_zero, &done, Label::kNear);
324     movmskpd(result_reg, input_reg);
325     // Bit 0 contains the sign of the double in input_reg.
326     // If input was positive, we are ok and return 0, otherwise
327     // jump to conversion_failed.
328     and_(result_reg, 1);
329     j(not_zero, conversion_failed, dst);
330     bind(&done);
331   }
332 }
333 
334 
TruncateHeapNumberToI(Register result_reg,Register input_reg)335 void MacroAssembler::TruncateHeapNumberToI(Register result_reg,
336                                            Register input_reg) {
337   Label done, slow_case;
338 
339   if (CpuFeatures::IsSupported(SSE3)) {
340     CpuFeatureScope scope(this, SSE3);
341     Label convert;
342     // Use more powerful conversion when sse3 is available.
343     // Load x87 register with heap number.
344     fld_d(FieldOperand(input_reg, HeapNumber::kValueOffset));
345     // Get exponent alone and check for too-big exponent.
346     mov(result_reg, FieldOperand(input_reg, HeapNumber::kExponentOffset));
347     and_(result_reg, HeapNumber::kExponentMask);
348     const uint32_t kTooBigExponent =
349         (HeapNumber::kExponentBias + 63) << HeapNumber::kExponentShift;
350     cmp(Operand(result_reg), Immediate(kTooBigExponent));
351     j(greater_equal, &slow_case, Label::kNear);
352 
353     // Reserve space for 64 bit answer.
354     sub(Operand(esp), Immediate(kDoubleSize));
355     // Do conversion, which cannot fail because we checked the exponent.
356     fisttp_d(Operand(esp, 0));
357     mov(result_reg, Operand(esp, 0));  // Low word of answer is the result.
358     add(Operand(esp), Immediate(kDoubleSize));
359     jmp(&done, Label::kNear);
360 
361     // Slow case.
362     bind(&slow_case);
363     if (input_reg.is(result_reg)) {
364       // Input is clobbered. Restore number from fpu stack
365       sub(Operand(esp), Immediate(kDoubleSize));
366       fstp_d(Operand(esp, 0));
367       SlowTruncateToI(result_reg, esp, 0);
368       add(esp, Immediate(kDoubleSize));
369     } else {
370       fstp(0);
371       SlowTruncateToI(result_reg, input_reg);
372     }
373   } else if (CpuFeatures::IsSupported(SSE2)) {
374     CpuFeatureScope scope(this, SSE2);
375     movsd(xmm0, FieldOperand(input_reg, HeapNumber::kValueOffset));
376     cvttsd2si(result_reg, Operand(xmm0));
377     cmp(result_reg, 0x80000000u);
378     j(not_equal, &done, Label::kNear);
379     // Check if the input was 0x8000000 (kMinInt).
380     // If no, then we got an overflow and we deoptimize.
381     ExternalReference min_int = ExternalReference::address_of_min_int();
382     ucomisd(xmm0, Operand::StaticVariable(min_int));
383     j(not_equal, &slow_case, Label::kNear);
384     j(parity_even, &slow_case, Label::kNear);  // NaN.
385     jmp(&done, Label::kNear);
386 
387     // Slow case.
388     bind(&slow_case);
389     if (input_reg.is(result_reg)) {
390       // Input is clobbered. Restore number from double scratch.
391       sub(esp, Immediate(kDoubleSize));
392       movsd(MemOperand(esp, 0), xmm0);
393       SlowTruncateToI(result_reg, esp, 0);
394       add(esp, Immediate(kDoubleSize));
395     } else {
396       SlowTruncateToI(result_reg, input_reg);
397     }
398   } else {
399     SlowTruncateToI(result_reg, input_reg);
400   }
401   bind(&done);
402 }
403 
404 
TaggedToI(Register result_reg,Register input_reg,XMMRegister temp,MinusZeroMode minus_zero_mode,Label * lost_precision)405 void MacroAssembler::TaggedToI(Register result_reg,
406                                Register input_reg,
407                                XMMRegister temp,
408                                MinusZeroMode minus_zero_mode,
409                                Label* lost_precision) {
410   Label done;
411   ASSERT(!temp.is(xmm0));
412 
413   cmp(FieldOperand(input_reg, HeapObject::kMapOffset),
414       isolate()->factory()->heap_number_map());
415   j(not_equal, lost_precision, Label::kNear);
416 
417   if (CpuFeatures::IsSafeForSnapshot(SSE2)) {
418     ASSERT(!temp.is(no_xmm_reg));
419     CpuFeatureScope scope(this, SSE2);
420 
421     movsd(xmm0, FieldOperand(input_reg, HeapNumber::kValueOffset));
422     cvttsd2si(result_reg, Operand(xmm0));
423     Cvtsi2sd(temp, Operand(result_reg));
424     ucomisd(xmm0, temp);
425     RecordComment("Deferred TaggedToI: lost precision");
426     j(not_equal, lost_precision, Label::kNear);
427     RecordComment("Deferred TaggedToI: NaN");
428     j(parity_even, lost_precision, Label::kNear);
429     if (minus_zero_mode == FAIL_ON_MINUS_ZERO) {
430       test(result_reg, Operand(result_reg));
431       j(not_zero, &done, Label::kNear);
432       movmskpd(result_reg, xmm0);
433       and_(result_reg, 1);
434       RecordComment("Deferred TaggedToI: minus zero");
435       j(not_zero, lost_precision, Label::kNear);
436     }
437   } else {
438     // TODO(olivf) Converting a number on the fpu is actually quite slow. We
439     // should first try a fast conversion and then bailout to this slow case.
440     Label lost_precision_pop, zero_check;
441     Label* lost_precision_int = (minus_zero_mode == FAIL_ON_MINUS_ZERO)
442         ? &lost_precision_pop : lost_precision;
443     sub(esp, Immediate(kPointerSize));
444     fld_d(FieldOperand(input_reg, HeapNumber::kValueOffset));
445     if (minus_zero_mode == FAIL_ON_MINUS_ZERO) fld(0);
446     fist_s(MemOperand(esp, 0));
447     fild_s(MemOperand(esp, 0));
448     FCmp();
449     pop(result_reg);
450     j(not_equal, lost_precision_int, Label::kNear);
451     j(parity_even, lost_precision_int, Label::kNear);  // NaN.
452     if (minus_zero_mode == FAIL_ON_MINUS_ZERO) {
453       test(result_reg, Operand(result_reg));
454       j(zero, &zero_check, Label::kNear);
455       fstp(0);
456       jmp(&done, Label::kNear);
457       bind(&zero_check);
458       // To check for minus zero, we load the value again as float, and check
459       // if that is still 0.
460       sub(esp, Immediate(kPointerSize));
461       fstp_s(Operand(esp, 0));
462       pop(result_reg);
463       test(result_reg, Operand(result_reg));
464       j(zero, &done, Label::kNear);
465       jmp(lost_precision, Label::kNear);
466 
467       bind(&lost_precision_pop);
468       fstp(0);
469       jmp(lost_precision, Label::kNear);
470     }
471   }
472   bind(&done);
473 }
474 
475 
LoadUint32(XMMRegister dst,Register src,XMMRegister scratch)476 void MacroAssembler::LoadUint32(XMMRegister dst,
477                                 Register src,
478                                 XMMRegister scratch) {
479   Label done;
480   cmp(src, Immediate(0));
481   ExternalReference uint32_bias =
482         ExternalReference::address_of_uint32_bias();
483   movsd(scratch, Operand::StaticVariable(uint32_bias));
484   Cvtsi2sd(dst, src);
485   j(not_sign, &done, Label::kNear);
486   addsd(dst, scratch);
487   bind(&done);
488 }
489 
490 
LoadUint32NoSSE2(Register src)491 void MacroAssembler::LoadUint32NoSSE2(Register src) {
492   Label done;
493   push(src);
494   fild_s(Operand(esp, 0));
495   cmp(src, Immediate(0));
496   j(not_sign, &done, Label::kNear);
497   ExternalReference uint32_bias =
498         ExternalReference::address_of_uint32_bias();
499   fld_d(Operand::StaticVariable(uint32_bias));
500   faddp(1);
501   bind(&done);
502   add(esp, Immediate(kPointerSize));
503 }
504 
505 
RecordWriteArray(Register object,Register value,Register index,SaveFPRegsMode save_fp,RememberedSetAction remembered_set_action,SmiCheck smi_check)506 void MacroAssembler::RecordWriteArray(Register object,
507                                       Register value,
508                                       Register index,
509                                       SaveFPRegsMode save_fp,
510                                       RememberedSetAction remembered_set_action,
511                                       SmiCheck smi_check) {
512   // First, check if a write barrier is even needed. The tests below
513   // catch stores of Smis.
514   Label done;
515 
516   // Skip barrier if writing a smi.
517   if (smi_check == INLINE_SMI_CHECK) {
518     ASSERT_EQ(0, kSmiTag);
519     test(value, Immediate(kSmiTagMask));
520     j(zero, &done);
521   }
522 
523   // Array access: calculate the destination address in the same manner as
524   // KeyedStoreIC::GenerateGeneric.  Multiply a smi by 2 to get an offset
525   // into an array of words.
526   Register dst = index;
527   lea(dst, Operand(object, index, times_half_pointer_size,
528                    FixedArray::kHeaderSize - kHeapObjectTag));
529 
530   RecordWrite(
531       object, dst, value, save_fp, remembered_set_action, OMIT_SMI_CHECK);
532 
533   bind(&done);
534 
535   // Clobber clobbered input registers when running with the debug-code flag
536   // turned on to provoke errors.
537   if (emit_debug_code()) {
538     mov(value, Immediate(BitCast<int32_t>(kZapValue)));
539     mov(index, Immediate(BitCast<int32_t>(kZapValue)));
540   }
541 }
542 
543 
RecordWriteField(Register object,int offset,Register value,Register dst,SaveFPRegsMode save_fp,RememberedSetAction remembered_set_action,SmiCheck smi_check)544 void MacroAssembler::RecordWriteField(
545     Register object,
546     int offset,
547     Register value,
548     Register dst,
549     SaveFPRegsMode save_fp,
550     RememberedSetAction remembered_set_action,
551     SmiCheck smi_check) {
552   // First, check if a write barrier is even needed. The tests below
553   // catch stores of Smis.
554   Label done;
555 
556   // Skip barrier if writing a smi.
557   if (smi_check == INLINE_SMI_CHECK) {
558     JumpIfSmi(value, &done, Label::kNear);
559   }
560 
561   // Although the object register is tagged, the offset is relative to the start
562   // of the object, so so offset must be a multiple of kPointerSize.
563   ASSERT(IsAligned(offset, kPointerSize));
564 
565   lea(dst, FieldOperand(object, offset));
566   if (emit_debug_code()) {
567     Label ok;
568     test_b(dst, (1 << kPointerSizeLog2) - 1);
569     j(zero, &ok, Label::kNear);
570     int3();
571     bind(&ok);
572   }
573 
574   RecordWrite(
575       object, dst, value, save_fp, remembered_set_action, OMIT_SMI_CHECK);
576 
577   bind(&done);
578 
579   // Clobber clobbered input registers when running with the debug-code flag
580   // turned on to provoke errors.
581   if (emit_debug_code()) {
582     mov(value, Immediate(BitCast<int32_t>(kZapValue)));
583     mov(dst, Immediate(BitCast<int32_t>(kZapValue)));
584   }
585 }
586 
587 
RecordWriteForMap(Register object,Handle<Map> map,Register scratch1,Register scratch2,SaveFPRegsMode save_fp)588 void MacroAssembler::RecordWriteForMap(
589     Register object,
590     Handle<Map> map,
591     Register scratch1,
592     Register scratch2,
593     SaveFPRegsMode save_fp) {
594   Label done;
595 
596   Register address = scratch1;
597   Register value = scratch2;
598   if (emit_debug_code()) {
599     Label ok;
600     lea(address, FieldOperand(object, HeapObject::kMapOffset));
601     test_b(address, (1 << kPointerSizeLog2) - 1);
602     j(zero, &ok, Label::kNear);
603     int3();
604     bind(&ok);
605   }
606 
607   ASSERT(!object.is(value));
608   ASSERT(!object.is(address));
609   ASSERT(!value.is(address));
610   AssertNotSmi(object);
611 
612   if (!FLAG_incremental_marking) {
613     return;
614   }
615 
616   // Count number of write barriers in generated code.
617   isolate()->counters()->write_barriers_static()->Increment();
618   IncrementCounter(isolate()->counters()->write_barriers_dynamic(), 1);
619 
620   // A single check of the map's pages interesting flag suffices, since it is
621   // only set during incremental collection, and then it's also guaranteed that
622   // the from object's page's interesting flag is also set.  This optimization
623   // relies on the fact that maps can never be in new space.
624   ASSERT(!isolate()->heap()->InNewSpace(*map));
625   CheckPageFlagForMap(map,
626                       MemoryChunk::kPointersToHereAreInterestingMask,
627                       zero,
628                       &done,
629                       Label::kNear);
630 
631   // Delay the initialization of |address| and |value| for the stub until it's
632   // known that the will be needed. Up until this point their values are not
633   // needed since they are embedded in the operands of instructions that need
634   // them.
635   lea(address, FieldOperand(object, HeapObject::kMapOffset));
636   mov(value, Immediate(map));
637   RecordWriteStub stub(object, value, address, OMIT_REMEMBERED_SET, save_fp);
638   CallStub(&stub);
639 
640   bind(&done);
641 
642   // Clobber clobbered input registers when running with the debug-code flag
643   // turned on to provoke errors.
644   if (emit_debug_code()) {
645     mov(value, Immediate(BitCast<int32_t>(kZapValue)));
646     mov(scratch1, Immediate(BitCast<int32_t>(kZapValue)));
647     mov(scratch2, Immediate(BitCast<int32_t>(kZapValue)));
648   }
649 }
650 
651 
RecordWrite(Register object,Register address,Register value,SaveFPRegsMode fp_mode,RememberedSetAction remembered_set_action,SmiCheck smi_check)652 void MacroAssembler::RecordWrite(Register object,
653                                  Register address,
654                                  Register value,
655                                  SaveFPRegsMode fp_mode,
656                                  RememberedSetAction remembered_set_action,
657                                  SmiCheck smi_check) {
658   ASSERT(!object.is(value));
659   ASSERT(!object.is(address));
660   ASSERT(!value.is(address));
661   AssertNotSmi(object);
662 
663   if (remembered_set_action == OMIT_REMEMBERED_SET &&
664       !FLAG_incremental_marking) {
665     return;
666   }
667 
668   if (emit_debug_code()) {
669     Label ok;
670     cmp(value, Operand(address, 0));
671     j(equal, &ok, Label::kNear);
672     int3();
673     bind(&ok);
674   }
675 
676   // Count number of write barriers in generated code.
677   isolate()->counters()->write_barriers_static()->Increment();
678   IncrementCounter(isolate()->counters()->write_barriers_dynamic(), 1);
679 
680   // First, check if a write barrier is even needed. The tests below
681   // catch stores of Smis and stores into young gen.
682   Label done;
683 
684   if (smi_check == INLINE_SMI_CHECK) {
685     // Skip barrier if writing a smi.
686     JumpIfSmi(value, &done, Label::kNear);
687   }
688 
689   CheckPageFlag(value,
690                 value,  // Used as scratch.
691                 MemoryChunk::kPointersToHereAreInterestingMask,
692                 zero,
693                 &done,
694                 Label::kNear);
695   CheckPageFlag(object,
696                 value,  // Used as scratch.
697                 MemoryChunk::kPointersFromHereAreInterestingMask,
698                 zero,
699                 &done,
700                 Label::kNear);
701 
702   RecordWriteStub stub(object, value, address, remembered_set_action, fp_mode);
703   CallStub(&stub);
704 
705   bind(&done);
706 
707   // Clobber clobbered registers when running with the debug-code flag
708   // turned on to provoke errors.
709   if (emit_debug_code()) {
710     mov(address, Immediate(BitCast<int32_t>(kZapValue)));
711     mov(value, Immediate(BitCast<int32_t>(kZapValue)));
712   }
713 }
714 
715 
716 #ifdef ENABLE_DEBUGGER_SUPPORT
DebugBreak()717 void MacroAssembler::DebugBreak() {
718   Set(eax, Immediate(0));
719   mov(ebx, Immediate(ExternalReference(Runtime::kDebugBreak, isolate())));
720   CEntryStub ces(1);
721   call(ces.GetCode(isolate()), RelocInfo::DEBUG_BREAK);
722 }
723 #endif
724 
725 
Cvtsi2sd(XMMRegister dst,const Operand & src)726 void MacroAssembler::Cvtsi2sd(XMMRegister dst, const Operand& src) {
727   xorps(dst, dst);
728   cvtsi2sd(dst, src);
729 }
730 
731 
Set(Register dst,const Immediate & x)732 void MacroAssembler::Set(Register dst, const Immediate& x) {
733   if (x.is_zero()) {
734     xor_(dst, dst);  // Shorter than mov.
735   } else {
736     mov(dst, x);
737   }
738 }
739 
740 
Set(const Operand & dst,const Immediate & x)741 void MacroAssembler::Set(const Operand& dst, const Immediate& x) {
742   mov(dst, x);
743 }
744 
745 
IsUnsafeImmediate(const Immediate & x)746 bool MacroAssembler::IsUnsafeImmediate(const Immediate& x) {
747   static const int kMaxImmediateBits = 17;
748   if (!RelocInfo::IsNone(x.rmode_)) return false;
749   return !is_intn(x.x_, kMaxImmediateBits);
750 }
751 
752 
SafeSet(Register dst,const Immediate & x)753 void MacroAssembler::SafeSet(Register dst, const Immediate& x) {
754   if (IsUnsafeImmediate(x) && jit_cookie() != 0) {
755     Set(dst, Immediate(x.x_ ^ jit_cookie()));
756     xor_(dst, jit_cookie());
757   } else {
758     Set(dst, x);
759   }
760 }
761 
762 
SafePush(const Immediate & x)763 void MacroAssembler::SafePush(const Immediate& x) {
764   if (IsUnsafeImmediate(x) && jit_cookie() != 0) {
765     push(Immediate(x.x_ ^ jit_cookie()));
766     xor_(Operand(esp, 0), Immediate(jit_cookie()));
767   } else {
768     push(x);
769   }
770 }
771 
772 
CmpObjectType(Register heap_object,InstanceType type,Register map)773 void MacroAssembler::CmpObjectType(Register heap_object,
774                                    InstanceType type,
775                                    Register map) {
776   mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
777   CmpInstanceType(map, type);
778 }
779 
780 
CmpInstanceType(Register map,InstanceType type)781 void MacroAssembler::CmpInstanceType(Register map, InstanceType type) {
782   cmpb(FieldOperand(map, Map::kInstanceTypeOffset),
783        static_cast<int8_t>(type));
784 }
785 
786 
CheckFastElements(Register map,Label * fail,Label::Distance distance)787 void MacroAssembler::CheckFastElements(Register map,
788                                        Label* fail,
789                                        Label::Distance distance) {
790   STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
791   STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
792   STATIC_ASSERT(FAST_ELEMENTS == 2);
793   STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3);
794   cmpb(FieldOperand(map, Map::kBitField2Offset),
795        Map::kMaximumBitField2FastHoleyElementValue);
796   j(above, fail, distance);
797 }
798 
799 
CheckFastObjectElements(Register map,Label * fail,Label::Distance distance)800 void MacroAssembler::CheckFastObjectElements(Register map,
801                                              Label* fail,
802                                              Label::Distance distance) {
803   STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
804   STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
805   STATIC_ASSERT(FAST_ELEMENTS == 2);
806   STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3);
807   cmpb(FieldOperand(map, Map::kBitField2Offset),
808        Map::kMaximumBitField2FastHoleySmiElementValue);
809   j(below_equal, fail, distance);
810   cmpb(FieldOperand(map, Map::kBitField2Offset),
811        Map::kMaximumBitField2FastHoleyElementValue);
812   j(above, fail, distance);
813 }
814 
815 
CheckFastSmiElements(Register map,Label * fail,Label::Distance distance)816 void MacroAssembler::CheckFastSmiElements(Register map,
817                                           Label* fail,
818                                           Label::Distance distance) {
819   STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
820   STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
821   cmpb(FieldOperand(map, Map::kBitField2Offset),
822        Map::kMaximumBitField2FastHoleySmiElementValue);
823   j(above, fail, distance);
824 }
825 
826 
StoreNumberToDoubleElements(Register maybe_number,Register elements,Register key,Register scratch1,XMMRegister scratch2,Label * fail,bool specialize_for_processor,int elements_offset)827 void MacroAssembler::StoreNumberToDoubleElements(
828     Register maybe_number,
829     Register elements,
830     Register key,
831     Register scratch1,
832     XMMRegister scratch2,
833     Label* fail,
834     bool specialize_for_processor,
835     int elements_offset) {
836   Label smi_value, done, maybe_nan, not_nan, is_nan, have_double_value;
837   JumpIfSmi(maybe_number, &smi_value, Label::kNear);
838 
839   CheckMap(maybe_number,
840            isolate()->factory()->heap_number_map(),
841            fail,
842            DONT_DO_SMI_CHECK);
843 
844   // Double value, canonicalize NaN.
845   uint32_t offset = HeapNumber::kValueOffset + sizeof(kHoleNanLower32);
846   cmp(FieldOperand(maybe_number, offset),
847       Immediate(kNaNOrInfinityLowerBoundUpper32));
848   j(greater_equal, &maybe_nan, Label::kNear);
849 
850   bind(&not_nan);
851   ExternalReference canonical_nan_reference =
852       ExternalReference::address_of_canonical_non_hole_nan();
853   if (CpuFeatures::IsSupported(SSE2) && specialize_for_processor) {
854     CpuFeatureScope use_sse2(this, SSE2);
855     movsd(scratch2, FieldOperand(maybe_number, HeapNumber::kValueOffset));
856     bind(&have_double_value);
857     movsd(FieldOperand(elements, key, times_4,
858                         FixedDoubleArray::kHeaderSize - elements_offset),
859            scratch2);
860   } else {
861     fld_d(FieldOperand(maybe_number, HeapNumber::kValueOffset));
862     bind(&have_double_value);
863     fstp_d(FieldOperand(elements, key, times_4,
864                         FixedDoubleArray::kHeaderSize - elements_offset));
865   }
866   jmp(&done);
867 
868   bind(&maybe_nan);
869   // Could be NaN or Infinity. If fraction is not zero, it's NaN, otherwise
870   // it's an Infinity, and the non-NaN code path applies.
871   j(greater, &is_nan, Label::kNear);
872   cmp(FieldOperand(maybe_number, HeapNumber::kValueOffset), Immediate(0));
873   j(zero, &not_nan);
874   bind(&is_nan);
875   if (CpuFeatures::IsSupported(SSE2) && specialize_for_processor) {
876     CpuFeatureScope use_sse2(this, SSE2);
877     movsd(scratch2, Operand::StaticVariable(canonical_nan_reference));
878   } else {
879     fld_d(Operand::StaticVariable(canonical_nan_reference));
880   }
881   jmp(&have_double_value, Label::kNear);
882 
883   bind(&smi_value);
884   // Value is a smi. Convert to a double and store.
885   // Preserve original value.
886   mov(scratch1, maybe_number);
887   SmiUntag(scratch1);
888   if (CpuFeatures::IsSupported(SSE2) && specialize_for_processor) {
889     CpuFeatureScope fscope(this, SSE2);
890     Cvtsi2sd(scratch2, scratch1);
891     movsd(FieldOperand(elements, key, times_4,
892                         FixedDoubleArray::kHeaderSize - elements_offset),
893            scratch2);
894   } else {
895     push(scratch1);
896     fild_s(Operand(esp, 0));
897     pop(scratch1);
898     fstp_d(FieldOperand(elements, key, times_4,
899                         FixedDoubleArray::kHeaderSize - elements_offset));
900   }
901   bind(&done);
902 }
903 
904 
CompareMap(Register obj,Handle<Map> map)905 void MacroAssembler::CompareMap(Register obj, Handle<Map> map) {
906   cmp(FieldOperand(obj, HeapObject::kMapOffset), map);
907 }
908 
909 
CheckMap(Register obj,Handle<Map> map,Label * fail,SmiCheckType smi_check_type)910 void MacroAssembler::CheckMap(Register obj,
911                               Handle<Map> map,
912                               Label* fail,
913                               SmiCheckType smi_check_type) {
914   if (smi_check_type == DO_SMI_CHECK) {
915     JumpIfSmi(obj, fail);
916   }
917 
918   CompareMap(obj, map);
919   j(not_equal, fail);
920 }
921 
922 
DispatchMap(Register obj,Register unused,Handle<Map> map,Handle<Code> success,SmiCheckType smi_check_type)923 void MacroAssembler::DispatchMap(Register obj,
924                                  Register unused,
925                                  Handle<Map> map,
926                                  Handle<Code> success,
927                                  SmiCheckType smi_check_type) {
928   Label fail;
929   if (smi_check_type == DO_SMI_CHECK) {
930     JumpIfSmi(obj, &fail);
931   }
932   cmp(FieldOperand(obj, HeapObject::kMapOffset), Immediate(map));
933   j(equal, success);
934 
935   bind(&fail);
936 }
937 
938 
IsObjectStringType(Register heap_object,Register map,Register instance_type)939 Condition MacroAssembler::IsObjectStringType(Register heap_object,
940                                              Register map,
941                                              Register instance_type) {
942   mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
943   movzx_b(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
944   STATIC_ASSERT(kNotStringTag != 0);
945   test(instance_type, Immediate(kIsNotStringMask));
946   return zero;
947 }
948 
949 
IsObjectNameType(Register heap_object,Register map,Register instance_type)950 Condition MacroAssembler::IsObjectNameType(Register heap_object,
951                                            Register map,
952                                            Register instance_type) {
953   mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
954   movzx_b(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
955   cmpb(instance_type, static_cast<uint8_t>(LAST_NAME_TYPE));
956   return below_equal;
957 }
958 
959 
IsObjectJSObjectType(Register heap_object,Register map,Register scratch,Label * fail)960 void MacroAssembler::IsObjectJSObjectType(Register heap_object,
961                                           Register map,
962                                           Register scratch,
963                                           Label* fail) {
964   mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
965   IsInstanceJSObjectType(map, scratch, fail);
966 }
967 
968 
IsInstanceJSObjectType(Register map,Register scratch,Label * fail)969 void MacroAssembler::IsInstanceJSObjectType(Register map,
970                                             Register scratch,
971                                             Label* fail) {
972   movzx_b(scratch, FieldOperand(map, Map::kInstanceTypeOffset));
973   sub(scratch, Immediate(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
974   cmp(scratch,
975       LAST_NONCALLABLE_SPEC_OBJECT_TYPE - FIRST_NONCALLABLE_SPEC_OBJECT_TYPE);
976   j(above, fail);
977 }
978 
979 
FCmp()980 void MacroAssembler::FCmp() {
981   if (CpuFeatures::IsSupported(CMOV)) {
982     fucomip();
983     fstp(0);
984   } else {
985     fucompp();
986     push(eax);
987     fnstsw_ax();
988     sahf();
989     pop(eax);
990   }
991 }
992 
993 
AssertNumber(Register object)994 void MacroAssembler::AssertNumber(Register object) {
995   if (emit_debug_code()) {
996     Label ok;
997     JumpIfSmi(object, &ok);
998     cmp(FieldOperand(object, HeapObject::kMapOffset),
999         isolate()->factory()->heap_number_map());
1000     Check(equal, kOperandNotANumber);
1001     bind(&ok);
1002   }
1003 }
1004 
1005 
AssertSmi(Register object)1006 void MacroAssembler::AssertSmi(Register object) {
1007   if (emit_debug_code()) {
1008     test(object, Immediate(kSmiTagMask));
1009     Check(equal, kOperandIsNotASmi);
1010   }
1011 }
1012 
1013 
AssertString(Register object)1014 void MacroAssembler::AssertString(Register object) {
1015   if (emit_debug_code()) {
1016     test(object, Immediate(kSmiTagMask));
1017     Check(not_equal, kOperandIsASmiAndNotAString);
1018     push(object);
1019     mov(object, FieldOperand(object, HeapObject::kMapOffset));
1020     CmpInstanceType(object, FIRST_NONSTRING_TYPE);
1021     pop(object);
1022     Check(below, kOperandIsNotAString);
1023   }
1024 }
1025 
1026 
AssertName(Register object)1027 void MacroAssembler::AssertName(Register object) {
1028   if (emit_debug_code()) {
1029     test(object, Immediate(kSmiTagMask));
1030     Check(not_equal, kOperandIsASmiAndNotAName);
1031     push(object);
1032     mov(object, FieldOperand(object, HeapObject::kMapOffset));
1033     CmpInstanceType(object, LAST_NAME_TYPE);
1034     pop(object);
1035     Check(below_equal, kOperandIsNotAName);
1036   }
1037 }
1038 
1039 
AssertNotSmi(Register object)1040 void MacroAssembler::AssertNotSmi(Register object) {
1041   if (emit_debug_code()) {
1042     test(object, Immediate(kSmiTagMask));
1043     Check(not_equal, kOperandIsASmi);
1044   }
1045 }
1046 
1047 
Prologue(PrologueFrameMode frame_mode)1048 void MacroAssembler::Prologue(PrologueFrameMode frame_mode) {
1049   if (frame_mode == BUILD_STUB_FRAME) {
1050     push(ebp);  // Caller's frame pointer.
1051     mov(ebp, esp);
1052     push(esi);  // Callee's context.
1053     push(Immediate(Smi::FromInt(StackFrame::STUB)));
1054   } else {
1055     PredictableCodeSizeScope predictible_code_size_scope(this,
1056         kNoCodeAgeSequenceLength);
1057     if (isolate()->IsCodePreAgingActive()) {
1058         // Pre-age the code.
1059       call(isolate()->builtins()->MarkCodeAsExecutedOnce(),
1060           RelocInfo::CODE_AGE_SEQUENCE);
1061       Nop(kNoCodeAgeSequenceLength - Assembler::kCallInstructionLength);
1062     } else {
1063       push(ebp);  // Caller's frame pointer.
1064       mov(ebp, esp);
1065       push(esi);  // Callee's context.
1066       push(edi);  // Callee's JS function.
1067     }
1068   }
1069 }
1070 
1071 
EnterFrame(StackFrame::Type type)1072 void MacroAssembler::EnterFrame(StackFrame::Type type) {
1073   push(ebp);
1074   mov(ebp, esp);
1075   push(esi);
1076   push(Immediate(Smi::FromInt(type)));
1077   push(Immediate(CodeObject()));
1078   if (emit_debug_code()) {
1079     cmp(Operand(esp, 0), Immediate(isolate()->factory()->undefined_value()));
1080     Check(not_equal, kCodeObjectNotProperlyPatched);
1081   }
1082 }
1083 
1084 
LeaveFrame(StackFrame::Type type)1085 void MacroAssembler::LeaveFrame(StackFrame::Type type) {
1086   if (emit_debug_code()) {
1087     cmp(Operand(ebp, StandardFrameConstants::kMarkerOffset),
1088         Immediate(Smi::FromInt(type)));
1089     Check(equal, kStackFrameTypesMustMatch);
1090   }
1091   leave();
1092 }
1093 
1094 
EnterExitFramePrologue()1095 void MacroAssembler::EnterExitFramePrologue() {
1096   // Set up the frame structure on the stack.
1097   ASSERT(ExitFrameConstants::kCallerSPDisplacement == +2 * kPointerSize);
1098   ASSERT(ExitFrameConstants::kCallerPCOffset == +1 * kPointerSize);
1099   ASSERT(ExitFrameConstants::kCallerFPOffset ==  0 * kPointerSize);
1100   push(ebp);
1101   mov(ebp, esp);
1102 
1103   // Reserve room for entry stack pointer and push the code object.
1104   ASSERT(ExitFrameConstants::kSPOffset  == -1 * kPointerSize);
1105   push(Immediate(0));  // Saved entry sp, patched before call.
1106   push(Immediate(CodeObject()));  // Accessed from ExitFrame::code_slot.
1107 
1108   // Save the frame pointer and the context in top.
1109   ExternalReference c_entry_fp_address(Isolate::kCEntryFPAddress, isolate());
1110   ExternalReference context_address(Isolate::kContextAddress, isolate());
1111   mov(Operand::StaticVariable(c_entry_fp_address), ebp);
1112   mov(Operand::StaticVariable(context_address), esi);
1113 }
1114 
1115 
EnterExitFrameEpilogue(int argc,bool save_doubles)1116 void MacroAssembler::EnterExitFrameEpilogue(int argc, bool save_doubles) {
1117   // Optionally save all XMM registers.
1118   if (save_doubles) {
1119     CpuFeatureScope scope(this, SSE2);
1120     int space = XMMRegister::kNumRegisters * kDoubleSize + argc * kPointerSize;
1121     sub(esp, Immediate(space));
1122     const int offset = -2 * kPointerSize;
1123     for (int i = 0; i < XMMRegister::kNumRegisters; i++) {
1124       XMMRegister reg = XMMRegister::from_code(i);
1125       movsd(Operand(ebp, offset - ((i + 1) * kDoubleSize)), reg);
1126     }
1127   } else {
1128     sub(esp, Immediate(argc * kPointerSize));
1129   }
1130 
1131   // Get the required frame alignment for the OS.
1132   const int kFrameAlignment = OS::ActivationFrameAlignment();
1133   if (kFrameAlignment > 0) {
1134     ASSERT(IsPowerOf2(kFrameAlignment));
1135     and_(esp, -kFrameAlignment);
1136   }
1137 
1138   // Patch the saved entry sp.
1139   mov(Operand(ebp, ExitFrameConstants::kSPOffset), esp);
1140 }
1141 
1142 
EnterExitFrame(bool save_doubles)1143 void MacroAssembler::EnterExitFrame(bool save_doubles) {
1144   EnterExitFramePrologue();
1145 
1146   // Set up argc and argv in callee-saved registers.
1147   int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
1148   mov(edi, eax);
1149   lea(esi, Operand(ebp, eax, times_4, offset));
1150 
1151   // Reserve space for argc, argv and isolate.
1152   EnterExitFrameEpilogue(3, save_doubles);
1153 }
1154 
1155 
EnterApiExitFrame(int argc)1156 void MacroAssembler::EnterApiExitFrame(int argc) {
1157   EnterExitFramePrologue();
1158   EnterExitFrameEpilogue(argc, false);
1159 }
1160 
1161 
LeaveExitFrame(bool save_doubles)1162 void MacroAssembler::LeaveExitFrame(bool save_doubles) {
1163   // Optionally restore all XMM registers.
1164   if (save_doubles) {
1165     CpuFeatureScope scope(this, SSE2);
1166     const int offset = -2 * kPointerSize;
1167     for (int i = 0; i < XMMRegister::kNumRegisters; i++) {
1168       XMMRegister reg = XMMRegister::from_code(i);
1169       movsd(reg, Operand(ebp, offset - ((i + 1) * kDoubleSize)));
1170     }
1171   }
1172 
1173   // Get the return address from the stack and restore the frame pointer.
1174   mov(ecx, Operand(ebp, 1 * kPointerSize));
1175   mov(ebp, Operand(ebp, 0 * kPointerSize));
1176 
1177   // Pop the arguments and the receiver from the caller stack.
1178   lea(esp, Operand(esi, 1 * kPointerSize));
1179 
1180   // Push the return address to get ready to return.
1181   push(ecx);
1182 
1183   LeaveExitFrameEpilogue(true);
1184 }
1185 
1186 
LeaveExitFrameEpilogue(bool restore_context)1187 void MacroAssembler::LeaveExitFrameEpilogue(bool restore_context) {
1188   // Restore current context from top and clear it in debug mode.
1189   ExternalReference context_address(Isolate::kContextAddress, isolate());
1190   if (restore_context) {
1191     mov(esi, Operand::StaticVariable(context_address));
1192   }
1193 #ifdef DEBUG
1194   mov(Operand::StaticVariable(context_address), Immediate(0));
1195 #endif
1196 
1197   // Clear the top frame.
1198   ExternalReference c_entry_fp_address(Isolate::kCEntryFPAddress,
1199                                        isolate());
1200   mov(Operand::StaticVariable(c_entry_fp_address), Immediate(0));
1201 }
1202 
1203 
LeaveApiExitFrame(bool restore_context)1204 void MacroAssembler::LeaveApiExitFrame(bool restore_context) {
1205   mov(esp, ebp);
1206   pop(ebp);
1207 
1208   LeaveExitFrameEpilogue(restore_context);
1209 }
1210 
1211 
PushTryHandler(StackHandler::Kind kind,int handler_index)1212 void MacroAssembler::PushTryHandler(StackHandler::Kind kind,
1213                                     int handler_index) {
1214   // Adjust this code if not the case.
1215   STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize);
1216   STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
1217   STATIC_ASSERT(StackHandlerConstants::kCodeOffset == 1 * kPointerSize);
1218   STATIC_ASSERT(StackHandlerConstants::kStateOffset == 2 * kPointerSize);
1219   STATIC_ASSERT(StackHandlerConstants::kContextOffset == 3 * kPointerSize);
1220   STATIC_ASSERT(StackHandlerConstants::kFPOffset == 4 * kPointerSize);
1221 
1222   // We will build up the handler from the bottom by pushing on the stack.
1223   // First push the frame pointer and context.
1224   if (kind == StackHandler::JS_ENTRY) {
1225     // The frame pointer does not point to a JS frame so we save NULL for
1226     // ebp. We expect the code throwing an exception to check ebp before
1227     // dereferencing it to restore the context.
1228     push(Immediate(0));  // NULL frame pointer.
1229     push(Immediate(Smi::FromInt(0)));  // No context.
1230   } else {
1231     push(ebp);
1232     push(esi);
1233   }
1234   // Push the state and the code object.
1235   unsigned state =
1236       StackHandler::IndexField::encode(handler_index) |
1237       StackHandler::KindField::encode(kind);
1238   push(Immediate(state));
1239   Push(CodeObject());
1240 
1241   // Link the current handler as the next handler.
1242   ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
1243   push(Operand::StaticVariable(handler_address));
1244   // Set this new handler as the current one.
1245   mov(Operand::StaticVariable(handler_address), esp);
1246 }
1247 
1248 
PopTryHandler()1249 void MacroAssembler::PopTryHandler() {
1250   STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
1251   ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
1252   pop(Operand::StaticVariable(handler_address));
1253   add(esp, Immediate(StackHandlerConstants::kSize - kPointerSize));
1254 }
1255 
1256 
JumpToHandlerEntry()1257 void MacroAssembler::JumpToHandlerEntry() {
1258   // Compute the handler entry address and jump to it.  The handler table is
1259   // a fixed array of (smi-tagged) code offsets.
1260   // eax = exception, edi = code object, edx = state.
1261   mov(ebx, FieldOperand(edi, Code::kHandlerTableOffset));
1262   shr(edx, StackHandler::kKindWidth);
1263   mov(edx, FieldOperand(ebx, edx, times_4, FixedArray::kHeaderSize));
1264   SmiUntag(edx);
1265   lea(edi, FieldOperand(edi, edx, times_1, Code::kHeaderSize));
1266   jmp(edi);
1267 }
1268 
1269 
Throw(Register value)1270 void MacroAssembler::Throw(Register value) {
1271   // Adjust this code if not the case.
1272   STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize);
1273   STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
1274   STATIC_ASSERT(StackHandlerConstants::kCodeOffset == 1 * kPointerSize);
1275   STATIC_ASSERT(StackHandlerConstants::kStateOffset == 2 * kPointerSize);
1276   STATIC_ASSERT(StackHandlerConstants::kContextOffset == 3 * kPointerSize);
1277   STATIC_ASSERT(StackHandlerConstants::kFPOffset == 4 * kPointerSize);
1278 
1279   // The exception is expected in eax.
1280   if (!value.is(eax)) {
1281     mov(eax, value);
1282   }
1283   // Drop the stack pointer to the top of the top handler.
1284   ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
1285   mov(esp, Operand::StaticVariable(handler_address));
1286   // Restore the next handler.
1287   pop(Operand::StaticVariable(handler_address));
1288 
1289   // Remove the code object and state, compute the handler address in edi.
1290   pop(edi);  // Code object.
1291   pop(edx);  // Index and state.
1292 
1293   // Restore the context and frame pointer.
1294   pop(esi);  // Context.
1295   pop(ebp);  // Frame pointer.
1296 
1297   // If the handler is a JS frame, restore the context to the frame.
1298   // (kind == ENTRY) == (ebp == 0) == (esi == 0), so we could test either
1299   // ebp or esi.
1300   Label skip;
1301   test(esi, esi);
1302   j(zero, &skip, Label::kNear);
1303   mov(Operand(ebp, StandardFrameConstants::kContextOffset), esi);
1304   bind(&skip);
1305 
1306   JumpToHandlerEntry();
1307 }
1308 
1309 
ThrowUncatchable(Register value)1310 void MacroAssembler::ThrowUncatchable(Register value) {
1311   // Adjust this code if not the case.
1312   STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize);
1313   STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
1314   STATIC_ASSERT(StackHandlerConstants::kCodeOffset == 1 * kPointerSize);
1315   STATIC_ASSERT(StackHandlerConstants::kStateOffset == 2 * kPointerSize);
1316   STATIC_ASSERT(StackHandlerConstants::kContextOffset == 3 * kPointerSize);
1317   STATIC_ASSERT(StackHandlerConstants::kFPOffset == 4 * kPointerSize);
1318 
1319   // The exception is expected in eax.
1320   if (!value.is(eax)) {
1321     mov(eax, value);
1322   }
1323   // Drop the stack pointer to the top of the top stack handler.
1324   ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
1325   mov(esp, Operand::StaticVariable(handler_address));
1326 
1327   // Unwind the handlers until the top ENTRY handler is found.
1328   Label fetch_next, check_kind;
1329   jmp(&check_kind, Label::kNear);
1330   bind(&fetch_next);
1331   mov(esp, Operand(esp, StackHandlerConstants::kNextOffset));
1332 
1333   bind(&check_kind);
1334   STATIC_ASSERT(StackHandler::JS_ENTRY == 0);
1335   test(Operand(esp, StackHandlerConstants::kStateOffset),
1336        Immediate(StackHandler::KindField::kMask));
1337   j(not_zero, &fetch_next);
1338 
1339   // Set the top handler address to next handler past the top ENTRY handler.
1340   pop(Operand::StaticVariable(handler_address));
1341 
1342   // Remove the code object and state, compute the handler address in edi.
1343   pop(edi);  // Code object.
1344   pop(edx);  // Index and state.
1345 
1346   // Clear the context pointer and frame pointer (0 was saved in the handler).
1347   pop(esi);
1348   pop(ebp);
1349 
1350   JumpToHandlerEntry();
1351 }
1352 
1353 
CheckAccessGlobalProxy(Register holder_reg,Register scratch1,Register scratch2,Label * miss)1354 void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
1355                                             Register scratch1,
1356                                             Register scratch2,
1357                                             Label* miss) {
1358   Label same_contexts;
1359 
1360   ASSERT(!holder_reg.is(scratch1));
1361   ASSERT(!holder_reg.is(scratch2));
1362   ASSERT(!scratch1.is(scratch2));
1363 
1364   // Load current lexical context from the stack frame.
1365   mov(scratch1, Operand(ebp, StandardFrameConstants::kContextOffset));
1366 
1367   // When generating debug code, make sure the lexical context is set.
1368   if (emit_debug_code()) {
1369     cmp(scratch1, Immediate(0));
1370     Check(not_equal, kWeShouldNotHaveAnEmptyLexicalContext);
1371   }
1372   // Load the native context of the current context.
1373   int offset =
1374       Context::kHeaderSize + Context::GLOBAL_OBJECT_INDEX * kPointerSize;
1375   mov(scratch1, FieldOperand(scratch1, offset));
1376   mov(scratch1, FieldOperand(scratch1, GlobalObject::kNativeContextOffset));
1377 
1378   // Check the context is a native context.
1379   if (emit_debug_code()) {
1380     // Read the first word and compare to native_context_map.
1381     cmp(FieldOperand(scratch1, HeapObject::kMapOffset),
1382         isolate()->factory()->native_context_map());
1383     Check(equal, kJSGlobalObjectNativeContextShouldBeANativeContext);
1384   }
1385 
1386   // Check if both contexts are the same.
1387   cmp(scratch1, FieldOperand(holder_reg, JSGlobalProxy::kNativeContextOffset));
1388   j(equal, &same_contexts);
1389 
1390   // Compare security tokens, save holder_reg on the stack so we can use it
1391   // as a temporary register.
1392   //
1393   // Check that the security token in the calling global object is
1394   // compatible with the security token in the receiving global
1395   // object.
1396   mov(scratch2,
1397       FieldOperand(holder_reg, JSGlobalProxy::kNativeContextOffset));
1398 
1399   // Check the context is a native context.
1400   if (emit_debug_code()) {
1401     cmp(scratch2, isolate()->factory()->null_value());
1402     Check(not_equal, kJSGlobalProxyContextShouldNotBeNull);
1403 
1404     // Read the first word and compare to native_context_map(),
1405     cmp(FieldOperand(scratch2, HeapObject::kMapOffset),
1406         isolate()->factory()->native_context_map());
1407     Check(equal, kJSGlobalObjectNativeContextShouldBeANativeContext);
1408   }
1409 
1410   int token_offset = Context::kHeaderSize +
1411                      Context::SECURITY_TOKEN_INDEX * kPointerSize;
1412   mov(scratch1, FieldOperand(scratch1, token_offset));
1413   cmp(scratch1, FieldOperand(scratch2, token_offset));
1414   j(not_equal, miss);
1415 
1416   bind(&same_contexts);
1417 }
1418 
1419 
1420 // Compute the hash code from the untagged key.  This must be kept in sync with
1421 // ComputeIntegerHash in utils.h and KeyedLoadGenericElementStub in
1422 // code-stub-hydrogen.cc
1423 //
1424 // Note: r0 will contain hash code
GetNumberHash(Register r0,Register scratch)1425 void MacroAssembler::GetNumberHash(Register r0, Register scratch) {
1426   // Xor original key with a seed.
1427   if (Serializer::enabled()) {
1428     ExternalReference roots_array_start =
1429         ExternalReference::roots_array_start(isolate());
1430     mov(scratch, Immediate(Heap::kHashSeedRootIndex));
1431     mov(scratch,
1432         Operand::StaticArray(scratch, times_pointer_size, roots_array_start));
1433     SmiUntag(scratch);
1434     xor_(r0, scratch);
1435   } else {
1436     int32_t seed = isolate()->heap()->HashSeed();
1437     xor_(r0, Immediate(seed));
1438   }
1439 
1440   // hash = ~hash + (hash << 15);
1441   mov(scratch, r0);
1442   not_(r0);
1443   shl(scratch, 15);
1444   add(r0, scratch);
1445   // hash = hash ^ (hash >> 12);
1446   mov(scratch, r0);
1447   shr(scratch, 12);
1448   xor_(r0, scratch);
1449   // hash = hash + (hash << 2);
1450   lea(r0, Operand(r0, r0, times_4, 0));
1451   // hash = hash ^ (hash >> 4);
1452   mov(scratch, r0);
1453   shr(scratch, 4);
1454   xor_(r0, scratch);
1455   // hash = hash * 2057;
1456   imul(r0, r0, 2057);
1457   // hash = hash ^ (hash >> 16);
1458   mov(scratch, r0);
1459   shr(scratch, 16);
1460   xor_(r0, scratch);
1461 }
1462 
1463 
1464 
LoadFromNumberDictionary(Label * miss,Register elements,Register key,Register r0,Register r1,Register r2,Register result)1465 void MacroAssembler::LoadFromNumberDictionary(Label* miss,
1466                                               Register elements,
1467                                               Register key,
1468                                               Register r0,
1469                                               Register r1,
1470                                               Register r2,
1471                                               Register result) {
1472   // Register use:
1473   //
1474   // elements - holds the slow-case elements of the receiver and is unchanged.
1475   //
1476   // key      - holds the smi key on entry and is unchanged.
1477   //
1478   // Scratch registers:
1479   //
1480   // r0 - holds the untagged key on entry and holds the hash once computed.
1481   //
1482   // r1 - used to hold the capacity mask of the dictionary
1483   //
1484   // r2 - used for the index into the dictionary.
1485   //
1486   // result - holds the result on exit if the load succeeds and we fall through.
1487 
1488   Label done;
1489 
1490   GetNumberHash(r0, r1);
1491 
1492   // Compute capacity mask.
1493   mov(r1, FieldOperand(elements, SeededNumberDictionary::kCapacityOffset));
1494   shr(r1, kSmiTagSize);  // convert smi to int
1495   dec(r1);
1496 
1497   // Generate an unrolled loop that performs a few probes before giving up.
1498   for (int i = 0; i < kNumberDictionaryProbes; i++) {
1499     // Use r2 for index calculations and keep the hash intact in r0.
1500     mov(r2, r0);
1501     // Compute the masked index: (hash + i + i * i) & mask.
1502     if (i > 0) {
1503       add(r2, Immediate(SeededNumberDictionary::GetProbeOffset(i)));
1504     }
1505     and_(r2, r1);
1506 
1507     // Scale the index by multiplying by the entry size.
1508     ASSERT(SeededNumberDictionary::kEntrySize == 3);
1509     lea(r2, Operand(r2, r2, times_2, 0));  // r2 = r2 * 3
1510 
1511     // Check if the key matches.
1512     cmp(key, FieldOperand(elements,
1513                           r2,
1514                           times_pointer_size,
1515                           SeededNumberDictionary::kElementsStartOffset));
1516     if (i != (kNumberDictionaryProbes - 1)) {
1517       j(equal, &done);
1518     } else {
1519       j(not_equal, miss);
1520     }
1521   }
1522 
1523   bind(&done);
1524   // Check that the value is a normal propety.
1525   const int kDetailsOffset =
1526       SeededNumberDictionary::kElementsStartOffset + 2 * kPointerSize;
1527   ASSERT_EQ(NORMAL, 0);
1528   test(FieldOperand(elements, r2, times_pointer_size, kDetailsOffset),
1529        Immediate(PropertyDetails::TypeField::kMask << kSmiTagSize));
1530   j(not_zero, miss);
1531 
1532   // Get the value at the masked, scaled index.
1533   const int kValueOffset =
1534       SeededNumberDictionary::kElementsStartOffset + kPointerSize;
1535   mov(result, FieldOperand(elements, r2, times_pointer_size, kValueOffset));
1536 }
1537 
1538 
LoadAllocationTopHelper(Register result,Register scratch,AllocationFlags flags)1539 void MacroAssembler::LoadAllocationTopHelper(Register result,
1540                                              Register scratch,
1541                                              AllocationFlags flags) {
1542   ExternalReference allocation_top =
1543       AllocationUtils::GetAllocationTopReference(isolate(), flags);
1544 
1545   // Just return if allocation top is already known.
1546   if ((flags & RESULT_CONTAINS_TOP) != 0) {
1547     // No use of scratch if allocation top is provided.
1548     ASSERT(scratch.is(no_reg));
1549 #ifdef DEBUG
1550     // Assert that result actually contains top on entry.
1551     cmp(result, Operand::StaticVariable(allocation_top));
1552     Check(equal, kUnexpectedAllocationTop);
1553 #endif
1554     return;
1555   }
1556 
1557   // Move address of new object to result. Use scratch register if available.
1558   if (scratch.is(no_reg)) {
1559     mov(result, Operand::StaticVariable(allocation_top));
1560   } else {
1561     mov(scratch, Immediate(allocation_top));
1562     mov(result, Operand(scratch, 0));
1563   }
1564 }
1565 
1566 
UpdateAllocationTopHelper(Register result_end,Register scratch,AllocationFlags flags)1567 void MacroAssembler::UpdateAllocationTopHelper(Register result_end,
1568                                                Register scratch,
1569                                                AllocationFlags flags) {
1570   if (emit_debug_code()) {
1571     test(result_end, Immediate(kObjectAlignmentMask));
1572     Check(zero, kUnalignedAllocationInNewSpace);
1573   }
1574 
1575   ExternalReference allocation_top =
1576       AllocationUtils::GetAllocationTopReference(isolate(), flags);
1577 
1578   // Update new top. Use scratch if available.
1579   if (scratch.is(no_reg)) {
1580     mov(Operand::StaticVariable(allocation_top), result_end);
1581   } else {
1582     mov(Operand(scratch, 0), result_end);
1583   }
1584 }
1585 
1586 
Allocate(int object_size,Register result,Register result_end,Register scratch,Label * gc_required,AllocationFlags flags)1587 void MacroAssembler::Allocate(int object_size,
1588                               Register result,
1589                               Register result_end,
1590                               Register scratch,
1591                               Label* gc_required,
1592                               AllocationFlags flags) {
1593   ASSERT((flags & (RESULT_CONTAINS_TOP | SIZE_IN_WORDS)) == 0);
1594   ASSERT(object_size <= Page::kMaxNonCodeHeapObjectSize);
1595   if (!FLAG_inline_new) {
1596     if (emit_debug_code()) {
1597       // Trash the registers to simulate an allocation failure.
1598       mov(result, Immediate(0x7091));
1599       if (result_end.is_valid()) {
1600         mov(result_end, Immediate(0x7191));
1601       }
1602       if (scratch.is_valid()) {
1603         mov(scratch, Immediate(0x7291));
1604       }
1605     }
1606     jmp(gc_required);
1607     return;
1608   }
1609   ASSERT(!result.is(result_end));
1610 
1611   // Load address of new object into result.
1612   LoadAllocationTopHelper(result, scratch, flags);
1613 
1614   ExternalReference allocation_limit =
1615       AllocationUtils::GetAllocationLimitReference(isolate(), flags);
1616 
1617   // Align the next allocation. Storing the filler map without checking top is
1618   // safe in new-space because the limit of the heap is aligned there.
1619   if ((flags & DOUBLE_ALIGNMENT) != 0) {
1620     ASSERT((flags & PRETENURE_OLD_POINTER_SPACE) == 0);
1621     ASSERT(kPointerAlignment * 2 == kDoubleAlignment);
1622     Label aligned;
1623     test(result, Immediate(kDoubleAlignmentMask));
1624     j(zero, &aligned, Label::kNear);
1625     if ((flags & PRETENURE_OLD_DATA_SPACE) != 0) {
1626       cmp(result, Operand::StaticVariable(allocation_limit));
1627       j(above_equal, gc_required);
1628     }
1629     mov(Operand(result, 0),
1630         Immediate(isolate()->factory()->one_pointer_filler_map()));
1631     add(result, Immediate(kDoubleSize / 2));
1632     bind(&aligned);
1633   }
1634 
1635   // Calculate new top and bail out if space is exhausted.
1636   Register top_reg = result_end.is_valid() ? result_end : result;
1637   if (!top_reg.is(result)) {
1638     mov(top_reg, result);
1639   }
1640   add(top_reg, Immediate(object_size));
1641   j(carry, gc_required);
1642   cmp(top_reg, Operand::StaticVariable(allocation_limit));
1643   j(above, gc_required);
1644 
1645   // Update allocation top.
1646   UpdateAllocationTopHelper(top_reg, scratch, flags);
1647 
1648   // Tag result if requested.
1649   bool tag_result = (flags & TAG_OBJECT) != 0;
1650   if (top_reg.is(result)) {
1651     if (tag_result) {
1652       sub(result, Immediate(object_size - kHeapObjectTag));
1653     } else {
1654       sub(result, Immediate(object_size));
1655     }
1656   } else if (tag_result) {
1657     ASSERT(kHeapObjectTag == 1);
1658     inc(result);
1659   }
1660 }
1661 
1662 
Allocate(int header_size,ScaleFactor element_size,Register element_count,RegisterValueType element_count_type,Register result,Register result_end,Register scratch,Label * gc_required,AllocationFlags flags)1663 void MacroAssembler::Allocate(int header_size,
1664                               ScaleFactor element_size,
1665                               Register element_count,
1666                               RegisterValueType element_count_type,
1667                               Register result,
1668                               Register result_end,
1669                               Register scratch,
1670                               Label* gc_required,
1671                               AllocationFlags flags) {
1672   ASSERT((flags & SIZE_IN_WORDS) == 0);
1673   if (!FLAG_inline_new) {
1674     if (emit_debug_code()) {
1675       // Trash the registers to simulate an allocation failure.
1676       mov(result, Immediate(0x7091));
1677       mov(result_end, Immediate(0x7191));
1678       if (scratch.is_valid()) {
1679         mov(scratch, Immediate(0x7291));
1680       }
1681       // Register element_count is not modified by the function.
1682     }
1683     jmp(gc_required);
1684     return;
1685   }
1686   ASSERT(!result.is(result_end));
1687 
1688   // Load address of new object into result.
1689   LoadAllocationTopHelper(result, scratch, flags);
1690 
1691   ExternalReference allocation_limit =
1692       AllocationUtils::GetAllocationLimitReference(isolate(), flags);
1693 
1694   // Align the next allocation. Storing the filler map without checking top is
1695   // safe in new-space because the limit of the heap is aligned there.
1696   if ((flags & DOUBLE_ALIGNMENT) != 0) {
1697     ASSERT((flags & PRETENURE_OLD_POINTER_SPACE) == 0);
1698     ASSERT(kPointerAlignment * 2 == kDoubleAlignment);
1699     Label aligned;
1700     test(result, Immediate(kDoubleAlignmentMask));
1701     j(zero, &aligned, Label::kNear);
1702     if ((flags & PRETENURE_OLD_DATA_SPACE) != 0) {
1703       cmp(result, Operand::StaticVariable(allocation_limit));
1704       j(above_equal, gc_required);
1705     }
1706     mov(Operand(result, 0),
1707         Immediate(isolate()->factory()->one_pointer_filler_map()));
1708     add(result, Immediate(kDoubleSize / 2));
1709     bind(&aligned);
1710   }
1711 
1712   // Calculate new top and bail out if space is exhausted.
1713   // We assume that element_count*element_size + header_size does not
1714   // overflow.
1715   if (element_count_type == REGISTER_VALUE_IS_SMI) {
1716     STATIC_ASSERT(static_cast<ScaleFactor>(times_2 - 1) == times_1);
1717     STATIC_ASSERT(static_cast<ScaleFactor>(times_4 - 1) == times_2);
1718     STATIC_ASSERT(static_cast<ScaleFactor>(times_8 - 1) == times_4);
1719     ASSERT(element_size >= times_2);
1720     ASSERT(kSmiTagSize == 1);
1721     element_size = static_cast<ScaleFactor>(element_size - 1);
1722   } else {
1723     ASSERT(element_count_type == REGISTER_VALUE_IS_INT32);
1724   }
1725   lea(result_end, Operand(element_count, element_size, header_size));
1726   add(result_end, result);
1727   j(carry, gc_required);
1728   cmp(result_end, Operand::StaticVariable(allocation_limit));
1729   j(above, gc_required);
1730 
1731   if ((flags & TAG_OBJECT) != 0) {
1732     ASSERT(kHeapObjectTag == 1);
1733     inc(result);
1734   }
1735 
1736   // Update allocation top.
1737   UpdateAllocationTopHelper(result_end, scratch, flags);
1738 }
1739 
1740 
Allocate(Register object_size,Register result,Register result_end,Register scratch,Label * gc_required,AllocationFlags flags)1741 void MacroAssembler::Allocate(Register object_size,
1742                               Register result,
1743                               Register result_end,
1744                               Register scratch,
1745                               Label* gc_required,
1746                               AllocationFlags flags) {
1747   ASSERT((flags & (RESULT_CONTAINS_TOP | SIZE_IN_WORDS)) == 0);
1748   if (!FLAG_inline_new) {
1749     if (emit_debug_code()) {
1750       // Trash the registers to simulate an allocation failure.
1751       mov(result, Immediate(0x7091));
1752       mov(result_end, Immediate(0x7191));
1753       if (scratch.is_valid()) {
1754         mov(scratch, Immediate(0x7291));
1755       }
1756       // object_size is left unchanged by this function.
1757     }
1758     jmp(gc_required);
1759     return;
1760   }
1761   ASSERT(!result.is(result_end));
1762 
1763   // Load address of new object into result.
1764   LoadAllocationTopHelper(result, scratch, flags);
1765 
1766   ExternalReference allocation_limit =
1767       AllocationUtils::GetAllocationLimitReference(isolate(), flags);
1768 
1769   // Align the next allocation. Storing the filler map without checking top is
1770   // safe in new-space because the limit of the heap is aligned there.
1771   if ((flags & DOUBLE_ALIGNMENT) != 0) {
1772     ASSERT((flags & PRETENURE_OLD_POINTER_SPACE) == 0);
1773     ASSERT(kPointerAlignment * 2 == kDoubleAlignment);
1774     Label aligned;
1775     test(result, Immediate(kDoubleAlignmentMask));
1776     j(zero, &aligned, Label::kNear);
1777     if ((flags & PRETENURE_OLD_DATA_SPACE) != 0) {
1778       cmp(result, Operand::StaticVariable(allocation_limit));
1779       j(above_equal, gc_required);
1780     }
1781     mov(Operand(result, 0),
1782         Immediate(isolate()->factory()->one_pointer_filler_map()));
1783     add(result, Immediate(kDoubleSize / 2));
1784     bind(&aligned);
1785   }
1786 
1787   // Calculate new top and bail out if space is exhausted.
1788   if (!object_size.is(result_end)) {
1789     mov(result_end, object_size);
1790   }
1791   add(result_end, result);
1792   j(carry, gc_required);
1793   cmp(result_end, Operand::StaticVariable(allocation_limit));
1794   j(above, gc_required);
1795 
1796   // Tag result if requested.
1797   if ((flags & TAG_OBJECT) != 0) {
1798     ASSERT(kHeapObjectTag == 1);
1799     inc(result);
1800   }
1801 
1802   // Update allocation top.
1803   UpdateAllocationTopHelper(result_end, scratch, flags);
1804 }
1805 
1806 
UndoAllocationInNewSpace(Register object)1807 void MacroAssembler::UndoAllocationInNewSpace(Register object) {
1808   ExternalReference new_space_allocation_top =
1809       ExternalReference::new_space_allocation_top_address(isolate());
1810 
1811   // Make sure the object has no tag before resetting top.
1812   and_(object, Immediate(~kHeapObjectTagMask));
1813 #ifdef DEBUG
1814   cmp(object, Operand::StaticVariable(new_space_allocation_top));
1815   Check(below, kUndoAllocationOfNonAllocatedMemory);
1816 #endif
1817   mov(Operand::StaticVariable(new_space_allocation_top), object);
1818 }
1819 
1820 
AllocateHeapNumber(Register result,Register scratch1,Register scratch2,Label * gc_required)1821 void MacroAssembler::AllocateHeapNumber(Register result,
1822                                         Register scratch1,
1823                                         Register scratch2,
1824                                         Label* gc_required) {
1825   // Allocate heap number in new space.
1826   Allocate(HeapNumber::kSize, result, scratch1, scratch2, gc_required,
1827            TAG_OBJECT);
1828 
1829   // Set the map.
1830   mov(FieldOperand(result, HeapObject::kMapOffset),
1831       Immediate(isolate()->factory()->heap_number_map()));
1832 }
1833 
1834 
AllocateTwoByteString(Register result,Register length,Register scratch1,Register scratch2,Register scratch3,Label * gc_required)1835 void MacroAssembler::AllocateTwoByteString(Register result,
1836                                            Register length,
1837                                            Register scratch1,
1838                                            Register scratch2,
1839                                            Register scratch3,
1840                                            Label* gc_required) {
1841   // Calculate the number of bytes needed for the characters in the string while
1842   // observing object alignment.
1843   ASSERT((SeqTwoByteString::kHeaderSize & kObjectAlignmentMask) == 0);
1844   ASSERT(kShortSize == 2);
1845   // scratch1 = length * 2 + kObjectAlignmentMask.
1846   lea(scratch1, Operand(length, length, times_1, kObjectAlignmentMask));
1847   and_(scratch1, Immediate(~kObjectAlignmentMask));
1848 
1849   // Allocate two byte string in new space.
1850   Allocate(SeqTwoByteString::kHeaderSize,
1851            times_1,
1852            scratch1,
1853            REGISTER_VALUE_IS_INT32,
1854            result,
1855            scratch2,
1856            scratch3,
1857            gc_required,
1858            TAG_OBJECT);
1859 
1860   // Set the map, length and hash field.
1861   mov(FieldOperand(result, HeapObject::kMapOffset),
1862       Immediate(isolate()->factory()->string_map()));
1863   mov(scratch1, length);
1864   SmiTag(scratch1);
1865   mov(FieldOperand(result, String::kLengthOffset), scratch1);
1866   mov(FieldOperand(result, String::kHashFieldOffset),
1867       Immediate(String::kEmptyHashField));
1868 }
1869 
1870 
AllocateAsciiString(Register result,Register length,Register scratch1,Register scratch2,Register scratch3,Label * gc_required)1871 void MacroAssembler::AllocateAsciiString(Register result,
1872                                          Register length,
1873                                          Register scratch1,
1874                                          Register scratch2,
1875                                          Register scratch3,
1876                                          Label* gc_required) {
1877   // Calculate the number of bytes needed for the characters in the string while
1878   // observing object alignment.
1879   ASSERT((SeqOneByteString::kHeaderSize & kObjectAlignmentMask) == 0);
1880   mov(scratch1, length);
1881   ASSERT(kCharSize == 1);
1882   add(scratch1, Immediate(kObjectAlignmentMask));
1883   and_(scratch1, Immediate(~kObjectAlignmentMask));
1884 
1885   // Allocate ASCII string in new space.
1886   Allocate(SeqOneByteString::kHeaderSize,
1887            times_1,
1888            scratch1,
1889            REGISTER_VALUE_IS_INT32,
1890            result,
1891            scratch2,
1892            scratch3,
1893            gc_required,
1894            TAG_OBJECT);
1895 
1896   // Set the map, length and hash field.
1897   mov(FieldOperand(result, HeapObject::kMapOffset),
1898       Immediate(isolate()->factory()->ascii_string_map()));
1899   mov(scratch1, length);
1900   SmiTag(scratch1);
1901   mov(FieldOperand(result, String::kLengthOffset), scratch1);
1902   mov(FieldOperand(result, String::kHashFieldOffset),
1903       Immediate(String::kEmptyHashField));
1904 }
1905 
1906 
AllocateAsciiString(Register result,int length,Register scratch1,Register scratch2,Label * gc_required)1907 void MacroAssembler::AllocateAsciiString(Register result,
1908                                          int length,
1909                                          Register scratch1,
1910                                          Register scratch2,
1911                                          Label* gc_required) {
1912   ASSERT(length > 0);
1913 
1914   // Allocate ASCII string in new space.
1915   Allocate(SeqOneByteString::SizeFor(length), result, scratch1, scratch2,
1916            gc_required, TAG_OBJECT);
1917 
1918   // Set the map, length and hash field.
1919   mov(FieldOperand(result, HeapObject::kMapOffset),
1920       Immediate(isolate()->factory()->ascii_string_map()));
1921   mov(FieldOperand(result, String::kLengthOffset),
1922       Immediate(Smi::FromInt(length)));
1923   mov(FieldOperand(result, String::kHashFieldOffset),
1924       Immediate(String::kEmptyHashField));
1925 }
1926 
1927 
AllocateTwoByteConsString(Register result,Register scratch1,Register scratch2,Label * gc_required)1928 void MacroAssembler::AllocateTwoByteConsString(Register result,
1929                                         Register scratch1,
1930                                         Register scratch2,
1931                                         Label* gc_required) {
1932   // Allocate heap number in new space.
1933   Allocate(ConsString::kSize, result, scratch1, scratch2, gc_required,
1934            TAG_OBJECT);
1935 
1936   // Set the map. The other fields are left uninitialized.
1937   mov(FieldOperand(result, HeapObject::kMapOffset),
1938       Immediate(isolate()->factory()->cons_string_map()));
1939 }
1940 
1941 
AllocateAsciiConsString(Register result,Register scratch1,Register scratch2,Label * gc_required)1942 void MacroAssembler::AllocateAsciiConsString(Register result,
1943                                              Register scratch1,
1944                                              Register scratch2,
1945                                              Label* gc_required) {
1946   Label allocate_new_space, install_map;
1947   AllocationFlags flags = TAG_OBJECT;
1948 
1949   ExternalReference high_promotion_mode = ExternalReference::
1950       new_space_high_promotion_mode_active_address(isolate());
1951 
1952   test(Operand::StaticVariable(high_promotion_mode), Immediate(1));
1953   j(zero, &allocate_new_space);
1954 
1955   Allocate(ConsString::kSize,
1956            result,
1957            scratch1,
1958            scratch2,
1959            gc_required,
1960            static_cast<AllocationFlags>(flags | PRETENURE_OLD_POINTER_SPACE));
1961   jmp(&install_map);
1962 
1963   bind(&allocate_new_space);
1964   Allocate(ConsString::kSize,
1965            result,
1966            scratch1,
1967            scratch2,
1968            gc_required,
1969            flags);
1970 
1971   bind(&install_map);
1972   // Set the map. The other fields are left uninitialized.
1973   mov(FieldOperand(result, HeapObject::kMapOffset),
1974       Immediate(isolate()->factory()->cons_ascii_string_map()));
1975 }
1976 
1977 
AllocateTwoByteSlicedString(Register result,Register scratch1,Register scratch2,Label * gc_required)1978 void MacroAssembler::AllocateTwoByteSlicedString(Register result,
1979                                           Register scratch1,
1980                                           Register scratch2,
1981                                           Label* gc_required) {
1982   // Allocate heap number in new space.
1983   Allocate(SlicedString::kSize, result, scratch1, scratch2, gc_required,
1984            TAG_OBJECT);
1985 
1986   // Set the map. The other fields are left uninitialized.
1987   mov(FieldOperand(result, HeapObject::kMapOffset),
1988       Immediate(isolate()->factory()->sliced_string_map()));
1989 }
1990 
1991 
AllocateAsciiSlicedString(Register result,Register scratch1,Register scratch2,Label * gc_required)1992 void MacroAssembler::AllocateAsciiSlicedString(Register result,
1993                                                Register scratch1,
1994                                                Register scratch2,
1995                                                Label* gc_required) {
1996   // Allocate heap number in new space.
1997   Allocate(SlicedString::kSize, result, scratch1, scratch2, gc_required,
1998            TAG_OBJECT);
1999 
2000   // Set the map. The other fields are left uninitialized.
2001   mov(FieldOperand(result, HeapObject::kMapOffset),
2002       Immediate(isolate()->factory()->sliced_ascii_string_map()));
2003 }
2004 
2005 
2006 // Copy memory, byte-by-byte, from source to destination.  Not optimized for
2007 // long or aligned copies.  The contents of scratch and length are destroyed.
2008 // Source and destination are incremented by length.
2009 // Many variants of movsb, loop unrolling, word moves, and indexed operands
2010 // have been tried here already, and this is fastest.
2011 // A simpler loop is faster on small copies, but 30% slower on large ones.
2012 // The cld() instruction must have been emitted, to set the direction flag(),
2013 // before calling this function.
CopyBytes(Register source,Register destination,Register length,Register scratch)2014 void MacroAssembler::CopyBytes(Register source,
2015                                Register destination,
2016                                Register length,
2017                                Register scratch) {
2018   Label short_loop, len4, len8, len12, done, short_string;
2019   ASSERT(source.is(esi));
2020   ASSERT(destination.is(edi));
2021   ASSERT(length.is(ecx));
2022   cmp(length, Immediate(4));
2023   j(below, &short_string, Label::kNear);
2024 
2025   // Because source is 4-byte aligned in our uses of this function,
2026   // we keep source aligned for the rep_movs call by copying the odd bytes
2027   // at the end of the ranges.
2028   mov(scratch, Operand(source, length, times_1, -4));
2029   mov(Operand(destination, length, times_1, -4), scratch);
2030 
2031   cmp(length, Immediate(8));
2032   j(below_equal, &len4, Label::kNear);
2033   cmp(length, Immediate(12));
2034   j(below_equal, &len8, Label::kNear);
2035   cmp(length, Immediate(16));
2036   j(below_equal, &len12, Label::kNear);
2037 
2038   mov(scratch, ecx);
2039   shr(ecx, 2);
2040   rep_movs();
2041   and_(scratch, Immediate(0x3));
2042   add(destination, scratch);
2043   jmp(&done, Label::kNear);
2044 
2045   bind(&len12);
2046   mov(scratch, Operand(source, 8));
2047   mov(Operand(destination, 8), scratch);
2048   bind(&len8);
2049   mov(scratch, Operand(source, 4));
2050   mov(Operand(destination, 4), scratch);
2051   bind(&len4);
2052   mov(scratch, Operand(source, 0));
2053   mov(Operand(destination, 0), scratch);
2054   add(destination, length);
2055   jmp(&done, Label::kNear);
2056 
2057   bind(&short_string);
2058   test(length, length);
2059   j(zero, &done, Label::kNear);
2060 
2061   bind(&short_loop);
2062   mov_b(scratch, Operand(source, 0));
2063   mov_b(Operand(destination, 0), scratch);
2064   inc(source);
2065   inc(destination);
2066   dec(length);
2067   j(not_zero, &short_loop);
2068 
2069   bind(&done);
2070 }
2071 
2072 
InitializeFieldsWithFiller(Register start_offset,Register end_offset,Register filler)2073 void MacroAssembler::InitializeFieldsWithFiller(Register start_offset,
2074                                                 Register end_offset,
2075                                                 Register filler) {
2076   Label loop, entry;
2077   jmp(&entry);
2078   bind(&loop);
2079   mov(Operand(start_offset, 0), filler);
2080   add(start_offset, Immediate(kPointerSize));
2081   bind(&entry);
2082   cmp(start_offset, end_offset);
2083   j(less, &loop);
2084 }
2085 
2086 
BooleanBitTest(Register object,int field_offset,int bit_index)2087 void MacroAssembler::BooleanBitTest(Register object,
2088                                     int field_offset,
2089                                     int bit_index) {
2090   bit_index += kSmiTagSize + kSmiShiftSize;
2091   ASSERT(IsPowerOf2(kBitsPerByte));
2092   int byte_index = bit_index / kBitsPerByte;
2093   int byte_bit_index = bit_index & (kBitsPerByte - 1);
2094   test_b(FieldOperand(object, field_offset + byte_index),
2095          static_cast<byte>(1 << byte_bit_index));
2096 }
2097 
2098 
2099 
NegativeZeroTest(Register result,Register op,Label * then_label)2100 void MacroAssembler::NegativeZeroTest(Register result,
2101                                       Register op,
2102                                       Label* then_label) {
2103   Label ok;
2104   test(result, result);
2105   j(not_zero, &ok);
2106   test(op, op);
2107   j(sign, then_label);
2108   bind(&ok);
2109 }
2110 
2111 
NegativeZeroTest(Register result,Register op1,Register op2,Register scratch,Label * then_label)2112 void MacroAssembler::NegativeZeroTest(Register result,
2113                                       Register op1,
2114                                       Register op2,
2115                                       Register scratch,
2116                                       Label* then_label) {
2117   Label ok;
2118   test(result, result);
2119   j(not_zero, &ok);
2120   mov(scratch, op1);
2121   or_(scratch, op2);
2122   j(sign, then_label);
2123   bind(&ok);
2124 }
2125 
2126 
TryGetFunctionPrototype(Register function,Register result,Register scratch,Label * miss,bool miss_on_bound_function)2127 void MacroAssembler::TryGetFunctionPrototype(Register function,
2128                                              Register result,
2129                                              Register scratch,
2130                                              Label* miss,
2131                                              bool miss_on_bound_function) {
2132   // Check that the receiver isn't a smi.
2133   JumpIfSmi(function, miss);
2134 
2135   // Check that the function really is a function.
2136   CmpObjectType(function, JS_FUNCTION_TYPE, result);
2137   j(not_equal, miss);
2138 
2139   if (miss_on_bound_function) {
2140     // If a bound function, go to miss label.
2141     mov(scratch,
2142         FieldOperand(function, JSFunction::kSharedFunctionInfoOffset));
2143     BooleanBitTest(scratch, SharedFunctionInfo::kCompilerHintsOffset,
2144                    SharedFunctionInfo::kBoundFunction);
2145     j(not_zero, miss);
2146   }
2147 
2148   // Make sure that the function has an instance prototype.
2149   Label non_instance;
2150   movzx_b(scratch, FieldOperand(result, Map::kBitFieldOffset));
2151   test(scratch, Immediate(1 << Map::kHasNonInstancePrototype));
2152   j(not_zero, &non_instance);
2153 
2154   // Get the prototype or initial map from the function.
2155   mov(result,
2156       FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
2157 
2158   // If the prototype or initial map is the hole, don't return it and
2159   // simply miss the cache instead. This will allow us to allocate a
2160   // prototype object on-demand in the runtime system.
2161   cmp(result, Immediate(isolate()->factory()->the_hole_value()));
2162   j(equal, miss);
2163 
2164   // If the function does not have an initial map, we're done.
2165   Label done;
2166   CmpObjectType(result, MAP_TYPE, scratch);
2167   j(not_equal, &done);
2168 
2169   // Get the prototype from the initial map.
2170   mov(result, FieldOperand(result, Map::kPrototypeOffset));
2171   jmp(&done);
2172 
2173   // Non-instance prototype: Fetch prototype from constructor field
2174   // in initial map.
2175   bind(&non_instance);
2176   mov(result, FieldOperand(result, Map::kConstructorOffset));
2177 
2178   // All done.
2179   bind(&done);
2180 }
2181 
2182 
CallStub(CodeStub * stub,TypeFeedbackId ast_id)2183 void MacroAssembler::CallStub(CodeStub* stub, TypeFeedbackId ast_id) {
2184   ASSERT(AllowThisStubCall(stub));  // Calls are not allowed in some stubs.
2185   call(stub->GetCode(isolate()), RelocInfo::CODE_TARGET, ast_id);
2186 }
2187 
2188 
TailCallStub(CodeStub * stub)2189 void MacroAssembler::TailCallStub(CodeStub* stub) {
2190   jmp(stub->GetCode(isolate()), RelocInfo::CODE_TARGET);
2191 }
2192 
2193 
StubReturn(int argc)2194 void MacroAssembler::StubReturn(int argc) {
2195   ASSERT(argc >= 1 && generating_stub());
2196   ret((argc - 1) * kPointerSize);
2197 }
2198 
2199 
AllowThisStubCall(CodeStub * stub)2200 bool MacroAssembler::AllowThisStubCall(CodeStub* stub) {
2201   return has_frame_ || !stub->SometimesSetsUpAFrame();
2202 }
2203 
2204 
IllegalOperation(int num_arguments)2205 void MacroAssembler::IllegalOperation(int num_arguments) {
2206   if (num_arguments > 0) {
2207     add(esp, Immediate(num_arguments * kPointerSize));
2208   }
2209   mov(eax, Immediate(isolate()->factory()->undefined_value()));
2210 }
2211 
2212 
IndexFromHash(Register hash,Register index)2213 void MacroAssembler::IndexFromHash(Register hash, Register index) {
2214   // The assert checks that the constants for the maximum number of digits
2215   // for an array index cached in the hash field and the number of bits
2216   // reserved for it does not conflict.
2217   ASSERT(TenToThe(String::kMaxCachedArrayIndexLength) <
2218          (1 << String::kArrayIndexValueBits));
2219   // We want the smi-tagged index in key.  kArrayIndexValueMask has zeros in
2220   // the low kHashShift bits.
2221   and_(hash, String::kArrayIndexValueMask);
2222   STATIC_ASSERT(String::kHashShift >= kSmiTagSize && kSmiTag == 0);
2223   if (String::kHashShift > kSmiTagSize) {
2224     shr(hash, String::kHashShift - kSmiTagSize);
2225   }
2226   if (!index.is(hash)) {
2227     mov(index, hash);
2228   }
2229 }
2230 
2231 
CallRuntime(const Runtime::Function * f,int num_arguments,SaveFPRegsMode save_doubles)2232 void MacroAssembler::CallRuntime(const Runtime::Function* f,
2233                                  int num_arguments,
2234                                  SaveFPRegsMode save_doubles) {
2235   // If the expected number of arguments of the runtime function is
2236   // constant, we check that the actual number of arguments match the
2237   // expectation.
2238   if (f->nargs >= 0 && f->nargs != num_arguments) {
2239     IllegalOperation(num_arguments);
2240     return;
2241   }
2242 
2243   // TODO(1236192): Most runtime routines don't need the number of
2244   // arguments passed in because it is constant. At some point we
2245   // should remove this need and make the runtime routine entry code
2246   // smarter.
2247   Set(eax, Immediate(num_arguments));
2248   mov(ebx, Immediate(ExternalReference(f, isolate())));
2249   CEntryStub ces(1, CpuFeatures::IsSupported(SSE2) ? save_doubles
2250                                                    : kDontSaveFPRegs);
2251   CallStub(&ces);
2252 }
2253 
2254 
CallExternalReference(ExternalReference ref,int num_arguments)2255 void MacroAssembler::CallExternalReference(ExternalReference ref,
2256                                            int num_arguments) {
2257   mov(eax, Immediate(num_arguments));
2258   mov(ebx, Immediate(ref));
2259 
2260   CEntryStub stub(1);
2261   CallStub(&stub);
2262 }
2263 
2264 
TailCallExternalReference(const ExternalReference & ext,int num_arguments,int result_size)2265 void MacroAssembler::TailCallExternalReference(const ExternalReference& ext,
2266                                                int num_arguments,
2267                                                int result_size) {
2268   // TODO(1236192): Most runtime routines don't need the number of
2269   // arguments passed in because it is constant. At some point we
2270   // should remove this need and make the runtime routine entry code
2271   // smarter.
2272   Set(eax, Immediate(num_arguments));
2273   JumpToExternalReference(ext);
2274 }
2275 
2276 
TailCallRuntime(Runtime::FunctionId fid,int num_arguments,int result_size)2277 void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid,
2278                                      int num_arguments,
2279                                      int result_size) {
2280   TailCallExternalReference(ExternalReference(fid, isolate()),
2281                             num_arguments,
2282                             result_size);
2283 }
2284 
2285 
ApiParameterOperand(int index)2286 Operand ApiParameterOperand(int index) {
2287   return Operand(esp, index * kPointerSize);
2288 }
2289 
2290 
PrepareCallApiFunction(int argc)2291 void MacroAssembler::PrepareCallApiFunction(int argc) {
2292   EnterApiExitFrame(argc);
2293   if (emit_debug_code()) {
2294     mov(esi, Immediate(BitCast<int32_t>(kZapValue)));
2295   }
2296 }
2297 
2298 
CallApiFunctionAndReturn(Address function_address,Address thunk_address,Operand thunk_last_arg,int stack_space,Operand return_value_operand,Operand * context_restore_operand)2299 void MacroAssembler::CallApiFunctionAndReturn(
2300     Address function_address,
2301     Address thunk_address,
2302     Operand thunk_last_arg,
2303     int stack_space,
2304     Operand return_value_operand,
2305     Operand* context_restore_operand) {
2306   ExternalReference next_address =
2307       ExternalReference::handle_scope_next_address(isolate());
2308   ExternalReference limit_address =
2309       ExternalReference::handle_scope_limit_address(isolate());
2310   ExternalReference level_address =
2311       ExternalReference::handle_scope_level_address(isolate());
2312 
2313   // Allocate HandleScope in callee-save registers.
2314   mov(ebx, Operand::StaticVariable(next_address));
2315   mov(edi, Operand::StaticVariable(limit_address));
2316   add(Operand::StaticVariable(level_address), Immediate(1));
2317 
2318   if (FLAG_log_timer_events) {
2319     FrameScope frame(this, StackFrame::MANUAL);
2320     PushSafepointRegisters();
2321     PrepareCallCFunction(1, eax);
2322     mov(Operand(esp, 0),
2323         Immediate(ExternalReference::isolate_address(isolate())));
2324     CallCFunction(ExternalReference::log_enter_external_function(isolate()), 1);
2325     PopSafepointRegisters();
2326   }
2327 
2328 
2329   Label profiler_disabled;
2330   Label end_profiler_check;
2331   bool* is_profiling_flag =
2332       isolate()->cpu_profiler()->is_profiling_address();
2333   STATIC_ASSERT(sizeof(*is_profiling_flag) == 1);
2334   mov(eax, Immediate(reinterpret_cast<Address>(is_profiling_flag)));
2335   cmpb(Operand(eax, 0), 0);
2336   j(zero, &profiler_disabled);
2337 
2338   // Additional parameter is the address of the actual getter function.
2339   mov(thunk_last_arg, Immediate(function_address));
2340   // Call the api function.
2341   call(thunk_address, RelocInfo::RUNTIME_ENTRY);
2342   jmp(&end_profiler_check);
2343 
2344   bind(&profiler_disabled);
2345   // Call the api function.
2346   call(function_address, RelocInfo::RUNTIME_ENTRY);
2347   bind(&end_profiler_check);
2348 
2349   if (FLAG_log_timer_events) {
2350     FrameScope frame(this, StackFrame::MANUAL);
2351     PushSafepointRegisters();
2352     PrepareCallCFunction(1, eax);
2353     mov(Operand(esp, 0),
2354         Immediate(ExternalReference::isolate_address(isolate())));
2355     CallCFunction(ExternalReference::log_leave_external_function(isolate()), 1);
2356     PopSafepointRegisters();
2357   }
2358 
2359   Label prologue;
2360   // Load the value from ReturnValue
2361   mov(eax, return_value_operand);
2362 
2363   Label promote_scheduled_exception;
2364   Label exception_handled;
2365   Label delete_allocated_handles;
2366   Label leave_exit_frame;
2367 
2368   bind(&prologue);
2369   // No more valid handles (the result handle was the last one). Restore
2370   // previous handle scope.
2371   mov(Operand::StaticVariable(next_address), ebx);
2372   sub(Operand::StaticVariable(level_address), Immediate(1));
2373   Assert(above_equal, kInvalidHandleScopeLevel);
2374   cmp(edi, Operand::StaticVariable(limit_address));
2375   j(not_equal, &delete_allocated_handles);
2376   bind(&leave_exit_frame);
2377 
2378   // Check if the function scheduled an exception.
2379   ExternalReference scheduled_exception_address =
2380       ExternalReference::scheduled_exception_address(isolate());
2381   cmp(Operand::StaticVariable(scheduled_exception_address),
2382       Immediate(isolate()->factory()->the_hole_value()));
2383   j(not_equal, &promote_scheduled_exception);
2384   bind(&exception_handled);
2385 
2386 #if ENABLE_EXTRA_CHECKS
2387   // Check if the function returned a valid JavaScript value.
2388   Label ok;
2389   Register return_value = eax;
2390   Register map = ecx;
2391 
2392   JumpIfSmi(return_value, &ok, Label::kNear);
2393   mov(map, FieldOperand(return_value, HeapObject::kMapOffset));
2394 
2395   CmpInstanceType(map, FIRST_NONSTRING_TYPE);
2396   j(below, &ok, Label::kNear);
2397 
2398   CmpInstanceType(map, FIRST_SPEC_OBJECT_TYPE);
2399   j(above_equal, &ok, Label::kNear);
2400 
2401   cmp(map, isolate()->factory()->heap_number_map());
2402   j(equal, &ok, Label::kNear);
2403 
2404   cmp(return_value, isolate()->factory()->undefined_value());
2405   j(equal, &ok, Label::kNear);
2406 
2407   cmp(return_value, isolate()->factory()->true_value());
2408   j(equal, &ok, Label::kNear);
2409 
2410   cmp(return_value, isolate()->factory()->false_value());
2411   j(equal, &ok, Label::kNear);
2412 
2413   cmp(return_value, isolate()->factory()->null_value());
2414   j(equal, &ok, Label::kNear);
2415 
2416   Abort(kAPICallReturnedInvalidObject);
2417 
2418   bind(&ok);
2419 #endif
2420 
2421   bool restore_context = context_restore_operand != NULL;
2422   if (restore_context) {
2423     mov(esi, *context_restore_operand);
2424   }
2425   LeaveApiExitFrame(!restore_context);
2426   ret(stack_space * kPointerSize);
2427 
2428   bind(&promote_scheduled_exception);
2429   {
2430     FrameScope frame(this, StackFrame::INTERNAL);
2431     CallRuntime(Runtime::kPromoteScheduledException, 0);
2432   }
2433   jmp(&exception_handled);
2434 
2435   // HandleScope limit has changed. Delete allocated extensions.
2436   ExternalReference delete_extensions =
2437       ExternalReference::delete_handle_scope_extensions(isolate());
2438   bind(&delete_allocated_handles);
2439   mov(Operand::StaticVariable(limit_address), edi);
2440   mov(edi, eax);
2441   mov(Operand(esp, 0),
2442       Immediate(ExternalReference::isolate_address(isolate())));
2443   mov(eax, Immediate(delete_extensions));
2444   call(eax);
2445   mov(eax, edi);
2446   jmp(&leave_exit_frame);
2447 }
2448 
2449 
JumpToExternalReference(const ExternalReference & ext)2450 void MacroAssembler::JumpToExternalReference(const ExternalReference& ext) {
2451   // Set the entry point and jump to the C entry runtime stub.
2452   mov(ebx, Immediate(ext));
2453   CEntryStub ces(1);
2454   jmp(ces.GetCode(isolate()), RelocInfo::CODE_TARGET);
2455 }
2456 
2457 
SetCallKind(Register dst,CallKind call_kind)2458 void MacroAssembler::SetCallKind(Register dst, CallKind call_kind) {
2459   // This macro takes the dst register to make the code more readable
2460   // at the call sites. However, the dst register has to be ecx to
2461   // follow the calling convention which requires the call type to be
2462   // in ecx.
2463   ASSERT(dst.is(ecx));
2464   if (call_kind == CALL_AS_FUNCTION) {
2465     // Set to some non-zero smi by updating the least significant
2466     // byte.
2467     mov_b(dst, 1 << kSmiTagSize);
2468   } else {
2469     // Set to smi zero by clearing the register.
2470     xor_(dst, dst);
2471   }
2472 }
2473 
2474 
InvokePrologue(const ParameterCount & expected,const ParameterCount & actual,Handle<Code> code_constant,const Operand & code_operand,Label * done,bool * definitely_mismatches,InvokeFlag flag,Label::Distance done_near,const CallWrapper & call_wrapper,CallKind call_kind)2475 void MacroAssembler::InvokePrologue(const ParameterCount& expected,
2476                                     const ParameterCount& actual,
2477                                     Handle<Code> code_constant,
2478                                     const Operand& code_operand,
2479                                     Label* done,
2480                                     bool* definitely_mismatches,
2481                                     InvokeFlag flag,
2482                                     Label::Distance done_near,
2483                                     const CallWrapper& call_wrapper,
2484                                     CallKind call_kind) {
2485   bool definitely_matches = false;
2486   *definitely_mismatches = false;
2487   Label invoke;
2488   if (expected.is_immediate()) {
2489     ASSERT(actual.is_immediate());
2490     if (expected.immediate() == actual.immediate()) {
2491       definitely_matches = true;
2492     } else {
2493       mov(eax, actual.immediate());
2494       const int sentinel = SharedFunctionInfo::kDontAdaptArgumentsSentinel;
2495       if (expected.immediate() == sentinel) {
2496         // Don't worry about adapting arguments for builtins that
2497         // don't want that done. Skip adaption code by making it look
2498         // like we have a match between expected and actual number of
2499         // arguments.
2500         definitely_matches = true;
2501       } else {
2502         *definitely_mismatches = true;
2503         mov(ebx, expected.immediate());
2504       }
2505     }
2506   } else {
2507     if (actual.is_immediate()) {
2508       // Expected is in register, actual is immediate. This is the
2509       // case when we invoke function values without going through the
2510       // IC mechanism.
2511       cmp(expected.reg(), actual.immediate());
2512       j(equal, &invoke);
2513       ASSERT(expected.reg().is(ebx));
2514       mov(eax, actual.immediate());
2515     } else if (!expected.reg().is(actual.reg())) {
2516       // Both expected and actual are in (different) registers. This
2517       // is the case when we invoke functions using call and apply.
2518       cmp(expected.reg(), actual.reg());
2519       j(equal, &invoke);
2520       ASSERT(actual.reg().is(eax));
2521       ASSERT(expected.reg().is(ebx));
2522     }
2523   }
2524 
2525   if (!definitely_matches) {
2526     Handle<Code> adaptor =
2527         isolate()->builtins()->ArgumentsAdaptorTrampoline();
2528     if (!code_constant.is_null()) {
2529       mov(edx, Immediate(code_constant));
2530       add(edx, Immediate(Code::kHeaderSize - kHeapObjectTag));
2531     } else if (!code_operand.is_reg(edx)) {
2532       mov(edx, code_operand);
2533     }
2534 
2535     if (flag == CALL_FUNCTION) {
2536       call_wrapper.BeforeCall(CallSize(adaptor, RelocInfo::CODE_TARGET));
2537       SetCallKind(ecx, call_kind);
2538       call(adaptor, RelocInfo::CODE_TARGET);
2539       call_wrapper.AfterCall();
2540       if (!*definitely_mismatches) {
2541         jmp(done, done_near);
2542       }
2543     } else {
2544       SetCallKind(ecx, call_kind);
2545       jmp(adaptor, RelocInfo::CODE_TARGET);
2546     }
2547     bind(&invoke);
2548   }
2549 }
2550 
2551 
InvokeCode(const Operand & code,const ParameterCount & expected,const ParameterCount & actual,InvokeFlag flag,const CallWrapper & call_wrapper,CallKind call_kind)2552 void MacroAssembler::InvokeCode(const Operand& code,
2553                                 const ParameterCount& expected,
2554                                 const ParameterCount& actual,
2555                                 InvokeFlag flag,
2556                                 const CallWrapper& call_wrapper,
2557                                 CallKind call_kind) {
2558   // You can't call a function without a valid frame.
2559   ASSERT(flag == JUMP_FUNCTION || has_frame());
2560 
2561   Label done;
2562   bool definitely_mismatches = false;
2563   InvokePrologue(expected, actual, Handle<Code>::null(), code,
2564                  &done, &definitely_mismatches, flag, Label::kNear,
2565                  call_wrapper, call_kind);
2566   if (!definitely_mismatches) {
2567     if (flag == CALL_FUNCTION) {
2568       call_wrapper.BeforeCall(CallSize(code));
2569       SetCallKind(ecx, call_kind);
2570       call(code);
2571       call_wrapper.AfterCall();
2572     } else {
2573       ASSERT(flag == JUMP_FUNCTION);
2574       SetCallKind(ecx, call_kind);
2575       jmp(code);
2576     }
2577     bind(&done);
2578   }
2579 }
2580 
2581 
InvokeCode(Handle<Code> code,const ParameterCount & expected,const ParameterCount & actual,RelocInfo::Mode rmode,InvokeFlag flag,const CallWrapper & call_wrapper,CallKind call_kind)2582 void MacroAssembler::InvokeCode(Handle<Code> code,
2583                                 const ParameterCount& expected,
2584                                 const ParameterCount& actual,
2585                                 RelocInfo::Mode rmode,
2586                                 InvokeFlag flag,
2587                                 const CallWrapper& call_wrapper,
2588                                 CallKind call_kind) {
2589   // You can't call a function without a valid frame.
2590   ASSERT(flag == JUMP_FUNCTION || has_frame());
2591 
2592   Label done;
2593   Operand dummy(eax, 0);
2594   bool definitely_mismatches = false;
2595   InvokePrologue(expected, actual, code, dummy, &done, &definitely_mismatches,
2596                  flag, Label::kNear, call_wrapper, call_kind);
2597   if (!definitely_mismatches) {
2598     if (flag == CALL_FUNCTION) {
2599       call_wrapper.BeforeCall(CallSize(code, rmode));
2600       SetCallKind(ecx, call_kind);
2601       call(code, rmode);
2602       call_wrapper.AfterCall();
2603     } else {
2604       ASSERT(flag == JUMP_FUNCTION);
2605       SetCallKind(ecx, call_kind);
2606       jmp(code, rmode);
2607     }
2608     bind(&done);
2609   }
2610 }
2611 
2612 
InvokeFunction(Register fun,const ParameterCount & actual,InvokeFlag flag,const CallWrapper & call_wrapper,CallKind call_kind)2613 void MacroAssembler::InvokeFunction(Register fun,
2614                                     const ParameterCount& actual,
2615                                     InvokeFlag flag,
2616                                     const CallWrapper& call_wrapper,
2617                                     CallKind call_kind) {
2618   // You can't call a function without a valid frame.
2619   ASSERT(flag == JUMP_FUNCTION || has_frame());
2620 
2621   ASSERT(fun.is(edi));
2622   mov(edx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
2623   mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
2624   mov(ebx, FieldOperand(edx, SharedFunctionInfo::kFormalParameterCountOffset));
2625   SmiUntag(ebx);
2626 
2627   ParameterCount expected(ebx);
2628   InvokeCode(FieldOperand(edi, JSFunction::kCodeEntryOffset),
2629              expected, actual, flag, call_wrapper, call_kind);
2630 }
2631 
2632 
InvokeFunction(Register fun,const ParameterCount & expected,const ParameterCount & actual,InvokeFlag flag,const CallWrapper & call_wrapper,CallKind call_kind)2633 void MacroAssembler::InvokeFunction(Register fun,
2634                                     const ParameterCount& expected,
2635                                     const ParameterCount& actual,
2636                                     InvokeFlag flag,
2637                                     const CallWrapper& call_wrapper,
2638                                     CallKind call_kind) {
2639   // You can't call a function without a valid frame.
2640   ASSERT(flag == JUMP_FUNCTION || has_frame());
2641 
2642   ASSERT(fun.is(edi));
2643   mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
2644 
2645   InvokeCode(FieldOperand(edi, JSFunction::kCodeEntryOffset),
2646              expected, actual, flag, call_wrapper, call_kind);
2647 }
2648 
2649 
InvokeFunction(Handle<JSFunction> function,const ParameterCount & expected,const ParameterCount & actual,InvokeFlag flag,const CallWrapper & call_wrapper,CallKind call_kind)2650 void MacroAssembler::InvokeFunction(Handle<JSFunction> function,
2651                                     const ParameterCount& expected,
2652                                     const ParameterCount& actual,
2653                                     InvokeFlag flag,
2654                                     const CallWrapper& call_wrapper,
2655                                     CallKind call_kind) {
2656   LoadHeapObject(edi, function);
2657   InvokeFunction(edi, expected, actual, flag, call_wrapper, call_kind);
2658 }
2659 
2660 
InvokeBuiltin(Builtins::JavaScript id,InvokeFlag flag,const CallWrapper & call_wrapper)2661 void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id,
2662                                    InvokeFlag flag,
2663                                    const CallWrapper& call_wrapper) {
2664   // You can't call a builtin without a valid frame.
2665   ASSERT(flag == JUMP_FUNCTION || has_frame());
2666 
2667   // Rely on the assertion to check that the number of provided
2668   // arguments match the expected number of arguments. Fake a
2669   // parameter count to avoid emitting code to do the check.
2670   ParameterCount expected(0);
2671   GetBuiltinFunction(edi, id);
2672   InvokeCode(FieldOperand(edi, JSFunction::kCodeEntryOffset),
2673              expected, expected, flag, call_wrapper, CALL_AS_METHOD);
2674 }
2675 
2676 
GetBuiltinFunction(Register target,Builtins::JavaScript id)2677 void MacroAssembler::GetBuiltinFunction(Register target,
2678                                         Builtins::JavaScript id) {
2679   // Load the JavaScript builtin function from the builtins object.
2680   mov(target, Operand(esi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
2681   mov(target, FieldOperand(target, GlobalObject::kBuiltinsOffset));
2682   mov(target, FieldOperand(target,
2683                            JSBuiltinsObject::OffsetOfFunctionWithId(id)));
2684 }
2685 
2686 
GetBuiltinEntry(Register target,Builtins::JavaScript id)2687 void MacroAssembler::GetBuiltinEntry(Register target, Builtins::JavaScript id) {
2688   ASSERT(!target.is(edi));
2689   // Load the JavaScript builtin function from the builtins object.
2690   GetBuiltinFunction(edi, id);
2691   // Load the code entry point from the function into the target register.
2692   mov(target, FieldOperand(edi, JSFunction::kCodeEntryOffset));
2693 }
2694 
2695 
LoadContext(Register dst,int context_chain_length)2696 void MacroAssembler::LoadContext(Register dst, int context_chain_length) {
2697   if (context_chain_length > 0) {
2698     // Move up the chain of contexts to the context containing the slot.
2699     mov(dst, Operand(esi, Context::SlotOffset(Context::PREVIOUS_INDEX)));
2700     for (int i = 1; i < context_chain_length; i++) {
2701       mov(dst, Operand(dst, Context::SlotOffset(Context::PREVIOUS_INDEX)));
2702     }
2703   } else {
2704     // Slot is in the current function context.  Move it into the
2705     // destination register in case we store into it (the write barrier
2706     // cannot be allowed to destroy the context in esi).
2707     mov(dst, esi);
2708   }
2709 
2710   // We should not have found a with context by walking the context chain
2711   // (i.e., the static scope chain and runtime context chain do not agree).
2712   // A variable occurring in such a scope should have slot type LOOKUP and
2713   // not CONTEXT.
2714   if (emit_debug_code()) {
2715     cmp(FieldOperand(dst, HeapObject::kMapOffset),
2716         isolate()->factory()->with_context_map());
2717     Check(not_equal, kVariableResolvedToWithContext);
2718   }
2719 }
2720 
2721 
LoadTransitionedArrayMapConditional(ElementsKind expected_kind,ElementsKind transitioned_kind,Register map_in_out,Register scratch,Label * no_map_match)2722 void MacroAssembler::LoadTransitionedArrayMapConditional(
2723     ElementsKind expected_kind,
2724     ElementsKind transitioned_kind,
2725     Register map_in_out,
2726     Register scratch,
2727     Label* no_map_match) {
2728   // Load the global or builtins object from the current context.
2729   mov(scratch, Operand(esi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
2730   mov(scratch, FieldOperand(scratch, GlobalObject::kNativeContextOffset));
2731 
2732   // Check that the function's map is the same as the expected cached map.
2733   mov(scratch, Operand(scratch,
2734                        Context::SlotOffset(Context::JS_ARRAY_MAPS_INDEX)));
2735 
2736   size_t offset = expected_kind * kPointerSize +
2737       FixedArrayBase::kHeaderSize;
2738   cmp(map_in_out, FieldOperand(scratch, offset));
2739   j(not_equal, no_map_match);
2740 
2741   // Use the transitioned cached map.
2742   offset = transitioned_kind * kPointerSize +
2743       FixedArrayBase::kHeaderSize;
2744   mov(map_in_out, FieldOperand(scratch, offset));
2745 }
2746 
2747 
LoadInitialArrayMap(Register function_in,Register scratch,Register map_out,bool can_have_holes)2748 void MacroAssembler::LoadInitialArrayMap(
2749     Register function_in, Register scratch,
2750     Register map_out, bool can_have_holes) {
2751   ASSERT(!function_in.is(map_out));
2752   Label done;
2753   mov(map_out, FieldOperand(function_in,
2754                             JSFunction::kPrototypeOrInitialMapOffset));
2755   if (!FLAG_smi_only_arrays) {
2756     ElementsKind kind = can_have_holes ? FAST_HOLEY_ELEMENTS : FAST_ELEMENTS;
2757     LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS,
2758                                         kind,
2759                                         map_out,
2760                                         scratch,
2761                                         &done);
2762   } else if (can_have_holes) {
2763     LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS,
2764                                         FAST_HOLEY_SMI_ELEMENTS,
2765                                         map_out,
2766                                         scratch,
2767                                         &done);
2768   }
2769   bind(&done);
2770 }
2771 
2772 
LoadGlobalContext(Register global_context)2773 void MacroAssembler::LoadGlobalContext(Register global_context) {
2774   // Load the global or builtins object from the current context.
2775   mov(global_context,
2776       Operand(esi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
2777   // Load the native context from the global or builtins object.
2778   mov(global_context,
2779       FieldOperand(global_context, GlobalObject::kNativeContextOffset));
2780 }
2781 
2782 
LoadGlobalFunction(int index,Register function)2783 void MacroAssembler::LoadGlobalFunction(int index, Register function) {
2784   // Load the global or builtins object from the current context.
2785   mov(function,
2786       Operand(esi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
2787   // Load the native context from the global or builtins object.
2788   mov(function,
2789       FieldOperand(function, GlobalObject::kNativeContextOffset));
2790   // Load the function from the native context.
2791   mov(function, Operand(function, Context::SlotOffset(index)));
2792 }
2793 
2794 
LoadGlobalFunctionInitialMap(Register function,Register map)2795 void MacroAssembler::LoadGlobalFunctionInitialMap(Register function,
2796                                                   Register map) {
2797   // Load the initial map.  The global functions all have initial maps.
2798   mov(map, FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
2799   if (emit_debug_code()) {
2800     Label ok, fail;
2801     CheckMap(map, isolate()->factory()->meta_map(), &fail, DO_SMI_CHECK);
2802     jmp(&ok);
2803     bind(&fail);
2804     Abort(kGlobalFunctionsMustHaveInitialMap);
2805     bind(&ok);
2806   }
2807 }
2808 
2809 
2810 // Store the value in register src in the safepoint register stack
2811 // slot for register dst.
StoreToSafepointRegisterSlot(Register dst,Register src)2812 void MacroAssembler::StoreToSafepointRegisterSlot(Register dst, Register src) {
2813   mov(SafepointRegisterSlot(dst), src);
2814 }
2815 
2816 
StoreToSafepointRegisterSlot(Register dst,Immediate src)2817 void MacroAssembler::StoreToSafepointRegisterSlot(Register dst, Immediate src) {
2818   mov(SafepointRegisterSlot(dst), src);
2819 }
2820 
2821 
LoadFromSafepointRegisterSlot(Register dst,Register src)2822 void MacroAssembler::LoadFromSafepointRegisterSlot(Register dst, Register src) {
2823   mov(dst, SafepointRegisterSlot(src));
2824 }
2825 
2826 
SafepointRegisterSlot(Register reg)2827 Operand MacroAssembler::SafepointRegisterSlot(Register reg) {
2828   return Operand(esp, SafepointRegisterStackIndex(reg.code()) * kPointerSize);
2829 }
2830 
2831 
SafepointRegisterStackIndex(int reg_code)2832 int MacroAssembler::SafepointRegisterStackIndex(int reg_code) {
2833   // The registers are pushed starting with the lowest encoding,
2834   // which means that lowest encodings are furthest away from
2835   // the stack pointer.
2836   ASSERT(reg_code >= 0 && reg_code < kNumSafepointRegisters);
2837   return kNumSafepointRegisters - reg_code - 1;
2838 }
2839 
2840 
LoadHeapObject(Register result,Handle<HeapObject> object)2841 void MacroAssembler::LoadHeapObject(Register result,
2842                                     Handle<HeapObject> object) {
2843   AllowDeferredHandleDereference embedding_raw_address;
2844   if (isolate()->heap()->InNewSpace(*object)) {
2845     Handle<Cell> cell = isolate()->factory()->NewCell(object);
2846     mov(result, Operand::ForCell(cell));
2847   } else {
2848     mov(result, object);
2849   }
2850 }
2851 
2852 
CmpHeapObject(Register reg,Handle<HeapObject> object)2853 void MacroAssembler::CmpHeapObject(Register reg, Handle<HeapObject> object) {
2854   AllowDeferredHandleDereference using_raw_address;
2855   if (isolate()->heap()->InNewSpace(*object)) {
2856     Handle<Cell> cell = isolate()->factory()->NewCell(object);
2857     cmp(reg, Operand::ForCell(cell));
2858   } else {
2859     cmp(reg, object);
2860   }
2861 }
2862 
2863 
PushHeapObject(Handle<HeapObject> object)2864 void MacroAssembler::PushHeapObject(Handle<HeapObject> object) {
2865   AllowDeferredHandleDereference using_raw_address;
2866   if (isolate()->heap()->InNewSpace(*object)) {
2867     Handle<Cell> cell = isolate()->factory()->NewCell(object);
2868     push(Operand::ForCell(cell));
2869   } else {
2870     Push(object);
2871   }
2872 }
2873 
2874 
Ret()2875 void MacroAssembler::Ret() {
2876   ret(0);
2877 }
2878 
2879 
Ret(int bytes_dropped,Register scratch)2880 void MacroAssembler::Ret(int bytes_dropped, Register scratch) {
2881   if (is_uint16(bytes_dropped)) {
2882     ret(bytes_dropped);
2883   } else {
2884     pop(scratch);
2885     add(esp, Immediate(bytes_dropped));
2886     push(scratch);
2887     ret(0);
2888   }
2889 }
2890 
2891 
VerifyX87StackDepth(uint32_t depth)2892 void MacroAssembler::VerifyX87StackDepth(uint32_t depth) {
2893   // Make sure the floating point stack is either empty or has depth items.
2894   ASSERT(depth <= 7);
2895   // This is very expensive.
2896   ASSERT(FLAG_debug_code && FLAG_enable_slow_asserts);
2897 
2898   // The top-of-stack (tos) is 7 if there is one item pushed.
2899   int tos = (8 - depth) % 8;
2900   const int kTopMask = 0x3800;
2901   push(eax);
2902   fwait();
2903   fnstsw_ax();
2904   and_(eax, kTopMask);
2905   shr(eax, 11);
2906   cmp(eax, Immediate(tos));
2907   Check(equal, kUnexpectedFPUStackDepthAfterInstruction);
2908   fnclex();
2909   pop(eax);
2910 }
2911 
2912 
Drop(int stack_elements)2913 void MacroAssembler::Drop(int stack_elements) {
2914   if (stack_elements > 0) {
2915     add(esp, Immediate(stack_elements * kPointerSize));
2916   }
2917 }
2918 
2919 
Move(Register dst,Register src)2920 void MacroAssembler::Move(Register dst, Register src) {
2921   if (!dst.is(src)) {
2922     mov(dst, src);
2923   }
2924 }
2925 
2926 
SetCounter(StatsCounter * counter,int value)2927 void MacroAssembler::SetCounter(StatsCounter* counter, int value) {
2928   if (FLAG_native_code_counters && counter->Enabled()) {
2929     mov(Operand::StaticVariable(ExternalReference(counter)), Immediate(value));
2930   }
2931 }
2932 
2933 
IncrementCounter(StatsCounter * counter,int value)2934 void MacroAssembler::IncrementCounter(StatsCounter* counter, int value) {
2935   ASSERT(value > 0);
2936   if (FLAG_native_code_counters && counter->Enabled()) {
2937     Operand operand = Operand::StaticVariable(ExternalReference(counter));
2938     if (value == 1) {
2939       inc(operand);
2940     } else {
2941       add(operand, Immediate(value));
2942     }
2943   }
2944 }
2945 
2946 
DecrementCounter(StatsCounter * counter,int value)2947 void MacroAssembler::DecrementCounter(StatsCounter* counter, int value) {
2948   ASSERT(value > 0);
2949   if (FLAG_native_code_counters && counter->Enabled()) {
2950     Operand operand = Operand::StaticVariable(ExternalReference(counter));
2951     if (value == 1) {
2952       dec(operand);
2953     } else {
2954       sub(operand, Immediate(value));
2955     }
2956   }
2957 }
2958 
2959 
IncrementCounter(Condition cc,StatsCounter * counter,int value)2960 void MacroAssembler::IncrementCounter(Condition cc,
2961                                       StatsCounter* counter,
2962                                       int value) {
2963   ASSERT(value > 0);
2964   if (FLAG_native_code_counters && counter->Enabled()) {
2965     Label skip;
2966     j(NegateCondition(cc), &skip);
2967     pushfd();
2968     IncrementCounter(counter, value);
2969     popfd();
2970     bind(&skip);
2971   }
2972 }
2973 
2974 
DecrementCounter(Condition cc,StatsCounter * counter,int value)2975 void MacroAssembler::DecrementCounter(Condition cc,
2976                                       StatsCounter* counter,
2977                                       int value) {
2978   ASSERT(value > 0);
2979   if (FLAG_native_code_counters && counter->Enabled()) {
2980     Label skip;
2981     j(NegateCondition(cc), &skip);
2982     pushfd();
2983     DecrementCounter(counter, value);
2984     popfd();
2985     bind(&skip);
2986   }
2987 }
2988 
2989 
Assert(Condition cc,BailoutReason reason)2990 void MacroAssembler::Assert(Condition cc, BailoutReason reason) {
2991   if (emit_debug_code()) Check(cc, reason);
2992 }
2993 
2994 
AssertFastElements(Register elements)2995 void MacroAssembler::AssertFastElements(Register elements) {
2996   if (emit_debug_code()) {
2997     Factory* factory = isolate()->factory();
2998     Label ok;
2999     cmp(FieldOperand(elements, HeapObject::kMapOffset),
3000         Immediate(factory->fixed_array_map()));
3001     j(equal, &ok);
3002     cmp(FieldOperand(elements, HeapObject::kMapOffset),
3003         Immediate(factory->fixed_double_array_map()));
3004     j(equal, &ok);
3005     cmp(FieldOperand(elements, HeapObject::kMapOffset),
3006         Immediate(factory->fixed_cow_array_map()));
3007     j(equal, &ok);
3008     Abort(kJSObjectWithFastElementsMapHasSlowElements);
3009     bind(&ok);
3010   }
3011 }
3012 
3013 
Check(Condition cc,BailoutReason reason)3014 void MacroAssembler::Check(Condition cc, BailoutReason reason) {
3015   Label L;
3016   j(cc, &L);
3017   Abort(reason);
3018   // will not return here
3019   bind(&L);
3020 }
3021 
3022 
CheckStackAlignment()3023 void MacroAssembler::CheckStackAlignment() {
3024   int frame_alignment = OS::ActivationFrameAlignment();
3025   int frame_alignment_mask = frame_alignment - 1;
3026   if (frame_alignment > kPointerSize) {
3027     ASSERT(IsPowerOf2(frame_alignment));
3028     Label alignment_as_expected;
3029     test(esp, Immediate(frame_alignment_mask));
3030     j(zero, &alignment_as_expected);
3031     // Abort if stack is not aligned.
3032     int3();
3033     bind(&alignment_as_expected);
3034   }
3035 }
3036 
3037 
Abort(BailoutReason reason)3038 void MacroAssembler::Abort(BailoutReason reason) {
3039   // We want to pass the msg string like a smi to avoid GC
3040   // problems, however msg is not guaranteed to be aligned
3041   // properly. Instead, we pass an aligned pointer that is
3042   // a proper v8 smi, but also pass the alignment difference
3043   // from the real pointer as a smi.
3044   const char* msg = GetBailoutReason(reason);
3045   intptr_t p1 = reinterpret_cast<intptr_t>(msg);
3046   intptr_t p0 = (p1 & ~kSmiTagMask) + kSmiTag;
3047   ASSERT(reinterpret_cast<Object*>(p0)->IsSmi());
3048 #ifdef DEBUG
3049   if (msg != NULL) {
3050     RecordComment("Abort message: ");
3051     RecordComment(msg);
3052   }
3053 
3054   if (FLAG_trap_on_abort) {
3055     int3();
3056     return;
3057   }
3058 #endif
3059 
3060   push(eax);
3061   push(Immediate(p0));
3062   push(Immediate(reinterpret_cast<intptr_t>(Smi::FromInt(p1 - p0))));
3063   // Disable stub call restrictions to always allow calls to abort.
3064   if (!has_frame_) {
3065     // We don't actually want to generate a pile of code for this, so just
3066     // claim there is a stack frame, without generating one.
3067     FrameScope scope(this, StackFrame::NONE);
3068     CallRuntime(Runtime::kAbort, 2);
3069   } else {
3070     CallRuntime(Runtime::kAbort, 2);
3071   }
3072   // will not return here
3073   int3();
3074 }
3075 
3076 
Throw(BailoutReason reason)3077 void MacroAssembler::Throw(BailoutReason reason) {
3078 #ifdef DEBUG
3079   const char* msg = GetBailoutReason(reason);
3080   if (msg != NULL) {
3081     RecordComment("Throw message: ");
3082     RecordComment(msg);
3083   }
3084 #endif
3085 
3086   push(eax);
3087   push(Immediate(Smi::FromInt(reason)));
3088   // Disable stub call restrictions to always allow calls to throw.
3089   if (!has_frame_) {
3090     // We don't actually want to generate a pile of code for this, so just
3091     // claim there is a stack frame, without generating one.
3092     FrameScope scope(this, StackFrame::NONE);
3093     CallRuntime(Runtime::kThrowMessage, 1);
3094   } else {
3095     CallRuntime(Runtime::kThrowMessage, 1);
3096   }
3097   // will not return here
3098   int3();
3099 }
3100 
3101 
ThrowIf(Condition cc,BailoutReason reason)3102 void MacroAssembler::ThrowIf(Condition cc, BailoutReason reason) {
3103   Label L;
3104   j(NegateCondition(cc), &L);
3105   Throw(reason);
3106   // will not return here
3107   bind(&L);
3108 }
3109 
3110 
LoadInstanceDescriptors(Register map,Register descriptors)3111 void MacroAssembler::LoadInstanceDescriptors(Register map,
3112                                              Register descriptors) {
3113   mov(descriptors, FieldOperand(map, Map::kDescriptorsOffset));
3114 }
3115 
3116 
NumberOfOwnDescriptors(Register dst,Register map)3117 void MacroAssembler::NumberOfOwnDescriptors(Register dst, Register map) {
3118   mov(dst, FieldOperand(map, Map::kBitField3Offset));
3119   DecodeField<Map::NumberOfOwnDescriptorsBits>(dst);
3120 }
3121 
3122 
LoadPowerOf2(XMMRegister dst,Register scratch,int power)3123 void MacroAssembler::LoadPowerOf2(XMMRegister dst,
3124                                   Register scratch,
3125                                   int power) {
3126   ASSERT(is_uintn(power + HeapNumber::kExponentBias,
3127                   HeapNumber::kExponentBits));
3128   mov(scratch, Immediate(power + HeapNumber::kExponentBias));
3129   movd(dst, scratch);
3130   psllq(dst, HeapNumber::kMantissaBits);
3131 }
3132 
3133 
LookupNumberStringCache(Register object,Register result,Register scratch1,Register scratch2,Label * not_found)3134 void MacroAssembler::LookupNumberStringCache(Register object,
3135                                              Register result,
3136                                              Register scratch1,
3137                                              Register scratch2,
3138                                              Label* not_found) {
3139   // Use of registers. Register result is used as a temporary.
3140   Register number_string_cache = result;
3141   Register mask = scratch1;
3142   Register scratch = scratch2;
3143 
3144   // Load the number string cache.
3145   LoadRoot(number_string_cache, Heap::kNumberStringCacheRootIndex);
3146   // Make the hash mask from the length of the number string cache. It
3147   // contains two elements (number and string) for each cache entry.
3148   mov(mask, FieldOperand(number_string_cache, FixedArray::kLengthOffset));
3149   shr(mask, kSmiTagSize + 1);  // Untag length and divide it by two.
3150   sub(mask, Immediate(1));  // Make mask.
3151 
3152   // Calculate the entry in the number string cache. The hash value in the
3153   // number string cache for smis is just the smi value, and the hash for
3154   // doubles is the xor of the upper and lower words. See
3155   // Heap::GetNumberStringCache.
3156   Label smi_hash_calculated;
3157   Label load_result_from_cache;
3158   Label not_smi;
3159   STATIC_ASSERT(kSmiTag == 0);
3160   JumpIfNotSmi(object, &not_smi, Label::kNear);
3161   mov(scratch, object);
3162   SmiUntag(scratch);
3163   jmp(&smi_hash_calculated, Label::kNear);
3164   bind(&not_smi);
3165   cmp(FieldOperand(object, HeapObject::kMapOffset),
3166       isolate()->factory()->heap_number_map());
3167   j(not_equal, not_found);
3168   STATIC_ASSERT(8 == kDoubleSize);
3169   mov(scratch, FieldOperand(object, HeapNumber::kValueOffset));
3170   xor_(scratch, FieldOperand(object, HeapNumber::kValueOffset + 4));
3171   // Object is heap number and hash is now in scratch. Calculate cache index.
3172   and_(scratch, mask);
3173   Register index = scratch;
3174   Register probe = mask;
3175   mov(probe,
3176       FieldOperand(number_string_cache,
3177                    index,
3178                    times_twice_pointer_size,
3179                    FixedArray::kHeaderSize));
3180   JumpIfSmi(probe, not_found);
3181   if (CpuFeatures::IsSupported(SSE2)) {
3182     CpuFeatureScope fscope(this, SSE2);
3183     movsd(xmm0, FieldOperand(object, HeapNumber::kValueOffset));
3184     ucomisd(xmm0, FieldOperand(probe, HeapNumber::kValueOffset));
3185   } else {
3186     fld_d(FieldOperand(object, HeapNumber::kValueOffset));
3187     fld_d(FieldOperand(probe, HeapNumber::kValueOffset));
3188     FCmp();
3189   }
3190   j(parity_even, not_found);  // Bail out if NaN is involved.
3191   j(not_equal, not_found);  // The cache did not contain this value.
3192   jmp(&load_result_from_cache, Label::kNear);
3193 
3194   bind(&smi_hash_calculated);
3195   // Object is smi and hash is now in scratch. Calculate cache index.
3196   and_(scratch, mask);
3197   // Check if the entry is the smi we are looking for.
3198   cmp(object,
3199       FieldOperand(number_string_cache,
3200                    index,
3201                    times_twice_pointer_size,
3202                    FixedArray::kHeaderSize));
3203   j(not_equal, not_found);
3204 
3205   // Get the result from the cache.
3206   bind(&load_result_from_cache);
3207   mov(result,
3208       FieldOperand(number_string_cache,
3209                    index,
3210                    times_twice_pointer_size,
3211                    FixedArray::kHeaderSize + kPointerSize));
3212   IncrementCounter(isolate()->counters()->number_to_string_native(), 1);
3213 }
3214 
3215 
JumpIfInstanceTypeIsNotSequentialAscii(Register instance_type,Register scratch,Label * failure)3216 void MacroAssembler::JumpIfInstanceTypeIsNotSequentialAscii(
3217     Register instance_type,
3218     Register scratch,
3219     Label* failure) {
3220   if (!scratch.is(instance_type)) {
3221     mov(scratch, instance_type);
3222   }
3223   and_(scratch,
3224        kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask);
3225   cmp(scratch, kStringTag | kSeqStringTag | kOneByteStringTag);
3226   j(not_equal, failure);
3227 }
3228 
3229 
JumpIfNotBothSequentialAsciiStrings(Register object1,Register object2,Register scratch1,Register scratch2,Label * failure)3230 void MacroAssembler::JumpIfNotBothSequentialAsciiStrings(Register object1,
3231                                                          Register object2,
3232                                                          Register scratch1,
3233                                                          Register scratch2,
3234                                                          Label* failure) {
3235   // Check that both objects are not smis.
3236   STATIC_ASSERT(kSmiTag == 0);
3237   mov(scratch1, object1);
3238   and_(scratch1, object2);
3239   JumpIfSmi(scratch1, failure);
3240 
3241   // Load instance type for both strings.
3242   mov(scratch1, FieldOperand(object1, HeapObject::kMapOffset));
3243   mov(scratch2, FieldOperand(object2, HeapObject::kMapOffset));
3244   movzx_b(scratch1, FieldOperand(scratch1, Map::kInstanceTypeOffset));
3245   movzx_b(scratch2, FieldOperand(scratch2, Map::kInstanceTypeOffset));
3246 
3247   // Check that both are flat ASCII strings.
3248   const int kFlatAsciiStringMask =
3249       kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
3250   const int kFlatAsciiStringTag =
3251       kStringTag | kOneByteStringTag | kSeqStringTag;
3252   // Interleave bits from both instance types and compare them in one check.
3253   ASSERT_EQ(0, kFlatAsciiStringMask & (kFlatAsciiStringMask << 3));
3254   and_(scratch1, kFlatAsciiStringMask);
3255   and_(scratch2, kFlatAsciiStringMask);
3256   lea(scratch1, Operand(scratch1, scratch2, times_8, 0));
3257   cmp(scratch1, kFlatAsciiStringTag | (kFlatAsciiStringTag << 3));
3258   j(not_equal, failure);
3259 }
3260 
3261 
JumpIfNotUniqueName(Operand operand,Label * not_unique_name,Label::Distance distance)3262 void MacroAssembler::JumpIfNotUniqueName(Operand operand,
3263                                          Label* not_unique_name,
3264                                          Label::Distance distance) {
3265   STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0);
3266   Label succeed;
3267   test(operand, Immediate(kIsNotStringMask | kIsNotInternalizedMask));
3268   j(zero, &succeed);
3269   cmpb(operand, static_cast<uint8_t>(SYMBOL_TYPE));
3270   j(not_equal, not_unique_name, distance);
3271 
3272   bind(&succeed);
3273 }
3274 
3275 
EmitSeqStringSetCharCheck(Register string,Register index,Register value,uint32_t encoding_mask)3276 void MacroAssembler::EmitSeqStringSetCharCheck(Register string,
3277                                                Register index,
3278                                                Register value,
3279                                                uint32_t encoding_mask) {
3280   Label is_object;
3281   JumpIfNotSmi(string, &is_object, Label::kNear);
3282   Throw(kNonObject);
3283   bind(&is_object);
3284 
3285   push(value);
3286   mov(value, FieldOperand(string, HeapObject::kMapOffset));
3287   movzx_b(value, FieldOperand(value, Map::kInstanceTypeOffset));
3288 
3289   and_(value, Immediate(kStringRepresentationMask | kStringEncodingMask));
3290   cmp(value, Immediate(encoding_mask));
3291   pop(value);
3292   ThrowIf(not_equal, kUnexpectedStringType);
3293 
3294   // The index is assumed to be untagged coming in, tag it to compare with the
3295   // string length without using a temp register, it is restored at the end of
3296   // this function.
3297   SmiTag(index);
3298   // Can't use overflow here directly, compiler can't seem to disambiguate.
3299   ThrowIf(NegateCondition(no_overflow), kIndexIsTooLarge);
3300 
3301   cmp(index, FieldOperand(string, String::kLengthOffset));
3302   ThrowIf(greater_equal, kIndexIsTooLarge);
3303 
3304   cmp(index, Immediate(Smi::FromInt(0)));
3305   ThrowIf(less, kIndexIsNegative);
3306 
3307   // Restore the index
3308   SmiUntag(index);
3309 }
3310 
3311 
PrepareCallCFunction(int num_arguments,Register scratch)3312 void MacroAssembler::PrepareCallCFunction(int num_arguments, Register scratch) {
3313   int frame_alignment = OS::ActivationFrameAlignment();
3314   if (frame_alignment != 0) {
3315     // Make stack end at alignment and make room for num_arguments words
3316     // and the original value of esp.
3317     mov(scratch, esp);
3318     sub(esp, Immediate((num_arguments + 1) * kPointerSize));
3319     ASSERT(IsPowerOf2(frame_alignment));
3320     and_(esp, -frame_alignment);
3321     mov(Operand(esp, num_arguments * kPointerSize), scratch);
3322   } else {
3323     sub(esp, Immediate(num_arguments * kPointerSize));
3324   }
3325 }
3326 
3327 
CallCFunction(ExternalReference function,int num_arguments)3328 void MacroAssembler::CallCFunction(ExternalReference function,
3329                                    int num_arguments) {
3330   // Trashing eax is ok as it will be the return value.
3331   mov(eax, Immediate(function));
3332   CallCFunction(eax, num_arguments);
3333 }
3334 
3335 
CallCFunction(Register function,int num_arguments)3336 void MacroAssembler::CallCFunction(Register function,
3337                                    int num_arguments) {
3338   ASSERT(has_frame());
3339   // Check stack alignment.
3340   if (emit_debug_code()) {
3341     CheckStackAlignment();
3342   }
3343 
3344   call(function);
3345   if (OS::ActivationFrameAlignment() != 0) {
3346     mov(esp, Operand(esp, num_arguments * kPointerSize));
3347   } else {
3348     add(esp, Immediate(num_arguments * kPointerSize));
3349   }
3350 }
3351 
3352 
AreAliased(Register r1,Register r2,Register r3,Register r4)3353 bool AreAliased(Register r1, Register r2, Register r3, Register r4) {
3354   if (r1.is(r2)) return true;
3355   if (r1.is(r3)) return true;
3356   if (r1.is(r4)) return true;
3357   if (r2.is(r3)) return true;
3358   if (r2.is(r4)) return true;
3359   if (r3.is(r4)) return true;
3360   return false;
3361 }
3362 
3363 
CodePatcher(byte * address,int size)3364 CodePatcher::CodePatcher(byte* address, int size)
3365     : address_(address),
3366       size_(size),
3367       masm_(NULL, address, size + Assembler::kGap) {
3368   // Create a new macro assembler pointing to the address of the code to patch.
3369   // The size is adjusted with kGap on order for the assembler to generate size
3370   // bytes of instructions without failing with buffer size constraints.
3371   ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
3372 }
3373 
3374 
~CodePatcher()3375 CodePatcher::~CodePatcher() {
3376   // Indicate that code has changed.
3377   CPU::FlushICache(address_, size_);
3378 
3379   // Check that the code was patched as expected.
3380   ASSERT(masm_.pc_ == address_ + size_);
3381   ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
3382 }
3383 
3384 
CheckPageFlag(Register object,Register scratch,int mask,Condition cc,Label * condition_met,Label::Distance condition_met_distance)3385 void MacroAssembler::CheckPageFlag(
3386     Register object,
3387     Register scratch,
3388     int mask,
3389     Condition cc,
3390     Label* condition_met,
3391     Label::Distance condition_met_distance) {
3392   ASSERT(cc == zero || cc == not_zero);
3393   if (scratch.is(object)) {
3394     and_(scratch, Immediate(~Page::kPageAlignmentMask));
3395   } else {
3396     mov(scratch, Immediate(~Page::kPageAlignmentMask));
3397     and_(scratch, object);
3398   }
3399   if (mask < (1 << kBitsPerByte)) {
3400     test_b(Operand(scratch, MemoryChunk::kFlagsOffset),
3401            static_cast<uint8_t>(mask));
3402   } else {
3403     test(Operand(scratch, MemoryChunk::kFlagsOffset), Immediate(mask));
3404   }
3405   j(cc, condition_met, condition_met_distance);
3406 }
3407 
3408 
CheckPageFlagForMap(Handle<Map> map,int mask,Condition cc,Label * condition_met,Label::Distance condition_met_distance)3409 void MacroAssembler::CheckPageFlagForMap(
3410     Handle<Map> map,
3411     int mask,
3412     Condition cc,
3413     Label* condition_met,
3414     Label::Distance condition_met_distance) {
3415   ASSERT(cc == zero || cc == not_zero);
3416   Page* page = Page::FromAddress(map->address());
3417   ExternalReference reference(ExternalReference::page_flags(page));
3418   // The inlined static address check of the page's flags relies
3419   // on maps never being compacted.
3420   ASSERT(!isolate()->heap()->mark_compact_collector()->
3421          IsOnEvacuationCandidate(*map));
3422   if (mask < (1 << kBitsPerByte)) {
3423     test_b(Operand::StaticVariable(reference), static_cast<uint8_t>(mask));
3424   } else {
3425     test(Operand::StaticVariable(reference), Immediate(mask));
3426   }
3427   j(cc, condition_met, condition_met_distance);
3428 }
3429 
3430 
CheckMapDeprecated(Handle<Map> map,Register scratch,Label * if_deprecated)3431 void MacroAssembler::CheckMapDeprecated(Handle<Map> map,
3432                                         Register scratch,
3433                                         Label* if_deprecated) {
3434   if (map->CanBeDeprecated()) {
3435     mov(scratch, map);
3436     mov(scratch, FieldOperand(scratch, Map::kBitField3Offset));
3437     and_(scratch, Immediate(Smi::FromInt(Map::Deprecated::kMask)));
3438     j(not_zero, if_deprecated);
3439   }
3440 }
3441 
3442 
JumpIfBlack(Register object,Register scratch0,Register scratch1,Label * on_black,Label::Distance on_black_near)3443 void MacroAssembler::JumpIfBlack(Register object,
3444                                  Register scratch0,
3445                                  Register scratch1,
3446                                  Label* on_black,
3447                                  Label::Distance on_black_near) {
3448   HasColor(object, scratch0, scratch1,
3449            on_black, on_black_near,
3450            1, 0);  // kBlackBitPattern.
3451   ASSERT(strcmp(Marking::kBlackBitPattern, "10") == 0);
3452 }
3453 
3454 
HasColor(Register object,Register bitmap_scratch,Register mask_scratch,Label * has_color,Label::Distance has_color_distance,int first_bit,int second_bit)3455 void MacroAssembler::HasColor(Register object,
3456                               Register bitmap_scratch,
3457                               Register mask_scratch,
3458                               Label* has_color,
3459                               Label::Distance has_color_distance,
3460                               int first_bit,
3461                               int second_bit) {
3462   ASSERT(!AreAliased(object, bitmap_scratch, mask_scratch, ecx));
3463 
3464   GetMarkBits(object, bitmap_scratch, mask_scratch);
3465 
3466   Label other_color, word_boundary;
3467   test(mask_scratch, Operand(bitmap_scratch, MemoryChunk::kHeaderSize));
3468   j(first_bit == 1 ? zero : not_zero, &other_color, Label::kNear);
3469   add(mask_scratch, mask_scratch);  // Shift left 1 by adding.
3470   j(zero, &word_boundary, Label::kNear);
3471   test(mask_scratch, Operand(bitmap_scratch, MemoryChunk::kHeaderSize));
3472   j(second_bit == 1 ? not_zero : zero, has_color, has_color_distance);
3473   jmp(&other_color, Label::kNear);
3474 
3475   bind(&word_boundary);
3476   test_b(Operand(bitmap_scratch, MemoryChunk::kHeaderSize + kPointerSize), 1);
3477 
3478   j(second_bit == 1 ? not_zero : zero, has_color, has_color_distance);
3479   bind(&other_color);
3480 }
3481 
3482 
GetMarkBits(Register addr_reg,Register bitmap_reg,Register mask_reg)3483 void MacroAssembler::GetMarkBits(Register addr_reg,
3484                                  Register bitmap_reg,
3485                                  Register mask_reg) {
3486   ASSERT(!AreAliased(addr_reg, mask_reg, bitmap_reg, ecx));
3487   mov(bitmap_reg, Immediate(~Page::kPageAlignmentMask));
3488   and_(bitmap_reg, addr_reg);
3489   mov(ecx, addr_reg);
3490   int shift =
3491       Bitmap::kBitsPerCellLog2 + kPointerSizeLog2 - Bitmap::kBytesPerCellLog2;
3492   shr(ecx, shift);
3493   and_(ecx,
3494        (Page::kPageAlignmentMask >> shift) & ~(Bitmap::kBytesPerCell - 1));
3495 
3496   add(bitmap_reg, ecx);
3497   mov(ecx, addr_reg);
3498   shr(ecx, kPointerSizeLog2);
3499   and_(ecx, (1 << Bitmap::kBitsPerCellLog2) - 1);
3500   mov(mask_reg, Immediate(1));
3501   shl_cl(mask_reg);
3502 }
3503 
3504 
EnsureNotWhite(Register value,Register bitmap_scratch,Register mask_scratch,Label * value_is_white_and_not_data,Label::Distance distance)3505 void MacroAssembler::EnsureNotWhite(
3506     Register value,
3507     Register bitmap_scratch,
3508     Register mask_scratch,
3509     Label* value_is_white_and_not_data,
3510     Label::Distance distance) {
3511   ASSERT(!AreAliased(value, bitmap_scratch, mask_scratch, ecx));
3512   GetMarkBits(value, bitmap_scratch, mask_scratch);
3513 
3514   // If the value is black or grey we don't need to do anything.
3515   ASSERT(strcmp(Marking::kWhiteBitPattern, "00") == 0);
3516   ASSERT(strcmp(Marking::kBlackBitPattern, "10") == 0);
3517   ASSERT(strcmp(Marking::kGreyBitPattern, "11") == 0);
3518   ASSERT(strcmp(Marking::kImpossibleBitPattern, "01") == 0);
3519 
3520   Label done;
3521 
3522   // Since both black and grey have a 1 in the first position and white does
3523   // not have a 1 there we only need to check one bit.
3524   test(mask_scratch, Operand(bitmap_scratch, MemoryChunk::kHeaderSize));
3525   j(not_zero, &done, Label::kNear);
3526 
3527   if (emit_debug_code()) {
3528     // Check for impossible bit pattern.
3529     Label ok;
3530     push(mask_scratch);
3531     // shl.  May overflow making the check conservative.
3532     add(mask_scratch, mask_scratch);
3533     test(mask_scratch, Operand(bitmap_scratch, MemoryChunk::kHeaderSize));
3534     j(zero, &ok, Label::kNear);
3535     int3();
3536     bind(&ok);
3537     pop(mask_scratch);
3538   }
3539 
3540   // Value is white.  We check whether it is data that doesn't need scanning.
3541   // Currently only checks for HeapNumber and non-cons strings.
3542   Register map = ecx;  // Holds map while checking type.
3543   Register length = ecx;  // Holds length of object after checking type.
3544   Label not_heap_number;
3545   Label is_data_object;
3546 
3547   // Check for heap-number
3548   mov(map, FieldOperand(value, HeapObject::kMapOffset));
3549   cmp(map, isolate()->factory()->heap_number_map());
3550   j(not_equal, &not_heap_number, Label::kNear);
3551   mov(length, Immediate(HeapNumber::kSize));
3552   jmp(&is_data_object, Label::kNear);
3553 
3554   bind(&not_heap_number);
3555   // Check for strings.
3556   ASSERT(kIsIndirectStringTag == 1 && kIsIndirectStringMask == 1);
3557   ASSERT(kNotStringTag == 0x80 && kIsNotStringMask == 0x80);
3558   // If it's a string and it's not a cons string then it's an object containing
3559   // no GC pointers.
3560   Register instance_type = ecx;
3561   movzx_b(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
3562   test_b(instance_type, kIsIndirectStringMask | kIsNotStringMask);
3563   j(not_zero, value_is_white_and_not_data);
3564   // It's a non-indirect (non-cons and non-slice) string.
3565   // If it's external, the length is just ExternalString::kSize.
3566   // Otherwise it's String::kHeaderSize + string->length() * (1 or 2).
3567   Label not_external;
3568   // External strings are the only ones with the kExternalStringTag bit
3569   // set.
3570   ASSERT_EQ(0, kSeqStringTag & kExternalStringTag);
3571   ASSERT_EQ(0, kConsStringTag & kExternalStringTag);
3572   test_b(instance_type, kExternalStringTag);
3573   j(zero, &not_external, Label::kNear);
3574   mov(length, Immediate(ExternalString::kSize));
3575   jmp(&is_data_object, Label::kNear);
3576 
3577   bind(&not_external);
3578   // Sequential string, either ASCII or UC16.
3579   ASSERT(kOneByteStringTag == 0x04);
3580   and_(length, Immediate(kStringEncodingMask));
3581   xor_(length, Immediate(kStringEncodingMask));
3582   add(length, Immediate(0x04));
3583   // Value now either 4 (if ASCII) or 8 (if UC16), i.e., char-size shifted
3584   // by 2. If we multiply the string length as smi by this, it still
3585   // won't overflow a 32-bit value.
3586   ASSERT_EQ(SeqOneByteString::kMaxSize, SeqTwoByteString::kMaxSize);
3587   ASSERT(SeqOneByteString::kMaxSize <=
3588          static_cast<int>(0xffffffffu >> (2 + kSmiTagSize)));
3589   imul(length, FieldOperand(value, String::kLengthOffset));
3590   shr(length, 2 + kSmiTagSize + kSmiShiftSize);
3591   add(length, Immediate(SeqString::kHeaderSize + kObjectAlignmentMask));
3592   and_(length, Immediate(~kObjectAlignmentMask));
3593 
3594   bind(&is_data_object);
3595   // Value is a data object, and it is white.  Mark it black.  Since we know
3596   // that the object is white we can make it black by flipping one bit.
3597   or_(Operand(bitmap_scratch, MemoryChunk::kHeaderSize), mask_scratch);
3598 
3599   and_(bitmap_scratch, Immediate(~Page::kPageAlignmentMask));
3600   add(Operand(bitmap_scratch, MemoryChunk::kLiveBytesOffset),
3601       length);
3602   if (emit_debug_code()) {
3603     mov(length, Operand(bitmap_scratch, MemoryChunk::kLiveBytesOffset));
3604     cmp(length, Operand(bitmap_scratch, MemoryChunk::kSizeOffset));
3605     Check(less_equal, kLiveBytesCountOverflowChunkSize);
3606   }
3607 
3608   bind(&done);
3609 }
3610 
3611 
EnumLength(Register dst,Register map)3612 void MacroAssembler::EnumLength(Register dst, Register map) {
3613   STATIC_ASSERT(Map::EnumLengthBits::kShift == 0);
3614   mov(dst, FieldOperand(map, Map::kBitField3Offset));
3615   and_(dst, Immediate(Smi::FromInt(Map::EnumLengthBits::kMask)));
3616 }
3617 
3618 
CheckEnumCache(Label * call_runtime)3619 void MacroAssembler::CheckEnumCache(Label* call_runtime) {
3620   Label next, start;
3621   mov(ecx, eax);
3622 
3623   // Check if the enum length field is properly initialized, indicating that
3624   // there is an enum cache.
3625   mov(ebx, FieldOperand(ecx, HeapObject::kMapOffset));
3626 
3627   EnumLength(edx, ebx);
3628   cmp(edx, Immediate(Smi::FromInt(kInvalidEnumCacheSentinel)));
3629   j(equal, call_runtime);
3630 
3631   jmp(&start);
3632 
3633   bind(&next);
3634   mov(ebx, FieldOperand(ecx, HeapObject::kMapOffset));
3635 
3636   // For all objects but the receiver, check that the cache is empty.
3637   EnumLength(edx, ebx);
3638   cmp(edx, Immediate(Smi::FromInt(0)));
3639   j(not_equal, call_runtime);
3640 
3641   bind(&start);
3642 
3643   // Check that there are no elements. Register rcx contains the current JS
3644   // object we've reached through the prototype chain.
3645   mov(ecx, FieldOperand(ecx, JSObject::kElementsOffset));
3646   cmp(ecx, isolate()->factory()->empty_fixed_array());
3647   j(not_equal, call_runtime);
3648 
3649   mov(ecx, FieldOperand(ebx, Map::kPrototypeOffset));
3650   cmp(ecx, isolate()->factory()->null_value());
3651   j(not_equal, &next);
3652 }
3653 
3654 
TestJSArrayForAllocationMemento(Register receiver_reg,Register scratch_reg,Label * no_memento_found)3655 void MacroAssembler::TestJSArrayForAllocationMemento(
3656     Register receiver_reg,
3657     Register scratch_reg,
3658     Label* no_memento_found) {
3659   ExternalReference new_space_start =
3660       ExternalReference::new_space_start(isolate());
3661   ExternalReference new_space_allocation_top =
3662       ExternalReference::new_space_allocation_top_address(isolate());
3663 
3664   lea(scratch_reg, Operand(receiver_reg,
3665       JSArray::kSize + AllocationMemento::kSize - kHeapObjectTag));
3666   cmp(scratch_reg, Immediate(new_space_start));
3667   j(less, no_memento_found);
3668   cmp(scratch_reg, Operand::StaticVariable(new_space_allocation_top));
3669   j(greater, no_memento_found);
3670   cmp(MemOperand(scratch_reg, -AllocationMemento::kSize),
3671       Immediate(isolate()->factory()->allocation_memento_map()));
3672 }
3673 
3674 
JumpIfDictionaryInPrototypeChain(Register object,Register scratch0,Register scratch1,Label * found)3675 void MacroAssembler::JumpIfDictionaryInPrototypeChain(
3676     Register object,
3677     Register scratch0,
3678     Register scratch1,
3679     Label* found) {
3680   ASSERT(!scratch1.is(scratch0));
3681   Factory* factory = isolate()->factory();
3682   Register current = scratch0;
3683   Label loop_again;
3684 
3685   // scratch contained elements pointer.
3686   mov(current, object);
3687 
3688   // Loop based on the map going up the prototype chain.
3689   bind(&loop_again);
3690   mov(current, FieldOperand(current, HeapObject::kMapOffset));
3691   mov(scratch1, FieldOperand(current, Map::kBitField2Offset));
3692   and_(scratch1, Map::kElementsKindMask);
3693   shr(scratch1, Map::kElementsKindShift);
3694   cmp(scratch1, Immediate(DICTIONARY_ELEMENTS));
3695   j(equal, found);
3696   mov(current, FieldOperand(current, Map::kPrototypeOffset));
3697   cmp(current, Immediate(factory->null_value()));
3698   j(not_equal, &loop_again);
3699 }
3700 
3701 } }  // namespace v8::internal
3702 
3703 #endif  // V8_TARGET_ARCH_IA32
3704