• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #if V8_TARGET_ARCH_IA32
6 
7 #include "src/base/bits.h"
8 #include "src/base/division-by-constant.h"
9 #include "src/bootstrapper.h"
10 #include "src/codegen.h"
11 #include "src/debug/debug.h"
12 #include "src/ia32/frames-ia32.h"
13 #include "src/ia32/macro-assembler-ia32.h"
14 #include "src/runtime/runtime.h"
15 
16 namespace v8 {
17 namespace internal {
18 
19 // -------------------------------------------------------------------------
20 // MacroAssembler implementation.
21 
MacroAssembler(Isolate * arg_isolate,void * buffer,int size,CodeObjectRequired create_code_object)22 MacroAssembler::MacroAssembler(Isolate* arg_isolate, void* buffer, int size,
23                                CodeObjectRequired create_code_object)
24     : Assembler(arg_isolate, buffer, size),
25       generating_stub_(false),
26       has_frame_(false) {
27   if (create_code_object == CodeObjectRequired::kYes) {
28     code_object_ =
29         Handle<Object>::New(isolate()->heap()->undefined_value(), isolate());
30   }
31 }
32 
33 
Load(Register dst,const Operand & src,Representation r)34 void MacroAssembler::Load(Register dst, const Operand& src, Representation r) {
35   DCHECK(!r.IsDouble());
36   if (r.IsInteger8()) {
37     movsx_b(dst, src);
38   } else if (r.IsUInteger8()) {
39     movzx_b(dst, src);
40   } else if (r.IsInteger16()) {
41     movsx_w(dst, src);
42   } else if (r.IsUInteger16()) {
43     movzx_w(dst, src);
44   } else {
45     mov(dst, src);
46   }
47 }
48 
49 
Store(Register src,const Operand & dst,Representation r)50 void MacroAssembler::Store(Register src, const Operand& dst, Representation r) {
51   DCHECK(!r.IsDouble());
52   if (r.IsInteger8() || r.IsUInteger8()) {
53     mov_b(dst, src);
54   } else if (r.IsInteger16() || r.IsUInteger16()) {
55     mov_w(dst, src);
56   } else {
57     if (r.IsHeapObject()) {
58       AssertNotSmi(src);
59     } else if (r.IsSmi()) {
60       AssertSmi(src);
61     }
62     mov(dst, src);
63   }
64 }
65 
66 
LoadRoot(Register destination,Heap::RootListIndex index)67 void MacroAssembler::LoadRoot(Register destination, Heap::RootListIndex index) {
68   if (isolate()->heap()->RootCanBeTreatedAsConstant(index)) {
69     mov(destination, isolate()->heap()->root_handle(index));
70     return;
71   }
72   ExternalReference roots_array_start =
73       ExternalReference::roots_array_start(isolate());
74   mov(destination, Immediate(index));
75   mov(destination, Operand::StaticArray(destination,
76                                         times_pointer_size,
77                                         roots_array_start));
78 }
79 
80 
StoreRoot(Register source,Register scratch,Heap::RootListIndex index)81 void MacroAssembler::StoreRoot(Register source,
82                                Register scratch,
83                                Heap::RootListIndex index) {
84   DCHECK(Heap::RootCanBeWrittenAfterInitialization(index));
85   ExternalReference roots_array_start =
86       ExternalReference::roots_array_start(isolate());
87   mov(scratch, Immediate(index));
88   mov(Operand::StaticArray(scratch, times_pointer_size, roots_array_start),
89       source);
90 }
91 
92 
CompareRoot(Register with,Register scratch,Heap::RootListIndex index)93 void MacroAssembler::CompareRoot(Register with,
94                                  Register scratch,
95                                  Heap::RootListIndex index) {
96   ExternalReference roots_array_start =
97       ExternalReference::roots_array_start(isolate());
98   mov(scratch, Immediate(index));
99   cmp(with, Operand::StaticArray(scratch,
100                                 times_pointer_size,
101                                 roots_array_start));
102 }
103 
104 
CompareRoot(Register with,Heap::RootListIndex index)105 void MacroAssembler::CompareRoot(Register with, Heap::RootListIndex index) {
106   DCHECK(isolate()->heap()->RootCanBeTreatedAsConstant(index));
107   cmp(with, isolate()->heap()->root_handle(index));
108 }
109 
110 
CompareRoot(const Operand & with,Heap::RootListIndex index)111 void MacroAssembler::CompareRoot(const Operand& with,
112                                  Heap::RootListIndex index) {
113   DCHECK(isolate()->heap()->RootCanBeTreatedAsConstant(index));
114   cmp(with, isolate()->heap()->root_handle(index));
115 }
116 
117 
PushRoot(Heap::RootListIndex index)118 void MacroAssembler::PushRoot(Heap::RootListIndex index) {
119   DCHECK(isolate()->heap()->RootCanBeTreatedAsConstant(index));
120   Push(isolate()->heap()->root_handle(index));
121 }
122 
123 #define REG(Name) \
124   { Register::kCode_##Name }
125 
126 static const Register saved_regs[] = {REG(eax), REG(ecx), REG(edx)};
127 
128 #undef REG
129 
130 static const int kNumberOfSavedRegs = sizeof(saved_regs) / sizeof(Register);
131 
PushCallerSaved(SaveFPRegsMode fp_mode,Register exclusion1,Register exclusion2,Register exclusion3)132 void MacroAssembler::PushCallerSaved(SaveFPRegsMode fp_mode,
133                                      Register exclusion1, Register exclusion2,
134                                      Register exclusion3) {
135   // We don't allow a GC during a store buffer overflow so there is no need to
136   // store the registers in any particular way, but we do have to store and
137   // restore them.
138   for (int i = 0; i < kNumberOfSavedRegs; i++) {
139     Register reg = saved_regs[i];
140     if (!reg.is(exclusion1) && !reg.is(exclusion2) && !reg.is(exclusion3)) {
141       push(reg);
142     }
143   }
144   if (fp_mode == kSaveFPRegs) {
145     sub(esp, Immediate(kDoubleSize * (XMMRegister::kMaxNumRegisters - 1)));
146     // Save all XMM registers except XMM0.
147     for (int i = XMMRegister::kMaxNumRegisters - 1; i > 0; i--) {
148       XMMRegister reg = XMMRegister::from_code(i);
149       movsd(Operand(esp, (i - 1) * kDoubleSize), reg);
150     }
151   }
152 }
153 
PopCallerSaved(SaveFPRegsMode fp_mode,Register exclusion1,Register exclusion2,Register exclusion3)154 void MacroAssembler::PopCallerSaved(SaveFPRegsMode fp_mode, Register exclusion1,
155                                     Register exclusion2, Register exclusion3) {
156   if (fp_mode == kSaveFPRegs) {
157     // Restore all XMM registers except XMM0.
158     for (int i = XMMRegister::kMaxNumRegisters - 1; i > 0; i--) {
159       XMMRegister reg = XMMRegister::from_code(i);
160       movsd(reg, Operand(esp, (i - 1) * kDoubleSize));
161     }
162     add(esp, Immediate(kDoubleSize * (XMMRegister::kMaxNumRegisters - 1)));
163   }
164 
165   for (int i = kNumberOfSavedRegs - 1; i >= 0; i--) {
166     Register reg = saved_regs[i];
167     if (!reg.is(exclusion1) && !reg.is(exclusion2) && !reg.is(exclusion3)) {
168       pop(reg);
169     }
170   }
171 }
172 
InNewSpace(Register object,Register scratch,Condition cc,Label * condition_met,Label::Distance distance)173 void MacroAssembler::InNewSpace(Register object, Register scratch, Condition cc,
174                                 Label* condition_met,
175                                 Label::Distance distance) {
176   CheckPageFlag(object, scratch, MemoryChunk::kIsInNewSpaceMask, cc,
177                 condition_met, distance);
178 }
179 
180 
RememberedSetHelper(Register object,Register addr,Register scratch,SaveFPRegsMode save_fp,MacroAssembler::RememberedSetFinalAction and_then)181 void MacroAssembler::RememberedSetHelper(
182     Register object,  // Only used for debug checks.
183     Register addr,
184     Register scratch,
185     SaveFPRegsMode save_fp,
186     MacroAssembler::RememberedSetFinalAction and_then) {
187   Label done;
188   if (emit_debug_code()) {
189     Label ok;
190     JumpIfNotInNewSpace(object, scratch, &ok, Label::kNear);
191     int3();
192     bind(&ok);
193   }
194   // Load store buffer top.
195   ExternalReference store_buffer =
196       ExternalReference::store_buffer_top(isolate());
197   mov(scratch, Operand::StaticVariable(store_buffer));
198   // Store pointer to buffer.
199   mov(Operand(scratch, 0), addr);
200   // Increment buffer top.
201   add(scratch, Immediate(kPointerSize));
202   // Write back new top of buffer.
203   mov(Operand::StaticVariable(store_buffer), scratch);
204   // Call stub on end of buffer.
205   // Check for end of buffer.
206   test(scratch, Immediate(StoreBuffer::kStoreBufferMask));
207   if (and_then == kReturnAtEnd) {
208     Label buffer_overflowed;
209     j(equal, &buffer_overflowed, Label::kNear);
210     ret(0);
211     bind(&buffer_overflowed);
212   } else {
213     DCHECK(and_then == kFallThroughAtEnd);
214     j(not_equal, &done, Label::kNear);
215   }
216   StoreBufferOverflowStub store_buffer_overflow(isolate(), save_fp);
217   CallStub(&store_buffer_overflow);
218   if (and_then == kReturnAtEnd) {
219     ret(0);
220   } else {
221     DCHECK(and_then == kFallThroughAtEnd);
222     bind(&done);
223   }
224 }
225 
226 
ClampDoubleToUint8(XMMRegister input_reg,XMMRegister scratch_reg,Register result_reg)227 void MacroAssembler::ClampDoubleToUint8(XMMRegister input_reg,
228                                         XMMRegister scratch_reg,
229                                         Register result_reg) {
230   Label done;
231   Label conv_failure;
232   xorps(scratch_reg, scratch_reg);
233   cvtsd2si(result_reg, input_reg);
234   test(result_reg, Immediate(0xFFFFFF00));
235   j(zero, &done, Label::kNear);
236   cmp(result_reg, Immediate(0x1));
237   j(overflow, &conv_failure, Label::kNear);
238   mov(result_reg, Immediate(0));
239   setcc(sign, result_reg);
240   sub(result_reg, Immediate(1));
241   and_(result_reg, Immediate(255));
242   jmp(&done, Label::kNear);
243   bind(&conv_failure);
244   Move(result_reg, Immediate(0));
245   ucomisd(input_reg, scratch_reg);
246   j(below, &done, Label::kNear);
247   Move(result_reg, Immediate(255));
248   bind(&done);
249 }
250 
251 
ClampUint8(Register reg)252 void MacroAssembler::ClampUint8(Register reg) {
253   Label done;
254   test(reg, Immediate(0xFFFFFF00));
255   j(zero, &done, Label::kNear);
256   setcc(negative, reg);  // 1 if negative, 0 if positive.
257   dec_b(reg);  // 0 if negative, 255 if positive.
258   bind(&done);
259 }
260 
261 
SlowTruncateToI(Register result_reg,Register input_reg,int offset)262 void MacroAssembler::SlowTruncateToI(Register result_reg,
263                                      Register input_reg,
264                                      int offset) {
265   DoubleToIStub stub(isolate(), input_reg, result_reg, offset, true);
266   call(stub.GetCode(), RelocInfo::CODE_TARGET);
267 }
268 
269 
TruncateDoubleToI(Register result_reg,XMMRegister input_reg)270 void MacroAssembler::TruncateDoubleToI(Register result_reg,
271                                        XMMRegister input_reg) {
272   Label done;
273   cvttsd2si(result_reg, Operand(input_reg));
274   cmp(result_reg, 0x1);
275   j(no_overflow, &done, Label::kNear);
276 
277   sub(esp, Immediate(kDoubleSize));
278   movsd(MemOperand(esp, 0), input_reg);
279   SlowTruncateToI(result_reg, esp, 0);
280   add(esp, Immediate(kDoubleSize));
281   bind(&done);
282 }
283 
284 
DoubleToI(Register result_reg,XMMRegister input_reg,XMMRegister scratch,MinusZeroMode minus_zero_mode,Label * lost_precision,Label * is_nan,Label * minus_zero,Label::Distance dst)285 void MacroAssembler::DoubleToI(Register result_reg, XMMRegister input_reg,
286                                XMMRegister scratch,
287                                MinusZeroMode minus_zero_mode,
288                                Label* lost_precision, Label* is_nan,
289                                Label* minus_zero, Label::Distance dst) {
290   DCHECK(!input_reg.is(scratch));
291   cvttsd2si(result_reg, Operand(input_reg));
292   Cvtsi2sd(scratch, Operand(result_reg));
293   ucomisd(scratch, input_reg);
294   j(not_equal, lost_precision, dst);
295   j(parity_even, is_nan, dst);
296   if (minus_zero_mode == FAIL_ON_MINUS_ZERO) {
297     Label done;
298     // The integer converted back is equal to the original. We
299     // only have to test if we got -0 as an input.
300     test(result_reg, Operand(result_reg));
301     j(not_zero, &done, Label::kNear);
302     movmskpd(result_reg, input_reg);
303     // Bit 0 contains the sign of the double in input_reg.
304     // If input was positive, we are ok and return 0, otherwise
305     // jump to minus_zero.
306     and_(result_reg, 1);
307     j(not_zero, minus_zero, dst);
308     bind(&done);
309   }
310 }
311 
312 
TruncateHeapNumberToI(Register result_reg,Register input_reg)313 void MacroAssembler::TruncateHeapNumberToI(Register result_reg,
314                                            Register input_reg) {
315   Label done, slow_case;
316 
317   if (CpuFeatures::IsSupported(SSE3)) {
318     CpuFeatureScope scope(this, SSE3);
319     Label convert;
320     // Use more powerful conversion when sse3 is available.
321     // Load x87 register with heap number.
322     fld_d(FieldOperand(input_reg, HeapNumber::kValueOffset));
323     // Get exponent alone and check for too-big exponent.
324     mov(result_reg, FieldOperand(input_reg, HeapNumber::kExponentOffset));
325     and_(result_reg, HeapNumber::kExponentMask);
326     const uint32_t kTooBigExponent =
327         (HeapNumber::kExponentBias + 63) << HeapNumber::kExponentShift;
328     cmp(Operand(result_reg), Immediate(kTooBigExponent));
329     j(greater_equal, &slow_case, Label::kNear);
330 
331     // Reserve space for 64 bit answer.
332     sub(Operand(esp), Immediate(kDoubleSize));
333     // Do conversion, which cannot fail because we checked the exponent.
334     fisttp_d(Operand(esp, 0));
335     mov(result_reg, Operand(esp, 0));  // Low word of answer is the result.
336     add(Operand(esp), Immediate(kDoubleSize));
337     jmp(&done, Label::kNear);
338 
339     // Slow case.
340     bind(&slow_case);
341     if (input_reg.is(result_reg)) {
342       // Input is clobbered. Restore number from fpu stack
343       sub(Operand(esp), Immediate(kDoubleSize));
344       fstp_d(Operand(esp, 0));
345       SlowTruncateToI(result_reg, esp, 0);
346       add(esp, Immediate(kDoubleSize));
347     } else {
348       fstp(0);
349       SlowTruncateToI(result_reg, input_reg);
350     }
351   } else {
352     movsd(xmm0, FieldOperand(input_reg, HeapNumber::kValueOffset));
353     cvttsd2si(result_reg, Operand(xmm0));
354     cmp(result_reg, 0x1);
355     j(no_overflow, &done, Label::kNear);
356     // Check if the input was 0x8000000 (kMinInt).
357     // If no, then we got an overflow and we deoptimize.
358     ExternalReference min_int = ExternalReference::address_of_min_int();
359     ucomisd(xmm0, Operand::StaticVariable(min_int));
360     j(not_equal, &slow_case, Label::kNear);
361     j(parity_even, &slow_case, Label::kNear);  // NaN.
362     jmp(&done, Label::kNear);
363 
364     // Slow case.
365     bind(&slow_case);
366     if (input_reg.is(result_reg)) {
367       // Input is clobbered. Restore number from double scratch.
368       sub(esp, Immediate(kDoubleSize));
369       movsd(MemOperand(esp, 0), xmm0);
370       SlowTruncateToI(result_reg, esp, 0);
371       add(esp, Immediate(kDoubleSize));
372     } else {
373       SlowTruncateToI(result_reg, input_reg);
374     }
375   }
376   bind(&done);
377 }
378 
379 
LoadUint32(XMMRegister dst,const Operand & src)380 void MacroAssembler::LoadUint32(XMMRegister dst, const Operand& src) {
381   Label done;
382   cmp(src, Immediate(0));
383   ExternalReference uint32_bias = ExternalReference::address_of_uint32_bias();
384   Cvtsi2sd(dst, src);
385   j(not_sign, &done, Label::kNear);
386   addsd(dst, Operand::StaticVariable(uint32_bias));
387   bind(&done);
388 }
389 
390 
RecordWriteArray(Register object,Register value,Register index,SaveFPRegsMode save_fp,RememberedSetAction remembered_set_action,SmiCheck smi_check,PointersToHereCheck pointers_to_here_check_for_value)391 void MacroAssembler::RecordWriteArray(
392     Register object,
393     Register value,
394     Register index,
395     SaveFPRegsMode save_fp,
396     RememberedSetAction remembered_set_action,
397     SmiCheck smi_check,
398     PointersToHereCheck pointers_to_here_check_for_value) {
399   // First, check if a write barrier is even needed. The tests below
400   // catch stores of Smis.
401   Label done;
402 
403   // Skip barrier if writing a smi.
404   if (smi_check == INLINE_SMI_CHECK) {
405     DCHECK_EQ(0, kSmiTag);
406     test(value, Immediate(kSmiTagMask));
407     j(zero, &done);
408   }
409 
410   // Array access: calculate the destination address in the same manner as
411   // KeyedStoreIC::GenerateGeneric.  Multiply a smi by 2 to get an offset
412   // into an array of words.
413   Register dst = index;
414   lea(dst, Operand(object, index, times_half_pointer_size,
415                    FixedArray::kHeaderSize - kHeapObjectTag));
416 
417   RecordWrite(object, dst, value, save_fp, remembered_set_action,
418               OMIT_SMI_CHECK, pointers_to_here_check_for_value);
419 
420   bind(&done);
421 
422   // Clobber clobbered input registers when running with the debug-code flag
423   // turned on to provoke errors.
424   if (emit_debug_code()) {
425     mov(value, Immediate(bit_cast<int32_t>(kZapValue)));
426     mov(index, Immediate(bit_cast<int32_t>(kZapValue)));
427   }
428 }
429 
430 
RecordWriteField(Register object,int offset,Register value,Register dst,SaveFPRegsMode save_fp,RememberedSetAction remembered_set_action,SmiCheck smi_check,PointersToHereCheck pointers_to_here_check_for_value)431 void MacroAssembler::RecordWriteField(
432     Register object,
433     int offset,
434     Register value,
435     Register dst,
436     SaveFPRegsMode save_fp,
437     RememberedSetAction remembered_set_action,
438     SmiCheck smi_check,
439     PointersToHereCheck pointers_to_here_check_for_value) {
440   // First, check if a write barrier is even needed. The tests below
441   // catch stores of Smis.
442   Label done;
443 
444   // Skip barrier if writing a smi.
445   if (smi_check == INLINE_SMI_CHECK) {
446     JumpIfSmi(value, &done, Label::kNear);
447   }
448 
449   // Although the object register is tagged, the offset is relative to the start
450   // of the object, so so offset must be a multiple of kPointerSize.
451   DCHECK(IsAligned(offset, kPointerSize));
452 
453   lea(dst, FieldOperand(object, offset));
454   if (emit_debug_code()) {
455     Label ok;
456     test_b(dst, Immediate((1 << kPointerSizeLog2) - 1));
457     j(zero, &ok, Label::kNear);
458     int3();
459     bind(&ok);
460   }
461 
462   RecordWrite(object, dst, value, save_fp, remembered_set_action,
463               OMIT_SMI_CHECK, pointers_to_here_check_for_value);
464 
465   bind(&done);
466 
467   // Clobber clobbered input registers when running with the debug-code flag
468   // turned on to provoke errors.
469   if (emit_debug_code()) {
470     mov(value, Immediate(bit_cast<int32_t>(kZapValue)));
471     mov(dst, Immediate(bit_cast<int32_t>(kZapValue)));
472   }
473 }
474 
475 
RecordWriteForMap(Register object,Handle<Map> map,Register scratch1,Register scratch2,SaveFPRegsMode save_fp)476 void MacroAssembler::RecordWriteForMap(
477     Register object,
478     Handle<Map> map,
479     Register scratch1,
480     Register scratch2,
481     SaveFPRegsMode save_fp) {
482   Label done;
483 
484   Register address = scratch1;
485   Register value = scratch2;
486   if (emit_debug_code()) {
487     Label ok;
488     lea(address, FieldOperand(object, HeapObject::kMapOffset));
489     test_b(address, Immediate((1 << kPointerSizeLog2) - 1));
490     j(zero, &ok, Label::kNear);
491     int3();
492     bind(&ok);
493   }
494 
495   DCHECK(!object.is(value));
496   DCHECK(!object.is(address));
497   DCHECK(!value.is(address));
498   AssertNotSmi(object);
499 
500   if (!FLAG_incremental_marking) {
501     return;
502   }
503 
504   // Compute the address.
505   lea(address, FieldOperand(object, HeapObject::kMapOffset));
506 
507   // A single check of the map's pages interesting flag suffices, since it is
508   // only set during incremental collection, and then it's also guaranteed that
509   // the from object's page's interesting flag is also set.  This optimization
510   // relies on the fact that maps can never be in new space.
511   DCHECK(!isolate()->heap()->InNewSpace(*map));
512   CheckPageFlagForMap(map,
513                       MemoryChunk::kPointersToHereAreInterestingMask,
514                       zero,
515                       &done,
516                       Label::kNear);
517 
518   RecordWriteStub stub(isolate(), object, value, address, OMIT_REMEMBERED_SET,
519                        save_fp);
520   CallStub(&stub);
521 
522   bind(&done);
523 
524   // Count number of write barriers in generated code.
525   isolate()->counters()->write_barriers_static()->Increment();
526   IncrementCounter(isolate()->counters()->write_barriers_dynamic(), 1);
527 
528   // Clobber clobbered input registers when running with the debug-code flag
529   // turned on to provoke errors.
530   if (emit_debug_code()) {
531     mov(value, Immediate(bit_cast<int32_t>(kZapValue)));
532     mov(scratch1, Immediate(bit_cast<int32_t>(kZapValue)));
533     mov(scratch2, Immediate(bit_cast<int32_t>(kZapValue)));
534   }
535 }
536 
537 
RecordWrite(Register object,Register address,Register value,SaveFPRegsMode fp_mode,RememberedSetAction remembered_set_action,SmiCheck smi_check,PointersToHereCheck pointers_to_here_check_for_value)538 void MacroAssembler::RecordWrite(
539     Register object,
540     Register address,
541     Register value,
542     SaveFPRegsMode fp_mode,
543     RememberedSetAction remembered_set_action,
544     SmiCheck smi_check,
545     PointersToHereCheck pointers_to_here_check_for_value) {
546   DCHECK(!object.is(value));
547   DCHECK(!object.is(address));
548   DCHECK(!value.is(address));
549   AssertNotSmi(object);
550 
551   if (remembered_set_action == OMIT_REMEMBERED_SET &&
552       !FLAG_incremental_marking) {
553     return;
554   }
555 
556   if (emit_debug_code()) {
557     Label ok;
558     cmp(value, Operand(address, 0));
559     j(equal, &ok, Label::kNear);
560     int3();
561     bind(&ok);
562   }
563 
564   // First, check if a write barrier is even needed. The tests below
565   // catch stores of Smis and stores into young gen.
566   Label done;
567 
568   if (smi_check == INLINE_SMI_CHECK) {
569     // Skip barrier if writing a smi.
570     JumpIfSmi(value, &done, Label::kNear);
571   }
572 
573   if (pointers_to_here_check_for_value != kPointersToHereAreAlwaysInteresting) {
574     CheckPageFlag(value,
575                   value,  // Used as scratch.
576                   MemoryChunk::kPointersToHereAreInterestingMask,
577                   zero,
578                   &done,
579                   Label::kNear);
580   }
581   CheckPageFlag(object,
582                 value,  // Used as scratch.
583                 MemoryChunk::kPointersFromHereAreInterestingMask,
584                 zero,
585                 &done,
586                 Label::kNear);
587 
588   RecordWriteStub stub(isolate(), object, value, address, remembered_set_action,
589                        fp_mode);
590   CallStub(&stub);
591 
592   bind(&done);
593 
594   // Count number of write barriers in generated code.
595   isolate()->counters()->write_barriers_static()->Increment();
596   IncrementCounter(isolate()->counters()->write_barriers_dynamic(), 1);
597 
598   // Clobber clobbered registers when running with the debug-code flag
599   // turned on to provoke errors.
600   if (emit_debug_code()) {
601     mov(address, Immediate(bit_cast<int32_t>(kZapValue)));
602     mov(value, Immediate(bit_cast<int32_t>(kZapValue)));
603   }
604 }
605 
RecordWriteCodeEntryField(Register js_function,Register code_entry,Register scratch)606 void MacroAssembler::RecordWriteCodeEntryField(Register js_function,
607                                                Register code_entry,
608                                                Register scratch) {
609   const int offset = JSFunction::kCodeEntryOffset;
610 
611   // Since a code entry (value) is always in old space, we don't need to update
612   // remembered set. If incremental marking is off, there is nothing for us to
613   // do.
614   if (!FLAG_incremental_marking) return;
615 
616   DCHECK(!js_function.is(code_entry));
617   DCHECK(!js_function.is(scratch));
618   DCHECK(!code_entry.is(scratch));
619   AssertNotSmi(js_function);
620 
621   if (emit_debug_code()) {
622     Label ok;
623     lea(scratch, FieldOperand(js_function, offset));
624     cmp(code_entry, Operand(scratch, 0));
625     j(equal, &ok, Label::kNear);
626     int3();
627     bind(&ok);
628   }
629 
630   // First, check if a write barrier is even needed. The tests below
631   // catch stores of Smis and stores into young gen.
632   Label done;
633 
634   CheckPageFlag(code_entry, scratch,
635                 MemoryChunk::kPointersToHereAreInterestingMask, zero, &done,
636                 Label::kNear);
637   CheckPageFlag(js_function, scratch,
638                 MemoryChunk::kPointersFromHereAreInterestingMask, zero, &done,
639                 Label::kNear);
640 
641   // Save input registers.
642   push(js_function);
643   push(code_entry);
644 
645   const Register dst = scratch;
646   lea(dst, FieldOperand(js_function, offset));
647 
648   // Save caller-saved registers.
649   PushCallerSaved(kDontSaveFPRegs, js_function, code_entry);
650 
651   int argument_count = 3;
652   PrepareCallCFunction(argument_count, code_entry);
653   mov(Operand(esp, 0 * kPointerSize), js_function);
654   mov(Operand(esp, 1 * kPointerSize), dst);  // Slot.
655   mov(Operand(esp, 2 * kPointerSize),
656       Immediate(ExternalReference::isolate_address(isolate())));
657 
658   {
659     AllowExternalCallThatCantCauseGC scope(this);
660     CallCFunction(
661         ExternalReference::incremental_marking_record_write_code_entry_function(
662             isolate()),
663         argument_count);
664   }
665 
666   // Restore caller-saved registers.
667   PopCallerSaved(kDontSaveFPRegs, js_function, code_entry);
668 
669   // Restore input registers.
670   pop(code_entry);
671   pop(js_function);
672 
673   bind(&done);
674 }
675 
DebugBreak()676 void MacroAssembler::DebugBreak() {
677   Move(eax, Immediate(0));
678   mov(ebx, Immediate(ExternalReference(Runtime::kHandleDebuggerStatement,
679                                        isolate())));
680   CEntryStub ces(isolate(), 1);
681   call(ces.GetCode(), RelocInfo::DEBUGGER_STATEMENT);
682 }
683 
Cvtsi2sd(XMMRegister dst,const Operand & src)684 void MacroAssembler::Cvtsi2sd(XMMRegister dst, const Operand& src) {
685   xorps(dst, dst);
686   cvtsi2sd(dst, src);
687 }
688 
689 
Cvtui2ss(XMMRegister dst,Register src,Register tmp)690 void MacroAssembler::Cvtui2ss(XMMRegister dst, Register src, Register tmp) {
691   Label msb_set_src;
692   Label jmp_return;
693   test(src, src);
694   j(sign, &msb_set_src, Label::kNear);
695   cvtsi2ss(dst, src);
696   jmp(&jmp_return, Label::kNear);
697   bind(&msb_set_src);
698   mov(tmp, src);
699   shr(src, 1);
700   // Recover the least significant bit to avoid rounding errors.
701   and_(tmp, Immediate(1));
702   or_(src, tmp);
703   cvtsi2ss(dst, src);
704   addss(dst, dst);
705   bind(&jmp_return);
706 }
707 
ShlPair(Register high,Register low,uint8_t shift)708 void MacroAssembler::ShlPair(Register high, Register low, uint8_t shift) {
709   if (shift >= 32) {
710     mov(high, low);
711     shl(high, shift - 32);
712     xor_(low, low);
713   } else {
714     shld(high, low, shift);
715     shl(low, shift);
716   }
717 }
718 
ShlPair_cl(Register high,Register low)719 void MacroAssembler::ShlPair_cl(Register high, Register low) {
720   shld_cl(high, low);
721   shl_cl(low);
722   Label done;
723   test(ecx, Immediate(0x20));
724   j(equal, &done, Label::kNear);
725   mov(high, low);
726   xor_(low, low);
727   bind(&done);
728 }
729 
ShrPair(Register high,Register low,uint8_t shift)730 void MacroAssembler::ShrPair(Register high, Register low, uint8_t shift) {
731   if (shift >= 32) {
732     mov(low, high);
733     shr(low, shift - 32);
734     xor_(high, high);
735   } else {
736     shrd(high, low, shift);
737     shr(high, shift);
738   }
739 }
740 
ShrPair_cl(Register high,Register low)741 void MacroAssembler::ShrPair_cl(Register high, Register low) {
742   shrd_cl(low, high);
743   shr_cl(high);
744   Label done;
745   test(ecx, Immediate(0x20));
746   j(equal, &done, Label::kNear);
747   mov(low, high);
748   xor_(high, high);
749   bind(&done);
750 }
751 
SarPair(Register high,Register low,uint8_t shift)752 void MacroAssembler::SarPair(Register high, Register low, uint8_t shift) {
753   if (shift >= 32) {
754     mov(low, high);
755     sar(low, shift - 32);
756     sar(high, 31);
757   } else {
758     shrd(high, low, shift);
759     sar(high, shift);
760   }
761 }
762 
SarPair_cl(Register high,Register low)763 void MacroAssembler::SarPair_cl(Register high, Register low) {
764   shrd_cl(low, high);
765   sar_cl(high);
766   Label done;
767   test(ecx, Immediate(0x20));
768   j(equal, &done, Label::kNear);
769   mov(low, high);
770   sar(high, 31);
771   bind(&done);
772 }
773 
IsUnsafeImmediate(const Immediate & x)774 bool MacroAssembler::IsUnsafeImmediate(const Immediate& x) {
775   static const int kMaxImmediateBits = 17;
776   if (!RelocInfo::IsNone(x.rmode_)) return false;
777   return !is_intn(x.x_, kMaxImmediateBits);
778 }
779 
780 
SafeMove(Register dst,const Immediate & x)781 void MacroAssembler::SafeMove(Register dst, const Immediate& x) {
782   if (IsUnsafeImmediate(x) && jit_cookie() != 0) {
783     Move(dst, Immediate(x.x_ ^ jit_cookie()));
784     xor_(dst, jit_cookie());
785   } else {
786     Move(dst, x);
787   }
788 }
789 
790 
SafePush(const Immediate & x)791 void MacroAssembler::SafePush(const Immediate& x) {
792   if (IsUnsafeImmediate(x) && jit_cookie() != 0) {
793     push(Immediate(x.x_ ^ jit_cookie()));
794     xor_(Operand(esp, 0), Immediate(jit_cookie()));
795   } else {
796     push(x);
797   }
798 }
799 
800 
CmpObjectType(Register heap_object,InstanceType type,Register map)801 void MacroAssembler::CmpObjectType(Register heap_object,
802                                    InstanceType type,
803                                    Register map) {
804   mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
805   CmpInstanceType(map, type);
806 }
807 
808 
CmpInstanceType(Register map,InstanceType type)809 void MacroAssembler::CmpInstanceType(Register map, InstanceType type) {
810   cmpb(FieldOperand(map, Map::kInstanceTypeOffset), Immediate(type));
811 }
812 
CheckFastObjectElements(Register map,Label * fail,Label::Distance distance)813 void MacroAssembler::CheckFastObjectElements(Register map,
814                                              Label* fail,
815                                              Label::Distance distance) {
816   STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
817   STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
818   STATIC_ASSERT(FAST_ELEMENTS == 2);
819   STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3);
820   cmpb(FieldOperand(map, Map::kBitField2Offset),
821        Immediate(Map::kMaximumBitField2FastHoleySmiElementValue));
822   j(below_equal, fail, distance);
823   cmpb(FieldOperand(map, Map::kBitField2Offset),
824        Immediate(Map::kMaximumBitField2FastHoleyElementValue));
825   j(above, fail, distance);
826 }
827 
828 
CheckFastSmiElements(Register map,Label * fail,Label::Distance distance)829 void MacroAssembler::CheckFastSmiElements(Register map,
830                                           Label* fail,
831                                           Label::Distance distance) {
832   STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
833   STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
834   cmpb(FieldOperand(map, Map::kBitField2Offset),
835        Immediate(Map::kMaximumBitField2FastHoleySmiElementValue));
836   j(above, fail, distance);
837 }
838 
839 
StoreNumberToDoubleElements(Register maybe_number,Register elements,Register key,Register scratch1,XMMRegister scratch2,Label * fail,int elements_offset)840 void MacroAssembler::StoreNumberToDoubleElements(
841     Register maybe_number,
842     Register elements,
843     Register key,
844     Register scratch1,
845     XMMRegister scratch2,
846     Label* fail,
847     int elements_offset) {
848   Label smi_value, done;
849   JumpIfSmi(maybe_number, &smi_value, Label::kNear);
850 
851   CheckMap(maybe_number,
852            isolate()->factory()->heap_number_map(),
853            fail,
854            DONT_DO_SMI_CHECK);
855 
856   // Double value, turn potential sNaN into qNaN.
857   Move(scratch2, 1.0);
858   mulsd(scratch2, FieldOperand(maybe_number, HeapNumber::kValueOffset));
859   jmp(&done, Label::kNear);
860 
861   bind(&smi_value);
862   // Value is a smi. Convert to a double and store.
863   // Preserve original value.
864   mov(scratch1, maybe_number);
865   SmiUntag(scratch1);
866   Cvtsi2sd(scratch2, scratch1);
867   bind(&done);
868   movsd(FieldOperand(elements, key, times_4,
869                      FixedDoubleArray::kHeaderSize - elements_offset),
870         scratch2);
871 }
872 
873 
CompareMap(Register obj,Handle<Map> map)874 void MacroAssembler::CompareMap(Register obj, Handle<Map> map) {
875   cmp(FieldOperand(obj, HeapObject::kMapOffset), map);
876 }
877 
878 
CheckMap(Register obj,Handle<Map> map,Label * fail,SmiCheckType smi_check_type)879 void MacroAssembler::CheckMap(Register obj,
880                               Handle<Map> map,
881                               Label* fail,
882                               SmiCheckType smi_check_type) {
883   if (smi_check_type == DO_SMI_CHECK) {
884     JumpIfSmi(obj, fail);
885   }
886 
887   CompareMap(obj, map);
888   j(not_equal, fail);
889 }
890 
891 
DispatchWeakMap(Register obj,Register scratch1,Register scratch2,Handle<WeakCell> cell,Handle<Code> success,SmiCheckType smi_check_type)892 void MacroAssembler::DispatchWeakMap(Register obj, Register scratch1,
893                                      Register scratch2, Handle<WeakCell> cell,
894                                      Handle<Code> success,
895                                      SmiCheckType smi_check_type) {
896   Label fail;
897   if (smi_check_type == DO_SMI_CHECK) {
898     JumpIfSmi(obj, &fail);
899   }
900   mov(scratch1, FieldOperand(obj, HeapObject::kMapOffset));
901   CmpWeakValue(scratch1, cell, scratch2);
902   j(equal, success);
903 
904   bind(&fail);
905 }
906 
907 
IsObjectStringType(Register heap_object,Register map,Register instance_type)908 Condition MacroAssembler::IsObjectStringType(Register heap_object,
909                                              Register map,
910                                              Register instance_type) {
911   mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
912   movzx_b(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
913   STATIC_ASSERT(kNotStringTag != 0);
914   test(instance_type, Immediate(kIsNotStringMask));
915   return zero;
916 }
917 
918 
IsObjectNameType(Register heap_object,Register map,Register instance_type)919 Condition MacroAssembler::IsObjectNameType(Register heap_object,
920                                            Register map,
921                                            Register instance_type) {
922   mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
923   movzx_b(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
924   cmpb(instance_type, Immediate(LAST_NAME_TYPE));
925   return below_equal;
926 }
927 
928 
FCmp()929 void MacroAssembler::FCmp() {
930   fucomip();
931   fstp(0);
932 }
933 
934 
AssertNumber(Register object)935 void MacroAssembler::AssertNumber(Register object) {
936   if (emit_debug_code()) {
937     Label ok;
938     JumpIfSmi(object, &ok);
939     cmp(FieldOperand(object, HeapObject::kMapOffset),
940         isolate()->factory()->heap_number_map());
941     Check(equal, kOperandNotANumber);
942     bind(&ok);
943   }
944 }
945 
AssertNotNumber(Register object)946 void MacroAssembler::AssertNotNumber(Register object) {
947   if (emit_debug_code()) {
948     test(object, Immediate(kSmiTagMask));
949     Check(not_equal, kOperandIsANumber);
950     cmp(FieldOperand(object, HeapObject::kMapOffset),
951         isolate()->factory()->heap_number_map());
952     Check(not_equal, kOperandIsANumber);
953   }
954 }
955 
AssertSmi(Register object)956 void MacroAssembler::AssertSmi(Register object) {
957   if (emit_debug_code()) {
958     test(object, Immediate(kSmiTagMask));
959     Check(equal, kOperandIsNotASmi);
960   }
961 }
962 
963 
AssertString(Register object)964 void MacroAssembler::AssertString(Register object) {
965   if (emit_debug_code()) {
966     test(object, Immediate(kSmiTagMask));
967     Check(not_equal, kOperandIsASmiAndNotAString);
968     push(object);
969     mov(object, FieldOperand(object, HeapObject::kMapOffset));
970     CmpInstanceType(object, FIRST_NONSTRING_TYPE);
971     pop(object);
972     Check(below, kOperandIsNotAString);
973   }
974 }
975 
976 
AssertName(Register object)977 void MacroAssembler::AssertName(Register object) {
978   if (emit_debug_code()) {
979     test(object, Immediate(kSmiTagMask));
980     Check(not_equal, kOperandIsASmiAndNotAName);
981     push(object);
982     mov(object, FieldOperand(object, HeapObject::kMapOffset));
983     CmpInstanceType(object, LAST_NAME_TYPE);
984     pop(object);
985     Check(below_equal, kOperandIsNotAName);
986   }
987 }
988 
989 
AssertFunction(Register object)990 void MacroAssembler::AssertFunction(Register object) {
991   if (emit_debug_code()) {
992     test(object, Immediate(kSmiTagMask));
993     Check(not_equal, kOperandIsASmiAndNotAFunction);
994     Push(object);
995     CmpObjectType(object, JS_FUNCTION_TYPE, object);
996     Pop(object);
997     Check(equal, kOperandIsNotAFunction);
998   }
999 }
1000 
1001 
AssertBoundFunction(Register object)1002 void MacroAssembler::AssertBoundFunction(Register object) {
1003   if (emit_debug_code()) {
1004     test(object, Immediate(kSmiTagMask));
1005     Check(not_equal, kOperandIsASmiAndNotABoundFunction);
1006     Push(object);
1007     CmpObjectType(object, JS_BOUND_FUNCTION_TYPE, object);
1008     Pop(object);
1009     Check(equal, kOperandIsNotABoundFunction);
1010   }
1011 }
1012 
AssertGeneratorObject(Register object)1013 void MacroAssembler::AssertGeneratorObject(Register object) {
1014   if (emit_debug_code()) {
1015     test(object, Immediate(kSmiTagMask));
1016     Check(not_equal, kOperandIsASmiAndNotAGeneratorObject);
1017     Push(object);
1018     CmpObjectType(object, JS_GENERATOR_OBJECT_TYPE, object);
1019     Pop(object);
1020     Check(equal, kOperandIsNotAGeneratorObject);
1021   }
1022 }
1023 
AssertReceiver(Register object)1024 void MacroAssembler::AssertReceiver(Register object) {
1025   if (emit_debug_code()) {
1026     test(object, Immediate(kSmiTagMask));
1027     Check(not_equal, kOperandIsASmiAndNotAReceiver);
1028     Push(object);
1029     STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
1030     CmpObjectType(object, FIRST_JS_RECEIVER_TYPE, object);
1031     Pop(object);
1032     Check(above_equal, kOperandIsNotAReceiver);
1033   }
1034 }
1035 
1036 
AssertUndefinedOrAllocationSite(Register object)1037 void MacroAssembler::AssertUndefinedOrAllocationSite(Register object) {
1038   if (emit_debug_code()) {
1039     Label done_checking;
1040     AssertNotSmi(object);
1041     cmp(object, isolate()->factory()->undefined_value());
1042     j(equal, &done_checking);
1043     cmp(FieldOperand(object, 0),
1044         Immediate(isolate()->factory()->allocation_site_map()));
1045     Assert(equal, kExpectedUndefinedOrCell);
1046     bind(&done_checking);
1047   }
1048 }
1049 
1050 
AssertNotSmi(Register object)1051 void MacroAssembler::AssertNotSmi(Register object) {
1052   if (emit_debug_code()) {
1053     test(object, Immediate(kSmiTagMask));
1054     Check(not_equal, kOperandIsASmi);
1055   }
1056 }
1057 
StubPrologue(StackFrame::Type type)1058 void MacroAssembler::StubPrologue(StackFrame::Type type) {
1059   push(ebp);  // Caller's frame pointer.
1060   mov(ebp, esp);
1061   push(Immediate(Smi::FromInt(type)));
1062 }
1063 
Prologue(bool code_pre_aging)1064 void MacroAssembler::Prologue(bool code_pre_aging) {
1065   PredictableCodeSizeScope predictible_code_size_scope(this,
1066       kNoCodeAgeSequenceLength);
1067   if (code_pre_aging) {
1068       // Pre-age the code.
1069     call(isolate()->builtins()->MarkCodeAsExecutedOnce(),
1070         RelocInfo::CODE_AGE_SEQUENCE);
1071     Nop(kNoCodeAgeSequenceLength - Assembler::kCallInstructionLength);
1072   } else {
1073     push(ebp);  // Caller's frame pointer.
1074     mov(ebp, esp);
1075     push(esi);  // Callee's context.
1076     push(edi);  // Callee's JS function.
1077   }
1078 }
1079 
1080 
EmitLoadTypeFeedbackVector(Register vector)1081 void MacroAssembler::EmitLoadTypeFeedbackVector(Register vector) {
1082   mov(vector, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
1083   mov(vector, FieldOperand(vector, JSFunction::kLiteralsOffset));
1084   mov(vector, FieldOperand(vector, LiteralsArray::kFeedbackVectorOffset));
1085 }
1086 
1087 
EnterFrame(StackFrame::Type type,bool load_constant_pool_pointer_reg)1088 void MacroAssembler::EnterFrame(StackFrame::Type type,
1089                                 bool load_constant_pool_pointer_reg) {
1090   // Out-of-line constant pool not implemented on ia32.
1091   UNREACHABLE();
1092 }
1093 
1094 
EnterFrame(StackFrame::Type type)1095 void MacroAssembler::EnterFrame(StackFrame::Type type) {
1096   push(ebp);
1097   mov(ebp, esp);
1098   push(Immediate(Smi::FromInt(type)));
1099   if (type == StackFrame::INTERNAL) {
1100     push(Immediate(CodeObject()));
1101   }
1102   if (emit_debug_code()) {
1103     cmp(Operand(esp, 0), Immediate(isolate()->factory()->undefined_value()));
1104     Check(not_equal, kCodeObjectNotProperlyPatched);
1105   }
1106 }
1107 
1108 
LeaveFrame(StackFrame::Type type)1109 void MacroAssembler::LeaveFrame(StackFrame::Type type) {
1110   if (emit_debug_code()) {
1111     cmp(Operand(ebp, CommonFrameConstants::kContextOrFrameTypeOffset),
1112         Immediate(Smi::FromInt(type)));
1113     Check(equal, kStackFrameTypesMustMatch);
1114   }
1115   leave();
1116 }
1117 
EnterBuiltinFrame(Register context,Register target,Register argc)1118 void MacroAssembler::EnterBuiltinFrame(Register context, Register target,
1119                                        Register argc) {
1120   Push(ebp);
1121   Move(ebp, esp);
1122   Push(context);
1123   Push(target);
1124   Push(argc);
1125 }
1126 
LeaveBuiltinFrame(Register context,Register target,Register argc)1127 void MacroAssembler::LeaveBuiltinFrame(Register context, Register target,
1128                                        Register argc) {
1129   Pop(argc);
1130   Pop(target);
1131   Pop(context);
1132   leave();
1133 }
1134 
EnterExitFramePrologue(StackFrame::Type frame_type)1135 void MacroAssembler::EnterExitFramePrologue(StackFrame::Type frame_type) {
1136   DCHECK(frame_type == StackFrame::EXIT ||
1137          frame_type == StackFrame::BUILTIN_EXIT);
1138 
1139   // Set up the frame structure on the stack.
1140   DCHECK_EQ(+2 * kPointerSize, ExitFrameConstants::kCallerSPDisplacement);
1141   DCHECK_EQ(+1 * kPointerSize, ExitFrameConstants::kCallerPCOffset);
1142   DCHECK_EQ(0 * kPointerSize, ExitFrameConstants::kCallerFPOffset);
1143   push(ebp);
1144   mov(ebp, esp);
1145 
1146   // Reserve room for entry stack pointer and push the code object.
1147   push(Immediate(Smi::FromInt(frame_type)));
1148   DCHECK_EQ(-2 * kPointerSize, ExitFrameConstants::kSPOffset);
1149   push(Immediate(0));  // Saved entry sp, patched before call.
1150   DCHECK_EQ(-3 * kPointerSize, ExitFrameConstants::kCodeOffset);
1151   push(Immediate(CodeObject()));  // Accessed from ExitFrame::code_slot.
1152 
1153   // Save the frame pointer and the context in top.
1154   ExternalReference c_entry_fp_address(Isolate::kCEntryFPAddress, isolate());
1155   ExternalReference context_address(Isolate::kContextAddress, isolate());
1156   ExternalReference c_function_address(Isolate::kCFunctionAddress, isolate());
1157   mov(Operand::StaticVariable(c_entry_fp_address), ebp);
1158   mov(Operand::StaticVariable(context_address), esi);
1159   mov(Operand::StaticVariable(c_function_address), ebx);
1160 }
1161 
1162 
EnterExitFrameEpilogue(int argc,bool save_doubles)1163 void MacroAssembler::EnterExitFrameEpilogue(int argc, bool save_doubles) {
1164   // Optionally save all XMM registers.
1165   if (save_doubles) {
1166     int space = XMMRegister::kMaxNumRegisters * kDoubleSize +
1167                 argc * kPointerSize;
1168     sub(esp, Immediate(space));
1169     const int offset = -ExitFrameConstants::kFixedFrameSizeFromFp;
1170     for (int i = 0; i < XMMRegister::kMaxNumRegisters; i++) {
1171       XMMRegister reg = XMMRegister::from_code(i);
1172       movsd(Operand(ebp, offset - ((i + 1) * kDoubleSize)), reg);
1173     }
1174   } else {
1175     sub(esp, Immediate(argc * kPointerSize));
1176   }
1177 
1178   // Get the required frame alignment for the OS.
1179   const int kFrameAlignment = base::OS::ActivationFrameAlignment();
1180   if (kFrameAlignment > 0) {
1181     DCHECK(base::bits::IsPowerOfTwo32(kFrameAlignment));
1182     and_(esp, -kFrameAlignment);
1183   }
1184 
1185   // Patch the saved entry sp.
1186   mov(Operand(ebp, ExitFrameConstants::kSPOffset), esp);
1187 }
1188 
EnterExitFrame(int argc,bool save_doubles,StackFrame::Type frame_type)1189 void MacroAssembler::EnterExitFrame(int argc, bool save_doubles,
1190                                     StackFrame::Type frame_type) {
1191   EnterExitFramePrologue(frame_type);
1192 
1193   // Set up argc and argv in callee-saved registers.
1194   int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
1195   mov(edi, eax);
1196   lea(esi, Operand(ebp, eax, times_4, offset));
1197 
1198   // Reserve space for argc, argv and isolate.
1199   EnterExitFrameEpilogue(argc, save_doubles);
1200 }
1201 
1202 
EnterApiExitFrame(int argc)1203 void MacroAssembler::EnterApiExitFrame(int argc) {
1204   EnterExitFramePrologue(StackFrame::EXIT);
1205   EnterExitFrameEpilogue(argc, false);
1206 }
1207 
1208 
LeaveExitFrame(bool save_doubles,bool pop_arguments)1209 void MacroAssembler::LeaveExitFrame(bool save_doubles, bool pop_arguments) {
1210   // Optionally restore all XMM registers.
1211   if (save_doubles) {
1212     const int offset = -ExitFrameConstants::kFixedFrameSizeFromFp;
1213     for (int i = 0; i < XMMRegister::kMaxNumRegisters; i++) {
1214       XMMRegister reg = XMMRegister::from_code(i);
1215       movsd(reg, Operand(ebp, offset - ((i + 1) * kDoubleSize)));
1216     }
1217   }
1218 
1219   if (pop_arguments) {
1220     // Get the return address from the stack and restore the frame pointer.
1221     mov(ecx, Operand(ebp, 1 * kPointerSize));
1222     mov(ebp, Operand(ebp, 0 * kPointerSize));
1223 
1224     // Pop the arguments and the receiver from the caller stack.
1225     lea(esp, Operand(esi, 1 * kPointerSize));
1226 
1227     // Push the return address to get ready to return.
1228     push(ecx);
1229   } else {
1230     // Otherwise just leave the exit frame.
1231     leave();
1232   }
1233 
1234   LeaveExitFrameEpilogue(true);
1235 }
1236 
1237 
LeaveExitFrameEpilogue(bool restore_context)1238 void MacroAssembler::LeaveExitFrameEpilogue(bool restore_context) {
1239   // Restore current context from top and clear it in debug mode.
1240   ExternalReference context_address(Isolate::kContextAddress, isolate());
1241   if (restore_context) {
1242     mov(esi, Operand::StaticVariable(context_address));
1243   }
1244 #ifdef DEBUG
1245   mov(Operand::StaticVariable(context_address), Immediate(0));
1246 #endif
1247 
1248   // Clear the top frame.
1249   ExternalReference c_entry_fp_address(Isolate::kCEntryFPAddress,
1250                                        isolate());
1251   mov(Operand::StaticVariable(c_entry_fp_address), Immediate(0));
1252 }
1253 
1254 
LeaveApiExitFrame(bool restore_context)1255 void MacroAssembler::LeaveApiExitFrame(bool restore_context) {
1256   mov(esp, ebp);
1257   pop(ebp);
1258 
1259   LeaveExitFrameEpilogue(restore_context);
1260 }
1261 
1262 
PushStackHandler()1263 void MacroAssembler::PushStackHandler() {
1264   // Adjust this code if not the case.
1265   STATIC_ASSERT(StackHandlerConstants::kSize == 1 * kPointerSize);
1266   STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
1267 
1268   // Link the current handler as the next handler.
1269   ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
1270   push(Operand::StaticVariable(handler_address));
1271 
1272   // Set this new handler as the current one.
1273   mov(Operand::StaticVariable(handler_address), esp);
1274 }
1275 
1276 
PopStackHandler()1277 void MacroAssembler::PopStackHandler() {
1278   STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
1279   ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
1280   pop(Operand::StaticVariable(handler_address));
1281   add(esp, Immediate(StackHandlerConstants::kSize - kPointerSize));
1282 }
1283 
1284 
1285 // Compute the hash code from the untagged key.  This must be kept in sync with
1286 // ComputeIntegerHash in utils.h and KeyedLoadGenericStub in
1287 // code-stub-hydrogen.cc
1288 //
1289 // Note: r0 will contain hash code
GetNumberHash(Register r0,Register scratch)1290 void MacroAssembler::GetNumberHash(Register r0, Register scratch) {
1291   // Xor original key with a seed.
1292   if (serializer_enabled()) {
1293     ExternalReference roots_array_start =
1294         ExternalReference::roots_array_start(isolate());
1295     mov(scratch, Immediate(Heap::kHashSeedRootIndex));
1296     mov(scratch,
1297         Operand::StaticArray(scratch, times_pointer_size, roots_array_start));
1298     SmiUntag(scratch);
1299     xor_(r0, scratch);
1300   } else {
1301     int32_t seed = isolate()->heap()->HashSeed();
1302     xor_(r0, Immediate(seed));
1303   }
1304 
1305   // hash = ~hash + (hash << 15);
1306   mov(scratch, r0);
1307   not_(r0);
1308   shl(scratch, 15);
1309   add(r0, scratch);
1310   // hash = hash ^ (hash >> 12);
1311   mov(scratch, r0);
1312   shr(scratch, 12);
1313   xor_(r0, scratch);
1314   // hash = hash + (hash << 2);
1315   lea(r0, Operand(r0, r0, times_4, 0));
1316   // hash = hash ^ (hash >> 4);
1317   mov(scratch, r0);
1318   shr(scratch, 4);
1319   xor_(r0, scratch);
1320   // hash = hash * 2057;
1321   imul(r0, r0, 2057);
1322   // hash = hash ^ (hash >> 16);
1323   mov(scratch, r0);
1324   shr(scratch, 16);
1325   xor_(r0, scratch);
1326   and_(r0, 0x3fffffff);
1327 }
1328 
LoadAllocationTopHelper(Register result,Register scratch,AllocationFlags flags)1329 void MacroAssembler::LoadAllocationTopHelper(Register result,
1330                                              Register scratch,
1331                                              AllocationFlags flags) {
1332   ExternalReference allocation_top =
1333       AllocationUtils::GetAllocationTopReference(isolate(), flags);
1334 
1335   // Just return if allocation top is already known.
1336   if ((flags & RESULT_CONTAINS_TOP) != 0) {
1337     // No use of scratch if allocation top is provided.
1338     DCHECK(scratch.is(no_reg));
1339 #ifdef DEBUG
1340     // Assert that result actually contains top on entry.
1341     cmp(result, Operand::StaticVariable(allocation_top));
1342     Check(equal, kUnexpectedAllocationTop);
1343 #endif
1344     return;
1345   }
1346 
1347   // Move address of new object to result. Use scratch register if available.
1348   if (scratch.is(no_reg)) {
1349     mov(result, Operand::StaticVariable(allocation_top));
1350   } else {
1351     mov(scratch, Immediate(allocation_top));
1352     mov(result, Operand(scratch, 0));
1353   }
1354 }
1355 
1356 
UpdateAllocationTopHelper(Register result_end,Register scratch,AllocationFlags flags)1357 void MacroAssembler::UpdateAllocationTopHelper(Register result_end,
1358                                                Register scratch,
1359                                                AllocationFlags flags) {
1360   if (emit_debug_code()) {
1361     test(result_end, Immediate(kObjectAlignmentMask));
1362     Check(zero, kUnalignedAllocationInNewSpace);
1363   }
1364 
1365   ExternalReference allocation_top =
1366       AllocationUtils::GetAllocationTopReference(isolate(), flags);
1367 
1368   // Update new top. Use scratch if available.
1369   if (scratch.is(no_reg)) {
1370     mov(Operand::StaticVariable(allocation_top), result_end);
1371   } else {
1372     mov(Operand(scratch, 0), result_end);
1373   }
1374 }
1375 
1376 
Allocate(int object_size,Register result,Register result_end,Register scratch,Label * gc_required,AllocationFlags flags)1377 void MacroAssembler::Allocate(int object_size,
1378                               Register result,
1379                               Register result_end,
1380                               Register scratch,
1381                               Label* gc_required,
1382                               AllocationFlags flags) {
1383   DCHECK((flags & (RESULT_CONTAINS_TOP | SIZE_IN_WORDS)) == 0);
1384   DCHECK(object_size <= kMaxRegularHeapObjectSize);
1385   DCHECK((flags & ALLOCATION_FOLDED) == 0);
1386   if (!FLAG_inline_new) {
1387     if (emit_debug_code()) {
1388       // Trash the registers to simulate an allocation failure.
1389       mov(result, Immediate(0x7091));
1390       if (result_end.is_valid()) {
1391         mov(result_end, Immediate(0x7191));
1392       }
1393       if (scratch.is_valid()) {
1394         mov(scratch, Immediate(0x7291));
1395       }
1396     }
1397     jmp(gc_required);
1398     return;
1399   }
1400   DCHECK(!result.is(result_end));
1401 
1402   // Load address of new object into result.
1403   LoadAllocationTopHelper(result, scratch, flags);
1404 
1405   ExternalReference allocation_limit =
1406       AllocationUtils::GetAllocationLimitReference(isolate(), flags);
1407 
1408   // Align the next allocation. Storing the filler map without checking top is
1409   // safe in new-space because the limit of the heap is aligned there.
1410   if ((flags & DOUBLE_ALIGNMENT) != 0) {
1411     DCHECK(kPointerAlignment * 2 == kDoubleAlignment);
1412     Label aligned;
1413     test(result, Immediate(kDoubleAlignmentMask));
1414     j(zero, &aligned, Label::kNear);
1415     if ((flags & PRETENURE) != 0) {
1416       cmp(result, Operand::StaticVariable(allocation_limit));
1417       j(above_equal, gc_required);
1418     }
1419     mov(Operand(result, 0),
1420         Immediate(isolate()->factory()->one_pointer_filler_map()));
1421     add(result, Immediate(kDoubleSize / 2));
1422     bind(&aligned);
1423   }
1424 
1425   // Calculate new top and bail out if space is exhausted.
1426   Register top_reg = result_end.is_valid() ? result_end : result;
1427 
1428   if (!top_reg.is(result)) {
1429     mov(top_reg, result);
1430   }
1431   add(top_reg, Immediate(object_size));
1432   cmp(top_reg, Operand::StaticVariable(allocation_limit));
1433   j(above, gc_required);
1434 
1435   if ((flags & ALLOCATION_FOLDING_DOMINATOR) == 0) {
1436     // The top pointer is not updated for allocation folding dominators.
1437     UpdateAllocationTopHelper(top_reg, scratch, flags);
1438   }
1439 
1440   if (top_reg.is(result)) {
1441     sub(result, Immediate(object_size - kHeapObjectTag));
1442   } else {
1443     // Tag the result.
1444     DCHECK(kHeapObjectTag == 1);
1445     inc(result);
1446   }
1447 }
1448 
1449 
Allocate(int header_size,ScaleFactor element_size,Register element_count,RegisterValueType element_count_type,Register result,Register result_end,Register scratch,Label * gc_required,AllocationFlags flags)1450 void MacroAssembler::Allocate(int header_size,
1451                               ScaleFactor element_size,
1452                               Register element_count,
1453                               RegisterValueType element_count_type,
1454                               Register result,
1455                               Register result_end,
1456                               Register scratch,
1457                               Label* gc_required,
1458                               AllocationFlags flags) {
1459   DCHECK((flags & SIZE_IN_WORDS) == 0);
1460   DCHECK((flags & ALLOCATION_FOLDING_DOMINATOR) == 0);
1461   DCHECK((flags & ALLOCATION_FOLDED) == 0);
1462   if (!FLAG_inline_new) {
1463     if (emit_debug_code()) {
1464       // Trash the registers to simulate an allocation failure.
1465       mov(result, Immediate(0x7091));
1466       mov(result_end, Immediate(0x7191));
1467       if (scratch.is_valid()) {
1468         mov(scratch, Immediate(0x7291));
1469       }
1470       // Register element_count is not modified by the function.
1471     }
1472     jmp(gc_required);
1473     return;
1474   }
1475   DCHECK(!result.is(result_end));
1476 
1477   // Load address of new object into result.
1478   LoadAllocationTopHelper(result, scratch, flags);
1479 
1480   ExternalReference allocation_limit =
1481       AllocationUtils::GetAllocationLimitReference(isolate(), flags);
1482 
1483   // Align the next allocation. Storing the filler map without checking top is
1484   // safe in new-space because the limit of the heap is aligned there.
1485   if ((flags & DOUBLE_ALIGNMENT) != 0) {
1486     DCHECK(kPointerAlignment * 2 == kDoubleAlignment);
1487     Label aligned;
1488     test(result, Immediate(kDoubleAlignmentMask));
1489     j(zero, &aligned, Label::kNear);
1490     if ((flags & PRETENURE) != 0) {
1491       cmp(result, Operand::StaticVariable(allocation_limit));
1492       j(above_equal, gc_required);
1493     }
1494     mov(Operand(result, 0),
1495         Immediate(isolate()->factory()->one_pointer_filler_map()));
1496     add(result, Immediate(kDoubleSize / 2));
1497     bind(&aligned);
1498   }
1499 
1500   // Calculate new top and bail out if space is exhausted.
1501   // We assume that element_count*element_size + header_size does not
1502   // overflow.
1503   if (element_count_type == REGISTER_VALUE_IS_SMI) {
1504     STATIC_ASSERT(static_cast<ScaleFactor>(times_2 - 1) == times_1);
1505     STATIC_ASSERT(static_cast<ScaleFactor>(times_4 - 1) == times_2);
1506     STATIC_ASSERT(static_cast<ScaleFactor>(times_8 - 1) == times_4);
1507     DCHECK(element_size >= times_2);
1508     DCHECK(kSmiTagSize == 1);
1509     element_size = static_cast<ScaleFactor>(element_size - 1);
1510   } else {
1511     DCHECK(element_count_type == REGISTER_VALUE_IS_INT32);
1512   }
1513 
1514   lea(result_end, Operand(element_count, element_size, header_size));
1515   add(result_end, result);
1516   cmp(result_end, Operand::StaticVariable(allocation_limit));
1517   j(above, gc_required);
1518 
1519   // Tag result.
1520   DCHECK(kHeapObjectTag == 1);
1521   inc(result);
1522 
1523   UpdateAllocationTopHelper(result_end, scratch, flags);
1524 }
1525 
1526 
Allocate(Register object_size,Register result,Register result_end,Register scratch,Label * gc_required,AllocationFlags flags)1527 void MacroAssembler::Allocate(Register object_size,
1528                               Register result,
1529                               Register result_end,
1530                               Register scratch,
1531                               Label* gc_required,
1532                               AllocationFlags flags) {
1533   DCHECK((flags & (RESULT_CONTAINS_TOP | SIZE_IN_WORDS)) == 0);
1534   DCHECK((flags & ALLOCATION_FOLDED) == 0);
1535   if (!FLAG_inline_new) {
1536     if (emit_debug_code()) {
1537       // Trash the registers to simulate an allocation failure.
1538       mov(result, Immediate(0x7091));
1539       mov(result_end, Immediate(0x7191));
1540       if (scratch.is_valid()) {
1541         mov(scratch, Immediate(0x7291));
1542       }
1543       // object_size is left unchanged by this function.
1544     }
1545     jmp(gc_required);
1546     return;
1547   }
1548   DCHECK(!result.is(result_end));
1549 
1550   // Load address of new object into result.
1551   LoadAllocationTopHelper(result, scratch, flags);
1552 
1553   ExternalReference allocation_limit =
1554       AllocationUtils::GetAllocationLimitReference(isolate(), flags);
1555 
1556   // Align the next allocation. Storing the filler map without checking top is
1557   // safe in new-space because the limit of the heap is aligned there.
1558   if ((flags & DOUBLE_ALIGNMENT) != 0) {
1559     DCHECK(kPointerAlignment * 2 == kDoubleAlignment);
1560     Label aligned;
1561     test(result, Immediate(kDoubleAlignmentMask));
1562     j(zero, &aligned, Label::kNear);
1563     if ((flags & PRETENURE) != 0) {
1564       cmp(result, Operand::StaticVariable(allocation_limit));
1565       j(above_equal, gc_required);
1566     }
1567     mov(Operand(result, 0),
1568         Immediate(isolate()->factory()->one_pointer_filler_map()));
1569     add(result, Immediate(kDoubleSize / 2));
1570     bind(&aligned);
1571   }
1572 
1573   // Calculate new top and bail out if space is exhausted.
1574   if (!object_size.is(result_end)) {
1575     mov(result_end, object_size);
1576   }
1577   add(result_end, result);
1578   cmp(result_end, Operand::StaticVariable(allocation_limit));
1579   j(above, gc_required);
1580 
1581   // Tag result.
1582   DCHECK(kHeapObjectTag == 1);
1583   inc(result);
1584 
1585   if ((flags & ALLOCATION_FOLDING_DOMINATOR) == 0) {
1586     // The top pointer is not updated for allocation folding dominators.
1587     UpdateAllocationTopHelper(result_end, scratch, flags);
1588   }
1589 }
1590 
FastAllocate(int object_size,Register result,Register result_end,AllocationFlags flags)1591 void MacroAssembler::FastAllocate(int object_size, Register result,
1592                                   Register result_end, AllocationFlags flags) {
1593   DCHECK(!result.is(result_end));
1594   // Load address of new object into result.
1595   LoadAllocationTopHelper(result, no_reg, flags);
1596 
1597   if ((flags & DOUBLE_ALIGNMENT) != 0) {
1598     DCHECK(kPointerAlignment * 2 == kDoubleAlignment);
1599     Label aligned;
1600     test(result, Immediate(kDoubleAlignmentMask));
1601     j(zero, &aligned, Label::kNear);
1602     mov(Operand(result, 0),
1603         Immediate(isolate()->factory()->one_pointer_filler_map()));
1604     add(result, Immediate(kDoubleSize / 2));
1605     bind(&aligned);
1606   }
1607 
1608   lea(result_end, Operand(result, object_size));
1609   UpdateAllocationTopHelper(result_end, no_reg, flags);
1610 
1611   DCHECK(kHeapObjectTag == 1);
1612   inc(result);
1613 }
1614 
FastAllocate(Register object_size,Register result,Register result_end,AllocationFlags flags)1615 void MacroAssembler::FastAllocate(Register object_size, Register result,
1616                                   Register result_end, AllocationFlags flags) {
1617   DCHECK(!result.is(result_end));
1618   // Load address of new object into result.
1619   LoadAllocationTopHelper(result, no_reg, flags);
1620 
1621   if ((flags & DOUBLE_ALIGNMENT) != 0) {
1622     DCHECK(kPointerAlignment * 2 == kDoubleAlignment);
1623     Label aligned;
1624     test(result, Immediate(kDoubleAlignmentMask));
1625     j(zero, &aligned, Label::kNear);
1626     mov(Operand(result, 0),
1627         Immediate(isolate()->factory()->one_pointer_filler_map()));
1628     add(result, Immediate(kDoubleSize / 2));
1629     bind(&aligned);
1630   }
1631 
1632   lea(result_end, Operand(result, object_size, times_1, 0));
1633   UpdateAllocationTopHelper(result_end, no_reg, flags);
1634 
1635   DCHECK(kHeapObjectTag == 1);
1636   inc(result);
1637 }
1638 
1639 
AllocateHeapNumber(Register result,Register scratch1,Register scratch2,Label * gc_required,MutableMode mode)1640 void MacroAssembler::AllocateHeapNumber(Register result,
1641                                         Register scratch1,
1642                                         Register scratch2,
1643                                         Label* gc_required,
1644                                         MutableMode mode) {
1645   // Allocate heap number in new space.
1646   Allocate(HeapNumber::kSize, result, scratch1, scratch2, gc_required,
1647            NO_ALLOCATION_FLAGS);
1648 
1649   Handle<Map> map = mode == MUTABLE
1650       ? isolate()->factory()->mutable_heap_number_map()
1651       : isolate()->factory()->heap_number_map();
1652 
1653   // Set the map.
1654   mov(FieldOperand(result, HeapObject::kMapOffset), Immediate(map));
1655 }
1656 
1657 
AllocateTwoByteString(Register result,Register length,Register scratch1,Register scratch2,Register scratch3,Label * gc_required)1658 void MacroAssembler::AllocateTwoByteString(Register result,
1659                                            Register length,
1660                                            Register scratch1,
1661                                            Register scratch2,
1662                                            Register scratch3,
1663                                            Label* gc_required) {
1664   // Calculate the number of bytes needed for the characters in the string while
1665   // observing object alignment.
1666   DCHECK((SeqTwoByteString::kHeaderSize & kObjectAlignmentMask) == 0);
1667   DCHECK(kShortSize == 2);
1668   // scratch1 = length * 2 + kObjectAlignmentMask.
1669   lea(scratch1, Operand(length, length, times_1, kObjectAlignmentMask));
1670   and_(scratch1, Immediate(~kObjectAlignmentMask));
1671 
1672   // Allocate two byte string in new space.
1673   Allocate(SeqTwoByteString::kHeaderSize, times_1, scratch1,
1674            REGISTER_VALUE_IS_INT32, result, scratch2, scratch3, gc_required,
1675            NO_ALLOCATION_FLAGS);
1676 
1677   // Set the map, length and hash field.
1678   mov(FieldOperand(result, HeapObject::kMapOffset),
1679       Immediate(isolate()->factory()->string_map()));
1680   mov(scratch1, length);
1681   SmiTag(scratch1);
1682   mov(FieldOperand(result, String::kLengthOffset), scratch1);
1683   mov(FieldOperand(result, String::kHashFieldOffset),
1684       Immediate(String::kEmptyHashField));
1685 }
1686 
1687 
AllocateOneByteString(Register result,Register length,Register scratch1,Register scratch2,Register scratch3,Label * gc_required)1688 void MacroAssembler::AllocateOneByteString(Register result, Register length,
1689                                            Register scratch1, Register scratch2,
1690                                            Register scratch3,
1691                                            Label* gc_required) {
1692   // Calculate the number of bytes needed for the characters in the string while
1693   // observing object alignment.
1694   DCHECK((SeqOneByteString::kHeaderSize & kObjectAlignmentMask) == 0);
1695   mov(scratch1, length);
1696   DCHECK(kCharSize == 1);
1697   add(scratch1, Immediate(kObjectAlignmentMask));
1698   and_(scratch1, Immediate(~kObjectAlignmentMask));
1699 
1700   // Allocate one-byte string in new space.
1701   Allocate(SeqOneByteString::kHeaderSize, times_1, scratch1,
1702            REGISTER_VALUE_IS_INT32, result, scratch2, scratch3, gc_required,
1703            NO_ALLOCATION_FLAGS);
1704 
1705   // Set the map, length and hash field.
1706   mov(FieldOperand(result, HeapObject::kMapOffset),
1707       Immediate(isolate()->factory()->one_byte_string_map()));
1708   mov(scratch1, length);
1709   SmiTag(scratch1);
1710   mov(FieldOperand(result, String::kLengthOffset), scratch1);
1711   mov(FieldOperand(result, String::kHashFieldOffset),
1712       Immediate(String::kEmptyHashField));
1713 }
1714 
1715 
AllocateOneByteString(Register result,int length,Register scratch1,Register scratch2,Label * gc_required)1716 void MacroAssembler::AllocateOneByteString(Register result, int length,
1717                                            Register scratch1, Register scratch2,
1718                                            Label* gc_required) {
1719   DCHECK(length > 0);
1720 
1721   // Allocate one-byte string in new space.
1722   Allocate(SeqOneByteString::SizeFor(length), result, scratch1, scratch2,
1723            gc_required, NO_ALLOCATION_FLAGS);
1724 
1725   // Set the map, length and hash field.
1726   mov(FieldOperand(result, HeapObject::kMapOffset),
1727       Immediate(isolate()->factory()->one_byte_string_map()));
1728   mov(FieldOperand(result, String::kLengthOffset),
1729       Immediate(Smi::FromInt(length)));
1730   mov(FieldOperand(result, String::kHashFieldOffset),
1731       Immediate(String::kEmptyHashField));
1732 }
1733 
1734 
AllocateTwoByteConsString(Register result,Register scratch1,Register scratch2,Label * gc_required)1735 void MacroAssembler::AllocateTwoByteConsString(Register result,
1736                                         Register scratch1,
1737                                         Register scratch2,
1738                                         Label* gc_required) {
1739   // Allocate heap number in new space.
1740   Allocate(ConsString::kSize, result, scratch1, scratch2, gc_required,
1741            NO_ALLOCATION_FLAGS);
1742 
1743   // Set the map. The other fields are left uninitialized.
1744   mov(FieldOperand(result, HeapObject::kMapOffset),
1745       Immediate(isolate()->factory()->cons_string_map()));
1746 }
1747 
1748 
AllocateOneByteConsString(Register result,Register scratch1,Register scratch2,Label * gc_required)1749 void MacroAssembler::AllocateOneByteConsString(Register result,
1750                                                Register scratch1,
1751                                                Register scratch2,
1752                                                Label* gc_required) {
1753   Allocate(ConsString::kSize, result, scratch1, scratch2, gc_required,
1754            NO_ALLOCATION_FLAGS);
1755 
1756   // Set the map. The other fields are left uninitialized.
1757   mov(FieldOperand(result, HeapObject::kMapOffset),
1758       Immediate(isolate()->factory()->cons_one_byte_string_map()));
1759 }
1760 
1761 
AllocateTwoByteSlicedString(Register result,Register scratch1,Register scratch2,Label * gc_required)1762 void MacroAssembler::AllocateTwoByteSlicedString(Register result,
1763                                           Register scratch1,
1764                                           Register scratch2,
1765                                           Label* gc_required) {
1766   // Allocate heap number in new space.
1767   Allocate(SlicedString::kSize, result, scratch1, scratch2, gc_required,
1768            NO_ALLOCATION_FLAGS);
1769 
1770   // Set the map. The other fields are left uninitialized.
1771   mov(FieldOperand(result, HeapObject::kMapOffset),
1772       Immediate(isolate()->factory()->sliced_string_map()));
1773 }
1774 
1775 
AllocateOneByteSlicedString(Register result,Register scratch1,Register scratch2,Label * gc_required)1776 void MacroAssembler::AllocateOneByteSlicedString(Register result,
1777                                                  Register scratch1,
1778                                                  Register scratch2,
1779                                                  Label* gc_required) {
1780   // Allocate heap number in new space.
1781   Allocate(SlicedString::kSize, result, scratch1, scratch2, gc_required,
1782            NO_ALLOCATION_FLAGS);
1783 
1784   // Set the map. The other fields are left uninitialized.
1785   mov(FieldOperand(result, HeapObject::kMapOffset),
1786       Immediate(isolate()->factory()->sliced_one_byte_string_map()));
1787 }
1788 
1789 
AllocateJSValue(Register result,Register constructor,Register value,Register scratch,Label * gc_required)1790 void MacroAssembler::AllocateJSValue(Register result, Register constructor,
1791                                      Register value, Register scratch,
1792                                      Label* gc_required) {
1793   DCHECK(!result.is(constructor));
1794   DCHECK(!result.is(scratch));
1795   DCHECK(!result.is(value));
1796 
1797   // Allocate JSValue in new space.
1798   Allocate(JSValue::kSize, result, scratch, no_reg, gc_required,
1799            NO_ALLOCATION_FLAGS);
1800 
1801   // Initialize the JSValue.
1802   LoadGlobalFunctionInitialMap(constructor, scratch);
1803   mov(FieldOperand(result, HeapObject::kMapOffset), scratch);
1804   LoadRoot(scratch, Heap::kEmptyFixedArrayRootIndex);
1805   mov(FieldOperand(result, JSObject::kPropertiesOffset), scratch);
1806   mov(FieldOperand(result, JSObject::kElementsOffset), scratch);
1807   mov(FieldOperand(result, JSValue::kValueOffset), value);
1808   STATIC_ASSERT(JSValue::kSize == 4 * kPointerSize);
1809 }
1810 
InitializeFieldsWithFiller(Register current_address,Register end_address,Register filler)1811 void MacroAssembler::InitializeFieldsWithFiller(Register current_address,
1812                                                 Register end_address,
1813                                                 Register filler) {
1814   Label loop, entry;
1815   jmp(&entry, Label::kNear);
1816   bind(&loop);
1817   mov(Operand(current_address, 0), filler);
1818   add(current_address, Immediate(kPointerSize));
1819   bind(&entry);
1820   cmp(current_address, end_address);
1821   j(below, &loop, Label::kNear);
1822 }
1823 
1824 
BooleanBitTest(Register object,int field_offset,int bit_index)1825 void MacroAssembler::BooleanBitTest(Register object,
1826                                     int field_offset,
1827                                     int bit_index) {
1828   bit_index += kSmiTagSize + kSmiShiftSize;
1829   DCHECK(base::bits::IsPowerOfTwo32(kBitsPerByte));
1830   int byte_index = bit_index / kBitsPerByte;
1831   int byte_bit_index = bit_index & (kBitsPerByte - 1);
1832   test_b(FieldOperand(object, field_offset + byte_index),
1833          Immediate(1 << byte_bit_index));
1834 }
1835 
1836 
1837 
NegativeZeroTest(Register result,Register op,Label * then_label)1838 void MacroAssembler::NegativeZeroTest(Register result,
1839                                       Register op,
1840                                       Label* then_label) {
1841   Label ok;
1842   test(result, result);
1843   j(not_zero, &ok, Label::kNear);
1844   test(op, op);
1845   j(sign, then_label, Label::kNear);
1846   bind(&ok);
1847 }
1848 
1849 
NegativeZeroTest(Register result,Register op1,Register op2,Register scratch,Label * then_label)1850 void MacroAssembler::NegativeZeroTest(Register result,
1851                                       Register op1,
1852                                       Register op2,
1853                                       Register scratch,
1854                                       Label* then_label) {
1855   Label ok;
1856   test(result, result);
1857   j(not_zero, &ok, Label::kNear);
1858   mov(scratch, op1);
1859   or_(scratch, op2);
1860   j(sign, then_label, Label::kNear);
1861   bind(&ok);
1862 }
1863 
1864 
GetMapConstructor(Register result,Register map,Register temp)1865 void MacroAssembler::GetMapConstructor(Register result, Register map,
1866                                        Register temp) {
1867   Label done, loop;
1868   mov(result, FieldOperand(map, Map::kConstructorOrBackPointerOffset));
1869   bind(&loop);
1870   JumpIfSmi(result, &done, Label::kNear);
1871   CmpObjectType(result, MAP_TYPE, temp);
1872   j(not_equal, &done, Label::kNear);
1873   mov(result, FieldOperand(result, Map::kConstructorOrBackPointerOffset));
1874   jmp(&loop);
1875   bind(&done);
1876 }
1877 
1878 
TryGetFunctionPrototype(Register function,Register result,Register scratch,Label * miss)1879 void MacroAssembler::TryGetFunctionPrototype(Register function, Register result,
1880                                              Register scratch, Label* miss) {
1881   // Get the prototype or initial map from the function.
1882   mov(result,
1883       FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
1884 
1885   // If the prototype or initial map is the hole, don't return it and
1886   // simply miss the cache instead. This will allow us to allocate a
1887   // prototype object on-demand in the runtime system.
1888   cmp(result, Immediate(isolate()->factory()->the_hole_value()));
1889   j(equal, miss);
1890 
1891   // If the function does not have an initial map, we're done.
1892   Label done;
1893   CmpObjectType(result, MAP_TYPE, scratch);
1894   j(not_equal, &done, Label::kNear);
1895 
1896   // Get the prototype from the initial map.
1897   mov(result, FieldOperand(result, Map::kPrototypeOffset));
1898 
1899   // All done.
1900   bind(&done);
1901 }
1902 
1903 
CallStub(CodeStub * stub,TypeFeedbackId ast_id)1904 void MacroAssembler::CallStub(CodeStub* stub, TypeFeedbackId ast_id) {
1905   DCHECK(AllowThisStubCall(stub));  // Calls are not allowed in some stubs.
1906   call(stub->GetCode(), RelocInfo::CODE_TARGET, ast_id);
1907 }
1908 
1909 
TailCallStub(CodeStub * stub)1910 void MacroAssembler::TailCallStub(CodeStub* stub) {
1911   jmp(stub->GetCode(), RelocInfo::CODE_TARGET);
1912 }
1913 
1914 
StubReturn(int argc)1915 void MacroAssembler::StubReturn(int argc) {
1916   DCHECK(argc >= 1 && generating_stub());
1917   ret((argc - 1) * kPointerSize);
1918 }
1919 
1920 
AllowThisStubCall(CodeStub * stub)1921 bool MacroAssembler::AllowThisStubCall(CodeStub* stub) {
1922   return has_frame_ || !stub->SometimesSetsUpAFrame();
1923 }
1924 
CallRuntime(const Runtime::Function * f,int num_arguments,SaveFPRegsMode save_doubles)1925 void MacroAssembler::CallRuntime(const Runtime::Function* f,
1926                                  int num_arguments,
1927                                  SaveFPRegsMode save_doubles) {
1928   // If the expected number of arguments of the runtime function is
1929   // constant, we check that the actual number of arguments match the
1930   // expectation.
1931   CHECK(f->nargs < 0 || f->nargs == num_arguments);
1932 
1933   // TODO(1236192): Most runtime routines don't need the number of
1934   // arguments passed in because it is constant. At some point we
1935   // should remove this need and make the runtime routine entry code
1936   // smarter.
1937   Move(eax, Immediate(num_arguments));
1938   mov(ebx, Immediate(ExternalReference(f, isolate())));
1939   CEntryStub ces(isolate(), 1, save_doubles);
1940   CallStub(&ces);
1941 }
1942 
1943 
CallExternalReference(ExternalReference ref,int num_arguments)1944 void MacroAssembler::CallExternalReference(ExternalReference ref,
1945                                            int num_arguments) {
1946   mov(eax, Immediate(num_arguments));
1947   mov(ebx, Immediate(ref));
1948 
1949   CEntryStub stub(isolate(), 1);
1950   CallStub(&stub);
1951 }
1952 
1953 
TailCallRuntime(Runtime::FunctionId fid)1954 void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid) {
1955   // ----------- S t a t e -------------
1956   //  -- esp[0]                 : return address
1957   //  -- esp[8]                 : argument num_arguments - 1
1958   //  ...
1959   //  -- esp[8 * num_arguments] : argument 0 (receiver)
1960   //
1961   //  For runtime functions with variable arguments:
1962   //  -- eax                    : number of  arguments
1963   // -----------------------------------
1964 
1965   const Runtime::Function* function = Runtime::FunctionForId(fid);
1966   DCHECK_EQ(1, function->result_size);
1967   if (function->nargs >= 0) {
1968     // TODO(1236192): Most runtime routines don't need the number of
1969     // arguments passed in because it is constant. At some point we
1970     // should remove this need and make the runtime routine entry code
1971     // smarter.
1972     mov(eax, Immediate(function->nargs));
1973   }
1974   JumpToExternalReference(ExternalReference(fid, isolate()));
1975 }
1976 
JumpToExternalReference(const ExternalReference & ext,bool builtin_exit_frame)1977 void MacroAssembler::JumpToExternalReference(const ExternalReference& ext,
1978                                              bool builtin_exit_frame) {
1979   // Set the entry point and jump to the C entry runtime stub.
1980   mov(ebx, Immediate(ext));
1981   CEntryStub ces(isolate(), 1, kDontSaveFPRegs, kArgvOnStack,
1982                  builtin_exit_frame);
1983   jmp(ces.GetCode(), RelocInfo::CODE_TARGET);
1984 }
1985 
PrepareForTailCall(const ParameterCount & callee_args_count,Register caller_args_count_reg,Register scratch0,Register scratch1,ReturnAddressState ra_state,int number_of_temp_values_after_return_address)1986 void MacroAssembler::PrepareForTailCall(
1987     const ParameterCount& callee_args_count, Register caller_args_count_reg,
1988     Register scratch0, Register scratch1, ReturnAddressState ra_state,
1989     int number_of_temp_values_after_return_address) {
1990 #if DEBUG
1991   if (callee_args_count.is_reg()) {
1992     DCHECK(!AreAliased(callee_args_count.reg(), caller_args_count_reg, scratch0,
1993                        scratch1));
1994   } else {
1995     DCHECK(!AreAliased(caller_args_count_reg, scratch0, scratch1));
1996   }
1997   DCHECK(ra_state != ReturnAddressState::kNotOnStack ||
1998          number_of_temp_values_after_return_address == 0);
1999 #endif
2000 
2001   // Calculate the destination address where we will put the return address
2002   // after we drop current frame.
2003   Register new_sp_reg = scratch0;
2004   if (callee_args_count.is_reg()) {
2005     sub(caller_args_count_reg, callee_args_count.reg());
2006     lea(new_sp_reg,
2007         Operand(ebp, caller_args_count_reg, times_pointer_size,
2008                 StandardFrameConstants::kCallerPCOffset -
2009                     number_of_temp_values_after_return_address * kPointerSize));
2010   } else {
2011     lea(new_sp_reg, Operand(ebp, caller_args_count_reg, times_pointer_size,
2012                             StandardFrameConstants::kCallerPCOffset -
2013                                 (callee_args_count.immediate() +
2014                                  number_of_temp_values_after_return_address) *
2015                                     kPointerSize));
2016   }
2017 
2018   if (FLAG_debug_code) {
2019     cmp(esp, new_sp_reg);
2020     Check(below, kStackAccessBelowStackPointer);
2021   }
2022 
2023   // Copy return address from caller's frame to current frame's return address
2024   // to avoid its trashing and let the following loop copy it to the right
2025   // place.
2026   Register tmp_reg = scratch1;
2027   if (ra_state == ReturnAddressState::kOnStack) {
2028     mov(tmp_reg, Operand(ebp, StandardFrameConstants::kCallerPCOffset));
2029     mov(Operand(esp, number_of_temp_values_after_return_address * kPointerSize),
2030         tmp_reg);
2031   } else {
2032     DCHECK(ReturnAddressState::kNotOnStack == ra_state);
2033     DCHECK_EQ(0, number_of_temp_values_after_return_address);
2034     Push(Operand(ebp, StandardFrameConstants::kCallerPCOffset));
2035   }
2036 
2037   // Restore caller's frame pointer now as it could be overwritten by
2038   // the copying loop.
2039   mov(ebp, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
2040 
2041   // +2 here is to copy both receiver and return address.
2042   Register count_reg = caller_args_count_reg;
2043   if (callee_args_count.is_reg()) {
2044     lea(count_reg, Operand(callee_args_count.reg(),
2045                            2 + number_of_temp_values_after_return_address));
2046   } else {
2047     mov(count_reg, Immediate(callee_args_count.immediate() + 2 +
2048                              number_of_temp_values_after_return_address));
2049     // TODO(ishell): Unroll copying loop for small immediate values.
2050   }
2051 
2052   // Now copy callee arguments to the caller frame going backwards to avoid
2053   // callee arguments corruption (source and destination areas could overlap).
2054   Label loop, entry;
2055   jmp(&entry, Label::kNear);
2056   bind(&loop);
2057   dec(count_reg);
2058   mov(tmp_reg, Operand(esp, count_reg, times_pointer_size, 0));
2059   mov(Operand(new_sp_reg, count_reg, times_pointer_size, 0), tmp_reg);
2060   bind(&entry);
2061   cmp(count_reg, Immediate(0));
2062   j(not_equal, &loop, Label::kNear);
2063 
2064   // Leave current frame.
2065   mov(esp, new_sp_reg);
2066 }
2067 
InvokePrologue(const ParameterCount & expected,const ParameterCount & actual,Label * done,bool * definitely_mismatches,InvokeFlag flag,Label::Distance done_near,const CallWrapper & call_wrapper)2068 void MacroAssembler::InvokePrologue(const ParameterCount& expected,
2069                                     const ParameterCount& actual,
2070                                     Label* done,
2071                                     bool* definitely_mismatches,
2072                                     InvokeFlag flag,
2073                                     Label::Distance done_near,
2074                                     const CallWrapper& call_wrapper) {
2075   bool definitely_matches = false;
2076   *definitely_mismatches = false;
2077   Label invoke;
2078   if (expected.is_immediate()) {
2079     DCHECK(actual.is_immediate());
2080     mov(eax, actual.immediate());
2081     if (expected.immediate() == actual.immediate()) {
2082       definitely_matches = true;
2083     } else {
2084       const int sentinel = SharedFunctionInfo::kDontAdaptArgumentsSentinel;
2085       if (expected.immediate() == sentinel) {
2086         // Don't worry about adapting arguments for builtins that
2087         // don't want that done. Skip adaption code by making it look
2088         // like we have a match between expected and actual number of
2089         // arguments.
2090         definitely_matches = true;
2091       } else {
2092         *definitely_mismatches = true;
2093         mov(ebx, expected.immediate());
2094       }
2095     }
2096   } else {
2097     if (actual.is_immediate()) {
2098       // Expected is in register, actual is immediate. This is the
2099       // case when we invoke function values without going through the
2100       // IC mechanism.
2101       mov(eax, actual.immediate());
2102       cmp(expected.reg(), actual.immediate());
2103       j(equal, &invoke);
2104       DCHECK(expected.reg().is(ebx));
2105     } else if (!expected.reg().is(actual.reg())) {
2106       // Both expected and actual are in (different) registers. This
2107       // is the case when we invoke functions using call and apply.
2108       cmp(expected.reg(), actual.reg());
2109       j(equal, &invoke);
2110       DCHECK(actual.reg().is(eax));
2111       DCHECK(expected.reg().is(ebx));
2112     } else {
2113       Move(eax, actual.reg());
2114     }
2115   }
2116 
2117   if (!definitely_matches) {
2118     Handle<Code> adaptor =
2119         isolate()->builtins()->ArgumentsAdaptorTrampoline();
2120     if (flag == CALL_FUNCTION) {
2121       call_wrapper.BeforeCall(CallSize(adaptor, RelocInfo::CODE_TARGET));
2122       call(adaptor, RelocInfo::CODE_TARGET);
2123       call_wrapper.AfterCall();
2124       if (!*definitely_mismatches) {
2125         jmp(done, done_near);
2126       }
2127     } else {
2128       jmp(adaptor, RelocInfo::CODE_TARGET);
2129     }
2130     bind(&invoke);
2131   }
2132 }
2133 
2134 
FloodFunctionIfStepping(Register fun,Register new_target,const ParameterCount & expected,const ParameterCount & actual)2135 void MacroAssembler::FloodFunctionIfStepping(Register fun, Register new_target,
2136                                              const ParameterCount& expected,
2137                                              const ParameterCount& actual) {
2138   Label skip_flooding;
2139   ExternalReference last_step_action =
2140       ExternalReference::debug_last_step_action_address(isolate());
2141   STATIC_ASSERT(StepFrame > StepIn);
2142   cmpb(Operand::StaticVariable(last_step_action), Immediate(StepIn));
2143   j(less, &skip_flooding);
2144   {
2145     FrameScope frame(this,
2146                      has_frame() ? StackFrame::NONE : StackFrame::INTERNAL);
2147     if (expected.is_reg()) {
2148       SmiTag(expected.reg());
2149       Push(expected.reg());
2150     }
2151     if (actual.is_reg()) {
2152       SmiTag(actual.reg());
2153       Push(actual.reg());
2154     }
2155     if (new_target.is_valid()) {
2156       Push(new_target);
2157     }
2158     Push(fun);
2159     Push(fun);
2160     CallRuntime(Runtime::kDebugPrepareStepInIfStepping);
2161     Pop(fun);
2162     if (new_target.is_valid()) {
2163       Pop(new_target);
2164     }
2165     if (actual.is_reg()) {
2166       Pop(actual.reg());
2167       SmiUntag(actual.reg());
2168     }
2169     if (expected.is_reg()) {
2170       Pop(expected.reg());
2171       SmiUntag(expected.reg());
2172     }
2173   }
2174   bind(&skip_flooding);
2175 }
2176 
2177 
InvokeFunctionCode(Register function,Register new_target,const ParameterCount & expected,const ParameterCount & actual,InvokeFlag flag,const CallWrapper & call_wrapper)2178 void MacroAssembler::InvokeFunctionCode(Register function, Register new_target,
2179                                         const ParameterCount& expected,
2180                                         const ParameterCount& actual,
2181                                         InvokeFlag flag,
2182                                         const CallWrapper& call_wrapper) {
2183   // You can't call a function without a valid frame.
2184   DCHECK(flag == JUMP_FUNCTION || has_frame());
2185   DCHECK(function.is(edi));
2186   DCHECK_IMPLIES(new_target.is_valid(), new_target.is(edx));
2187 
2188   if (call_wrapper.NeedsDebugStepCheck()) {
2189     FloodFunctionIfStepping(function, new_target, expected, actual);
2190   }
2191 
2192   // Clear the new.target register if not given.
2193   if (!new_target.is_valid()) {
2194     mov(edx, isolate()->factory()->undefined_value());
2195   }
2196 
2197   Label done;
2198   bool definitely_mismatches = false;
2199   InvokePrologue(expected, actual, &done, &definitely_mismatches, flag,
2200                  Label::kNear, call_wrapper);
2201   if (!definitely_mismatches) {
2202     // We call indirectly through the code field in the function to
2203     // allow recompilation to take effect without changing any of the
2204     // call sites.
2205     Operand code = FieldOperand(function, JSFunction::kCodeEntryOffset);
2206     if (flag == CALL_FUNCTION) {
2207       call_wrapper.BeforeCall(CallSize(code));
2208       call(code);
2209       call_wrapper.AfterCall();
2210     } else {
2211       DCHECK(flag == JUMP_FUNCTION);
2212       jmp(code);
2213     }
2214     bind(&done);
2215   }
2216 }
2217 
2218 
InvokeFunction(Register fun,Register new_target,const ParameterCount & actual,InvokeFlag flag,const CallWrapper & call_wrapper)2219 void MacroAssembler::InvokeFunction(Register fun,
2220                                     Register new_target,
2221                                     const ParameterCount& actual,
2222                                     InvokeFlag flag,
2223                                     const CallWrapper& call_wrapper) {
2224   // You can't call a function without a valid frame.
2225   DCHECK(flag == JUMP_FUNCTION || has_frame());
2226 
2227   DCHECK(fun.is(edi));
2228   mov(ebx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
2229   mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
2230   mov(ebx, FieldOperand(ebx, SharedFunctionInfo::kFormalParameterCountOffset));
2231   SmiUntag(ebx);
2232 
2233   ParameterCount expected(ebx);
2234   InvokeFunctionCode(edi, new_target, expected, actual, flag, call_wrapper);
2235 }
2236 
2237 
InvokeFunction(Register fun,const ParameterCount & expected,const ParameterCount & actual,InvokeFlag flag,const CallWrapper & call_wrapper)2238 void MacroAssembler::InvokeFunction(Register fun,
2239                                     const ParameterCount& expected,
2240                                     const ParameterCount& actual,
2241                                     InvokeFlag flag,
2242                                     const CallWrapper& call_wrapper) {
2243   // You can't call a function without a valid frame.
2244   DCHECK(flag == JUMP_FUNCTION || has_frame());
2245 
2246   DCHECK(fun.is(edi));
2247   mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
2248 
2249   InvokeFunctionCode(edi, no_reg, expected, actual, flag, call_wrapper);
2250 }
2251 
2252 
InvokeFunction(Handle<JSFunction> function,const ParameterCount & expected,const ParameterCount & actual,InvokeFlag flag,const CallWrapper & call_wrapper)2253 void MacroAssembler::InvokeFunction(Handle<JSFunction> function,
2254                                     const ParameterCount& expected,
2255                                     const ParameterCount& actual,
2256                                     InvokeFlag flag,
2257                                     const CallWrapper& call_wrapper) {
2258   LoadHeapObject(edi, function);
2259   InvokeFunction(edi, expected, actual, flag, call_wrapper);
2260 }
2261 
2262 
LoadContext(Register dst,int context_chain_length)2263 void MacroAssembler::LoadContext(Register dst, int context_chain_length) {
2264   if (context_chain_length > 0) {
2265     // Move up the chain of contexts to the context containing the slot.
2266     mov(dst, Operand(esi, Context::SlotOffset(Context::PREVIOUS_INDEX)));
2267     for (int i = 1; i < context_chain_length; i++) {
2268       mov(dst, Operand(dst, Context::SlotOffset(Context::PREVIOUS_INDEX)));
2269     }
2270   } else {
2271     // Slot is in the current function context.  Move it into the
2272     // destination register in case we store into it (the write barrier
2273     // cannot be allowed to destroy the context in esi).
2274     mov(dst, esi);
2275   }
2276 
2277   // We should not have found a with context by walking the context chain
2278   // (i.e., the static scope chain and runtime context chain do not agree).
2279   // A variable occurring in such a scope should have slot type LOOKUP and
2280   // not CONTEXT.
2281   if (emit_debug_code()) {
2282     cmp(FieldOperand(dst, HeapObject::kMapOffset),
2283         isolate()->factory()->with_context_map());
2284     Check(not_equal, kVariableResolvedToWithContext);
2285   }
2286 }
2287 
2288 
LoadGlobalProxy(Register dst)2289 void MacroAssembler::LoadGlobalProxy(Register dst) {
2290   mov(dst, NativeContextOperand());
2291   mov(dst, ContextOperand(dst, Context::GLOBAL_PROXY_INDEX));
2292 }
2293 
2294 
LoadTransitionedArrayMapConditional(ElementsKind expected_kind,ElementsKind transitioned_kind,Register map_in_out,Register scratch,Label * no_map_match)2295 void MacroAssembler::LoadTransitionedArrayMapConditional(
2296     ElementsKind expected_kind,
2297     ElementsKind transitioned_kind,
2298     Register map_in_out,
2299     Register scratch,
2300     Label* no_map_match) {
2301   DCHECK(IsFastElementsKind(expected_kind));
2302   DCHECK(IsFastElementsKind(transitioned_kind));
2303 
2304   // Check that the function's map is the same as the expected cached map.
2305   mov(scratch, NativeContextOperand());
2306   cmp(map_in_out,
2307       ContextOperand(scratch, Context::ArrayMapIndex(expected_kind)));
2308   j(not_equal, no_map_match);
2309 
2310   // Use the transitioned cached map.
2311   mov(map_in_out,
2312       ContextOperand(scratch, Context::ArrayMapIndex(transitioned_kind)));
2313 }
2314 
2315 
LoadGlobalFunction(int index,Register function)2316 void MacroAssembler::LoadGlobalFunction(int index, Register function) {
2317   // Load the native context from the current context.
2318   mov(function, NativeContextOperand());
2319   // Load the function from the native context.
2320   mov(function, ContextOperand(function, index));
2321 }
2322 
2323 
LoadGlobalFunctionInitialMap(Register function,Register map)2324 void MacroAssembler::LoadGlobalFunctionInitialMap(Register function,
2325                                                   Register map) {
2326   // Load the initial map.  The global functions all have initial maps.
2327   mov(map, FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
2328   if (emit_debug_code()) {
2329     Label ok, fail;
2330     CheckMap(map, isolate()->factory()->meta_map(), &fail, DO_SMI_CHECK);
2331     jmp(&ok);
2332     bind(&fail);
2333     Abort(kGlobalFunctionsMustHaveInitialMap);
2334     bind(&ok);
2335   }
2336 }
2337 
2338 
2339 // Store the value in register src in the safepoint register stack
2340 // slot for register dst.
StoreToSafepointRegisterSlot(Register dst,Register src)2341 void MacroAssembler::StoreToSafepointRegisterSlot(Register dst, Register src) {
2342   mov(SafepointRegisterSlot(dst), src);
2343 }
2344 
2345 
StoreToSafepointRegisterSlot(Register dst,Immediate src)2346 void MacroAssembler::StoreToSafepointRegisterSlot(Register dst, Immediate src) {
2347   mov(SafepointRegisterSlot(dst), src);
2348 }
2349 
2350 
LoadFromSafepointRegisterSlot(Register dst,Register src)2351 void MacroAssembler::LoadFromSafepointRegisterSlot(Register dst, Register src) {
2352   mov(dst, SafepointRegisterSlot(src));
2353 }
2354 
2355 
SafepointRegisterSlot(Register reg)2356 Operand MacroAssembler::SafepointRegisterSlot(Register reg) {
2357   return Operand(esp, SafepointRegisterStackIndex(reg.code()) * kPointerSize);
2358 }
2359 
2360 
SafepointRegisterStackIndex(int reg_code)2361 int MacroAssembler::SafepointRegisterStackIndex(int reg_code) {
2362   // The registers are pushed starting with the lowest encoding,
2363   // which means that lowest encodings are furthest away from
2364   // the stack pointer.
2365   DCHECK(reg_code >= 0 && reg_code < kNumSafepointRegisters);
2366   return kNumSafepointRegisters - reg_code - 1;
2367 }
2368 
2369 
LoadHeapObject(Register result,Handle<HeapObject> object)2370 void MacroAssembler::LoadHeapObject(Register result,
2371                                     Handle<HeapObject> object) {
2372   mov(result, object);
2373 }
2374 
2375 
CmpHeapObject(Register reg,Handle<HeapObject> object)2376 void MacroAssembler::CmpHeapObject(Register reg, Handle<HeapObject> object) {
2377   cmp(reg, object);
2378 }
2379 
PushHeapObject(Handle<HeapObject> object)2380 void MacroAssembler::PushHeapObject(Handle<HeapObject> object) { Push(object); }
2381 
CmpWeakValue(Register value,Handle<WeakCell> cell,Register scratch)2382 void MacroAssembler::CmpWeakValue(Register value, Handle<WeakCell> cell,
2383                                   Register scratch) {
2384   mov(scratch, cell);
2385   cmp(value, FieldOperand(scratch, WeakCell::kValueOffset));
2386 }
2387 
2388 
GetWeakValue(Register value,Handle<WeakCell> cell)2389 void MacroAssembler::GetWeakValue(Register value, Handle<WeakCell> cell) {
2390   mov(value, cell);
2391   mov(value, FieldOperand(value, WeakCell::kValueOffset));
2392 }
2393 
2394 
LoadWeakValue(Register value,Handle<WeakCell> cell,Label * miss)2395 void MacroAssembler::LoadWeakValue(Register value, Handle<WeakCell> cell,
2396                                    Label* miss) {
2397   GetWeakValue(value, cell);
2398   JumpIfSmi(value, miss);
2399 }
2400 
2401 
Ret()2402 void MacroAssembler::Ret() {
2403   ret(0);
2404 }
2405 
2406 
Ret(int bytes_dropped,Register scratch)2407 void MacroAssembler::Ret(int bytes_dropped, Register scratch) {
2408   if (is_uint16(bytes_dropped)) {
2409     ret(bytes_dropped);
2410   } else {
2411     pop(scratch);
2412     add(esp, Immediate(bytes_dropped));
2413     push(scratch);
2414     ret(0);
2415   }
2416 }
2417 
2418 
Drop(int stack_elements)2419 void MacroAssembler::Drop(int stack_elements) {
2420   if (stack_elements > 0) {
2421     add(esp, Immediate(stack_elements * kPointerSize));
2422   }
2423 }
2424 
2425 
Move(Register dst,Register src)2426 void MacroAssembler::Move(Register dst, Register src) {
2427   if (!dst.is(src)) {
2428     mov(dst, src);
2429   }
2430 }
2431 
2432 
Move(Register dst,const Immediate & x)2433 void MacroAssembler::Move(Register dst, const Immediate& x) {
2434   if (x.is_zero() && RelocInfo::IsNone(x.rmode_)) {
2435     xor_(dst, dst);  // Shorter than mov of 32-bit immediate 0.
2436   } else {
2437     mov(dst, x);
2438   }
2439 }
2440 
2441 
Move(const Operand & dst,const Immediate & x)2442 void MacroAssembler::Move(const Operand& dst, const Immediate& x) {
2443   mov(dst, x);
2444 }
2445 
2446 
Move(XMMRegister dst,uint32_t src)2447 void MacroAssembler::Move(XMMRegister dst, uint32_t src) {
2448   if (src == 0) {
2449     pxor(dst, dst);
2450   } else {
2451     unsigned cnt = base::bits::CountPopulation32(src);
2452     unsigned nlz = base::bits::CountLeadingZeros32(src);
2453     unsigned ntz = base::bits::CountTrailingZeros32(src);
2454     if (nlz + cnt + ntz == 32) {
2455       pcmpeqd(dst, dst);
2456       if (ntz == 0) {
2457         psrld(dst, 32 - cnt);
2458       } else {
2459         pslld(dst, 32 - cnt);
2460         if (nlz != 0) psrld(dst, nlz);
2461       }
2462     } else {
2463       push(eax);
2464       mov(eax, Immediate(src));
2465       movd(dst, Operand(eax));
2466       pop(eax);
2467     }
2468   }
2469 }
2470 
2471 
Move(XMMRegister dst,uint64_t src)2472 void MacroAssembler::Move(XMMRegister dst, uint64_t src) {
2473   if (src == 0) {
2474     pxor(dst, dst);
2475   } else {
2476     uint32_t lower = static_cast<uint32_t>(src);
2477     uint32_t upper = static_cast<uint32_t>(src >> 32);
2478     unsigned cnt = base::bits::CountPopulation64(src);
2479     unsigned nlz = base::bits::CountLeadingZeros64(src);
2480     unsigned ntz = base::bits::CountTrailingZeros64(src);
2481     if (nlz + cnt + ntz == 64) {
2482       pcmpeqd(dst, dst);
2483       if (ntz == 0) {
2484         psrlq(dst, 64 - cnt);
2485       } else {
2486         psllq(dst, 64 - cnt);
2487         if (nlz != 0) psrlq(dst, nlz);
2488       }
2489     } else if (lower == 0) {
2490       Move(dst, upper);
2491       psllq(dst, 32);
2492     } else if (CpuFeatures::IsSupported(SSE4_1)) {
2493       CpuFeatureScope scope(this, SSE4_1);
2494       push(eax);
2495       Move(eax, Immediate(lower));
2496       movd(dst, Operand(eax));
2497       Move(eax, Immediate(upper));
2498       pinsrd(dst, Operand(eax), 1);
2499       pop(eax);
2500     } else {
2501       push(Immediate(upper));
2502       push(Immediate(lower));
2503       movsd(dst, Operand(esp, 0));
2504       add(esp, Immediate(kDoubleSize));
2505     }
2506   }
2507 }
2508 
2509 
Pextrd(Register dst,XMMRegister src,int8_t imm8)2510 void MacroAssembler::Pextrd(Register dst, XMMRegister src, int8_t imm8) {
2511   if (imm8 == 0) {
2512     movd(dst, src);
2513     return;
2514   }
2515   DCHECK_EQ(1, imm8);
2516   if (CpuFeatures::IsSupported(SSE4_1)) {
2517     CpuFeatureScope sse_scope(this, SSE4_1);
2518     pextrd(dst, src, imm8);
2519     return;
2520   }
2521   pshufd(xmm0, src, 1);
2522   movd(dst, xmm0);
2523 }
2524 
2525 
Pinsrd(XMMRegister dst,const Operand & src,int8_t imm8)2526 void MacroAssembler::Pinsrd(XMMRegister dst, const Operand& src, int8_t imm8) {
2527   DCHECK(imm8 == 0 || imm8 == 1);
2528   if (CpuFeatures::IsSupported(SSE4_1)) {
2529     CpuFeatureScope sse_scope(this, SSE4_1);
2530     pinsrd(dst, src, imm8);
2531     return;
2532   }
2533   movd(xmm0, src);
2534   if (imm8 == 1) {
2535     punpckldq(dst, xmm0);
2536   } else {
2537     DCHECK_EQ(0, imm8);
2538     psrlq(dst, 32);
2539     punpckldq(xmm0, dst);
2540     movaps(dst, xmm0);
2541   }
2542 }
2543 
2544 
Lzcnt(Register dst,const Operand & src)2545 void MacroAssembler::Lzcnt(Register dst, const Operand& src) {
2546   if (CpuFeatures::IsSupported(LZCNT)) {
2547     CpuFeatureScope scope(this, LZCNT);
2548     lzcnt(dst, src);
2549     return;
2550   }
2551   Label not_zero_src;
2552   bsr(dst, src);
2553   j(not_zero, &not_zero_src, Label::kNear);
2554   Move(dst, Immediate(63));  // 63^31 == 32
2555   bind(&not_zero_src);
2556   xor_(dst, Immediate(31));  // for x in [0..31], 31^x == 31-x.
2557 }
2558 
2559 
Tzcnt(Register dst,const Operand & src)2560 void MacroAssembler::Tzcnt(Register dst, const Operand& src) {
2561   if (CpuFeatures::IsSupported(BMI1)) {
2562     CpuFeatureScope scope(this, BMI1);
2563     tzcnt(dst, src);
2564     return;
2565   }
2566   Label not_zero_src;
2567   bsf(dst, src);
2568   j(not_zero, &not_zero_src, Label::kNear);
2569   Move(dst, Immediate(32));  // The result of tzcnt is 32 if src = 0.
2570   bind(&not_zero_src);
2571 }
2572 
2573 
Popcnt(Register dst,const Operand & src)2574 void MacroAssembler::Popcnt(Register dst, const Operand& src) {
2575   if (CpuFeatures::IsSupported(POPCNT)) {
2576     CpuFeatureScope scope(this, POPCNT);
2577     popcnt(dst, src);
2578     return;
2579   }
2580   UNREACHABLE();
2581 }
2582 
2583 
SetCounter(StatsCounter * counter,int value)2584 void MacroAssembler::SetCounter(StatsCounter* counter, int value) {
2585   if (FLAG_native_code_counters && counter->Enabled()) {
2586     mov(Operand::StaticVariable(ExternalReference(counter)), Immediate(value));
2587   }
2588 }
2589 
2590 
IncrementCounter(StatsCounter * counter,int value)2591 void MacroAssembler::IncrementCounter(StatsCounter* counter, int value) {
2592   DCHECK(value > 0);
2593   if (FLAG_native_code_counters && counter->Enabled()) {
2594     Operand operand = Operand::StaticVariable(ExternalReference(counter));
2595     if (value == 1) {
2596       inc(operand);
2597     } else {
2598       add(operand, Immediate(value));
2599     }
2600   }
2601 }
2602 
2603 
DecrementCounter(StatsCounter * counter,int value)2604 void MacroAssembler::DecrementCounter(StatsCounter* counter, int value) {
2605   DCHECK(value > 0);
2606   if (FLAG_native_code_counters && counter->Enabled()) {
2607     Operand operand = Operand::StaticVariable(ExternalReference(counter));
2608     if (value == 1) {
2609       dec(operand);
2610     } else {
2611       sub(operand, Immediate(value));
2612     }
2613   }
2614 }
2615 
2616 
IncrementCounter(Condition cc,StatsCounter * counter,int value)2617 void MacroAssembler::IncrementCounter(Condition cc,
2618                                       StatsCounter* counter,
2619                                       int value) {
2620   DCHECK(value > 0);
2621   if (FLAG_native_code_counters && counter->Enabled()) {
2622     Label skip;
2623     j(NegateCondition(cc), &skip);
2624     pushfd();
2625     IncrementCounter(counter, value);
2626     popfd();
2627     bind(&skip);
2628   }
2629 }
2630 
2631 
DecrementCounter(Condition cc,StatsCounter * counter,int value)2632 void MacroAssembler::DecrementCounter(Condition cc,
2633                                       StatsCounter* counter,
2634                                       int value) {
2635   DCHECK(value > 0);
2636   if (FLAG_native_code_counters && counter->Enabled()) {
2637     Label skip;
2638     j(NegateCondition(cc), &skip);
2639     pushfd();
2640     DecrementCounter(counter, value);
2641     popfd();
2642     bind(&skip);
2643   }
2644 }
2645 
2646 
Assert(Condition cc,BailoutReason reason)2647 void MacroAssembler::Assert(Condition cc, BailoutReason reason) {
2648   if (emit_debug_code()) Check(cc, reason);
2649 }
2650 
2651 
AssertFastElements(Register elements)2652 void MacroAssembler::AssertFastElements(Register elements) {
2653   if (emit_debug_code()) {
2654     Factory* factory = isolate()->factory();
2655     Label ok;
2656     cmp(FieldOperand(elements, HeapObject::kMapOffset),
2657         Immediate(factory->fixed_array_map()));
2658     j(equal, &ok);
2659     cmp(FieldOperand(elements, HeapObject::kMapOffset),
2660         Immediate(factory->fixed_double_array_map()));
2661     j(equal, &ok);
2662     cmp(FieldOperand(elements, HeapObject::kMapOffset),
2663         Immediate(factory->fixed_cow_array_map()));
2664     j(equal, &ok);
2665     Abort(kJSObjectWithFastElementsMapHasSlowElements);
2666     bind(&ok);
2667   }
2668 }
2669 
2670 
Check(Condition cc,BailoutReason reason)2671 void MacroAssembler::Check(Condition cc, BailoutReason reason) {
2672   Label L;
2673   j(cc, &L);
2674   Abort(reason);
2675   // will not return here
2676   bind(&L);
2677 }
2678 
2679 
CheckStackAlignment()2680 void MacroAssembler::CheckStackAlignment() {
2681   int frame_alignment = base::OS::ActivationFrameAlignment();
2682   int frame_alignment_mask = frame_alignment - 1;
2683   if (frame_alignment > kPointerSize) {
2684     DCHECK(base::bits::IsPowerOfTwo32(frame_alignment));
2685     Label alignment_as_expected;
2686     test(esp, Immediate(frame_alignment_mask));
2687     j(zero, &alignment_as_expected);
2688     // Abort if stack is not aligned.
2689     int3();
2690     bind(&alignment_as_expected);
2691   }
2692 }
2693 
2694 
Abort(BailoutReason reason)2695 void MacroAssembler::Abort(BailoutReason reason) {
2696 #ifdef DEBUG
2697   const char* msg = GetBailoutReason(reason);
2698   if (msg != NULL) {
2699     RecordComment("Abort message: ");
2700     RecordComment(msg);
2701   }
2702 
2703   if (FLAG_trap_on_abort) {
2704     int3();
2705     return;
2706   }
2707 #endif
2708 
2709   // Check if Abort() has already been initialized.
2710   DCHECK(isolate()->builtins()->Abort()->IsHeapObject());
2711 
2712   Move(edx, Smi::FromInt(static_cast<int>(reason)));
2713 
2714   // Disable stub call restrictions to always allow calls to abort.
2715   if (!has_frame_) {
2716     // We don't actually want to generate a pile of code for this, so just
2717     // claim there is a stack frame, without generating one.
2718     FrameScope scope(this, StackFrame::NONE);
2719     Call(isolate()->builtins()->Abort(), RelocInfo::CODE_TARGET);
2720   } else {
2721     Call(isolate()->builtins()->Abort(), RelocInfo::CODE_TARGET);
2722   }
2723   // will not return here
2724   int3();
2725 }
2726 
2727 
LoadInstanceDescriptors(Register map,Register descriptors)2728 void MacroAssembler::LoadInstanceDescriptors(Register map,
2729                                              Register descriptors) {
2730   mov(descriptors, FieldOperand(map, Map::kDescriptorsOffset));
2731 }
2732 
2733 
NumberOfOwnDescriptors(Register dst,Register map)2734 void MacroAssembler::NumberOfOwnDescriptors(Register dst, Register map) {
2735   mov(dst, FieldOperand(map, Map::kBitField3Offset));
2736   DecodeField<Map::NumberOfOwnDescriptorsBits>(dst);
2737 }
2738 
2739 
LoadAccessor(Register dst,Register holder,int accessor_index,AccessorComponent accessor)2740 void MacroAssembler::LoadAccessor(Register dst, Register holder,
2741                                   int accessor_index,
2742                                   AccessorComponent accessor) {
2743   mov(dst, FieldOperand(holder, HeapObject::kMapOffset));
2744   LoadInstanceDescriptors(dst, dst);
2745   mov(dst, FieldOperand(dst, DescriptorArray::GetValueOffset(accessor_index)));
2746   int offset = accessor == ACCESSOR_GETTER ? AccessorPair::kGetterOffset
2747                                            : AccessorPair::kSetterOffset;
2748   mov(dst, FieldOperand(dst, offset));
2749 }
2750 
2751 
LoadPowerOf2(XMMRegister dst,Register scratch,int power)2752 void MacroAssembler::LoadPowerOf2(XMMRegister dst,
2753                                   Register scratch,
2754                                   int power) {
2755   DCHECK(is_uintn(power + HeapNumber::kExponentBias,
2756                   HeapNumber::kExponentBits));
2757   mov(scratch, Immediate(power + HeapNumber::kExponentBias));
2758   movd(dst, scratch);
2759   psllq(dst, HeapNumber::kMantissaBits);
2760 }
2761 
2762 
JumpIfInstanceTypeIsNotSequentialOneByte(Register instance_type,Register scratch,Label * failure)2763 void MacroAssembler::JumpIfInstanceTypeIsNotSequentialOneByte(
2764     Register instance_type, Register scratch, Label* failure) {
2765   if (!scratch.is(instance_type)) {
2766     mov(scratch, instance_type);
2767   }
2768   and_(scratch,
2769        kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask);
2770   cmp(scratch, kStringTag | kSeqStringTag | kOneByteStringTag);
2771   j(not_equal, failure);
2772 }
2773 
2774 
JumpIfNotBothSequentialOneByteStrings(Register object1,Register object2,Register scratch1,Register scratch2,Label * failure)2775 void MacroAssembler::JumpIfNotBothSequentialOneByteStrings(Register object1,
2776                                                            Register object2,
2777                                                            Register scratch1,
2778                                                            Register scratch2,
2779                                                            Label* failure) {
2780   // Check that both objects are not smis.
2781   STATIC_ASSERT(kSmiTag == 0);
2782   mov(scratch1, object1);
2783   and_(scratch1, object2);
2784   JumpIfSmi(scratch1, failure);
2785 
2786   // Load instance type for both strings.
2787   mov(scratch1, FieldOperand(object1, HeapObject::kMapOffset));
2788   mov(scratch2, FieldOperand(object2, HeapObject::kMapOffset));
2789   movzx_b(scratch1, FieldOperand(scratch1, Map::kInstanceTypeOffset));
2790   movzx_b(scratch2, FieldOperand(scratch2, Map::kInstanceTypeOffset));
2791 
2792   // Check that both are flat one-byte strings.
2793   const int kFlatOneByteStringMask =
2794       kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
2795   const int kFlatOneByteStringTag =
2796       kStringTag | kOneByteStringTag | kSeqStringTag;
2797   // Interleave bits from both instance types and compare them in one check.
2798   DCHECK_EQ(0, kFlatOneByteStringMask & (kFlatOneByteStringMask << 3));
2799   and_(scratch1, kFlatOneByteStringMask);
2800   and_(scratch2, kFlatOneByteStringMask);
2801   lea(scratch1, Operand(scratch1, scratch2, times_8, 0));
2802   cmp(scratch1, kFlatOneByteStringTag | (kFlatOneByteStringTag << 3));
2803   j(not_equal, failure);
2804 }
2805 
2806 
JumpIfNotUniqueNameInstanceType(Operand operand,Label * not_unique_name,Label::Distance distance)2807 void MacroAssembler::JumpIfNotUniqueNameInstanceType(Operand operand,
2808                                                      Label* not_unique_name,
2809                                                      Label::Distance distance) {
2810   STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0);
2811   Label succeed;
2812   test(operand, Immediate(kIsNotStringMask | kIsNotInternalizedMask));
2813   j(zero, &succeed);
2814   cmpb(operand, Immediate(SYMBOL_TYPE));
2815   j(not_equal, not_unique_name, distance);
2816 
2817   bind(&succeed);
2818 }
2819 
2820 
EmitSeqStringSetCharCheck(Register string,Register index,Register value,uint32_t encoding_mask)2821 void MacroAssembler::EmitSeqStringSetCharCheck(Register string,
2822                                                Register index,
2823                                                Register value,
2824                                                uint32_t encoding_mask) {
2825   Label is_object;
2826   JumpIfNotSmi(string, &is_object, Label::kNear);
2827   Abort(kNonObject);
2828   bind(&is_object);
2829 
2830   push(value);
2831   mov(value, FieldOperand(string, HeapObject::kMapOffset));
2832   movzx_b(value, FieldOperand(value, Map::kInstanceTypeOffset));
2833 
2834   and_(value, Immediate(kStringRepresentationMask | kStringEncodingMask));
2835   cmp(value, Immediate(encoding_mask));
2836   pop(value);
2837   Check(equal, kUnexpectedStringType);
2838 
2839   // The index is assumed to be untagged coming in, tag it to compare with the
2840   // string length without using a temp register, it is restored at the end of
2841   // this function.
2842   SmiTag(index);
2843   Check(no_overflow, kIndexIsTooLarge);
2844 
2845   cmp(index, FieldOperand(string, String::kLengthOffset));
2846   Check(less, kIndexIsTooLarge);
2847 
2848   cmp(index, Immediate(Smi::kZero));
2849   Check(greater_equal, kIndexIsNegative);
2850 
2851   // Restore the index
2852   SmiUntag(index);
2853 }
2854 
2855 
PrepareCallCFunction(int num_arguments,Register scratch)2856 void MacroAssembler::PrepareCallCFunction(int num_arguments, Register scratch) {
2857   int frame_alignment = base::OS::ActivationFrameAlignment();
2858   if (frame_alignment != 0) {
2859     // Make stack end at alignment and make room for num_arguments words
2860     // and the original value of esp.
2861     mov(scratch, esp);
2862     sub(esp, Immediate((num_arguments + 1) * kPointerSize));
2863     DCHECK(base::bits::IsPowerOfTwo32(frame_alignment));
2864     and_(esp, -frame_alignment);
2865     mov(Operand(esp, num_arguments * kPointerSize), scratch);
2866   } else {
2867     sub(esp, Immediate(num_arguments * kPointerSize));
2868   }
2869 }
2870 
2871 
CallCFunction(ExternalReference function,int num_arguments)2872 void MacroAssembler::CallCFunction(ExternalReference function,
2873                                    int num_arguments) {
2874   // Trashing eax is ok as it will be the return value.
2875   mov(eax, Immediate(function));
2876   CallCFunction(eax, num_arguments);
2877 }
2878 
2879 
CallCFunction(Register function,int num_arguments)2880 void MacroAssembler::CallCFunction(Register function,
2881                                    int num_arguments) {
2882   DCHECK(has_frame());
2883   // Check stack alignment.
2884   if (emit_debug_code()) {
2885     CheckStackAlignment();
2886   }
2887 
2888   call(function);
2889   if (base::OS::ActivationFrameAlignment() != 0) {
2890     mov(esp, Operand(esp, num_arguments * kPointerSize));
2891   } else {
2892     add(esp, Immediate(num_arguments * kPointerSize));
2893   }
2894 }
2895 
2896 
2897 #ifdef DEBUG
AreAliased(Register reg1,Register reg2,Register reg3,Register reg4,Register reg5,Register reg6,Register reg7,Register reg8)2898 bool AreAliased(Register reg1,
2899                 Register reg2,
2900                 Register reg3,
2901                 Register reg4,
2902                 Register reg5,
2903                 Register reg6,
2904                 Register reg7,
2905                 Register reg8) {
2906   int n_of_valid_regs = reg1.is_valid() + reg2.is_valid() +
2907       reg3.is_valid() + reg4.is_valid() + reg5.is_valid() + reg6.is_valid() +
2908       reg7.is_valid() + reg8.is_valid();
2909 
2910   RegList regs = 0;
2911   if (reg1.is_valid()) regs |= reg1.bit();
2912   if (reg2.is_valid()) regs |= reg2.bit();
2913   if (reg3.is_valid()) regs |= reg3.bit();
2914   if (reg4.is_valid()) regs |= reg4.bit();
2915   if (reg5.is_valid()) regs |= reg5.bit();
2916   if (reg6.is_valid()) regs |= reg6.bit();
2917   if (reg7.is_valid()) regs |= reg7.bit();
2918   if (reg8.is_valid()) regs |= reg8.bit();
2919   int n_of_non_aliasing_regs = NumRegs(regs);
2920 
2921   return n_of_valid_regs != n_of_non_aliasing_regs;
2922 }
2923 #endif
2924 
2925 
CodePatcher(Isolate * isolate,byte * address,int size)2926 CodePatcher::CodePatcher(Isolate* isolate, byte* address, int size)
2927     : address_(address),
2928       size_(size),
2929       masm_(isolate, address, size + Assembler::kGap, CodeObjectRequired::kNo) {
2930   // Create a new macro assembler pointing to the address of the code to patch.
2931   // The size is adjusted with kGap on order for the assembler to generate size
2932   // bytes of instructions without failing with buffer size constraints.
2933   DCHECK(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
2934 }
2935 
2936 
~CodePatcher()2937 CodePatcher::~CodePatcher() {
2938   // Indicate that code has changed.
2939   Assembler::FlushICache(masm_.isolate(), address_, size_);
2940 
2941   // Check that the code was patched as expected.
2942   DCHECK(masm_.pc_ == address_ + size_);
2943   DCHECK(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
2944 }
2945 
2946 
CheckPageFlag(Register object,Register scratch,int mask,Condition cc,Label * condition_met,Label::Distance condition_met_distance)2947 void MacroAssembler::CheckPageFlag(
2948     Register object,
2949     Register scratch,
2950     int mask,
2951     Condition cc,
2952     Label* condition_met,
2953     Label::Distance condition_met_distance) {
2954   DCHECK(cc == zero || cc == not_zero);
2955   if (scratch.is(object)) {
2956     and_(scratch, Immediate(~Page::kPageAlignmentMask));
2957   } else {
2958     mov(scratch, Immediate(~Page::kPageAlignmentMask));
2959     and_(scratch, object);
2960   }
2961   if (mask < (1 << kBitsPerByte)) {
2962     test_b(Operand(scratch, MemoryChunk::kFlagsOffset), Immediate(mask));
2963   } else {
2964     test(Operand(scratch, MemoryChunk::kFlagsOffset), Immediate(mask));
2965   }
2966   j(cc, condition_met, condition_met_distance);
2967 }
2968 
2969 
CheckPageFlagForMap(Handle<Map> map,int mask,Condition cc,Label * condition_met,Label::Distance condition_met_distance)2970 void MacroAssembler::CheckPageFlagForMap(
2971     Handle<Map> map,
2972     int mask,
2973     Condition cc,
2974     Label* condition_met,
2975     Label::Distance condition_met_distance) {
2976   DCHECK(cc == zero || cc == not_zero);
2977   Page* page = Page::FromAddress(map->address());
2978   DCHECK(!serializer_enabled());  // Serializer cannot match page_flags.
2979   ExternalReference reference(ExternalReference::page_flags(page));
2980   // The inlined static address check of the page's flags relies
2981   // on maps never being compacted.
2982   DCHECK(!isolate()->heap()->mark_compact_collector()->
2983          IsOnEvacuationCandidate(*map));
2984   if (mask < (1 << kBitsPerByte)) {
2985     test_b(Operand::StaticVariable(reference), Immediate(mask));
2986   } else {
2987     test(Operand::StaticVariable(reference), Immediate(mask));
2988   }
2989   j(cc, condition_met, condition_met_distance);
2990 }
2991 
2992 
JumpIfBlack(Register object,Register scratch0,Register scratch1,Label * on_black,Label::Distance on_black_near)2993 void MacroAssembler::JumpIfBlack(Register object,
2994                                  Register scratch0,
2995                                  Register scratch1,
2996                                  Label* on_black,
2997                                  Label::Distance on_black_near) {
2998   HasColor(object, scratch0, scratch1, on_black, on_black_near, 1,
2999            1);  // kBlackBitPattern.
3000   DCHECK(strcmp(Marking::kBlackBitPattern, "11") == 0);
3001 }
3002 
3003 
HasColor(Register object,Register bitmap_scratch,Register mask_scratch,Label * has_color,Label::Distance has_color_distance,int first_bit,int second_bit)3004 void MacroAssembler::HasColor(Register object,
3005                               Register bitmap_scratch,
3006                               Register mask_scratch,
3007                               Label* has_color,
3008                               Label::Distance has_color_distance,
3009                               int first_bit,
3010                               int second_bit) {
3011   DCHECK(!AreAliased(object, bitmap_scratch, mask_scratch, ecx));
3012 
3013   GetMarkBits(object, bitmap_scratch, mask_scratch);
3014 
3015   Label other_color, word_boundary;
3016   test(mask_scratch, Operand(bitmap_scratch, MemoryChunk::kHeaderSize));
3017   j(first_bit == 1 ? zero : not_zero, &other_color, Label::kNear);
3018   add(mask_scratch, mask_scratch);  // Shift left 1 by adding.
3019   j(zero, &word_boundary, Label::kNear);
3020   test(mask_scratch, Operand(bitmap_scratch, MemoryChunk::kHeaderSize));
3021   j(second_bit == 1 ? not_zero : zero, has_color, has_color_distance);
3022   jmp(&other_color, Label::kNear);
3023 
3024   bind(&word_boundary);
3025   test_b(Operand(bitmap_scratch, MemoryChunk::kHeaderSize + kPointerSize),
3026          Immediate(1));
3027 
3028   j(second_bit == 1 ? not_zero : zero, has_color, has_color_distance);
3029   bind(&other_color);
3030 }
3031 
3032 
GetMarkBits(Register addr_reg,Register bitmap_reg,Register mask_reg)3033 void MacroAssembler::GetMarkBits(Register addr_reg,
3034                                  Register bitmap_reg,
3035                                  Register mask_reg) {
3036   DCHECK(!AreAliased(addr_reg, mask_reg, bitmap_reg, ecx));
3037   mov(bitmap_reg, Immediate(~Page::kPageAlignmentMask));
3038   and_(bitmap_reg, addr_reg);
3039   mov(ecx, addr_reg);
3040   int shift =
3041       Bitmap::kBitsPerCellLog2 + kPointerSizeLog2 - Bitmap::kBytesPerCellLog2;
3042   shr(ecx, shift);
3043   and_(ecx,
3044        (Page::kPageAlignmentMask >> shift) & ~(Bitmap::kBytesPerCell - 1));
3045 
3046   add(bitmap_reg, ecx);
3047   mov(ecx, addr_reg);
3048   shr(ecx, kPointerSizeLog2);
3049   and_(ecx, (1 << Bitmap::kBitsPerCellLog2) - 1);
3050   mov(mask_reg, Immediate(1));
3051   shl_cl(mask_reg);
3052 }
3053 
3054 
JumpIfWhite(Register value,Register bitmap_scratch,Register mask_scratch,Label * value_is_white,Label::Distance distance)3055 void MacroAssembler::JumpIfWhite(Register value, Register bitmap_scratch,
3056                                  Register mask_scratch, Label* value_is_white,
3057                                  Label::Distance distance) {
3058   DCHECK(!AreAliased(value, bitmap_scratch, mask_scratch, ecx));
3059   GetMarkBits(value, bitmap_scratch, mask_scratch);
3060 
3061   // If the value is black or grey we don't need to do anything.
3062   DCHECK(strcmp(Marking::kWhiteBitPattern, "00") == 0);
3063   DCHECK(strcmp(Marking::kBlackBitPattern, "11") == 0);
3064   DCHECK(strcmp(Marking::kGreyBitPattern, "10") == 0);
3065   DCHECK(strcmp(Marking::kImpossibleBitPattern, "01") == 0);
3066 
3067   // Since both black and grey have a 1 in the first position and white does
3068   // not have a 1 there we only need to check one bit.
3069   test(mask_scratch, Operand(bitmap_scratch, MemoryChunk::kHeaderSize));
3070   j(zero, value_is_white, Label::kNear);
3071 }
3072 
3073 
EnumLength(Register dst,Register map)3074 void MacroAssembler::EnumLength(Register dst, Register map) {
3075   STATIC_ASSERT(Map::EnumLengthBits::kShift == 0);
3076   mov(dst, FieldOperand(map, Map::kBitField3Offset));
3077   and_(dst, Immediate(Map::EnumLengthBits::kMask));
3078   SmiTag(dst);
3079 }
3080 
3081 
CheckEnumCache(Label * call_runtime)3082 void MacroAssembler::CheckEnumCache(Label* call_runtime) {
3083   Label next, start;
3084   mov(ecx, eax);
3085 
3086   // Check if the enum length field is properly initialized, indicating that
3087   // there is an enum cache.
3088   mov(ebx, FieldOperand(ecx, HeapObject::kMapOffset));
3089 
3090   EnumLength(edx, ebx);
3091   cmp(edx, Immediate(Smi::FromInt(kInvalidEnumCacheSentinel)));
3092   j(equal, call_runtime);
3093 
3094   jmp(&start);
3095 
3096   bind(&next);
3097   mov(ebx, FieldOperand(ecx, HeapObject::kMapOffset));
3098 
3099   // For all objects but the receiver, check that the cache is empty.
3100   EnumLength(edx, ebx);
3101   cmp(edx, Immediate(Smi::kZero));
3102   j(not_equal, call_runtime);
3103 
3104   bind(&start);
3105 
3106   // Check that there are no elements. Register rcx contains the current JS
3107   // object we've reached through the prototype chain.
3108   Label no_elements;
3109   mov(ecx, FieldOperand(ecx, JSObject::kElementsOffset));
3110   cmp(ecx, isolate()->factory()->empty_fixed_array());
3111   j(equal, &no_elements);
3112 
3113   // Second chance, the object may be using the empty slow element dictionary.
3114   cmp(ecx, isolate()->factory()->empty_slow_element_dictionary());
3115   j(not_equal, call_runtime);
3116 
3117   bind(&no_elements);
3118   mov(ecx, FieldOperand(ebx, Map::kPrototypeOffset));
3119   cmp(ecx, isolate()->factory()->null_value());
3120   j(not_equal, &next);
3121 }
3122 
3123 
TestJSArrayForAllocationMemento(Register receiver_reg,Register scratch_reg,Label * no_memento_found)3124 void MacroAssembler::TestJSArrayForAllocationMemento(
3125     Register receiver_reg,
3126     Register scratch_reg,
3127     Label* no_memento_found) {
3128   Label map_check;
3129   Label top_check;
3130   ExternalReference new_space_allocation_top =
3131       ExternalReference::new_space_allocation_top_address(isolate());
3132   const int kMementoMapOffset = JSArray::kSize - kHeapObjectTag;
3133   const int kMementoLastWordOffset =
3134       kMementoMapOffset + AllocationMemento::kSize - kPointerSize;
3135 
3136   // Bail out if the object is not in new space.
3137   JumpIfNotInNewSpace(receiver_reg, scratch_reg, no_memento_found);
3138   // If the object is in new space, we need to check whether it is on the same
3139   // page as the current top.
3140   lea(scratch_reg, Operand(receiver_reg, kMementoLastWordOffset));
3141   xor_(scratch_reg, Operand::StaticVariable(new_space_allocation_top));
3142   test(scratch_reg, Immediate(~Page::kPageAlignmentMask));
3143   j(zero, &top_check);
3144   // The object is on a different page than allocation top. Bail out if the
3145   // object sits on the page boundary as no memento can follow and we cannot
3146   // touch the memory following it.
3147   lea(scratch_reg, Operand(receiver_reg, kMementoLastWordOffset));
3148   xor_(scratch_reg, receiver_reg);
3149   test(scratch_reg, Immediate(~Page::kPageAlignmentMask));
3150   j(not_zero, no_memento_found);
3151   // Continue with the actual map check.
3152   jmp(&map_check);
3153   // If top is on the same page as the current object, we need to check whether
3154   // we are below top.
3155   bind(&top_check);
3156   lea(scratch_reg, Operand(receiver_reg, kMementoLastWordOffset));
3157   cmp(scratch_reg, Operand::StaticVariable(new_space_allocation_top));
3158   j(greater_equal, no_memento_found);
3159   // Memento map check.
3160   bind(&map_check);
3161   mov(scratch_reg, Operand(receiver_reg, kMementoMapOffset));
3162   cmp(scratch_reg, Immediate(isolate()->factory()->allocation_memento_map()));
3163 }
3164 
3165 
JumpIfDictionaryInPrototypeChain(Register object,Register scratch0,Register scratch1,Label * found)3166 void MacroAssembler::JumpIfDictionaryInPrototypeChain(
3167     Register object,
3168     Register scratch0,
3169     Register scratch1,
3170     Label* found) {
3171   DCHECK(!scratch1.is(scratch0));
3172   Factory* factory = isolate()->factory();
3173   Register current = scratch0;
3174   Label loop_again, end;
3175 
3176   // scratch contained elements pointer.
3177   mov(current, object);
3178   mov(current, FieldOperand(current, HeapObject::kMapOffset));
3179   mov(current, FieldOperand(current, Map::kPrototypeOffset));
3180   cmp(current, Immediate(factory->null_value()));
3181   j(equal, &end);
3182 
3183   // Loop based on the map going up the prototype chain.
3184   bind(&loop_again);
3185   mov(current, FieldOperand(current, HeapObject::kMapOffset));
3186   STATIC_ASSERT(JS_PROXY_TYPE < JS_OBJECT_TYPE);
3187   STATIC_ASSERT(JS_VALUE_TYPE < JS_OBJECT_TYPE);
3188   CmpInstanceType(current, JS_OBJECT_TYPE);
3189   j(below, found);
3190   mov(scratch1, FieldOperand(current, Map::kBitField2Offset));
3191   DecodeField<Map::ElementsKindBits>(scratch1);
3192   cmp(scratch1, Immediate(DICTIONARY_ELEMENTS));
3193   j(equal, found);
3194   mov(current, FieldOperand(current, Map::kPrototypeOffset));
3195   cmp(current, Immediate(factory->null_value()));
3196   j(not_equal, &loop_again);
3197 
3198   bind(&end);
3199 }
3200 
3201 
TruncatingDiv(Register dividend,int32_t divisor)3202 void MacroAssembler::TruncatingDiv(Register dividend, int32_t divisor) {
3203   DCHECK(!dividend.is(eax));
3204   DCHECK(!dividend.is(edx));
3205   base::MagicNumbersForDivision<uint32_t> mag =
3206       base::SignedDivisionByConstant(static_cast<uint32_t>(divisor));
3207   mov(eax, Immediate(mag.multiplier));
3208   imul(dividend);
3209   bool neg = (mag.multiplier & (static_cast<uint32_t>(1) << 31)) != 0;
3210   if (divisor > 0 && neg) add(edx, dividend);
3211   if (divisor < 0 && !neg && mag.multiplier > 0) sub(edx, dividend);
3212   if (mag.shift > 0) sar(edx, mag.shift);
3213   mov(eax, dividend);
3214   shr(eax, 31);
3215   add(edx, eax);
3216 }
3217 
3218 
3219 }  // namespace internal
3220 }  // namespace v8
3221 
3222 #endif  // V8_TARGET_ARCH_IA32
3223