• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "src/v8.h"
6 
7 #if V8_TARGET_ARCH_IA32
8 
9 #include "src/base/bits.h"
10 #include "src/base/division-by-constant.h"
11 #include "src/bootstrapper.h"
12 #include "src/codegen.h"
13 #include "src/cpu-profiler.h"
14 #include "src/debug.h"
15 #include "src/isolate-inl.h"
16 #include "src/runtime.h"
17 #include "src/serialize.h"
18 
19 namespace v8 {
20 namespace internal {
21 
22 // -------------------------------------------------------------------------
23 // MacroAssembler implementation.
24 
MacroAssembler(Isolate * arg_isolate,void * buffer,int size)25 MacroAssembler::MacroAssembler(Isolate* arg_isolate, void* buffer, int size)
26     : Assembler(arg_isolate, buffer, size),
27       generating_stub_(false),
28       has_frame_(false) {
29   if (isolate() != NULL) {
30     // TODO(titzer): should we just use a null handle here instead?
31     code_object_ = Handle<Object>(isolate()->heap()->undefined_value(),
32                                   isolate());
33   }
34 }
35 
36 
Load(Register dst,const Operand & src,Representation r)37 void MacroAssembler::Load(Register dst, const Operand& src, Representation r) {
38   DCHECK(!r.IsDouble());
39   if (r.IsInteger8()) {
40     movsx_b(dst, src);
41   } else if (r.IsUInteger8()) {
42     movzx_b(dst, src);
43   } else if (r.IsInteger16()) {
44     movsx_w(dst, src);
45   } else if (r.IsUInteger16()) {
46     movzx_w(dst, src);
47   } else {
48     mov(dst, src);
49   }
50 }
51 
52 
Store(Register src,const Operand & dst,Representation r)53 void MacroAssembler::Store(Register src, const Operand& dst, Representation r) {
54   DCHECK(!r.IsDouble());
55   if (r.IsInteger8() || r.IsUInteger8()) {
56     mov_b(dst, src);
57   } else if (r.IsInteger16() || r.IsUInteger16()) {
58     mov_w(dst, src);
59   } else {
60     if (r.IsHeapObject()) {
61       AssertNotSmi(src);
62     } else if (r.IsSmi()) {
63       AssertSmi(src);
64     }
65     mov(dst, src);
66   }
67 }
68 
69 
LoadRoot(Register destination,Heap::RootListIndex index)70 void MacroAssembler::LoadRoot(Register destination, Heap::RootListIndex index) {
71   if (isolate()->heap()->RootCanBeTreatedAsConstant(index)) {
72     Handle<Object> value(&isolate()->heap()->roots_array_start()[index]);
73     mov(destination, value);
74     return;
75   }
76   ExternalReference roots_array_start =
77       ExternalReference::roots_array_start(isolate());
78   mov(destination, Immediate(index));
79   mov(destination, Operand::StaticArray(destination,
80                                         times_pointer_size,
81                                         roots_array_start));
82 }
83 
84 
StoreRoot(Register source,Register scratch,Heap::RootListIndex index)85 void MacroAssembler::StoreRoot(Register source,
86                                Register scratch,
87                                Heap::RootListIndex index) {
88   DCHECK(Heap::RootCanBeWrittenAfterInitialization(index));
89   ExternalReference roots_array_start =
90       ExternalReference::roots_array_start(isolate());
91   mov(scratch, Immediate(index));
92   mov(Operand::StaticArray(scratch, times_pointer_size, roots_array_start),
93       source);
94 }
95 
96 
CompareRoot(Register with,Register scratch,Heap::RootListIndex index)97 void MacroAssembler::CompareRoot(Register with,
98                                  Register scratch,
99                                  Heap::RootListIndex index) {
100   ExternalReference roots_array_start =
101       ExternalReference::roots_array_start(isolate());
102   mov(scratch, Immediate(index));
103   cmp(with, Operand::StaticArray(scratch,
104                                 times_pointer_size,
105                                 roots_array_start));
106 }
107 
108 
CompareRoot(Register with,Heap::RootListIndex index)109 void MacroAssembler::CompareRoot(Register with, Heap::RootListIndex index) {
110   DCHECK(isolate()->heap()->RootCanBeTreatedAsConstant(index));
111   Handle<Object> value(&isolate()->heap()->roots_array_start()[index]);
112   cmp(with, value);
113 }
114 
115 
CompareRoot(const Operand & with,Heap::RootListIndex index)116 void MacroAssembler::CompareRoot(const Operand& with,
117                                  Heap::RootListIndex index) {
118   DCHECK(isolate()->heap()->RootCanBeTreatedAsConstant(index));
119   Handle<Object> value(&isolate()->heap()->roots_array_start()[index]);
120   cmp(with, value);
121 }
122 
123 
InNewSpace(Register object,Register scratch,Condition cc,Label * condition_met,Label::Distance condition_met_distance)124 void MacroAssembler::InNewSpace(
125     Register object,
126     Register scratch,
127     Condition cc,
128     Label* condition_met,
129     Label::Distance condition_met_distance) {
130   DCHECK(cc == equal || cc == not_equal);
131   if (scratch.is(object)) {
132     and_(scratch, Immediate(~Page::kPageAlignmentMask));
133   } else {
134     mov(scratch, Immediate(~Page::kPageAlignmentMask));
135     and_(scratch, object);
136   }
137   // Check that we can use a test_b.
138   DCHECK(MemoryChunk::IN_FROM_SPACE < 8);
139   DCHECK(MemoryChunk::IN_TO_SPACE < 8);
140   int mask = (1 << MemoryChunk::IN_FROM_SPACE)
141            | (1 << MemoryChunk::IN_TO_SPACE);
142   // If non-zero, the page belongs to new-space.
143   test_b(Operand(scratch, MemoryChunk::kFlagsOffset),
144          static_cast<uint8_t>(mask));
145   j(cc, condition_met, condition_met_distance);
146 }
147 
148 
RememberedSetHelper(Register object,Register addr,Register scratch,SaveFPRegsMode save_fp,MacroAssembler::RememberedSetFinalAction and_then)149 void MacroAssembler::RememberedSetHelper(
150     Register object,  // Only used for debug checks.
151     Register addr,
152     Register scratch,
153     SaveFPRegsMode save_fp,
154     MacroAssembler::RememberedSetFinalAction and_then) {
155   Label done;
156   if (emit_debug_code()) {
157     Label ok;
158     JumpIfNotInNewSpace(object, scratch, &ok, Label::kNear);
159     int3();
160     bind(&ok);
161   }
162   // Load store buffer top.
163   ExternalReference store_buffer =
164       ExternalReference::store_buffer_top(isolate());
165   mov(scratch, Operand::StaticVariable(store_buffer));
166   // Store pointer to buffer.
167   mov(Operand(scratch, 0), addr);
168   // Increment buffer top.
169   add(scratch, Immediate(kPointerSize));
170   // Write back new top of buffer.
171   mov(Operand::StaticVariable(store_buffer), scratch);
172   // Call stub on end of buffer.
173   // Check for end of buffer.
174   test(scratch, Immediate(StoreBuffer::kStoreBufferOverflowBit));
175   if (and_then == kReturnAtEnd) {
176     Label buffer_overflowed;
177     j(not_equal, &buffer_overflowed, Label::kNear);
178     ret(0);
179     bind(&buffer_overflowed);
180   } else {
181     DCHECK(and_then == kFallThroughAtEnd);
182     j(equal, &done, Label::kNear);
183   }
184   StoreBufferOverflowStub store_buffer_overflow(isolate(), save_fp);
185   CallStub(&store_buffer_overflow);
186   if (and_then == kReturnAtEnd) {
187     ret(0);
188   } else {
189     DCHECK(and_then == kFallThroughAtEnd);
190     bind(&done);
191   }
192 }
193 
194 
ClampDoubleToUint8(XMMRegister input_reg,XMMRegister scratch_reg,Register result_reg)195 void MacroAssembler::ClampDoubleToUint8(XMMRegister input_reg,
196                                         XMMRegister scratch_reg,
197                                         Register result_reg) {
198   Label done;
199   Label conv_failure;
200   xorps(scratch_reg, scratch_reg);
201   cvtsd2si(result_reg, input_reg);
202   test(result_reg, Immediate(0xFFFFFF00));
203   j(zero, &done, Label::kNear);
204   cmp(result_reg, Immediate(0x1));
205   j(overflow, &conv_failure, Label::kNear);
206   mov(result_reg, Immediate(0));
207   setcc(sign, result_reg);
208   sub(result_reg, Immediate(1));
209   and_(result_reg, Immediate(255));
210   jmp(&done, Label::kNear);
211   bind(&conv_failure);
212   Move(result_reg, Immediate(0));
213   ucomisd(input_reg, scratch_reg);
214   j(below, &done, Label::kNear);
215   Move(result_reg, Immediate(255));
216   bind(&done);
217 }
218 
219 
ClampUint8(Register reg)220 void MacroAssembler::ClampUint8(Register reg) {
221   Label done;
222   test(reg, Immediate(0xFFFFFF00));
223   j(zero, &done, Label::kNear);
224   setcc(negative, reg);  // 1 if negative, 0 if positive.
225   dec_b(reg);  // 0 if negative, 255 if positive.
226   bind(&done);
227 }
228 
229 
SlowTruncateToI(Register result_reg,Register input_reg,int offset)230 void MacroAssembler::SlowTruncateToI(Register result_reg,
231                                      Register input_reg,
232                                      int offset) {
233   DoubleToIStub stub(isolate(), input_reg, result_reg, offset, true);
234   call(stub.GetCode(), RelocInfo::CODE_TARGET);
235 }
236 
237 
TruncateDoubleToI(Register result_reg,XMMRegister input_reg)238 void MacroAssembler::TruncateDoubleToI(Register result_reg,
239                                        XMMRegister input_reg) {
240   Label done;
241   cvttsd2si(result_reg, Operand(input_reg));
242   cmp(result_reg, 0x1);
243   j(no_overflow, &done, Label::kNear);
244 
245   sub(esp, Immediate(kDoubleSize));
246   movsd(MemOperand(esp, 0), input_reg);
247   SlowTruncateToI(result_reg, esp, 0);
248   add(esp, Immediate(kDoubleSize));
249   bind(&done);
250 }
251 
252 
DoubleToI(Register result_reg,XMMRegister input_reg,XMMRegister scratch,MinusZeroMode minus_zero_mode,Label * lost_precision,Label * is_nan,Label * minus_zero,Label::Distance dst)253 void MacroAssembler::DoubleToI(Register result_reg, XMMRegister input_reg,
254                                XMMRegister scratch,
255                                MinusZeroMode minus_zero_mode,
256                                Label* lost_precision, Label* is_nan,
257                                Label* minus_zero, Label::Distance dst) {
258   DCHECK(!input_reg.is(scratch));
259   cvttsd2si(result_reg, Operand(input_reg));
260   Cvtsi2sd(scratch, Operand(result_reg));
261   ucomisd(scratch, input_reg);
262   j(not_equal, lost_precision, dst);
263   j(parity_even, is_nan, dst);
264   if (minus_zero_mode == FAIL_ON_MINUS_ZERO) {
265     Label done;
266     // The integer converted back is equal to the original. We
267     // only have to test if we got -0 as an input.
268     test(result_reg, Operand(result_reg));
269     j(not_zero, &done, Label::kNear);
270     movmskpd(result_reg, input_reg);
271     // Bit 0 contains the sign of the double in input_reg.
272     // If input was positive, we are ok and return 0, otherwise
273     // jump to minus_zero.
274     and_(result_reg, 1);
275     j(not_zero, minus_zero, dst);
276     bind(&done);
277   }
278 }
279 
280 
TruncateHeapNumberToI(Register result_reg,Register input_reg)281 void MacroAssembler::TruncateHeapNumberToI(Register result_reg,
282                                            Register input_reg) {
283   Label done, slow_case;
284 
285   if (CpuFeatures::IsSupported(SSE3)) {
286     CpuFeatureScope scope(this, SSE3);
287     Label convert;
288     // Use more powerful conversion when sse3 is available.
289     // Load x87 register with heap number.
290     fld_d(FieldOperand(input_reg, HeapNumber::kValueOffset));
291     // Get exponent alone and check for too-big exponent.
292     mov(result_reg, FieldOperand(input_reg, HeapNumber::kExponentOffset));
293     and_(result_reg, HeapNumber::kExponentMask);
294     const uint32_t kTooBigExponent =
295         (HeapNumber::kExponentBias + 63) << HeapNumber::kExponentShift;
296     cmp(Operand(result_reg), Immediate(kTooBigExponent));
297     j(greater_equal, &slow_case, Label::kNear);
298 
299     // Reserve space for 64 bit answer.
300     sub(Operand(esp), Immediate(kDoubleSize));
301     // Do conversion, which cannot fail because we checked the exponent.
302     fisttp_d(Operand(esp, 0));
303     mov(result_reg, Operand(esp, 0));  // Low word of answer is the result.
304     add(Operand(esp), Immediate(kDoubleSize));
305     jmp(&done, Label::kNear);
306 
307     // Slow case.
308     bind(&slow_case);
309     if (input_reg.is(result_reg)) {
310       // Input is clobbered. Restore number from fpu stack
311       sub(Operand(esp), Immediate(kDoubleSize));
312       fstp_d(Operand(esp, 0));
313       SlowTruncateToI(result_reg, esp, 0);
314       add(esp, Immediate(kDoubleSize));
315     } else {
316       fstp(0);
317       SlowTruncateToI(result_reg, input_reg);
318     }
319   } else {
320     movsd(xmm0, FieldOperand(input_reg, HeapNumber::kValueOffset));
321     cvttsd2si(result_reg, Operand(xmm0));
322     cmp(result_reg, 0x1);
323     j(no_overflow, &done, Label::kNear);
324     // Check if the input was 0x8000000 (kMinInt).
325     // If no, then we got an overflow and we deoptimize.
326     ExternalReference min_int = ExternalReference::address_of_min_int();
327     ucomisd(xmm0, Operand::StaticVariable(min_int));
328     j(not_equal, &slow_case, Label::kNear);
329     j(parity_even, &slow_case, Label::kNear);  // NaN.
330     jmp(&done, Label::kNear);
331 
332     // Slow case.
333     bind(&slow_case);
334     if (input_reg.is(result_reg)) {
335       // Input is clobbered. Restore number from double scratch.
336       sub(esp, Immediate(kDoubleSize));
337       movsd(MemOperand(esp, 0), xmm0);
338       SlowTruncateToI(result_reg, esp, 0);
339       add(esp, Immediate(kDoubleSize));
340     } else {
341       SlowTruncateToI(result_reg, input_reg);
342     }
343   }
344   bind(&done);
345 }
346 
347 
LoadUint32(XMMRegister dst,Register src)348 void MacroAssembler::LoadUint32(XMMRegister dst,
349                                 Register src) {
350   Label done;
351   cmp(src, Immediate(0));
352   ExternalReference uint32_bias =
353         ExternalReference::address_of_uint32_bias();
354   Cvtsi2sd(dst, src);
355   j(not_sign, &done, Label::kNear);
356   addsd(dst, Operand::StaticVariable(uint32_bias));
357   bind(&done);
358 }
359 
360 
RecordWriteArray(Register object,Register value,Register index,SaveFPRegsMode save_fp,RememberedSetAction remembered_set_action,SmiCheck smi_check,PointersToHereCheck pointers_to_here_check_for_value)361 void MacroAssembler::RecordWriteArray(
362     Register object,
363     Register value,
364     Register index,
365     SaveFPRegsMode save_fp,
366     RememberedSetAction remembered_set_action,
367     SmiCheck smi_check,
368     PointersToHereCheck pointers_to_here_check_for_value) {
369   // First, check if a write barrier is even needed. The tests below
370   // catch stores of Smis.
371   Label done;
372 
373   // Skip barrier if writing a smi.
374   if (smi_check == INLINE_SMI_CHECK) {
375     DCHECK_EQ(0, kSmiTag);
376     test(value, Immediate(kSmiTagMask));
377     j(zero, &done);
378   }
379 
380   // Array access: calculate the destination address in the same manner as
381   // KeyedStoreIC::GenerateGeneric.  Multiply a smi by 2 to get an offset
382   // into an array of words.
383   Register dst = index;
384   lea(dst, Operand(object, index, times_half_pointer_size,
385                    FixedArray::kHeaderSize - kHeapObjectTag));
386 
387   RecordWrite(object, dst, value, save_fp, remembered_set_action,
388               OMIT_SMI_CHECK, pointers_to_here_check_for_value);
389 
390   bind(&done);
391 
392   // Clobber clobbered input registers when running with the debug-code flag
393   // turned on to provoke errors.
394   if (emit_debug_code()) {
395     mov(value, Immediate(bit_cast<int32_t>(kZapValue)));
396     mov(index, Immediate(bit_cast<int32_t>(kZapValue)));
397   }
398 }
399 
400 
RecordWriteField(Register object,int offset,Register value,Register dst,SaveFPRegsMode save_fp,RememberedSetAction remembered_set_action,SmiCheck smi_check,PointersToHereCheck pointers_to_here_check_for_value)401 void MacroAssembler::RecordWriteField(
402     Register object,
403     int offset,
404     Register value,
405     Register dst,
406     SaveFPRegsMode save_fp,
407     RememberedSetAction remembered_set_action,
408     SmiCheck smi_check,
409     PointersToHereCheck pointers_to_here_check_for_value) {
410   // First, check if a write barrier is even needed. The tests below
411   // catch stores of Smis.
412   Label done;
413 
414   // Skip barrier if writing a smi.
415   if (smi_check == INLINE_SMI_CHECK) {
416     JumpIfSmi(value, &done, Label::kNear);
417   }
418 
419   // Although the object register is tagged, the offset is relative to the start
420   // of the object, so so offset must be a multiple of kPointerSize.
421   DCHECK(IsAligned(offset, kPointerSize));
422 
423   lea(dst, FieldOperand(object, offset));
424   if (emit_debug_code()) {
425     Label ok;
426     test_b(dst, (1 << kPointerSizeLog2) - 1);
427     j(zero, &ok, Label::kNear);
428     int3();
429     bind(&ok);
430   }
431 
432   RecordWrite(object, dst, value, save_fp, remembered_set_action,
433               OMIT_SMI_CHECK, pointers_to_here_check_for_value);
434 
435   bind(&done);
436 
437   // Clobber clobbered input registers when running with the debug-code flag
438   // turned on to provoke errors.
439   if (emit_debug_code()) {
440     mov(value, Immediate(bit_cast<int32_t>(kZapValue)));
441     mov(dst, Immediate(bit_cast<int32_t>(kZapValue)));
442   }
443 }
444 
445 
RecordWriteForMap(Register object,Handle<Map> map,Register scratch1,Register scratch2,SaveFPRegsMode save_fp)446 void MacroAssembler::RecordWriteForMap(
447     Register object,
448     Handle<Map> map,
449     Register scratch1,
450     Register scratch2,
451     SaveFPRegsMode save_fp) {
452   Label done;
453 
454   Register address = scratch1;
455   Register value = scratch2;
456   if (emit_debug_code()) {
457     Label ok;
458     lea(address, FieldOperand(object, HeapObject::kMapOffset));
459     test_b(address, (1 << kPointerSizeLog2) - 1);
460     j(zero, &ok, Label::kNear);
461     int3();
462     bind(&ok);
463   }
464 
465   DCHECK(!object.is(value));
466   DCHECK(!object.is(address));
467   DCHECK(!value.is(address));
468   AssertNotSmi(object);
469 
470   if (!FLAG_incremental_marking) {
471     return;
472   }
473 
474   // Compute the address.
475   lea(address, FieldOperand(object, HeapObject::kMapOffset));
476 
477   // A single check of the map's pages interesting flag suffices, since it is
478   // only set during incremental collection, and then it's also guaranteed that
479   // the from object's page's interesting flag is also set.  This optimization
480   // relies on the fact that maps can never be in new space.
481   DCHECK(!isolate()->heap()->InNewSpace(*map));
482   CheckPageFlagForMap(map,
483                       MemoryChunk::kPointersToHereAreInterestingMask,
484                       zero,
485                       &done,
486                       Label::kNear);
487 
488   RecordWriteStub stub(isolate(), object, value, address, OMIT_REMEMBERED_SET,
489                        save_fp);
490   CallStub(&stub);
491 
492   bind(&done);
493 
494   // Count number of write barriers in generated code.
495   isolate()->counters()->write_barriers_static()->Increment();
496   IncrementCounter(isolate()->counters()->write_barriers_dynamic(), 1);
497 
498   // Clobber clobbered input registers when running with the debug-code flag
499   // turned on to provoke errors.
500   if (emit_debug_code()) {
501     mov(value, Immediate(bit_cast<int32_t>(kZapValue)));
502     mov(scratch1, Immediate(bit_cast<int32_t>(kZapValue)));
503     mov(scratch2, Immediate(bit_cast<int32_t>(kZapValue)));
504   }
505 }
506 
507 
RecordWrite(Register object,Register address,Register value,SaveFPRegsMode fp_mode,RememberedSetAction remembered_set_action,SmiCheck smi_check,PointersToHereCheck pointers_to_here_check_for_value)508 void MacroAssembler::RecordWrite(
509     Register object,
510     Register address,
511     Register value,
512     SaveFPRegsMode fp_mode,
513     RememberedSetAction remembered_set_action,
514     SmiCheck smi_check,
515     PointersToHereCheck pointers_to_here_check_for_value) {
516   DCHECK(!object.is(value));
517   DCHECK(!object.is(address));
518   DCHECK(!value.is(address));
519   AssertNotSmi(object);
520 
521   if (remembered_set_action == OMIT_REMEMBERED_SET &&
522       !FLAG_incremental_marking) {
523     return;
524   }
525 
526   if (emit_debug_code()) {
527     Label ok;
528     cmp(value, Operand(address, 0));
529     j(equal, &ok, Label::kNear);
530     int3();
531     bind(&ok);
532   }
533 
534   // First, check if a write barrier is even needed. The tests below
535   // catch stores of Smis and stores into young gen.
536   Label done;
537 
538   if (smi_check == INLINE_SMI_CHECK) {
539     // Skip barrier if writing a smi.
540     JumpIfSmi(value, &done, Label::kNear);
541   }
542 
543   if (pointers_to_here_check_for_value != kPointersToHereAreAlwaysInteresting) {
544     CheckPageFlag(value,
545                   value,  // Used as scratch.
546                   MemoryChunk::kPointersToHereAreInterestingMask,
547                   zero,
548                   &done,
549                   Label::kNear);
550   }
551   CheckPageFlag(object,
552                 value,  // Used as scratch.
553                 MemoryChunk::kPointersFromHereAreInterestingMask,
554                 zero,
555                 &done,
556                 Label::kNear);
557 
558   RecordWriteStub stub(isolate(), object, value, address, remembered_set_action,
559                        fp_mode);
560   CallStub(&stub);
561 
562   bind(&done);
563 
564   // Count number of write barriers in generated code.
565   isolate()->counters()->write_barriers_static()->Increment();
566   IncrementCounter(isolate()->counters()->write_barriers_dynamic(), 1);
567 
568   // Clobber clobbered registers when running with the debug-code flag
569   // turned on to provoke errors.
570   if (emit_debug_code()) {
571     mov(address, Immediate(bit_cast<int32_t>(kZapValue)));
572     mov(value, Immediate(bit_cast<int32_t>(kZapValue)));
573   }
574 }
575 
576 
DebugBreak()577 void MacroAssembler::DebugBreak() {
578   Move(eax, Immediate(0));
579   mov(ebx, Immediate(ExternalReference(Runtime::kDebugBreak, isolate())));
580   CEntryStub ces(isolate(), 1);
581   call(ces.GetCode(), RelocInfo::DEBUG_BREAK);
582 }
583 
584 
Cvtsi2sd(XMMRegister dst,const Operand & src)585 void MacroAssembler::Cvtsi2sd(XMMRegister dst, const Operand& src) {
586   xorps(dst, dst);
587   cvtsi2sd(dst, src);
588 }
589 
590 
IsUnsafeImmediate(const Immediate & x)591 bool MacroAssembler::IsUnsafeImmediate(const Immediate& x) {
592   static const int kMaxImmediateBits = 17;
593   if (!RelocInfo::IsNone(x.rmode_)) return false;
594   return !is_intn(x.x_, kMaxImmediateBits);
595 }
596 
597 
SafeMove(Register dst,const Immediate & x)598 void MacroAssembler::SafeMove(Register dst, const Immediate& x) {
599   if (IsUnsafeImmediate(x) && jit_cookie() != 0) {
600     Move(dst, Immediate(x.x_ ^ jit_cookie()));
601     xor_(dst, jit_cookie());
602   } else {
603     Move(dst, x);
604   }
605 }
606 
607 
SafePush(const Immediate & x)608 void MacroAssembler::SafePush(const Immediate& x) {
609   if (IsUnsafeImmediate(x) && jit_cookie() != 0) {
610     push(Immediate(x.x_ ^ jit_cookie()));
611     xor_(Operand(esp, 0), Immediate(jit_cookie()));
612   } else {
613     push(x);
614   }
615 }
616 
617 
CmpObjectType(Register heap_object,InstanceType type,Register map)618 void MacroAssembler::CmpObjectType(Register heap_object,
619                                    InstanceType type,
620                                    Register map) {
621   mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
622   CmpInstanceType(map, type);
623 }
624 
625 
CmpInstanceType(Register map,InstanceType type)626 void MacroAssembler::CmpInstanceType(Register map, InstanceType type) {
627   cmpb(FieldOperand(map, Map::kInstanceTypeOffset),
628        static_cast<int8_t>(type));
629 }
630 
631 
CheckFastElements(Register map,Label * fail,Label::Distance distance)632 void MacroAssembler::CheckFastElements(Register map,
633                                        Label* fail,
634                                        Label::Distance distance) {
635   STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
636   STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
637   STATIC_ASSERT(FAST_ELEMENTS == 2);
638   STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3);
639   cmpb(FieldOperand(map, Map::kBitField2Offset),
640        Map::kMaximumBitField2FastHoleyElementValue);
641   j(above, fail, distance);
642 }
643 
644 
CheckFastObjectElements(Register map,Label * fail,Label::Distance distance)645 void MacroAssembler::CheckFastObjectElements(Register map,
646                                              Label* fail,
647                                              Label::Distance distance) {
648   STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
649   STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
650   STATIC_ASSERT(FAST_ELEMENTS == 2);
651   STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3);
652   cmpb(FieldOperand(map, Map::kBitField2Offset),
653        Map::kMaximumBitField2FastHoleySmiElementValue);
654   j(below_equal, fail, distance);
655   cmpb(FieldOperand(map, Map::kBitField2Offset),
656        Map::kMaximumBitField2FastHoleyElementValue);
657   j(above, fail, distance);
658 }
659 
660 
CheckFastSmiElements(Register map,Label * fail,Label::Distance distance)661 void MacroAssembler::CheckFastSmiElements(Register map,
662                                           Label* fail,
663                                           Label::Distance distance) {
664   STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
665   STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
666   cmpb(FieldOperand(map, Map::kBitField2Offset),
667        Map::kMaximumBitField2FastHoleySmiElementValue);
668   j(above, fail, distance);
669 }
670 
671 
StoreNumberToDoubleElements(Register maybe_number,Register elements,Register key,Register scratch1,XMMRegister scratch2,Label * fail,int elements_offset)672 void MacroAssembler::StoreNumberToDoubleElements(
673     Register maybe_number,
674     Register elements,
675     Register key,
676     Register scratch1,
677     XMMRegister scratch2,
678     Label* fail,
679     int elements_offset) {
680   Label smi_value, done, maybe_nan, not_nan, is_nan, have_double_value;
681   JumpIfSmi(maybe_number, &smi_value, Label::kNear);
682 
683   CheckMap(maybe_number,
684            isolate()->factory()->heap_number_map(),
685            fail,
686            DONT_DO_SMI_CHECK);
687 
688   // Double value, canonicalize NaN.
689   uint32_t offset = HeapNumber::kValueOffset + sizeof(kHoleNanLower32);
690   cmp(FieldOperand(maybe_number, offset),
691       Immediate(kNaNOrInfinityLowerBoundUpper32));
692   j(greater_equal, &maybe_nan, Label::kNear);
693 
694   bind(&not_nan);
695   ExternalReference canonical_nan_reference =
696       ExternalReference::address_of_canonical_non_hole_nan();
697   movsd(scratch2, FieldOperand(maybe_number, HeapNumber::kValueOffset));
698   bind(&have_double_value);
699   movsd(FieldOperand(elements, key, times_4,
700                      FixedDoubleArray::kHeaderSize - elements_offset),
701         scratch2);
702   jmp(&done);
703 
704   bind(&maybe_nan);
705   // Could be NaN or Infinity. If fraction is not zero, it's NaN, otherwise
706   // it's an Infinity, and the non-NaN code path applies.
707   j(greater, &is_nan, Label::kNear);
708   cmp(FieldOperand(maybe_number, HeapNumber::kValueOffset), Immediate(0));
709   j(zero, &not_nan);
710   bind(&is_nan);
711   movsd(scratch2, Operand::StaticVariable(canonical_nan_reference));
712   jmp(&have_double_value, Label::kNear);
713 
714   bind(&smi_value);
715   // Value is a smi. Convert to a double and store.
716   // Preserve original value.
717   mov(scratch1, maybe_number);
718   SmiUntag(scratch1);
719   Cvtsi2sd(scratch2, scratch1);
720   movsd(FieldOperand(elements, key, times_4,
721                      FixedDoubleArray::kHeaderSize - elements_offset),
722         scratch2);
723   bind(&done);
724 }
725 
726 
CompareMap(Register obj,Handle<Map> map)727 void MacroAssembler::CompareMap(Register obj, Handle<Map> map) {
728   cmp(FieldOperand(obj, HeapObject::kMapOffset), map);
729 }
730 
731 
CheckMap(Register obj,Handle<Map> map,Label * fail,SmiCheckType smi_check_type)732 void MacroAssembler::CheckMap(Register obj,
733                               Handle<Map> map,
734                               Label* fail,
735                               SmiCheckType smi_check_type) {
736   if (smi_check_type == DO_SMI_CHECK) {
737     JumpIfSmi(obj, fail);
738   }
739 
740   CompareMap(obj, map);
741   j(not_equal, fail);
742 }
743 
744 
DispatchMap(Register obj,Register unused,Handle<Map> map,Handle<Code> success,SmiCheckType smi_check_type)745 void MacroAssembler::DispatchMap(Register obj,
746                                  Register unused,
747                                  Handle<Map> map,
748                                  Handle<Code> success,
749                                  SmiCheckType smi_check_type) {
750   Label fail;
751   if (smi_check_type == DO_SMI_CHECK) {
752     JumpIfSmi(obj, &fail);
753   }
754   cmp(FieldOperand(obj, HeapObject::kMapOffset), Immediate(map));
755   j(equal, success);
756 
757   bind(&fail);
758 }
759 
760 
IsObjectStringType(Register heap_object,Register map,Register instance_type)761 Condition MacroAssembler::IsObjectStringType(Register heap_object,
762                                              Register map,
763                                              Register instance_type) {
764   mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
765   movzx_b(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
766   STATIC_ASSERT(kNotStringTag != 0);
767   test(instance_type, Immediate(kIsNotStringMask));
768   return zero;
769 }
770 
771 
IsObjectNameType(Register heap_object,Register map,Register instance_type)772 Condition MacroAssembler::IsObjectNameType(Register heap_object,
773                                            Register map,
774                                            Register instance_type) {
775   mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
776   movzx_b(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
777   cmpb(instance_type, static_cast<uint8_t>(LAST_NAME_TYPE));
778   return below_equal;
779 }
780 
781 
IsObjectJSObjectType(Register heap_object,Register map,Register scratch,Label * fail)782 void MacroAssembler::IsObjectJSObjectType(Register heap_object,
783                                           Register map,
784                                           Register scratch,
785                                           Label* fail) {
786   mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
787   IsInstanceJSObjectType(map, scratch, fail);
788 }
789 
790 
IsInstanceJSObjectType(Register map,Register scratch,Label * fail)791 void MacroAssembler::IsInstanceJSObjectType(Register map,
792                                             Register scratch,
793                                             Label* fail) {
794   movzx_b(scratch, FieldOperand(map, Map::kInstanceTypeOffset));
795   sub(scratch, Immediate(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
796   cmp(scratch,
797       LAST_NONCALLABLE_SPEC_OBJECT_TYPE - FIRST_NONCALLABLE_SPEC_OBJECT_TYPE);
798   j(above, fail);
799 }
800 
801 
FCmp()802 void MacroAssembler::FCmp() {
803   fucomip();
804   fstp(0);
805 }
806 
807 
AssertNumber(Register object)808 void MacroAssembler::AssertNumber(Register object) {
809   if (emit_debug_code()) {
810     Label ok;
811     JumpIfSmi(object, &ok);
812     cmp(FieldOperand(object, HeapObject::kMapOffset),
813         isolate()->factory()->heap_number_map());
814     Check(equal, kOperandNotANumber);
815     bind(&ok);
816   }
817 }
818 
819 
AssertSmi(Register object)820 void MacroAssembler::AssertSmi(Register object) {
821   if (emit_debug_code()) {
822     test(object, Immediate(kSmiTagMask));
823     Check(equal, kOperandIsNotASmi);
824   }
825 }
826 
827 
AssertString(Register object)828 void MacroAssembler::AssertString(Register object) {
829   if (emit_debug_code()) {
830     test(object, Immediate(kSmiTagMask));
831     Check(not_equal, kOperandIsASmiAndNotAString);
832     push(object);
833     mov(object, FieldOperand(object, HeapObject::kMapOffset));
834     CmpInstanceType(object, FIRST_NONSTRING_TYPE);
835     pop(object);
836     Check(below, kOperandIsNotAString);
837   }
838 }
839 
840 
AssertName(Register object)841 void MacroAssembler::AssertName(Register object) {
842   if (emit_debug_code()) {
843     test(object, Immediate(kSmiTagMask));
844     Check(not_equal, kOperandIsASmiAndNotAName);
845     push(object);
846     mov(object, FieldOperand(object, HeapObject::kMapOffset));
847     CmpInstanceType(object, LAST_NAME_TYPE);
848     pop(object);
849     Check(below_equal, kOperandIsNotAName);
850   }
851 }
852 
853 
AssertUndefinedOrAllocationSite(Register object)854 void MacroAssembler::AssertUndefinedOrAllocationSite(Register object) {
855   if (emit_debug_code()) {
856     Label done_checking;
857     AssertNotSmi(object);
858     cmp(object, isolate()->factory()->undefined_value());
859     j(equal, &done_checking);
860     cmp(FieldOperand(object, 0),
861         Immediate(isolate()->factory()->allocation_site_map()));
862     Assert(equal, kExpectedUndefinedOrCell);
863     bind(&done_checking);
864   }
865 }
866 
867 
AssertNotSmi(Register object)868 void MacroAssembler::AssertNotSmi(Register object) {
869   if (emit_debug_code()) {
870     test(object, Immediate(kSmiTagMask));
871     Check(not_equal, kOperandIsASmi);
872   }
873 }
874 
875 
StubPrologue()876 void MacroAssembler::StubPrologue() {
877   push(ebp);  // Caller's frame pointer.
878   mov(ebp, esp);
879   push(esi);  // Callee's context.
880   push(Immediate(Smi::FromInt(StackFrame::STUB)));
881 }
882 
883 
Prologue(bool code_pre_aging)884 void MacroAssembler::Prologue(bool code_pre_aging) {
885   PredictableCodeSizeScope predictible_code_size_scope(this,
886       kNoCodeAgeSequenceLength);
887   if (code_pre_aging) {
888       // Pre-age the code.
889     call(isolate()->builtins()->MarkCodeAsExecutedOnce(),
890         RelocInfo::CODE_AGE_SEQUENCE);
891     Nop(kNoCodeAgeSequenceLength - Assembler::kCallInstructionLength);
892   } else {
893     push(ebp);  // Caller's frame pointer.
894     mov(ebp, esp);
895     push(esi);  // Callee's context.
896     push(edi);  // Callee's JS function.
897   }
898 }
899 
900 
EnterFrame(StackFrame::Type type)901 void MacroAssembler::EnterFrame(StackFrame::Type type) {
902   push(ebp);
903   mov(ebp, esp);
904   push(esi);
905   push(Immediate(Smi::FromInt(type)));
906   push(Immediate(CodeObject()));
907   if (emit_debug_code()) {
908     cmp(Operand(esp, 0), Immediate(isolate()->factory()->undefined_value()));
909     Check(not_equal, kCodeObjectNotProperlyPatched);
910   }
911 }
912 
913 
LeaveFrame(StackFrame::Type type)914 void MacroAssembler::LeaveFrame(StackFrame::Type type) {
915   if (emit_debug_code()) {
916     cmp(Operand(ebp, StandardFrameConstants::kMarkerOffset),
917         Immediate(Smi::FromInt(type)));
918     Check(equal, kStackFrameTypesMustMatch);
919   }
920   leave();
921 }
922 
923 
EnterExitFramePrologue()924 void MacroAssembler::EnterExitFramePrologue() {
925   // Set up the frame structure on the stack.
926   DCHECK(ExitFrameConstants::kCallerSPDisplacement == +2 * kPointerSize);
927   DCHECK(ExitFrameConstants::kCallerPCOffset == +1 * kPointerSize);
928   DCHECK(ExitFrameConstants::kCallerFPOffset ==  0 * kPointerSize);
929   push(ebp);
930   mov(ebp, esp);
931 
932   // Reserve room for entry stack pointer and push the code object.
933   DCHECK(ExitFrameConstants::kSPOffset  == -1 * kPointerSize);
934   push(Immediate(0));  // Saved entry sp, patched before call.
935   push(Immediate(CodeObject()));  // Accessed from ExitFrame::code_slot.
936 
937   // Save the frame pointer and the context in top.
938   ExternalReference c_entry_fp_address(Isolate::kCEntryFPAddress, isolate());
939   ExternalReference context_address(Isolate::kContextAddress, isolate());
940   mov(Operand::StaticVariable(c_entry_fp_address), ebp);
941   mov(Operand::StaticVariable(context_address), esi);
942 }
943 
944 
EnterExitFrameEpilogue(int argc,bool save_doubles)945 void MacroAssembler::EnterExitFrameEpilogue(int argc, bool save_doubles) {
946   // Optionally save all XMM registers.
947   if (save_doubles) {
948     int space = XMMRegister::kMaxNumRegisters * kDoubleSize +
949                 argc * kPointerSize;
950     sub(esp, Immediate(space));
951     const int offset = -2 * kPointerSize;
952     for (int i = 0; i < XMMRegister::kMaxNumRegisters; i++) {
953       XMMRegister reg = XMMRegister::from_code(i);
954       movsd(Operand(ebp, offset - ((i + 1) * kDoubleSize)), reg);
955     }
956   } else {
957     sub(esp, Immediate(argc * kPointerSize));
958   }
959 
960   // Get the required frame alignment for the OS.
961   const int kFrameAlignment = base::OS::ActivationFrameAlignment();
962   if (kFrameAlignment > 0) {
963     DCHECK(base::bits::IsPowerOfTwo32(kFrameAlignment));
964     and_(esp, -kFrameAlignment);
965   }
966 
967   // Patch the saved entry sp.
968   mov(Operand(ebp, ExitFrameConstants::kSPOffset), esp);
969 }
970 
971 
EnterExitFrame(bool save_doubles)972 void MacroAssembler::EnterExitFrame(bool save_doubles) {
973   EnterExitFramePrologue();
974 
975   // Set up argc and argv in callee-saved registers.
976   int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
977   mov(edi, eax);
978   lea(esi, Operand(ebp, eax, times_4, offset));
979 
980   // Reserve space for argc, argv and isolate.
981   EnterExitFrameEpilogue(3, save_doubles);
982 }
983 
984 
EnterApiExitFrame(int argc)985 void MacroAssembler::EnterApiExitFrame(int argc) {
986   EnterExitFramePrologue();
987   EnterExitFrameEpilogue(argc, false);
988 }
989 
990 
LeaveExitFrame(bool save_doubles)991 void MacroAssembler::LeaveExitFrame(bool save_doubles) {
992   // Optionally restore all XMM registers.
993   if (save_doubles) {
994     const int offset = -2 * kPointerSize;
995     for (int i = 0; i < XMMRegister::kMaxNumRegisters; i++) {
996       XMMRegister reg = XMMRegister::from_code(i);
997       movsd(reg, Operand(ebp, offset - ((i + 1) * kDoubleSize)));
998     }
999   }
1000 
1001   // Get the return address from the stack and restore the frame pointer.
1002   mov(ecx, Operand(ebp, 1 * kPointerSize));
1003   mov(ebp, Operand(ebp, 0 * kPointerSize));
1004 
1005   // Pop the arguments and the receiver from the caller stack.
1006   lea(esp, Operand(esi, 1 * kPointerSize));
1007 
1008   // Push the return address to get ready to return.
1009   push(ecx);
1010 
1011   LeaveExitFrameEpilogue(true);
1012 }
1013 
1014 
LeaveExitFrameEpilogue(bool restore_context)1015 void MacroAssembler::LeaveExitFrameEpilogue(bool restore_context) {
1016   // Restore current context from top and clear it in debug mode.
1017   ExternalReference context_address(Isolate::kContextAddress, isolate());
1018   if (restore_context) {
1019     mov(esi, Operand::StaticVariable(context_address));
1020   }
1021 #ifdef DEBUG
1022   mov(Operand::StaticVariable(context_address), Immediate(0));
1023 #endif
1024 
1025   // Clear the top frame.
1026   ExternalReference c_entry_fp_address(Isolate::kCEntryFPAddress,
1027                                        isolate());
1028   mov(Operand::StaticVariable(c_entry_fp_address), Immediate(0));
1029 }
1030 
1031 
LeaveApiExitFrame(bool restore_context)1032 void MacroAssembler::LeaveApiExitFrame(bool restore_context) {
1033   mov(esp, ebp);
1034   pop(ebp);
1035 
1036   LeaveExitFrameEpilogue(restore_context);
1037 }
1038 
1039 
PushTryHandler(StackHandler::Kind kind,int handler_index)1040 void MacroAssembler::PushTryHandler(StackHandler::Kind kind,
1041                                     int handler_index) {
1042   // Adjust this code if not the case.
1043   STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize);
1044   STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
1045   STATIC_ASSERT(StackHandlerConstants::kCodeOffset == 1 * kPointerSize);
1046   STATIC_ASSERT(StackHandlerConstants::kStateOffset == 2 * kPointerSize);
1047   STATIC_ASSERT(StackHandlerConstants::kContextOffset == 3 * kPointerSize);
1048   STATIC_ASSERT(StackHandlerConstants::kFPOffset == 4 * kPointerSize);
1049 
1050   // We will build up the handler from the bottom by pushing on the stack.
1051   // First push the frame pointer and context.
1052   if (kind == StackHandler::JS_ENTRY) {
1053     // The frame pointer does not point to a JS frame so we save NULL for
1054     // ebp. We expect the code throwing an exception to check ebp before
1055     // dereferencing it to restore the context.
1056     push(Immediate(0));  // NULL frame pointer.
1057     push(Immediate(Smi::FromInt(0)));  // No context.
1058   } else {
1059     push(ebp);
1060     push(esi);
1061   }
1062   // Push the state and the code object.
1063   unsigned state =
1064       StackHandler::IndexField::encode(handler_index) |
1065       StackHandler::KindField::encode(kind);
1066   push(Immediate(state));
1067   Push(CodeObject());
1068 
1069   // Link the current handler as the next handler.
1070   ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
1071   push(Operand::StaticVariable(handler_address));
1072   // Set this new handler as the current one.
1073   mov(Operand::StaticVariable(handler_address), esp);
1074 }
1075 
1076 
PopTryHandler()1077 void MacroAssembler::PopTryHandler() {
1078   STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
1079   ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
1080   pop(Operand::StaticVariable(handler_address));
1081   add(esp, Immediate(StackHandlerConstants::kSize - kPointerSize));
1082 }
1083 
1084 
JumpToHandlerEntry()1085 void MacroAssembler::JumpToHandlerEntry() {
1086   // Compute the handler entry address and jump to it.  The handler table is
1087   // a fixed array of (smi-tagged) code offsets.
1088   // eax = exception, edi = code object, edx = state.
1089   mov(ebx, FieldOperand(edi, Code::kHandlerTableOffset));
1090   shr(edx, StackHandler::kKindWidth);
1091   mov(edx, FieldOperand(ebx, edx, times_4, FixedArray::kHeaderSize));
1092   SmiUntag(edx);
1093   lea(edi, FieldOperand(edi, edx, times_1, Code::kHeaderSize));
1094   jmp(edi);
1095 }
1096 
1097 
Throw(Register value)1098 void MacroAssembler::Throw(Register value) {
1099   // Adjust this code if not the case.
1100   STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize);
1101   STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
1102   STATIC_ASSERT(StackHandlerConstants::kCodeOffset == 1 * kPointerSize);
1103   STATIC_ASSERT(StackHandlerConstants::kStateOffset == 2 * kPointerSize);
1104   STATIC_ASSERT(StackHandlerConstants::kContextOffset == 3 * kPointerSize);
1105   STATIC_ASSERT(StackHandlerConstants::kFPOffset == 4 * kPointerSize);
1106 
1107   // The exception is expected in eax.
1108   if (!value.is(eax)) {
1109     mov(eax, value);
1110   }
1111   // Drop the stack pointer to the top of the top handler.
1112   ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
1113   mov(esp, Operand::StaticVariable(handler_address));
1114   // Restore the next handler.
1115   pop(Operand::StaticVariable(handler_address));
1116 
1117   // Remove the code object and state, compute the handler address in edi.
1118   pop(edi);  // Code object.
1119   pop(edx);  // Index and state.
1120 
1121   // Restore the context and frame pointer.
1122   pop(esi);  // Context.
1123   pop(ebp);  // Frame pointer.
1124 
1125   // If the handler is a JS frame, restore the context to the frame.
1126   // (kind == ENTRY) == (ebp == 0) == (esi == 0), so we could test either
1127   // ebp or esi.
1128   Label skip;
1129   test(esi, esi);
1130   j(zero, &skip, Label::kNear);
1131   mov(Operand(ebp, StandardFrameConstants::kContextOffset), esi);
1132   bind(&skip);
1133 
1134   JumpToHandlerEntry();
1135 }
1136 
1137 
ThrowUncatchable(Register value)1138 void MacroAssembler::ThrowUncatchable(Register value) {
1139   // Adjust this code if not the case.
1140   STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize);
1141   STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
1142   STATIC_ASSERT(StackHandlerConstants::kCodeOffset == 1 * kPointerSize);
1143   STATIC_ASSERT(StackHandlerConstants::kStateOffset == 2 * kPointerSize);
1144   STATIC_ASSERT(StackHandlerConstants::kContextOffset == 3 * kPointerSize);
1145   STATIC_ASSERT(StackHandlerConstants::kFPOffset == 4 * kPointerSize);
1146 
1147   // The exception is expected in eax.
1148   if (!value.is(eax)) {
1149     mov(eax, value);
1150   }
1151   // Drop the stack pointer to the top of the top stack handler.
1152   ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
1153   mov(esp, Operand::StaticVariable(handler_address));
1154 
1155   // Unwind the handlers until the top ENTRY handler is found.
1156   Label fetch_next, check_kind;
1157   jmp(&check_kind, Label::kNear);
1158   bind(&fetch_next);
1159   mov(esp, Operand(esp, StackHandlerConstants::kNextOffset));
1160 
1161   bind(&check_kind);
1162   STATIC_ASSERT(StackHandler::JS_ENTRY == 0);
1163   test(Operand(esp, StackHandlerConstants::kStateOffset),
1164        Immediate(StackHandler::KindField::kMask));
1165   j(not_zero, &fetch_next);
1166 
1167   // Set the top handler address to next handler past the top ENTRY handler.
1168   pop(Operand::StaticVariable(handler_address));
1169 
1170   // Remove the code object and state, compute the handler address in edi.
1171   pop(edi);  // Code object.
1172   pop(edx);  // Index and state.
1173 
1174   // Clear the context pointer and frame pointer (0 was saved in the handler).
1175   pop(esi);
1176   pop(ebp);
1177 
1178   JumpToHandlerEntry();
1179 }
1180 
1181 
CheckAccessGlobalProxy(Register holder_reg,Register scratch1,Register scratch2,Label * miss)1182 void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
1183                                             Register scratch1,
1184                                             Register scratch2,
1185                                             Label* miss) {
1186   Label same_contexts;
1187 
1188   DCHECK(!holder_reg.is(scratch1));
1189   DCHECK(!holder_reg.is(scratch2));
1190   DCHECK(!scratch1.is(scratch2));
1191 
1192   // Load current lexical context from the stack frame.
1193   mov(scratch1, Operand(ebp, StandardFrameConstants::kContextOffset));
1194 
1195   // When generating debug code, make sure the lexical context is set.
1196   if (emit_debug_code()) {
1197     cmp(scratch1, Immediate(0));
1198     Check(not_equal, kWeShouldNotHaveAnEmptyLexicalContext);
1199   }
1200   // Load the native context of the current context.
1201   int offset =
1202       Context::kHeaderSize + Context::GLOBAL_OBJECT_INDEX * kPointerSize;
1203   mov(scratch1, FieldOperand(scratch1, offset));
1204   mov(scratch1, FieldOperand(scratch1, GlobalObject::kNativeContextOffset));
1205 
1206   // Check the context is a native context.
1207   if (emit_debug_code()) {
1208     // Read the first word and compare to native_context_map.
1209     cmp(FieldOperand(scratch1, HeapObject::kMapOffset),
1210         isolate()->factory()->native_context_map());
1211     Check(equal, kJSGlobalObjectNativeContextShouldBeANativeContext);
1212   }
1213 
1214   // Check if both contexts are the same.
1215   cmp(scratch1, FieldOperand(holder_reg, JSGlobalProxy::kNativeContextOffset));
1216   j(equal, &same_contexts);
1217 
1218   // Compare security tokens, save holder_reg on the stack so we can use it
1219   // as a temporary register.
1220   //
1221   // Check that the security token in the calling global object is
1222   // compatible with the security token in the receiving global
1223   // object.
1224   mov(scratch2,
1225       FieldOperand(holder_reg, JSGlobalProxy::kNativeContextOffset));
1226 
1227   // Check the context is a native context.
1228   if (emit_debug_code()) {
1229     cmp(scratch2, isolate()->factory()->null_value());
1230     Check(not_equal, kJSGlobalProxyContextShouldNotBeNull);
1231 
1232     // Read the first word and compare to native_context_map(),
1233     cmp(FieldOperand(scratch2, HeapObject::kMapOffset),
1234         isolate()->factory()->native_context_map());
1235     Check(equal, kJSGlobalObjectNativeContextShouldBeANativeContext);
1236   }
1237 
1238   int token_offset = Context::kHeaderSize +
1239                      Context::SECURITY_TOKEN_INDEX * kPointerSize;
1240   mov(scratch1, FieldOperand(scratch1, token_offset));
1241   cmp(scratch1, FieldOperand(scratch2, token_offset));
1242   j(not_equal, miss);
1243 
1244   bind(&same_contexts);
1245 }
1246 
1247 
1248 // Compute the hash code from the untagged key.  This must be kept in sync with
1249 // ComputeIntegerHash in utils.h and KeyedLoadGenericStub in
1250 // code-stub-hydrogen.cc
1251 //
1252 // Note: r0 will contain hash code
GetNumberHash(Register r0,Register scratch)1253 void MacroAssembler::GetNumberHash(Register r0, Register scratch) {
1254   // Xor original key with a seed.
1255   if (serializer_enabled()) {
1256     ExternalReference roots_array_start =
1257         ExternalReference::roots_array_start(isolate());
1258     mov(scratch, Immediate(Heap::kHashSeedRootIndex));
1259     mov(scratch,
1260         Operand::StaticArray(scratch, times_pointer_size, roots_array_start));
1261     SmiUntag(scratch);
1262     xor_(r0, scratch);
1263   } else {
1264     int32_t seed = isolate()->heap()->HashSeed();
1265     xor_(r0, Immediate(seed));
1266   }
1267 
1268   // hash = ~hash + (hash << 15);
1269   mov(scratch, r0);
1270   not_(r0);
1271   shl(scratch, 15);
1272   add(r0, scratch);
1273   // hash = hash ^ (hash >> 12);
1274   mov(scratch, r0);
1275   shr(scratch, 12);
1276   xor_(r0, scratch);
1277   // hash = hash + (hash << 2);
1278   lea(r0, Operand(r0, r0, times_4, 0));
1279   // hash = hash ^ (hash >> 4);
1280   mov(scratch, r0);
1281   shr(scratch, 4);
1282   xor_(r0, scratch);
1283   // hash = hash * 2057;
1284   imul(r0, r0, 2057);
1285   // hash = hash ^ (hash >> 16);
1286   mov(scratch, r0);
1287   shr(scratch, 16);
1288   xor_(r0, scratch);
1289 }
1290 
1291 
1292 
LoadFromNumberDictionary(Label * miss,Register elements,Register key,Register r0,Register r1,Register r2,Register result)1293 void MacroAssembler::LoadFromNumberDictionary(Label* miss,
1294                                               Register elements,
1295                                               Register key,
1296                                               Register r0,
1297                                               Register r1,
1298                                               Register r2,
1299                                               Register result) {
1300   // Register use:
1301   //
1302   // elements - holds the slow-case elements of the receiver and is unchanged.
1303   //
1304   // key      - holds the smi key on entry and is unchanged.
1305   //
1306   // Scratch registers:
1307   //
1308   // r0 - holds the untagged key on entry and holds the hash once computed.
1309   //
1310   // r1 - used to hold the capacity mask of the dictionary
1311   //
1312   // r2 - used for the index into the dictionary.
1313   //
1314   // result - holds the result on exit if the load succeeds and we fall through.
1315 
1316   Label done;
1317 
1318   GetNumberHash(r0, r1);
1319 
1320   // Compute capacity mask.
1321   mov(r1, FieldOperand(elements, SeededNumberDictionary::kCapacityOffset));
1322   shr(r1, kSmiTagSize);  // convert smi to int
1323   dec(r1);
1324 
1325   // Generate an unrolled loop that performs a few probes before giving up.
1326   for (int i = 0; i < kNumberDictionaryProbes; i++) {
1327     // Use r2 for index calculations and keep the hash intact in r0.
1328     mov(r2, r0);
1329     // Compute the masked index: (hash + i + i * i) & mask.
1330     if (i > 0) {
1331       add(r2, Immediate(SeededNumberDictionary::GetProbeOffset(i)));
1332     }
1333     and_(r2, r1);
1334 
1335     // Scale the index by multiplying by the entry size.
1336     DCHECK(SeededNumberDictionary::kEntrySize == 3);
1337     lea(r2, Operand(r2, r2, times_2, 0));  // r2 = r2 * 3
1338 
1339     // Check if the key matches.
1340     cmp(key, FieldOperand(elements,
1341                           r2,
1342                           times_pointer_size,
1343                           SeededNumberDictionary::kElementsStartOffset));
1344     if (i != (kNumberDictionaryProbes - 1)) {
1345       j(equal, &done);
1346     } else {
1347       j(not_equal, miss);
1348     }
1349   }
1350 
1351   bind(&done);
1352   // Check that the value is a normal propety.
1353   const int kDetailsOffset =
1354       SeededNumberDictionary::kElementsStartOffset + 2 * kPointerSize;
1355   DCHECK_EQ(NORMAL, 0);
1356   test(FieldOperand(elements, r2, times_pointer_size, kDetailsOffset),
1357        Immediate(PropertyDetails::TypeField::kMask << kSmiTagSize));
1358   j(not_zero, miss);
1359 
1360   // Get the value at the masked, scaled index.
1361   const int kValueOffset =
1362       SeededNumberDictionary::kElementsStartOffset + kPointerSize;
1363   mov(result, FieldOperand(elements, r2, times_pointer_size, kValueOffset));
1364 }
1365 
1366 
LoadAllocationTopHelper(Register result,Register scratch,AllocationFlags flags)1367 void MacroAssembler::LoadAllocationTopHelper(Register result,
1368                                              Register scratch,
1369                                              AllocationFlags flags) {
1370   ExternalReference allocation_top =
1371       AllocationUtils::GetAllocationTopReference(isolate(), flags);
1372 
1373   // Just return if allocation top is already known.
1374   if ((flags & RESULT_CONTAINS_TOP) != 0) {
1375     // No use of scratch if allocation top is provided.
1376     DCHECK(scratch.is(no_reg));
1377 #ifdef DEBUG
1378     // Assert that result actually contains top on entry.
1379     cmp(result, Operand::StaticVariable(allocation_top));
1380     Check(equal, kUnexpectedAllocationTop);
1381 #endif
1382     return;
1383   }
1384 
1385   // Move address of new object to result. Use scratch register if available.
1386   if (scratch.is(no_reg)) {
1387     mov(result, Operand::StaticVariable(allocation_top));
1388   } else {
1389     mov(scratch, Immediate(allocation_top));
1390     mov(result, Operand(scratch, 0));
1391   }
1392 }
1393 
1394 
UpdateAllocationTopHelper(Register result_end,Register scratch,AllocationFlags flags)1395 void MacroAssembler::UpdateAllocationTopHelper(Register result_end,
1396                                                Register scratch,
1397                                                AllocationFlags flags) {
1398   if (emit_debug_code()) {
1399     test(result_end, Immediate(kObjectAlignmentMask));
1400     Check(zero, kUnalignedAllocationInNewSpace);
1401   }
1402 
1403   ExternalReference allocation_top =
1404       AllocationUtils::GetAllocationTopReference(isolate(), flags);
1405 
1406   // Update new top. Use scratch if available.
1407   if (scratch.is(no_reg)) {
1408     mov(Operand::StaticVariable(allocation_top), result_end);
1409   } else {
1410     mov(Operand(scratch, 0), result_end);
1411   }
1412 }
1413 
1414 
Allocate(int object_size,Register result,Register result_end,Register scratch,Label * gc_required,AllocationFlags flags)1415 void MacroAssembler::Allocate(int object_size,
1416                               Register result,
1417                               Register result_end,
1418                               Register scratch,
1419                               Label* gc_required,
1420                               AllocationFlags flags) {
1421   DCHECK((flags & (RESULT_CONTAINS_TOP | SIZE_IN_WORDS)) == 0);
1422   DCHECK(object_size <= Page::kMaxRegularHeapObjectSize);
1423   if (!FLAG_inline_new) {
1424     if (emit_debug_code()) {
1425       // Trash the registers to simulate an allocation failure.
1426       mov(result, Immediate(0x7091));
1427       if (result_end.is_valid()) {
1428         mov(result_end, Immediate(0x7191));
1429       }
1430       if (scratch.is_valid()) {
1431         mov(scratch, Immediate(0x7291));
1432       }
1433     }
1434     jmp(gc_required);
1435     return;
1436   }
1437   DCHECK(!result.is(result_end));
1438 
1439   // Load address of new object into result.
1440   LoadAllocationTopHelper(result, scratch, flags);
1441 
1442   ExternalReference allocation_limit =
1443       AllocationUtils::GetAllocationLimitReference(isolate(), flags);
1444 
1445   // Align the next allocation. Storing the filler map without checking top is
1446   // safe in new-space because the limit of the heap is aligned there.
1447   if ((flags & DOUBLE_ALIGNMENT) != 0) {
1448     DCHECK((flags & PRETENURE_OLD_POINTER_SPACE) == 0);
1449     DCHECK(kPointerAlignment * 2 == kDoubleAlignment);
1450     Label aligned;
1451     test(result, Immediate(kDoubleAlignmentMask));
1452     j(zero, &aligned, Label::kNear);
1453     if ((flags & PRETENURE_OLD_DATA_SPACE) != 0) {
1454       cmp(result, Operand::StaticVariable(allocation_limit));
1455       j(above_equal, gc_required);
1456     }
1457     mov(Operand(result, 0),
1458         Immediate(isolate()->factory()->one_pointer_filler_map()));
1459     add(result, Immediate(kDoubleSize / 2));
1460     bind(&aligned);
1461   }
1462 
1463   // Calculate new top and bail out if space is exhausted.
1464   Register top_reg = result_end.is_valid() ? result_end : result;
1465   if (!top_reg.is(result)) {
1466     mov(top_reg, result);
1467   }
1468   add(top_reg, Immediate(object_size));
1469   j(carry, gc_required);
1470   cmp(top_reg, Operand::StaticVariable(allocation_limit));
1471   j(above, gc_required);
1472 
1473   // Update allocation top.
1474   UpdateAllocationTopHelper(top_reg, scratch, flags);
1475 
1476   // Tag result if requested.
1477   bool tag_result = (flags & TAG_OBJECT) != 0;
1478   if (top_reg.is(result)) {
1479     if (tag_result) {
1480       sub(result, Immediate(object_size - kHeapObjectTag));
1481     } else {
1482       sub(result, Immediate(object_size));
1483     }
1484   } else if (tag_result) {
1485     DCHECK(kHeapObjectTag == 1);
1486     inc(result);
1487   }
1488 }
1489 
1490 
Allocate(int header_size,ScaleFactor element_size,Register element_count,RegisterValueType element_count_type,Register result,Register result_end,Register scratch,Label * gc_required,AllocationFlags flags)1491 void MacroAssembler::Allocate(int header_size,
1492                               ScaleFactor element_size,
1493                               Register element_count,
1494                               RegisterValueType element_count_type,
1495                               Register result,
1496                               Register result_end,
1497                               Register scratch,
1498                               Label* gc_required,
1499                               AllocationFlags flags) {
1500   DCHECK((flags & SIZE_IN_WORDS) == 0);
1501   if (!FLAG_inline_new) {
1502     if (emit_debug_code()) {
1503       // Trash the registers to simulate an allocation failure.
1504       mov(result, Immediate(0x7091));
1505       mov(result_end, Immediate(0x7191));
1506       if (scratch.is_valid()) {
1507         mov(scratch, Immediate(0x7291));
1508       }
1509       // Register element_count is not modified by the function.
1510     }
1511     jmp(gc_required);
1512     return;
1513   }
1514   DCHECK(!result.is(result_end));
1515 
1516   // Load address of new object into result.
1517   LoadAllocationTopHelper(result, scratch, flags);
1518 
1519   ExternalReference allocation_limit =
1520       AllocationUtils::GetAllocationLimitReference(isolate(), flags);
1521 
1522   // Align the next allocation. Storing the filler map without checking top is
1523   // safe in new-space because the limit of the heap is aligned there.
1524   if ((flags & DOUBLE_ALIGNMENT) != 0) {
1525     DCHECK((flags & PRETENURE_OLD_POINTER_SPACE) == 0);
1526     DCHECK(kPointerAlignment * 2 == kDoubleAlignment);
1527     Label aligned;
1528     test(result, Immediate(kDoubleAlignmentMask));
1529     j(zero, &aligned, Label::kNear);
1530     if ((flags & PRETENURE_OLD_DATA_SPACE) != 0) {
1531       cmp(result, Operand::StaticVariable(allocation_limit));
1532       j(above_equal, gc_required);
1533     }
1534     mov(Operand(result, 0),
1535         Immediate(isolate()->factory()->one_pointer_filler_map()));
1536     add(result, Immediate(kDoubleSize / 2));
1537     bind(&aligned);
1538   }
1539 
1540   // Calculate new top and bail out if space is exhausted.
1541   // We assume that element_count*element_size + header_size does not
1542   // overflow.
1543   if (element_count_type == REGISTER_VALUE_IS_SMI) {
1544     STATIC_ASSERT(static_cast<ScaleFactor>(times_2 - 1) == times_1);
1545     STATIC_ASSERT(static_cast<ScaleFactor>(times_4 - 1) == times_2);
1546     STATIC_ASSERT(static_cast<ScaleFactor>(times_8 - 1) == times_4);
1547     DCHECK(element_size >= times_2);
1548     DCHECK(kSmiTagSize == 1);
1549     element_size = static_cast<ScaleFactor>(element_size - 1);
1550   } else {
1551     DCHECK(element_count_type == REGISTER_VALUE_IS_INT32);
1552   }
1553   lea(result_end, Operand(element_count, element_size, header_size));
1554   add(result_end, result);
1555   j(carry, gc_required);
1556   cmp(result_end, Operand::StaticVariable(allocation_limit));
1557   j(above, gc_required);
1558 
1559   if ((flags & TAG_OBJECT) != 0) {
1560     DCHECK(kHeapObjectTag == 1);
1561     inc(result);
1562   }
1563 
1564   // Update allocation top.
1565   UpdateAllocationTopHelper(result_end, scratch, flags);
1566 }
1567 
1568 
Allocate(Register object_size,Register result,Register result_end,Register scratch,Label * gc_required,AllocationFlags flags)1569 void MacroAssembler::Allocate(Register object_size,
1570                               Register result,
1571                               Register result_end,
1572                               Register scratch,
1573                               Label* gc_required,
1574                               AllocationFlags flags) {
1575   DCHECK((flags & (RESULT_CONTAINS_TOP | SIZE_IN_WORDS)) == 0);
1576   if (!FLAG_inline_new) {
1577     if (emit_debug_code()) {
1578       // Trash the registers to simulate an allocation failure.
1579       mov(result, Immediate(0x7091));
1580       mov(result_end, Immediate(0x7191));
1581       if (scratch.is_valid()) {
1582         mov(scratch, Immediate(0x7291));
1583       }
1584       // object_size is left unchanged by this function.
1585     }
1586     jmp(gc_required);
1587     return;
1588   }
1589   DCHECK(!result.is(result_end));
1590 
1591   // Load address of new object into result.
1592   LoadAllocationTopHelper(result, scratch, flags);
1593 
1594   ExternalReference allocation_limit =
1595       AllocationUtils::GetAllocationLimitReference(isolate(), flags);
1596 
1597   // Align the next allocation. Storing the filler map without checking top is
1598   // safe in new-space because the limit of the heap is aligned there.
1599   if ((flags & DOUBLE_ALIGNMENT) != 0) {
1600     DCHECK((flags & PRETENURE_OLD_POINTER_SPACE) == 0);
1601     DCHECK(kPointerAlignment * 2 == kDoubleAlignment);
1602     Label aligned;
1603     test(result, Immediate(kDoubleAlignmentMask));
1604     j(zero, &aligned, Label::kNear);
1605     if ((flags & PRETENURE_OLD_DATA_SPACE) != 0) {
1606       cmp(result, Operand::StaticVariable(allocation_limit));
1607       j(above_equal, gc_required);
1608     }
1609     mov(Operand(result, 0),
1610         Immediate(isolate()->factory()->one_pointer_filler_map()));
1611     add(result, Immediate(kDoubleSize / 2));
1612     bind(&aligned);
1613   }
1614 
1615   // Calculate new top and bail out if space is exhausted.
1616   if (!object_size.is(result_end)) {
1617     mov(result_end, object_size);
1618   }
1619   add(result_end, result);
1620   j(carry, gc_required);
1621   cmp(result_end, Operand::StaticVariable(allocation_limit));
1622   j(above, gc_required);
1623 
1624   // Tag result if requested.
1625   if ((flags & TAG_OBJECT) != 0) {
1626     DCHECK(kHeapObjectTag == 1);
1627     inc(result);
1628   }
1629 
1630   // Update allocation top.
1631   UpdateAllocationTopHelper(result_end, scratch, flags);
1632 }
1633 
1634 
UndoAllocationInNewSpace(Register object)1635 void MacroAssembler::UndoAllocationInNewSpace(Register object) {
1636   ExternalReference new_space_allocation_top =
1637       ExternalReference::new_space_allocation_top_address(isolate());
1638 
1639   // Make sure the object has no tag before resetting top.
1640   and_(object, Immediate(~kHeapObjectTagMask));
1641 #ifdef DEBUG
1642   cmp(object, Operand::StaticVariable(new_space_allocation_top));
1643   Check(below, kUndoAllocationOfNonAllocatedMemory);
1644 #endif
1645   mov(Operand::StaticVariable(new_space_allocation_top), object);
1646 }
1647 
1648 
AllocateHeapNumber(Register result,Register scratch1,Register scratch2,Label * gc_required,MutableMode mode)1649 void MacroAssembler::AllocateHeapNumber(Register result,
1650                                         Register scratch1,
1651                                         Register scratch2,
1652                                         Label* gc_required,
1653                                         MutableMode mode) {
1654   // Allocate heap number in new space.
1655   Allocate(HeapNumber::kSize, result, scratch1, scratch2, gc_required,
1656            TAG_OBJECT);
1657 
1658   Handle<Map> map = mode == MUTABLE
1659       ? isolate()->factory()->mutable_heap_number_map()
1660       : isolate()->factory()->heap_number_map();
1661 
1662   // Set the map.
1663   mov(FieldOperand(result, HeapObject::kMapOffset), Immediate(map));
1664 }
1665 
1666 
AllocateTwoByteString(Register result,Register length,Register scratch1,Register scratch2,Register scratch3,Label * gc_required)1667 void MacroAssembler::AllocateTwoByteString(Register result,
1668                                            Register length,
1669                                            Register scratch1,
1670                                            Register scratch2,
1671                                            Register scratch3,
1672                                            Label* gc_required) {
1673   // Calculate the number of bytes needed for the characters in the string while
1674   // observing object alignment.
1675   DCHECK((SeqTwoByteString::kHeaderSize & kObjectAlignmentMask) == 0);
1676   DCHECK(kShortSize == 2);
1677   // scratch1 = length * 2 + kObjectAlignmentMask.
1678   lea(scratch1, Operand(length, length, times_1, kObjectAlignmentMask));
1679   and_(scratch1, Immediate(~kObjectAlignmentMask));
1680 
1681   // Allocate two byte string in new space.
1682   Allocate(SeqTwoByteString::kHeaderSize,
1683            times_1,
1684            scratch1,
1685            REGISTER_VALUE_IS_INT32,
1686            result,
1687            scratch2,
1688            scratch3,
1689            gc_required,
1690            TAG_OBJECT);
1691 
1692   // Set the map, length and hash field.
1693   mov(FieldOperand(result, HeapObject::kMapOffset),
1694       Immediate(isolate()->factory()->string_map()));
1695   mov(scratch1, length);
1696   SmiTag(scratch1);
1697   mov(FieldOperand(result, String::kLengthOffset), scratch1);
1698   mov(FieldOperand(result, String::kHashFieldOffset),
1699       Immediate(String::kEmptyHashField));
1700 }
1701 
1702 
AllocateOneByteString(Register result,Register length,Register scratch1,Register scratch2,Register scratch3,Label * gc_required)1703 void MacroAssembler::AllocateOneByteString(Register result, Register length,
1704                                            Register scratch1, Register scratch2,
1705                                            Register scratch3,
1706                                            Label* gc_required) {
1707   // Calculate the number of bytes needed for the characters in the string while
1708   // observing object alignment.
1709   DCHECK((SeqOneByteString::kHeaderSize & kObjectAlignmentMask) == 0);
1710   mov(scratch1, length);
1711   DCHECK(kCharSize == 1);
1712   add(scratch1, Immediate(kObjectAlignmentMask));
1713   and_(scratch1, Immediate(~kObjectAlignmentMask));
1714 
1715   // Allocate one-byte string in new space.
1716   Allocate(SeqOneByteString::kHeaderSize,
1717            times_1,
1718            scratch1,
1719            REGISTER_VALUE_IS_INT32,
1720            result,
1721            scratch2,
1722            scratch3,
1723            gc_required,
1724            TAG_OBJECT);
1725 
1726   // Set the map, length and hash field.
1727   mov(FieldOperand(result, HeapObject::kMapOffset),
1728       Immediate(isolate()->factory()->one_byte_string_map()));
1729   mov(scratch1, length);
1730   SmiTag(scratch1);
1731   mov(FieldOperand(result, String::kLengthOffset), scratch1);
1732   mov(FieldOperand(result, String::kHashFieldOffset),
1733       Immediate(String::kEmptyHashField));
1734 }
1735 
1736 
AllocateOneByteString(Register result,int length,Register scratch1,Register scratch2,Label * gc_required)1737 void MacroAssembler::AllocateOneByteString(Register result, int length,
1738                                            Register scratch1, Register scratch2,
1739                                            Label* gc_required) {
1740   DCHECK(length > 0);
1741 
1742   // Allocate one-byte string in new space.
1743   Allocate(SeqOneByteString::SizeFor(length), result, scratch1, scratch2,
1744            gc_required, TAG_OBJECT);
1745 
1746   // Set the map, length and hash field.
1747   mov(FieldOperand(result, HeapObject::kMapOffset),
1748       Immediate(isolate()->factory()->one_byte_string_map()));
1749   mov(FieldOperand(result, String::kLengthOffset),
1750       Immediate(Smi::FromInt(length)));
1751   mov(FieldOperand(result, String::kHashFieldOffset),
1752       Immediate(String::kEmptyHashField));
1753 }
1754 
1755 
AllocateTwoByteConsString(Register result,Register scratch1,Register scratch2,Label * gc_required)1756 void MacroAssembler::AllocateTwoByteConsString(Register result,
1757                                         Register scratch1,
1758                                         Register scratch2,
1759                                         Label* gc_required) {
1760   // Allocate heap number in new space.
1761   Allocate(ConsString::kSize, result, scratch1, scratch2, gc_required,
1762            TAG_OBJECT);
1763 
1764   // Set the map. The other fields are left uninitialized.
1765   mov(FieldOperand(result, HeapObject::kMapOffset),
1766       Immediate(isolate()->factory()->cons_string_map()));
1767 }
1768 
1769 
AllocateOneByteConsString(Register result,Register scratch1,Register scratch2,Label * gc_required)1770 void MacroAssembler::AllocateOneByteConsString(Register result,
1771                                                Register scratch1,
1772                                                Register scratch2,
1773                                                Label* gc_required) {
1774   Allocate(ConsString::kSize,
1775            result,
1776            scratch1,
1777            scratch2,
1778            gc_required,
1779            TAG_OBJECT);
1780 
1781   // Set the map. The other fields are left uninitialized.
1782   mov(FieldOperand(result, HeapObject::kMapOffset),
1783       Immediate(isolate()->factory()->cons_one_byte_string_map()));
1784 }
1785 
1786 
AllocateTwoByteSlicedString(Register result,Register scratch1,Register scratch2,Label * gc_required)1787 void MacroAssembler::AllocateTwoByteSlicedString(Register result,
1788                                           Register scratch1,
1789                                           Register scratch2,
1790                                           Label* gc_required) {
1791   // Allocate heap number in new space.
1792   Allocate(SlicedString::kSize, result, scratch1, scratch2, gc_required,
1793            TAG_OBJECT);
1794 
1795   // Set the map. The other fields are left uninitialized.
1796   mov(FieldOperand(result, HeapObject::kMapOffset),
1797       Immediate(isolate()->factory()->sliced_string_map()));
1798 }
1799 
1800 
AllocateOneByteSlicedString(Register result,Register scratch1,Register scratch2,Label * gc_required)1801 void MacroAssembler::AllocateOneByteSlicedString(Register result,
1802                                                  Register scratch1,
1803                                                  Register scratch2,
1804                                                  Label* gc_required) {
1805   // Allocate heap number in new space.
1806   Allocate(SlicedString::kSize, result, scratch1, scratch2, gc_required,
1807            TAG_OBJECT);
1808 
1809   // Set the map. The other fields are left uninitialized.
1810   mov(FieldOperand(result, HeapObject::kMapOffset),
1811       Immediate(isolate()->factory()->sliced_one_byte_string_map()));
1812 }
1813 
1814 
1815 // Copy memory, byte-by-byte, from source to destination.  Not optimized for
1816 // long or aligned copies.  The contents of scratch and length are destroyed.
1817 // Source and destination are incremented by length.
1818 // Many variants of movsb, loop unrolling, word moves, and indexed operands
1819 // have been tried here already, and this is fastest.
1820 // A simpler loop is faster on small copies, but 30% slower on large ones.
1821 // The cld() instruction must have been emitted, to set the direction flag(),
1822 // before calling this function.
CopyBytes(Register source,Register destination,Register length,Register scratch)1823 void MacroAssembler::CopyBytes(Register source,
1824                                Register destination,
1825                                Register length,
1826                                Register scratch) {
1827   Label short_loop, len4, len8, len12, done, short_string;
1828   DCHECK(source.is(esi));
1829   DCHECK(destination.is(edi));
1830   DCHECK(length.is(ecx));
1831   cmp(length, Immediate(4));
1832   j(below, &short_string, Label::kNear);
1833 
1834   // Because source is 4-byte aligned in our uses of this function,
1835   // we keep source aligned for the rep_movs call by copying the odd bytes
1836   // at the end of the ranges.
1837   mov(scratch, Operand(source, length, times_1, -4));
1838   mov(Operand(destination, length, times_1, -4), scratch);
1839 
1840   cmp(length, Immediate(8));
1841   j(below_equal, &len4, Label::kNear);
1842   cmp(length, Immediate(12));
1843   j(below_equal, &len8, Label::kNear);
1844   cmp(length, Immediate(16));
1845   j(below_equal, &len12, Label::kNear);
1846 
1847   mov(scratch, ecx);
1848   shr(ecx, 2);
1849   rep_movs();
1850   and_(scratch, Immediate(0x3));
1851   add(destination, scratch);
1852   jmp(&done, Label::kNear);
1853 
1854   bind(&len12);
1855   mov(scratch, Operand(source, 8));
1856   mov(Operand(destination, 8), scratch);
1857   bind(&len8);
1858   mov(scratch, Operand(source, 4));
1859   mov(Operand(destination, 4), scratch);
1860   bind(&len4);
1861   mov(scratch, Operand(source, 0));
1862   mov(Operand(destination, 0), scratch);
1863   add(destination, length);
1864   jmp(&done, Label::kNear);
1865 
1866   bind(&short_string);
1867   test(length, length);
1868   j(zero, &done, Label::kNear);
1869 
1870   bind(&short_loop);
1871   mov_b(scratch, Operand(source, 0));
1872   mov_b(Operand(destination, 0), scratch);
1873   inc(source);
1874   inc(destination);
1875   dec(length);
1876   j(not_zero, &short_loop);
1877 
1878   bind(&done);
1879 }
1880 
1881 
InitializeFieldsWithFiller(Register start_offset,Register end_offset,Register filler)1882 void MacroAssembler::InitializeFieldsWithFiller(Register start_offset,
1883                                                 Register end_offset,
1884                                                 Register filler) {
1885   Label loop, entry;
1886   jmp(&entry);
1887   bind(&loop);
1888   mov(Operand(start_offset, 0), filler);
1889   add(start_offset, Immediate(kPointerSize));
1890   bind(&entry);
1891   cmp(start_offset, end_offset);
1892   j(less, &loop);
1893 }
1894 
1895 
BooleanBitTest(Register object,int field_offset,int bit_index)1896 void MacroAssembler::BooleanBitTest(Register object,
1897                                     int field_offset,
1898                                     int bit_index) {
1899   bit_index += kSmiTagSize + kSmiShiftSize;
1900   DCHECK(base::bits::IsPowerOfTwo32(kBitsPerByte));
1901   int byte_index = bit_index / kBitsPerByte;
1902   int byte_bit_index = bit_index & (kBitsPerByte - 1);
1903   test_b(FieldOperand(object, field_offset + byte_index),
1904          static_cast<byte>(1 << byte_bit_index));
1905 }
1906 
1907 
1908 
NegativeZeroTest(Register result,Register op,Label * then_label)1909 void MacroAssembler::NegativeZeroTest(Register result,
1910                                       Register op,
1911                                       Label* then_label) {
1912   Label ok;
1913   test(result, result);
1914   j(not_zero, &ok);
1915   test(op, op);
1916   j(sign, then_label);
1917   bind(&ok);
1918 }
1919 
1920 
NegativeZeroTest(Register result,Register op1,Register op2,Register scratch,Label * then_label)1921 void MacroAssembler::NegativeZeroTest(Register result,
1922                                       Register op1,
1923                                       Register op2,
1924                                       Register scratch,
1925                                       Label* then_label) {
1926   Label ok;
1927   test(result, result);
1928   j(not_zero, &ok);
1929   mov(scratch, op1);
1930   or_(scratch, op2);
1931   j(sign, then_label);
1932   bind(&ok);
1933 }
1934 
1935 
TryGetFunctionPrototype(Register function,Register result,Register scratch,Label * miss,bool miss_on_bound_function)1936 void MacroAssembler::TryGetFunctionPrototype(Register function,
1937                                              Register result,
1938                                              Register scratch,
1939                                              Label* miss,
1940                                              bool miss_on_bound_function) {
1941   Label non_instance;
1942   if (miss_on_bound_function) {
1943     // Check that the receiver isn't a smi.
1944     JumpIfSmi(function, miss);
1945 
1946     // Check that the function really is a function.
1947     CmpObjectType(function, JS_FUNCTION_TYPE, result);
1948     j(not_equal, miss);
1949 
1950     // If a bound function, go to miss label.
1951     mov(scratch,
1952         FieldOperand(function, JSFunction::kSharedFunctionInfoOffset));
1953     BooleanBitTest(scratch, SharedFunctionInfo::kCompilerHintsOffset,
1954                    SharedFunctionInfo::kBoundFunction);
1955     j(not_zero, miss);
1956 
1957     // Make sure that the function has an instance prototype.
1958     movzx_b(scratch, FieldOperand(result, Map::kBitFieldOffset));
1959     test(scratch, Immediate(1 << Map::kHasNonInstancePrototype));
1960     j(not_zero, &non_instance);
1961   }
1962 
1963   // Get the prototype or initial map from the function.
1964   mov(result,
1965       FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
1966 
1967   // If the prototype or initial map is the hole, don't return it and
1968   // simply miss the cache instead. This will allow us to allocate a
1969   // prototype object on-demand in the runtime system.
1970   cmp(result, Immediate(isolate()->factory()->the_hole_value()));
1971   j(equal, miss);
1972 
1973   // If the function does not have an initial map, we're done.
1974   Label done;
1975   CmpObjectType(result, MAP_TYPE, scratch);
1976   j(not_equal, &done);
1977 
1978   // Get the prototype from the initial map.
1979   mov(result, FieldOperand(result, Map::kPrototypeOffset));
1980 
1981   if (miss_on_bound_function) {
1982     jmp(&done);
1983 
1984     // Non-instance prototype: Fetch prototype from constructor field
1985     // in initial map.
1986     bind(&non_instance);
1987     mov(result, FieldOperand(result, Map::kConstructorOffset));
1988   }
1989 
1990   // All done.
1991   bind(&done);
1992 }
1993 
1994 
CallStub(CodeStub * stub,TypeFeedbackId ast_id)1995 void MacroAssembler::CallStub(CodeStub* stub, TypeFeedbackId ast_id) {
1996   DCHECK(AllowThisStubCall(stub));  // Calls are not allowed in some stubs.
1997   call(stub->GetCode(), RelocInfo::CODE_TARGET, ast_id);
1998 }
1999 
2000 
TailCallStub(CodeStub * stub)2001 void MacroAssembler::TailCallStub(CodeStub* stub) {
2002   jmp(stub->GetCode(), RelocInfo::CODE_TARGET);
2003 }
2004 
2005 
StubReturn(int argc)2006 void MacroAssembler::StubReturn(int argc) {
2007   DCHECK(argc >= 1 && generating_stub());
2008   ret((argc - 1) * kPointerSize);
2009 }
2010 
2011 
AllowThisStubCall(CodeStub * stub)2012 bool MacroAssembler::AllowThisStubCall(CodeStub* stub) {
2013   return has_frame_ || !stub->SometimesSetsUpAFrame();
2014 }
2015 
2016 
IndexFromHash(Register hash,Register index)2017 void MacroAssembler::IndexFromHash(Register hash, Register index) {
2018   // The assert checks that the constants for the maximum number of digits
2019   // for an array index cached in the hash field and the number of bits
2020   // reserved for it does not conflict.
2021   DCHECK(TenToThe(String::kMaxCachedArrayIndexLength) <
2022          (1 << String::kArrayIndexValueBits));
2023   if (!index.is(hash)) {
2024     mov(index, hash);
2025   }
2026   DecodeFieldToSmi<String::ArrayIndexValueBits>(index);
2027 }
2028 
2029 
CallRuntime(const Runtime::Function * f,int num_arguments,SaveFPRegsMode save_doubles)2030 void MacroAssembler::CallRuntime(const Runtime::Function* f,
2031                                  int num_arguments,
2032                                  SaveFPRegsMode save_doubles) {
2033   // If the expected number of arguments of the runtime function is
2034   // constant, we check that the actual number of arguments match the
2035   // expectation.
2036   CHECK(f->nargs < 0 || f->nargs == num_arguments);
2037 
2038   // TODO(1236192): Most runtime routines don't need the number of
2039   // arguments passed in because it is constant. At some point we
2040   // should remove this need and make the runtime routine entry code
2041   // smarter.
2042   Move(eax, Immediate(num_arguments));
2043   mov(ebx, Immediate(ExternalReference(f, isolate())));
2044   CEntryStub ces(isolate(), 1, save_doubles);
2045   CallStub(&ces);
2046 }
2047 
2048 
CallExternalReference(ExternalReference ref,int num_arguments)2049 void MacroAssembler::CallExternalReference(ExternalReference ref,
2050                                            int num_arguments) {
2051   mov(eax, Immediate(num_arguments));
2052   mov(ebx, Immediate(ref));
2053 
2054   CEntryStub stub(isolate(), 1);
2055   CallStub(&stub);
2056 }
2057 
2058 
TailCallExternalReference(const ExternalReference & ext,int num_arguments,int result_size)2059 void MacroAssembler::TailCallExternalReference(const ExternalReference& ext,
2060                                                int num_arguments,
2061                                                int result_size) {
2062   // TODO(1236192): Most runtime routines don't need the number of
2063   // arguments passed in because it is constant. At some point we
2064   // should remove this need and make the runtime routine entry code
2065   // smarter.
2066   Move(eax, Immediate(num_arguments));
2067   JumpToExternalReference(ext);
2068 }
2069 
2070 
TailCallRuntime(Runtime::FunctionId fid,int num_arguments,int result_size)2071 void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid,
2072                                      int num_arguments,
2073                                      int result_size) {
2074   TailCallExternalReference(ExternalReference(fid, isolate()),
2075                             num_arguments,
2076                             result_size);
2077 }
2078 
2079 
ApiParameterOperand(int index)2080 Operand ApiParameterOperand(int index) {
2081   return Operand(esp, index * kPointerSize);
2082 }
2083 
2084 
PrepareCallApiFunction(int argc)2085 void MacroAssembler::PrepareCallApiFunction(int argc) {
2086   EnterApiExitFrame(argc);
2087   if (emit_debug_code()) {
2088     mov(esi, Immediate(bit_cast<int32_t>(kZapValue)));
2089   }
2090 }
2091 
2092 
CallApiFunctionAndReturn(Register function_address,ExternalReference thunk_ref,Operand thunk_last_arg,int stack_space,Operand return_value_operand,Operand * context_restore_operand)2093 void MacroAssembler::CallApiFunctionAndReturn(
2094     Register function_address,
2095     ExternalReference thunk_ref,
2096     Operand thunk_last_arg,
2097     int stack_space,
2098     Operand return_value_operand,
2099     Operand* context_restore_operand) {
2100   ExternalReference next_address =
2101       ExternalReference::handle_scope_next_address(isolate());
2102   ExternalReference limit_address =
2103       ExternalReference::handle_scope_limit_address(isolate());
2104   ExternalReference level_address =
2105       ExternalReference::handle_scope_level_address(isolate());
2106 
2107   DCHECK(edx.is(function_address));
2108   // Allocate HandleScope in callee-save registers.
2109   mov(ebx, Operand::StaticVariable(next_address));
2110   mov(edi, Operand::StaticVariable(limit_address));
2111   add(Operand::StaticVariable(level_address), Immediate(1));
2112 
2113   if (FLAG_log_timer_events) {
2114     FrameScope frame(this, StackFrame::MANUAL);
2115     PushSafepointRegisters();
2116     PrepareCallCFunction(1, eax);
2117     mov(Operand(esp, 0),
2118         Immediate(ExternalReference::isolate_address(isolate())));
2119     CallCFunction(ExternalReference::log_enter_external_function(isolate()), 1);
2120     PopSafepointRegisters();
2121   }
2122 
2123 
2124   Label profiler_disabled;
2125   Label end_profiler_check;
2126   mov(eax, Immediate(ExternalReference::is_profiling_address(isolate())));
2127   cmpb(Operand(eax, 0), 0);
2128   j(zero, &profiler_disabled);
2129 
2130   // Additional parameter is the address of the actual getter function.
2131   mov(thunk_last_arg, function_address);
2132   // Call the api function.
2133   mov(eax, Immediate(thunk_ref));
2134   call(eax);
2135   jmp(&end_profiler_check);
2136 
2137   bind(&profiler_disabled);
2138   // Call the api function.
2139   call(function_address);
2140   bind(&end_profiler_check);
2141 
2142   if (FLAG_log_timer_events) {
2143     FrameScope frame(this, StackFrame::MANUAL);
2144     PushSafepointRegisters();
2145     PrepareCallCFunction(1, eax);
2146     mov(Operand(esp, 0),
2147         Immediate(ExternalReference::isolate_address(isolate())));
2148     CallCFunction(ExternalReference::log_leave_external_function(isolate()), 1);
2149     PopSafepointRegisters();
2150   }
2151 
2152   Label prologue;
2153   // Load the value from ReturnValue
2154   mov(eax, return_value_operand);
2155 
2156   Label promote_scheduled_exception;
2157   Label exception_handled;
2158   Label delete_allocated_handles;
2159   Label leave_exit_frame;
2160 
2161   bind(&prologue);
2162   // No more valid handles (the result handle was the last one). Restore
2163   // previous handle scope.
2164   mov(Operand::StaticVariable(next_address), ebx);
2165   sub(Operand::StaticVariable(level_address), Immediate(1));
2166   Assert(above_equal, kInvalidHandleScopeLevel);
2167   cmp(edi, Operand::StaticVariable(limit_address));
2168   j(not_equal, &delete_allocated_handles);
2169   bind(&leave_exit_frame);
2170 
2171   // Check if the function scheduled an exception.
2172   ExternalReference scheduled_exception_address =
2173       ExternalReference::scheduled_exception_address(isolate());
2174   cmp(Operand::StaticVariable(scheduled_exception_address),
2175       Immediate(isolate()->factory()->the_hole_value()));
2176   j(not_equal, &promote_scheduled_exception);
2177   bind(&exception_handled);
2178 
2179 #if ENABLE_EXTRA_CHECKS
2180   // Check if the function returned a valid JavaScript value.
2181   Label ok;
2182   Register return_value = eax;
2183   Register map = ecx;
2184 
2185   JumpIfSmi(return_value, &ok, Label::kNear);
2186   mov(map, FieldOperand(return_value, HeapObject::kMapOffset));
2187 
2188   CmpInstanceType(map, FIRST_NONSTRING_TYPE);
2189   j(below, &ok, Label::kNear);
2190 
2191   CmpInstanceType(map, FIRST_SPEC_OBJECT_TYPE);
2192   j(above_equal, &ok, Label::kNear);
2193 
2194   cmp(map, isolate()->factory()->heap_number_map());
2195   j(equal, &ok, Label::kNear);
2196 
2197   cmp(return_value, isolate()->factory()->undefined_value());
2198   j(equal, &ok, Label::kNear);
2199 
2200   cmp(return_value, isolate()->factory()->true_value());
2201   j(equal, &ok, Label::kNear);
2202 
2203   cmp(return_value, isolate()->factory()->false_value());
2204   j(equal, &ok, Label::kNear);
2205 
2206   cmp(return_value, isolate()->factory()->null_value());
2207   j(equal, &ok, Label::kNear);
2208 
2209   Abort(kAPICallReturnedInvalidObject);
2210 
2211   bind(&ok);
2212 #endif
2213 
2214   bool restore_context = context_restore_operand != NULL;
2215   if (restore_context) {
2216     mov(esi, *context_restore_operand);
2217   }
2218   LeaveApiExitFrame(!restore_context);
2219   ret(stack_space * kPointerSize);
2220 
2221   bind(&promote_scheduled_exception);
2222   {
2223     FrameScope frame(this, StackFrame::INTERNAL);
2224     CallRuntime(Runtime::kPromoteScheduledException, 0);
2225   }
2226   jmp(&exception_handled);
2227 
2228   // HandleScope limit has changed. Delete allocated extensions.
2229   ExternalReference delete_extensions =
2230       ExternalReference::delete_handle_scope_extensions(isolate());
2231   bind(&delete_allocated_handles);
2232   mov(Operand::StaticVariable(limit_address), edi);
2233   mov(edi, eax);
2234   mov(Operand(esp, 0),
2235       Immediate(ExternalReference::isolate_address(isolate())));
2236   mov(eax, Immediate(delete_extensions));
2237   call(eax);
2238   mov(eax, edi);
2239   jmp(&leave_exit_frame);
2240 }
2241 
2242 
JumpToExternalReference(const ExternalReference & ext)2243 void MacroAssembler::JumpToExternalReference(const ExternalReference& ext) {
2244   // Set the entry point and jump to the C entry runtime stub.
2245   mov(ebx, Immediate(ext));
2246   CEntryStub ces(isolate(), 1);
2247   jmp(ces.GetCode(), RelocInfo::CODE_TARGET);
2248 }
2249 
2250 
InvokePrologue(const ParameterCount & expected,const ParameterCount & actual,Handle<Code> code_constant,const Operand & code_operand,Label * done,bool * definitely_mismatches,InvokeFlag flag,Label::Distance done_near,const CallWrapper & call_wrapper)2251 void MacroAssembler::InvokePrologue(const ParameterCount& expected,
2252                                     const ParameterCount& actual,
2253                                     Handle<Code> code_constant,
2254                                     const Operand& code_operand,
2255                                     Label* done,
2256                                     bool* definitely_mismatches,
2257                                     InvokeFlag flag,
2258                                     Label::Distance done_near,
2259                                     const CallWrapper& call_wrapper) {
2260   bool definitely_matches = false;
2261   *definitely_mismatches = false;
2262   Label invoke;
2263   if (expected.is_immediate()) {
2264     DCHECK(actual.is_immediate());
2265     if (expected.immediate() == actual.immediate()) {
2266       definitely_matches = true;
2267     } else {
2268       mov(eax, actual.immediate());
2269       const int sentinel = SharedFunctionInfo::kDontAdaptArgumentsSentinel;
2270       if (expected.immediate() == sentinel) {
2271         // Don't worry about adapting arguments for builtins that
2272         // don't want that done. Skip adaption code by making it look
2273         // like we have a match between expected and actual number of
2274         // arguments.
2275         definitely_matches = true;
2276       } else {
2277         *definitely_mismatches = true;
2278         mov(ebx, expected.immediate());
2279       }
2280     }
2281   } else {
2282     if (actual.is_immediate()) {
2283       // Expected is in register, actual is immediate. This is the
2284       // case when we invoke function values without going through the
2285       // IC mechanism.
2286       cmp(expected.reg(), actual.immediate());
2287       j(equal, &invoke);
2288       DCHECK(expected.reg().is(ebx));
2289       mov(eax, actual.immediate());
2290     } else if (!expected.reg().is(actual.reg())) {
2291       // Both expected and actual are in (different) registers. This
2292       // is the case when we invoke functions using call and apply.
2293       cmp(expected.reg(), actual.reg());
2294       j(equal, &invoke);
2295       DCHECK(actual.reg().is(eax));
2296       DCHECK(expected.reg().is(ebx));
2297     }
2298   }
2299 
2300   if (!definitely_matches) {
2301     Handle<Code> adaptor =
2302         isolate()->builtins()->ArgumentsAdaptorTrampoline();
2303     if (!code_constant.is_null()) {
2304       mov(edx, Immediate(code_constant));
2305       add(edx, Immediate(Code::kHeaderSize - kHeapObjectTag));
2306     } else if (!code_operand.is_reg(edx)) {
2307       mov(edx, code_operand);
2308     }
2309 
2310     if (flag == CALL_FUNCTION) {
2311       call_wrapper.BeforeCall(CallSize(adaptor, RelocInfo::CODE_TARGET));
2312       call(adaptor, RelocInfo::CODE_TARGET);
2313       call_wrapper.AfterCall();
2314       if (!*definitely_mismatches) {
2315         jmp(done, done_near);
2316       }
2317     } else {
2318       jmp(adaptor, RelocInfo::CODE_TARGET);
2319     }
2320     bind(&invoke);
2321   }
2322 }
2323 
2324 
InvokeCode(const Operand & code,const ParameterCount & expected,const ParameterCount & actual,InvokeFlag flag,const CallWrapper & call_wrapper)2325 void MacroAssembler::InvokeCode(const Operand& code,
2326                                 const ParameterCount& expected,
2327                                 const ParameterCount& actual,
2328                                 InvokeFlag flag,
2329                                 const CallWrapper& call_wrapper) {
2330   // You can't call a function without a valid frame.
2331   DCHECK(flag == JUMP_FUNCTION || has_frame());
2332 
2333   Label done;
2334   bool definitely_mismatches = false;
2335   InvokePrologue(expected, actual, Handle<Code>::null(), code,
2336                  &done, &definitely_mismatches, flag, Label::kNear,
2337                  call_wrapper);
2338   if (!definitely_mismatches) {
2339     if (flag == CALL_FUNCTION) {
2340       call_wrapper.BeforeCall(CallSize(code));
2341       call(code);
2342       call_wrapper.AfterCall();
2343     } else {
2344       DCHECK(flag == JUMP_FUNCTION);
2345       jmp(code);
2346     }
2347     bind(&done);
2348   }
2349 }
2350 
2351 
InvokeFunction(Register fun,const ParameterCount & actual,InvokeFlag flag,const CallWrapper & call_wrapper)2352 void MacroAssembler::InvokeFunction(Register fun,
2353                                     const ParameterCount& actual,
2354                                     InvokeFlag flag,
2355                                     const CallWrapper& call_wrapper) {
2356   // You can't call a function without a valid frame.
2357   DCHECK(flag == JUMP_FUNCTION || has_frame());
2358 
2359   DCHECK(fun.is(edi));
2360   mov(edx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
2361   mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
2362   mov(ebx, FieldOperand(edx, SharedFunctionInfo::kFormalParameterCountOffset));
2363   SmiUntag(ebx);
2364 
2365   ParameterCount expected(ebx);
2366   InvokeCode(FieldOperand(edi, JSFunction::kCodeEntryOffset),
2367              expected, actual, flag, call_wrapper);
2368 }
2369 
2370 
InvokeFunction(Register fun,const ParameterCount & expected,const ParameterCount & actual,InvokeFlag flag,const CallWrapper & call_wrapper)2371 void MacroAssembler::InvokeFunction(Register fun,
2372                                     const ParameterCount& expected,
2373                                     const ParameterCount& actual,
2374                                     InvokeFlag flag,
2375                                     const CallWrapper& call_wrapper) {
2376   // You can't call a function without a valid frame.
2377   DCHECK(flag == JUMP_FUNCTION || has_frame());
2378 
2379   DCHECK(fun.is(edi));
2380   mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
2381 
2382   InvokeCode(FieldOperand(edi, JSFunction::kCodeEntryOffset),
2383              expected, actual, flag, call_wrapper);
2384 }
2385 
2386 
InvokeFunction(Handle<JSFunction> function,const ParameterCount & expected,const ParameterCount & actual,InvokeFlag flag,const CallWrapper & call_wrapper)2387 void MacroAssembler::InvokeFunction(Handle<JSFunction> function,
2388                                     const ParameterCount& expected,
2389                                     const ParameterCount& actual,
2390                                     InvokeFlag flag,
2391                                     const CallWrapper& call_wrapper) {
2392   LoadHeapObject(edi, function);
2393   InvokeFunction(edi, expected, actual, flag, call_wrapper);
2394 }
2395 
2396 
InvokeBuiltin(Builtins::JavaScript id,InvokeFlag flag,const CallWrapper & call_wrapper)2397 void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id,
2398                                    InvokeFlag flag,
2399                                    const CallWrapper& call_wrapper) {
2400   // You can't call a builtin without a valid frame.
2401   DCHECK(flag == JUMP_FUNCTION || has_frame());
2402 
2403   // Rely on the assertion to check that the number of provided
2404   // arguments match the expected number of arguments. Fake a
2405   // parameter count to avoid emitting code to do the check.
2406   ParameterCount expected(0);
2407   GetBuiltinFunction(edi, id);
2408   InvokeCode(FieldOperand(edi, JSFunction::kCodeEntryOffset),
2409              expected, expected, flag, call_wrapper);
2410 }
2411 
2412 
GetBuiltinFunction(Register target,Builtins::JavaScript id)2413 void MacroAssembler::GetBuiltinFunction(Register target,
2414                                         Builtins::JavaScript id) {
2415   // Load the JavaScript builtin function from the builtins object.
2416   mov(target, Operand(esi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
2417   mov(target, FieldOperand(target, GlobalObject::kBuiltinsOffset));
2418   mov(target, FieldOperand(target,
2419                            JSBuiltinsObject::OffsetOfFunctionWithId(id)));
2420 }
2421 
2422 
GetBuiltinEntry(Register target,Builtins::JavaScript id)2423 void MacroAssembler::GetBuiltinEntry(Register target, Builtins::JavaScript id) {
2424   DCHECK(!target.is(edi));
2425   // Load the JavaScript builtin function from the builtins object.
2426   GetBuiltinFunction(edi, id);
2427   // Load the code entry point from the function into the target register.
2428   mov(target, FieldOperand(edi, JSFunction::kCodeEntryOffset));
2429 }
2430 
2431 
LoadContext(Register dst,int context_chain_length)2432 void MacroAssembler::LoadContext(Register dst, int context_chain_length) {
2433   if (context_chain_length > 0) {
2434     // Move up the chain of contexts to the context containing the slot.
2435     mov(dst, Operand(esi, Context::SlotOffset(Context::PREVIOUS_INDEX)));
2436     for (int i = 1; i < context_chain_length; i++) {
2437       mov(dst, Operand(dst, Context::SlotOffset(Context::PREVIOUS_INDEX)));
2438     }
2439   } else {
2440     // Slot is in the current function context.  Move it into the
2441     // destination register in case we store into it (the write barrier
2442     // cannot be allowed to destroy the context in esi).
2443     mov(dst, esi);
2444   }
2445 
2446   // We should not have found a with context by walking the context chain
2447   // (i.e., the static scope chain and runtime context chain do not agree).
2448   // A variable occurring in such a scope should have slot type LOOKUP and
2449   // not CONTEXT.
2450   if (emit_debug_code()) {
2451     cmp(FieldOperand(dst, HeapObject::kMapOffset),
2452         isolate()->factory()->with_context_map());
2453     Check(not_equal, kVariableResolvedToWithContext);
2454   }
2455 }
2456 
2457 
LoadTransitionedArrayMapConditional(ElementsKind expected_kind,ElementsKind transitioned_kind,Register map_in_out,Register scratch,Label * no_map_match)2458 void MacroAssembler::LoadTransitionedArrayMapConditional(
2459     ElementsKind expected_kind,
2460     ElementsKind transitioned_kind,
2461     Register map_in_out,
2462     Register scratch,
2463     Label* no_map_match) {
2464   // Load the global or builtins object from the current context.
2465   mov(scratch, Operand(esi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
2466   mov(scratch, FieldOperand(scratch, GlobalObject::kNativeContextOffset));
2467 
2468   // Check that the function's map is the same as the expected cached map.
2469   mov(scratch, Operand(scratch,
2470                        Context::SlotOffset(Context::JS_ARRAY_MAPS_INDEX)));
2471 
2472   size_t offset = expected_kind * kPointerSize +
2473       FixedArrayBase::kHeaderSize;
2474   cmp(map_in_out, FieldOperand(scratch, offset));
2475   j(not_equal, no_map_match);
2476 
2477   // Use the transitioned cached map.
2478   offset = transitioned_kind * kPointerSize +
2479       FixedArrayBase::kHeaderSize;
2480   mov(map_in_out, FieldOperand(scratch, offset));
2481 }
2482 
2483 
LoadGlobalFunction(int index,Register function)2484 void MacroAssembler::LoadGlobalFunction(int index, Register function) {
2485   // Load the global or builtins object from the current context.
2486   mov(function,
2487       Operand(esi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
2488   // Load the native context from the global or builtins object.
2489   mov(function,
2490       FieldOperand(function, GlobalObject::kNativeContextOffset));
2491   // Load the function from the native context.
2492   mov(function, Operand(function, Context::SlotOffset(index)));
2493 }
2494 
2495 
LoadGlobalFunctionInitialMap(Register function,Register map)2496 void MacroAssembler::LoadGlobalFunctionInitialMap(Register function,
2497                                                   Register map) {
2498   // Load the initial map.  The global functions all have initial maps.
2499   mov(map, FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
2500   if (emit_debug_code()) {
2501     Label ok, fail;
2502     CheckMap(map, isolate()->factory()->meta_map(), &fail, DO_SMI_CHECK);
2503     jmp(&ok);
2504     bind(&fail);
2505     Abort(kGlobalFunctionsMustHaveInitialMap);
2506     bind(&ok);
2507   }
2508 }
2509 
2510 
2511 // Store the value in register src in the safepoint register stack
2512 // slot for register dst.
StoreToSafepointRegisterSlot(Register dst,Register src)2513 void MacroAssembler::StoreToSafepointRegisterSlot(Register dst, Register src) {
2514   mov(SafepointRegisterSlot(dst), src);
2515 }
2516 
2517 
StoreToSafepointRegisterSlot(Register dst,Immediate src)2518 void MacroAssembler::StoreToSafepointRegisterSlot(Register dst, Immediate src) {
2519   mov(SafepointRegisterSlot(dst), src);
2520 }
2521 
2522 
LoadFromSafepointRegisterSlot(Register dst,Register src)2523 void MacroAssembler::LoadFromSafepointRegisterSlot(Register dst, Register src) {
2524   mov(dst, SafepointRegisterSlot(src));
2525 }
2526 
2527 
SafepointRegisterSlot(Register reg)2528 Operand MacroAssembler::SafepointRegisterSlot(Register reg) {
2529   return Operand(esp, SafepointRegisterStackIndex(reg.code()) * kPointerSize);
2530 }
2531 
2532 
SafepointRegisterStackIndex(int reg_code)2533 int MacroAssembler::SafepointRegisterStackIndex(int reg_code) {
2534   // The registers are pushed starting with the lowest encoding,
2535   // which means that lowest encodings are furthest away from
2536   // the stack pointer.
2537   DCHECK(reg_code >= 0 && reg_code < kNumSafepointRegisters);
2538   return kNumSafepointRegisters - reg_code - 1;
2539 }
2540 
2541 
LoadHeapObject(Register result,Handle<HeapObject> object)2542 void MacroAssembler::LoadHeapObject(Register result,
2543                                     Handle<HeapObject> object) {
2544   AllowDeferredHandleDereference embedding_raw_address;
2545   if (isolate()->heap()->InNewSpace(*object)) {
2546     Handle<Cell> cell = isolate()->factory()->NewCell(object);
2547     mov(result, Operand::ForCell(cell));
2548   } else {
2549     mov(result, object);
2550   }
2551 }
2552 
2553 
CmpHeapObject(Register reg,Handle<HeapObject> object)2554 void MacroAssembler::CmpHeapObject(Register reg, Handle<HeapObject> object) {
2555   AllowDeferredHandleDereference using_raw_address;
2556   if (isolate()->heap()->InNewSpace(*object)) {
2557     Handle<Cell> cell = isolate()->factory()->NewCell(object);
2558     cmp(reg, Operand::ForCell(cell));
2559   } else {
2560     cmp(reg, object);
2561   }
2562 }
2563 
2564 
PushHeapObject(Handle<HeapObject> object)2565 void MacroAssembler::PushHeapObject(Handle<HeapObject> object) {
2566   AllowDeferredHandleDereference using_raw_address;
2567   if (isolate()->heap()->InNewSpace(*object)) {
2568     Handle<Cell> cell = isolate()->factory()->NewCell(object);
2569     push(Operand::ForCell(cell));
2570   } else {
2571     Push(object);
2572   }
2573 }
2574 
2575 
Ret()2576 void MacroAssembler::Ret() {
2577   ret(0);
2578 }
2579 
2580 
Ret(int bytes_dropped,Register scratch)2581 void MacroAssembler::Ret(int bytes_dropped, Register scratch) {
2582   if (is_uint16(bytes_dropped)) {
2583     ret(bytes_dropped);
2584   } else {
2585     pop(scratch);
2586     add(esp, Immediate(bytes_dropped));
2587     push(scratch);
2588     ret(0);
2589   }
2590 }
2591 
2592 
Drop(int stack_elements)2593 void MacroAssembler::Drop(int stack_elements) {
2594   if (stack_elements > 0) {
2595     add(esp, Immediate(stack_elements * kPointerSize));
2596   }
2597 }
2598 
2599 
Move(Register dst,Register src)2600 void MacroAssembler::Move(Register dst, Register src) {
2601   if (!dst.is(src)) {
2602     mov(dst, src);
2603   }
2604 }
2605 
2606 
Move(Register dst,const Immediate & x)2607 void MacroAssembler::Move(Register dst, const Immediate& x) {
2608   if (x.is_zero()) {
2609     xor_(dst, dst);  // Shorter than mov of 32-bit immediate 0.
2610   } else {
2611     mov(dst, x);
2612   }
2613 }
2614 
2615 
Move(const Operand & dst,const Immediate & x)2616 void MacroAssembler::Move(const Operand& dst, const Immediate& x) {
2617   mov(dst, x);
2618 }
2619 
2620 
Move(XMMRegister dst,double val)2621 void MacroAssembler::Move(XMMRegister dst, double val) {
2622   // TODO(titzer): recognize double constants with ExternalReferences.
2623   uint64_t int_val = bit_cast<uint64_t, double>(val);
2624   if (int_val == 0) {
2625     xorps(dst, dst);
2626   } else {
2627     int32_t lower = static_cast<int32_t>(int_val);
2628     int32_t upper = static_cast<int32_t>(int_val >> kBitsPerInt);
2629     push(Immediate(upper));
2630     push(Immediate(lower));
2631     movsd(dst, Operand(esp, 0));
2632     add(esp, Immediate(kDoubleSize));
2633   }
2634 }
2635 
2636 
SetCounter(StatsCounter * counter,int value)2637 void MacroAssembler::SetCounter(StatsCounter* counter, int value) {
2638   if (FLAG_native_code_counters && counter->Enabled()) {
2639     mov(Operand::StaticVariable(ExternalReference(counter)), Immediate(value));
2640   }
2641 }
2642 
2643 
IncrementCounter(StatsCounter * counter,int value)2644 void MacroAssembler::IncrementCounter(StatsCounter* counter, int value) {
2645   DCHECK(value > 0);
2646   if (FLAG_native_code_counters && counter->Enabled()) {
2647     Operand operand = Operand::StaticVariable(ExternalReference(counter));
2648     if (value == 1) {
2649       inc(operand);
2650     } else {
2651       add(operand, Immediate(value));
2652     }
2653   }
2654 }
2655 
2656 
DecrementCounter(StatsCounter * counter,int value)2657 void MacroAssembler::DecrementCounter(StatsCounter* counter, int value) {
2658   DCHECK(value > 0);
2659   if (FLAG_native_code_counters && counter->Enabled()) {
2660     Operand operand = Operand::StaticVariable(ExternalReference(counter));
2661     if (value == 1) {
2662       dec(operand);
2663     } else {
2664       sub(operand, Immediate(value));
2665     }
2666   }
2667 }
2668 
2669 
IncrementCounter(Condition cc,StatsCounter * counter,int value)2670 void MacroAssembler::IncrementCounter(Condition cc,
2671                                       StatsCounter* counter,
2672                                       int value) {
2673   DCHECK(value > 0);
2674   if (FLAG_native_code_counters && counter->Enabled()) {
2675     Label skip;
2676     j(NegateCondition(cc), &skip);
2677     pushfd();
2678     IncrementCounter(counter, value);
2679     popfd();
2680     bind(&skip);
2681   }
2682 }
2683 
2684 
DecrementCounter(Condition cc,StatsCounter * counter,int value)2685 void MacroAssembler::DecrementCounter(Condition cc,
2686                                       StatsCounter* counter,
2687                                       int value) {
2688   DCHECK(value > 0);
2689   if (FLAG_native_code_counters && counter->Enabled()) {
2690     Label skip;
2691     j(NegateCondition(cc), &skip);
2692     pushfd();
2693     DecrementCounter(counter, value);
2694     popfd();
2695     bind(&skip);
2696   }
2697 }
2698 
2699 
Assert(Condition cc,BailoutReason reason)2700 void MacroAssembler::Assert(Condition cc, BailoutReason reason) {
2701   if (emit_debug_code()) Check(cc, reason);
2702 }
2703 
2704 
AssertFastElements(Register elements)2705 void MacroAssembler::AssertFastElements(Register elements) {
2706   if (emit_debug_code()) {
2707     Factory* factory = isolate()->factory();
2708     Label ok;
2709     cmp(FieldOperand(elements, HeapObject::kMapOffset),
2710         Immediate(factory->fixed_array_map()));
2711     j(equal, &ok);
2712     cmp(FieldOperand(elements, HeapObject::kMapOffset),
2713         Immediate(factory->fixed_double_array_map()));
2714     j(equal, &ok);
2715     cmp(FieldOperand(elements, HeapObject::kMapOffset),
2716         Immediate(factory->fixed_cow_array_map()));
2717     j(equal, &ok);
2718     Abort(kJSObjectWithFastElementsMapHasSlowElements);
2719     bind(&ok);
2720   }
2721 }
2722 
2723 
Check(Condition cc,BailoutReason reason)2724 void MacroAssembler::Check(Condition cc, BailoutReason reason) {
2725   Label L;
2726   j(cc, &L);
2727   Abort(reason);
2728   // will not return here
2729   bind(&L);
2730 }
2731 
2732 
CheckStackAlignment()2733 void MacroAssembler::CheckStackAlignment() {
2734   int frame_alignment = base::OS::ActivationFrameAlignment();
2735   int frame_alignment_mask = frame_alignment - 1;
2736   if (frame_alignment > kPointerSize) {
2737     DCHECK(base::bits::IsPowerOfTwo32(frame_alignment));
2738     Label alignment_as_expected;
2739     test(esp, Immediate(frame_alignment_mask));
2740     j(zero, &alignment_as_expected);
2741     // Abort if stack is not aligned.
2742     int3();
2743     bind(&alignment_as_expected);
2744   }
2745 }
2746 
2747 
Abort(BailoutReason reason)2748 void MacroAssembler::Abort(BailoutReason reason) {
2749 #ifdef DEBUG
2750   const char* msg = GetBailoutReason(reason);
2751   if (msg != NULL) {
2752     RecordComment("Abort message: ");
2753     RecordComment(msg);
2754   }
2755 
2756   if (FLAG_trap_on_abort) {
2757     int3();
2758     return;
2759   }
2760 #endif
2761 
2762   push(Immediate(reinterpret_cast<intptr_t>(Smi::FromInt(reason))));
2763   // Disable stub call restrictions to always allow calls to abort.
2764   if (!has_frame_) {
2765     // We don't actually want to generate a pile of code for this, so just
2766     // claim there is a stack frame, without generating one.
2767     FrameScope scope(this, StackFrame::NONE);
2768     CallRuntime(Runtime::kAbort, 1);
2769   } else {
2770     CallRuntime(Runtime::kAbort, 1);
2771   }
2772   // will not return here
2773   int3();
2774 }
2775 
2776 
LoadInstanceDescriptors(Register map,Register descriptors)2777 void MacroAssembler::LoadInstanceDescriptors(Register map,
2778                                              Register descriptors) {
2779   mov(descriptors, FieldOperand(map, Map::kDescriptorsOffset));
2780 }
2781 
2782 
NumberOfOwnDescriptors(Register dst,Register map)2783 void MacroAssembler::NumberOfOwnDescriptors(Register dst, Register map) {
2784   mov(dst, FieldOperand(map, Map::kBitField3Offset));
2785   DecodeField<Map::NumberOfOwnDescriptorsBits>(dst);
2786 }
2787 
2788 
LoadPowerOf2(XMMRegister dst,Register scratch,int power)2789 void MacroAssembler::LoadPowerOf2(XMMRegister dst,
2790                                   Register scratch,
2791                                   int power) {
2792   DCHECK(is_uintn(power + HeapNumber::kExponentBias,
2793                   HeapNumber::kExponentBits));
2794   mov(scratch, Immediate(power + HeapNumber::kExponentBias));
2795   movd(dst, scratch);
2796   psllq(dst, HeapNumber::kMantissaBits);
2797 }
2798 
2799 
LookupNumberStringCache(Register object,Register result,Register scratch1,Register scratch2,Label * not_found)2800 void MacroAssembler::LookupNumberStringCache(Register object,
2801                                              Register result,
2802                                              Register scratch1,
2803                                              Register scratch2,
2804                                              Label* not_found) {
2805   // Use of registers. Register result is used as a temporary.
2806   Register number_string_cache = result;
2807   Register mask = scratch1;
2808   Register scratch = scratch2;
2809 
2810   // Load the number string cache.
2811   LoadRoot(number_string_cache, Heap::kNumberStringCacheRootIndex);
2812   // Make the hash mask from the length of the number string cache. It
2813   // contains two elements (number and string) for each cache entry.
2814   mov(mask, FieldOperand(number_string_cache, FixedArray::kLengthOffset));
2815   shr(mask, kSmiTagSize + 1);  // Untag length and divide it by two.
2816   sub(mask, Immediate(1));  // Make mask.
2817 
2818   // Calculate the entry in the number string cache. The hash value in the
2819   // number string cache for smis is just the smi value, and the hash for
2820   // doubles is the xor of the upper and lower words. See
2821   // Heap::GetNumberStringCache.
2822   Label smi_hash_calculated;
2823   Label load_result_from_cache;
2824   Label not_smi;
2825   STATIC_ASSERT(kSmiTag == 0);
2826   JumpIfNotSmi(object, &not_smi, Label::kNear);
2827   mov(scratch, object);
2828   SmiUntag(scratch);
2829   jmp(&smi_hash_calculated, Label::kNear);
2830   bind(&not_smi);
2831   cmp(FieldOperand(object, HeapObject::kMapOffset),
2832       isolate()->factory()->heap_number_map());
2833   j(not_equal, not_found);
2834   STATIC_ASSERT(8 == kDoubleSize);
2835   mov(scratch, FieldOperand(object, HeapNumber::kValueOffset));
2836   xor_(scratch, FieldOperand(object, HeapNumber::kValueOffset + 4));
2837   // Object is heap number and hash is now in scratch. Calculate cache index.
2838   and_(scratch, mask);
2839   Register index = scratch;
2840   Register probe = mask;
2841   mov(probe,
2842       FieldOperand(number_string_cache,
2843                    index,
2844                    times_twice_pointer_size,
2845                    FixedArray::kHeaderSize));
2846   JumpIfSmi(probe, not_found);
2847   movsd(xmm0, FieldOperand(object, HeapNumber::kValueOffset));
2848   ucomisd(xmm0, FieldOperand(probe, HeapNumber::kValueOffset));
2849   j(parity_even, not_found);  // Bail out if NaN is involved.
2850   j(not_equal, not_found);  // The cache did not contain this value.
2851   jmp(&load_result_from_cache, Label::kNear);
2852 
2853   bind(&smi_hash_calculated);
2854   // Object is smi and hash is now in scratch. Calculate cache index.
2855   and_(scratch, mask);
2856   // Check if the entry is the smi we are looking for.
2857   cmp(object,
2858       FieldOperand(number_string_cache,
2859                    index,
2860                    times_twice_pointer_size,
2861                    FixedArray::kHeaderSize));
2862   j(not_equal, not_found);
2863 
2864   // Get the result from the cache.
2865   bind(&load_result_from_cache);
2866   mov(result,
2867       FieldOperand(number_string_cache,
2868                    index,
2869                    times_twice_pointer_size,
2870                    FixedArray::kHeaderSize + kPointerSize));
2871   IncrementCounter(isolate()->counters()->number_to_string_native(), 1);
2872 }
2873 
2874 
JumpIfInstanceTypeIsNotSequentialOneByte(Register instance_type,Register scratch,Label * failure)2875 void MacroAssembler::JumpIfInstanceTypeIsNotSequentialOneByte(
2876     Register instance_type, Register scratch, Label* failure) {
2877   if (!scratch.is(instance_type)) {
2878     mov(scratch, instance_type);
2879   }
2880   and_(scratch,
2881        kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask);
2882   cmp(scratch, kStringTag | kSeqStringTag | kOneByteStringTag);
2883   j(not_equal, failure);
2884 }
2885 
2886 
JumpIfNotBothSequentialOneByteStrings(Register object1,Register object2,Register scratch1,Register scratch2,Label * failure)2887 void MacroAssembler::JumpIfNotBothSequentialOneByteStrings(Register object1,
2888                                                            Register object2,
2889                                                            Register scratch1,
2890                                                            Register scratch2,
2891                                                            Label* failure) {
2892   // Check that both objects are not smis.
2893   STATIC_ASSERT(kSmiTag == 0);
2894   mov(scratch1, object1);
2895   and_(scratch1, object2);
2896   JumpIfSmi(scratch1, failure);
2897 
2898   // Load instance type for both strings.
2899   mov(scratch1, FieldOperand(object1, HeapObject::kMapOffset));
2900   mov(scratch2, FieldOperand(object2, HeapObject::kMapOffset));
2901   movzx_b(scratch1, FieldOperand(scratch1, Map::kInstanceTypeOffset));
2902   movzx_b(scratch2, FieldOperand(scratch2, Map::kInstanceTypeOffset));
2903 
2904   // Check that both are flat one-byte strings.
2905   const int kFlatOneByteStringMask =
2906       kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
2907   const int kFlatOneByteStringTag =
2908       kStringTag | kOneByteStringTag | kSeqStringTag;
2909   // Interleave bits from both instance types and compare them in one check.
2910   DCHECK_EQ(0, kFlatOneByteStringMask & (kFlatOneByteStringMask << 3));
2911   and_(scratch1, kFlatOneByteStringMask);
2912   and_(scratch2, kFlatOneByteStringMask);
2913   lea(scratch1, Operand(scratch1, scratch2, times_8, 0));
2914   cmp(scratch1, kFlatOneByteStringTag | (kFlatOneByteStringTag << 3));
2915   j(not_equal, failure);
2916 }
2917 
2918 
JumpIfNotUniqueNameInstanceType(Operand operand,Label * not_unique_name,Label::Distance distance)2919 void MacroAssembler::JumpIfNotUniqueNameInstanceType(Operand operand,
2920                                                      Label* not_unique_name,
2921                                                      Label::Distance distance) {
2922   STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0);
2923   Label succeed;
2924   test(operand, Immediate(kIsNotStringMask | kIsNotInternalizedMask));
2925   j(zero, &succeed);
2926   cmpb(operand, static_cast<uint8_t>(SYMBOL_TYPE));
2927   j(not_equal, not_unique_name, distance);
2928 
2929   bind(&succeed);
2930 }
2931 
2932 
EmitSeqStringSetCharCheck(Register string,Register index,Register value,uint32_t encoding_mask)2933 void MacroAssembler::EmitSeqStringSetCharCheck(Register string,
2934                                                Register index,
2935                                                Register value,
2936                                                uint32_t encoding_mask) {
2937   Label is_object;
2938   JumpIfNotSmi(string, &is_object, Label::kNear);
2939   Abort(kNonObject);
2940   bind(&is_object);
2941 
2942   push(value);
2943   mov(value, FieldOperand(string, HeapObject::kMapOffset));
2944   movzx_b(value, FieldOperand(value, Map::kInstanceTypeOffset));
2945 
2946   and_(value, Immediate(kStringRepresentationMask | kStringEncodingMask));
2947   cmp(value, Immediate(encoding_mask));
2948   pop(value);
2949   Check(equal, kUnexpectedStringType);
2950 
2951   // The index is assumed to be untagged coming in, tag it to compare with the
2952   // string length without using a temp register, it is restored at the end of
2953   // this function.
2954   SmiTag(index);
2955   Check(no_overflow, kIndexIsTooLarge);
2956 
2957   cmp(index, FieldOperand(string, String::kLengthOffset));
2958   Check(less, kIndexIsTooLarge);
2959 
2960   cmp(index, Immediate(Smi::FromInt(0)));
2961   Check(greater_equal, kIndexIsNegative);
2962 
2963   // Restore the index
2964   SmiUntag(index);
2965 }
2966 
2967 
PrepareCallCFunction(int num_arguments,Register scratch)2968 void MacroAssembler::PrepareCallCFunction(int num_arguments, Register scratch) {
2969   int frame_alignment = base::OS::ActivationFrameAlignment();
2970   if (frame_alignment != 0) {
2971     // Make stack end at alignment and make room for num_arguments words
2972     // and the original value of esp.
2973     mov(scratch, esp);
2974     sub(esp, Immediate((num_arguments + 1) * kPointerSize));
2975     DCHECK(base::bits::IsPowerOfTwo32(frame_alignment));
2976     and_(esp, -frame_alignment);
2977     mov(Operand(esp, num_arguments * kPointerSize), scratch);
2978   } else {
2979     sub(esp, Immediate(num_arguments * kPointerSize));
2980   }
2981 }
2982 
2983 
CallCFunction(ExternalReference function,int num_arguments)2984 void MacroAssembler::CallCFunction(ExternalReference function,
2985                                    int num_arguments) {
2986   // Trashing eax is ok as it will be the return value.
2987   mov(eax, Immediate(function));
2988   CallCFunction(eax, num_arguments);
2989 }
2990 
2991 
CallCFunction(Register function,int num_arguments)2992 void MacroAssembler::CallCFunction(Register function,
2993                                    int num_arguments) {
2994   DCHECK(has_frame());
2995   // Check stack alignment.
2996   if (emit_debug_code()) {
2997     CheckStackAlignment();
2998   }
2999 
3000   call(function);
3001   if (base::OS::ActivationFrameAlignment() != 0) {
3002     mov(esp, Operand(esp, num_arguments * kPointerSize));
3003   } else {
3004     add(esp, Immediate(num_arguments * kPointerSize));
3005   }
3006 }
3007 
3008 
3009 #ifdef DEBUG
AreAliased(Register reg1,Register reg2,Register reg3,Register reg4,Register reg5,Register reg6,Register reg7,Register reg8)3010 bool AreAliased(Register reg1,
3011                 Register reg2,
3012                 Register reg3,
3013                 Register reg4,
3014                 Register reg5,
3015                 Register reg6,
3016                 Register reg7,
3017                 Register reg8) {
3018   int n_of_valid_regs = reg1.is_valid() + reg2.is_valid() +
3019       reg3.is_valid() + reg4.is_valid() + reg5.is_valid() + reg6.is_valid() +
3020       reg7.is_valid() + reg8.is_valid();
3021 
3022   RegList regs = 0;
3023   if (reg1.is_valid()) regs |= reg1.bit();
3024   if (reg2.is_valid()) regs |= reg2.bit();
3025   if (reg3.is_valid()) regs |= reg3.bit();
3026   if (reg4.is_valid()) regs |= reg4.bit();
3027   if (reg5.is_valid()) regs |= reg5.bit();
3028   if (reg6.is_valid()) regs |= reg6.bit();
3029   if (reg7.is_valid()) regs |= reg7.bit();
3030   if (reg8.is_valid()) regs |= reg8.bit();
3031   int n_of_non_aliasing_regs = NumRegs(regs);
3032 
3033   return n_of_valid_regs != n_of_non_aliasing_regs;
3034 }
3035 #endif
3036 
3037 
CodePatcher(byte * address,int size)3038 CodePatcher::CodePatcher(byte* address, int size)
3039     : address_(address),
3040       size_(size),
3041       masm_(NULL, address, size + Assembler::kGap) {
3042   // Create a new macro assembler pointing to the address of the code to patch.
3043   // The size is adjusted with kGap on order for the assembler to generate size
3044   // bytes of instructions without failing with buffer size constraints.
3045   DCHECK(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
3046 }
3047 
3048 
~CodePatcher()3049 CodePatcher::~CodePatcher() {
3050   // Indicate that code has changed.
3051   CpuFeatures::FlushICache(address_, size_);
3052 
3053   // Check that the code was patched as expected.
3054   DCHECK(masm_.pc_ == address_ + size_);
3055   DCHECK(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
3056 }
3057 
3058 
CheckPageFlag(Register object,Register scratch,int mask,Condition cc,Label * condition_met,Label::Distance condition_met_distance)3059 void MacroAssembler::CheckPageFlag(
3060     Register object,
3061     Register scratch,
3062     int mask,
3063     Condition cc,
3064     Label* condition_met,
3065     Label::Distance condition_met_distance) {
3066   DCHECK(cc == zero || cc == not_zero);
3067   if (scratch.is(object)) {
3068     and_(scratch, Immediate(~Page::kPageAlignmentMask));
3069   } else {
3070     mov(scratch, Immediate(~Page::kPageAlignmentMask));
3071     and_(scratch, object);
3072   }
3073   if (mask < (1 << kBitsPerByte)) {
3074     test_b(Operand(scratch, MemoryChunk::kFlagsOffset),
3075            static_cast<uint8_t>(mask));
3076   } else {
3077     test(Operand(scratch, MemoryChunk::kFlagsOffset), Immediate(mask));
3078   }
3079   j(cc, condition_met, condition_met_distance);
3080 }
3081 
3082 
CheckPageFlagForMap(Handle<Map> map,int mask,Condition cc,Label * condition_met,Label::Distance condition_met_distance)3083 void MacroAssembler::CheckPageFlagForMap(
3084     Handle<Map> map,
3085     int mask,
3086     Condition cc,
3087     Label* condition_met,
3088     Label::Distance condition_met_distance) {
3089   DCHECK(cc == zero || cc == not_zero);
3090   Page* page = Page::FromAddress(map->address());
3091   DCHECK(!serializer_enabled());  // Serializer cannot match page_flags.
3092   ExternalReference reference(ExternalReference::page_flags(page));
3093   // The inlined static address check of the page's flags relies
3094   // on maps never being compacted.
3095   DCHECK(!isolate()->heap()->mark_compact_collector()->
3096          IsOnEvacuationCandidate(*map));
3097   if (mask < (1 << kBitsPerByte)) {
3098     test_b(Operand::StaticVariable(reference), static_cast<uint8_t>(mask));
3099   } else {
3100     test(Operand::StaticVariable(reference), Immediate(mask));
3101   }
3102   j(cc, condition_met, condition_met_distance);
3103 }
3104 
3105 
CheckMapDeprecated(Handle<Map> map,Register scratch,Label * if_deprecated)3106 void MacroAssembler::CheckMapDeprecated(Handle<Map> map,
3107                                         Register scratch,
3108                                         Label* if_deprecated) {
3109   if (map->CanBeDeprecated()) {
3110     mov(scratch, map);
3111     mov(scratch, FieldOperand(scratch, Map::kBitField3Offset));
3112     and_(scratch, Immediate(Map::Deprecated::kMask));
3113     j(not_zero, if_deprecated);
3114   }
3115 }
3116 
3117 
JumpIfBlack(Register object,Register scratch0,Register scratch1,Label * on_black,Label::Distance on_black_near)3118 void MacroAssembler::JumpIfBlack(Register object,
3119                                  Register scratch0,
3120                                  Register scratch1,
3121                                  Label* on_black,
3122                                  Label::Distance on_black_near) {
3123   HasColor(object, scratch0, scratch1,
3124            on_black, on_black_near,
3125            1, 0);  // kBlackBitPattern.
3126   DCHECK(strcmp(Marking::kBlackBitPattern, "10") == 0);
3127 }
3128 
3129 
HasColor(Register object,Register bitmap_scratch,Register mask_scratch,Label * has_color,Label::Distance has_color_distance,int first_bit,int second_bit)3130 void MacroAssembler::HasColor(Register object,
3131                               Register bitmap_scratch,
3132                               Register mask_scratch,
3133                               Label* has_color,
3134                               Label::Distance has_color_distance,
3135                               int first_bit,
3136                               int second_bit) {
3137   DCHECK(!AreAliased(object, bitmap_scratch, mask_scratch, ecx));
3138 
3139   GetMarkBits(object, bitmap_scratch, mask_scratch);
3140 
3141   Label other_color, word_boundary;
3142   test(mask_scratch, Operand(bitmap_scratch, MemoryChunk::kHeaderSize));
3143   j(first_bit == 1 ? zero : not_zero, &other_color, Label::kNear);
3144   add(mask_scratch, mask_scratch);  // Shift left 1 by adding.
3145   j(zero, &word_boundary, Label::kNear);
3146   test(mask_scratch, Operand(bitmap_scratch, MemoryChunk::kHeaderSize));
3147   j(second_bit == 1 ? not_zero : zero, has_color, has_color_distance);
3148   jmp(&other_color, Label::kNear);
3149 
3150   bind(&word_boundary);
3151   test_b(Operand(bitmap_scratch, MemoryChunk::kHeaderSize + kPointerSize), 1);
3152 
3153   j(second_bit == 1 ? not_zero : zero, has_color, has_color_distance);
3154   bind(&other_color);
3155 }
3156 
3157 
GetMarkBits(Register addr_reg,Register bitmap_reg,Register mask_reg)3158 void MacroAssembler::GetMarkBits(Register addr_reg,
3159                                  Register bitmap_reg,
3160                                  Register mask_reg) {
3161   DCHECK(!AreAliased(addr_reg, mask_reg, bitmap_reg, ecx));
3162   mov(bitmap_reg, Immediate(~Page::kPageAlignmentMask));
3163   and_(bitmap_reg, addr_reg);
3164   mov(ecx, addr_reg);
3165   int shift =
3166       Bitmap::kBitsPerCellLog2 + kPointerSizeLog2 - Bitmap::kBytesPerCellLog2;
3167   shr(ecx, shift);
3168   and_(ecx,
3169        (Page::kPageAlignmentMask >> shift) & ~(Bitmap::kBytesPerCell - 1));
3170 
3171   add(bitmap_reg, ecx);
3172   mov(ecx, addr_reg);
3173   shr(ecx, kPointerSizeLog2);
3174   and_(ecx, (1 << Bitmap::kBitsPerCellLog2) - 1);
3175   mov(mask_reg, Immediate(1));
3176   shl_cl(mask_reg);
3177 }
3178 
3179 
EnsureNotWhite(Register value,Register bitmap_scratch,Register mask_scratch,Label * value_is_white_and_not_data,Label::Distance distance)3180 void MacroAssembler::EnsureNotWhite(
3181     Register value,
3182     Register bitmap_scratch,
3183     Register mask_scratch,
3184     Label* value_is_white_and_not_data,
3185     Label::Distance distance) {
3186   DCHECK(!AreAliased(value, bitmap_scratch, mask_scratch, ecx));
3187   GetMarkBits(value, bitmap_scratch, mask_scratch);
3188 
3189   // If the value is black or grey we don't need to do anything.
3190   DCHECK(strcmp(Marking::kWhiteBitPattern, "00") == 0);
3191   DCHECK(strcmp(Marking::kBlackBitPattern, "10") == 0);
3192   DCHECK(strcmp(Marking::kGreyBitPattern, "11") == 0);
3193   DCHECK(strcmp(Marking::kImpossibleBitPattern, "01") == 0);
3194 
3195   Label done;
3196 
3197   // Since both black and grey have a 1 in the first position and white does
3198   // not have a 1 there we only need to check one bit.
3199   test(mask_scratch, Operand(bitmap_scratch, MemoryChunk::kHeaderSize));
3200   j(not_zero, &done, Label::kNear);
3201 
3202   if (emit_debug_code()) {
3203     // Check for impossible bit pattern.
3204     Label ok;
3205     push(mask_scratch);
3206     // shl.  May overflow making the check conservative.
3207     add(mask_scratch, mask_scratch);
3208     test(mask_scratch, Operand(bitmap_scratch, MemoryChunk::kHeaderSize));
3209     j(zero, &ok, Label::kNear);
3210     int3();
3211     bind(&ok);
3212     pop(mask_scratch);
3213   }
3214 
3215   // Value is white.  We check whether it is data that doesn't need scanning.
3216   // Currently only checks for HeapNumber and non-cons strings.
3217   Register map = ecx;  // Holds map while checking type.
3218   Register length = ecx;  // Holds length of object after checking type.
3219   Label not_heap_number;
3220   Label is_data_object;
3221 
3222   // Check for heap-number
3223   mov(map, FieldOperand(value, HeapObject::kMapOffset));
3224   cmp(map, isolate()->factory()->heap_number_map());
3225   j(not_equal, &not_heap_number, Label::kNear);
3226   mov(length, Immediate(HeapNumber::kSize));
3227   jmp(&is_data_object, Label::kNear);
3228 
3229   bind(&not_heap_number);
3230   // Check for strings.
3231   DCHECK(kIsIndirectStringTag == 1 && kIsIndirectStringMask == 1);
3232   DCHECK(kNotStringTag == 0x80 && kIsNotStringMask == 0x80);
3233   // If it's a string and it's not a cons string then it's an object containing
3234   // no GC pointers.
3235   Register instance_type = ecx;
3236   movzx_b(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
3237   test_b(instance_type, kIsIndirectStringMask | kIsNotStringMask);
3238   j(not_zero, value_is_white_and_not_data);
3239   // It's a non-indirect (non-cons and non-slice) string.
3240   // If it's external, the length is just ExternalString::kSize.
3241   // Otherwise it's String::kHeaderSize + string->length() * (1 or 2).
3242   Label not_external;
3243   // External strings are the only ones with the kExternalStringTag bit
3244   // set.
3245   DCHECK_EQ(0, kSeqStringTag & kExternalStringTag);
3246   DCHECK_EQ(0, kConsStringTag & kExternalStringTag);
3247   test_b(instance_type, kExternalStringTag);
3248   j(zero, &not_external, Label::kNear);
3249   mov(length, Immediate(ExternalString::kSize));
3250   jmp(&is_data_object, Label::kNear);
3251 
3252   bind(&not_external);
3253   // Sequential string, either Latin1 or UC16.
3254   DCHECK(kOneByteStringTag == 0x04);
3255   and_(length, Immediate(kStringEncodingMask));
3256   xor_(length, Immediate(kStringEncodingMask));
3257   add(length, Immediate(0x04));
3258   // Value now either 4 (if Latin1) or 8 (if UC16), i.e., char-size shifted
3259   // by 2. If we multiply the string length as smi by this, it still
3260   // won't overflow a 32-bit value.
3261   DCHECK_EQ(SeqOneByteString::kMaxSize, SeqTwoByteString::kMaxSize);
3262   DCHECK(SeqOneByteString::kMaxSize <=
3263          static_cast<int>(0xffffffffu >> (2 + kSmiTagSize)));
3264   imul(length, FieldOperand(value, String::kLengthOffset));
3265   shr(length, 2 + kSmiTagSize + kSmiShiftSize);
3266   add(length, Immediate(SeqString::kHeaderSize + kObjectAlignmentMask));
3267   and_(length, Immediate(~kObjectAlignmentMask));
3268 
3269   bind(&is_data_object);
3270   // Value is a data object, and it is white.  Mark it black.  Since we know
3271   // that the object is white we can make it black by flipping one bit.
3272   or_(Operand(bitmap_scratch, MemoryChunk::kHeaderSize), mask_scratch);
3273 
3274   and_(bitmap_scratch, Immediate(~Page::kPageAlignmentMask));
3275   add(Operand(bitmap_scratch, MemoryChunk::kLiveBytesOffset),
3276       length);
3277   if (emit_debug_code()) {
3278     mov(length, Operand(bitmap_scratch, MemoryChunk::kLiveBytesOffset));
3279     cmp(length, Operand(bitmap_scratch, MemoryChunk::kSizeOffset));
3280     Check(less_equal, kLiveBytesCountOverflowChunkSize);
3281   }
3282 
3283   bind(&done);
3284 }
3285 
3286 
EnumLength(Register dst,Register map)3287 void MacroAssembler::EnumLength(Register dst, Register map) {
3288   STATIC_ASSERT(Map::EnumLengthBits::kShift == 0);
3289   mov(dst, FieldOperand(map, Map::kBitField3Offset));
3290   and_(dst, Immediate(Map::EnumLengthBits::kMask));
3291   SmiTag(dst);
3292 }
3293 
3294 
CheckEnumCache(Label * call_runtime)3295 void MacroAssembler::CheckEnumCache(Label* call_runtime) {
3296   Label next, start;
3297   mov(ecx, eax);
3298 
3299   // Check if the enum length field is properly initialized, indicating that
3300   // there is an enum cache.
3301   mov(ebx, FieldOperand(ecx, HeapObject::kMapOffset));
3302 
3303   EnumLength(edx, ebx);
3304   cmp(edx, Immediate(Smi::FromInt(kInvalidEnumCacheSentinel)));
3305   j(equal, call_runtime);
3306 
3307   jmp(&start);
3308 
3309   bind(&next);
3310   mov(ebx, FieldOperand(ecx, HeapObject::kMapOffset));
3311 
3312   // For all objects but the receiver, check that the cache is empty.
3313   EnumLength(edx, ebx);
3314   cmp(edx, Immediate(Smi::FromInt(0)));
3315   j(not_equal, call_runtime);
3316 
3317   bind(&start);
3318 
3319   // Check that there are no elements. Register rcx contains the current JS
3320   // object we've reached through the prototype chain.
3321   Label no_elements;
3322   mov(ecx, FieldOperand(ecx, JSObject::kElementsOffset));
3323   cmp(ecx, isolate()->factory()->empty_fixed_array());
3324   j(equal, &no_elements);
3325 
3326   // Second chance, the object may be using the empty slow element dictionary.
3327   cmp(ecx, isolate()->factory()->empty_slow_element_dictionary());
3328   j(not_equal, call_runtime);
3329 
3330   bind(&no_elements);
3331   mov(ecx, FieldOperand(ebx, Map::kPrototypeOffset));
3332   cmp(ecx, isolate()->factory()->null_value());
3333   j(not_equal, &next);
3334 }
3335 
3336 
TestJSArrayForAllocationMemento(Register receiver_reg,Register scratch_reg,Label * no_memento_found)3337 void MacroAssembler::TestJSArrayForAllocationMemento(
3338     Register receiver_reg,
3339     Register scratch_reg,
3340     Label* no_memento_found) {
3341   ExternalReference new_space_start =
3342       ExternalReference::new_space_start(isolate());
3343   ExternalReference new_space_allocation_top =
3344       ExternalReference::new_space_allocation_top_address(isolate());
3345 
3346   lea(scratch_reg, Operand(receiver_reg,
3347       JSArray::kSize + AllocationMemento::kSize - kHeapObjectTag));
3348   cmp(scratch_reg, Immediate(new_space_start));
3349   j(less, no_memento_found);
3350   cmp(scratch_reg, Operand::StaticVariable(new_space_allocation_top));
3351   j(greater, no_memento_found);
3352   cmp(MemOperand(scratch_reg, -AllocationMemento::kSize),
3353       Immediate(isolate()->factory()->allocation_memento_map()));
3354 }
3355 
3356 
JumpIfDictionaryInPrototypeChain(Register object,Register scratch0,Register scratch1,Label * found)3357 void MacroAssembler::JumpIfDictionaryInPrototypeChain(
3358     Register object,
3359     Register scratch0,
3360     Register scratch1,
3361     Label* found) {
3362   DCHECK(!scratch1.is(scratch0));
3363   Factory* factory = isolate()->factory();
3364   Register current = scratch0;
3365   Label loop_again;
3366 
3367   // scratch contained elements pointer.
3368   mov(current, object);
3369 
3370   // Loop based on the map going up the prototype chain.
3371   bind(&loop_again);
3372   mov(current, FieldOperand(current, HeapObject::kMapOffset));
3373   mov(scratch1, FieldOperand(current, Map::kBitField2Offset));
3374   DecodeField<Map::ElementsKindBits>(scratch1);
3375   cmp(scratch1, Immediate(DICTIONARY_ELEMENTS));
3376   j(equal, found);
3377   mov(current, FieldOperand(current, Map::kPrototypeOffset));
3378   cmp(current, Immediate(factory->null_value()));
3379   j(not_equal, &loop_again);
3380 }
3381 
3382 
TruncatingDiv(Register dividend,int32_t divisor)3383 void MacroAssembler::TruncatingDiv(Register dividend, int32_t divisor) {
3384   DCHECK(!dividend.is(eax));
3385   DCHECK(!dividend.is(edx));
3386   base::MagicNumbersForDivision<uint32_t> mag =
3387       base::SignedDivisionByConstant(static_cast<uint32_t>(divisor));
3388   mov(eax, Immediate(mag.multiplier));
3389   imul(dividend);
3390   bool neg = (mag.multiplier & (static_cast<uint32_t>(1) << 31)) != 0;
3391   if (divisor > 0 && neg) add(edx, dividend);
3392   if (divisor < 0 && !neg && mag.multiplier > 0) sub(edx, dividend);
3393   if (mag.shift > 0) sar(edx, mag.shift);
3394   mov(eax, dividend);
3395   shr(eax, 31);
3396   add(edx, eax);
3397 }
3398 
3399 
3400 } }  // namespace v8::internal
3401 
3402 #endif  // V8_TARGET_ARCH_IA32
3403