• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "src/v8.h"
6 
7 #if V8_TARGET_ARCH_IA32
8 
9 #include "src/bootstrapper.h"
10 #include "src/codegen.h"
11 #include "src/cpu-profiler.h"
12 #include "src/debug.h"
13 #include "src/isolate-inl.h"
14 #include "src/runtime.h"
15 #include "src/serialize.h"
16 
17 namespace v8 {
18 namespace internal {
19 
20 // -------------------------------------------------------------------------
21 // MacroAssembler implementation.
22 
MacroAssembler(Isolate * arg_isolate,void * buffer,int size)23 MacroAssembler::MacroAssembler(Isolate* arg_isolate, void* buffer, int size)
24     : Assembler(arg_isolate, buffer, size),
25       generating_stub_(false),
26       has_frame_(false) {
27   if (isolate() != NULL) {
28     // TODO(titzer): should we just use a null handle here instead?
29     code_object_ = Handle<Object>(isolate()->heap()->undefined_value(),
30                                   isolate());
31   }
32 }
33 
34 
Load(Register dst,const Operand & src,Representation r)35 void MacroAssembler::Load(Register dst, const Operand& src, Representation r) {
36   ASSERT(!r.IsDouble());
37   if (r.IsInteger8()) {
38     movsx_b(dst, src);
39   } else if (r.IsUInteger8()) {
40     movzx_b(dst, src);
41   } else if (r.IsInteger16()) {
42     movsx_w(dst, src);
43   } else if (r.IsUInteger16()) {
44     movzx_w(dst, src);
45   } else {
46     mov(dst, src);
47   }
48 }
49 
50 
Store(Register src,const Operand & dst,Representation r)51 void MacroAssembler::Store(Register src, const Operand& dst, Representation r) {
52   ASSERT(!r.IsDouble());
53   if (r.IsInteger8() || r.IsUInteger8()) {
54     mov_b(dst, src);
55   } else if (r.IsInteger16() || r.IsUInteger16()) {
56     mov_w(dst, src);
57   } else {
58     if (r.IsHeapObject()) {
59       AssertNotSmi(src);
60     } else if (r.IsSmi()) {
61       AssertSmi(src);
62     }
63     mov(dst, src);
64   }
65 }
66 
67 
LoadRoot(Register destination,Heap::RootListIndex index)68 void MacroAssembler::LoadRoot(Register destination, Heap::RootListIndex index) {
69   if (isolate()->heap()->RootCanBeTreatedAsConstant(index)) {
70     Handle<Object> value(&isolate()->heap()->roots_array_start()[index]);
71     mov(destination, value);
72     return;
73   }
74   ExternalReference roots_array_start =
75       ExternalReference::roots_array_start(isolate());
76   mov(destination, Immediate(index));
77   mov(destination, Operand::StaticArray(destination,
78                                         times_pointer_size,
79                                         roots_array_start));
80 }
81 
82 
StoreRoot(Register source,Register scratch,Heap::RootListIndex index)83 void MacroAssembler::StoreRoot(Register source,
84                                Register scratch,
85                                Heap::RootListIndex index) {
86   ASSERT(Heap::RootCanBeWrittenAfterInitialization(index));
87   ExternalReference roots_array_start =
88       ExternalReference::roots_array_start(isolate());
89   mov(scratch, Immediate(index));
90   mov(Operand::StaticArray(scratch, times_pointer_size, roots_array_start),
91       source);
92 }
93 
94 
CompareRoot(Register with,Register scratch,Heap::RootListIndex index)95 void MacroAssembler::CompareRoot(Register with,
96                                  Register scratch,
97                                  Heap::RootListIndex index) {
98   ExternalReference roots_array_start =
99       ExternalReference::roots_array_start(isolate());
100   mov(scratch, Immediate(index));
101   cmp(with, Operand::StaticArray(scratch,
102                                 times_pointer_size,
103                                 roots_array_start));
104 }
105 
106 
CompareRoot(Register with,Heap::RootListIndex index)107 void MacroAssembler::CompareRoot(Register with, Heap::RootListIndex index) {
108   ASSERT(isolate()->heap()->RootCanBeTreatedAsConstant(index));
109   Handle<Object> value(&isolate()->heap()->roots_array_start()[index]);
110   cmp(with, value);
111 }
112 
113 
CompareRoot(const Operand & with,Heap::RootListIndex index)114 void MacroAssembler::CompareRoot(const Operand& with,
115                                  Heap::RootListIndex index) {
116   ASSERT(isolate()->heap()->RootCanBeTreatedAsConstant(index));
117   Handle<Object> value(&isolate()->heap()->roots_array_start()[index]);
118   cmp(with, value);
119 }
120 
121 
InNewSpace(Register object,Register scratch,Condition cc,Label * condition_met,Label::Distance condition_met_distance)122 void MacroAssembler::InNewSpace(
123     Register object,
124     Register scratch,
125     Condition cc,
126     Label* condition_met,
127     Label::Distance condition_met_distance) {
128   ASSERT(cc == equal || cc == not_equal);
129   if (scratch.is(object)) {
130     and_(scratch, Immediate(~Page::kPageAlignmentMask));
131   } else {
132     mov(scratch, Immediate(~Page::kPageAlignmentMask));
133     and_(scratch, object);
134   }
135   // Check that we can use a test_b.
136   ASSERT(MemoryChunk::IN_FROM_SPACE < 8);
137   ASSERT(MemoryChunk::IN_TO_SPACE < 8);
138   int mask = (1 << MemoryChunk::IN_FROM_SPACE)
139            | (1 << MemoryChunk::IN_TO_SPACE);
140   // If non-zero, the page belongs to new-space.
141   test_b(Operand(scratch, MemoryChunk::kFlagsOffset),
142          static_cast<uint8_t>(mask));
143   j(cc, condition_met, condition_met_distance);
144 }
145 
146 
RememberedSetHelper(Register object,Register addr,Register scratch,SaveFPRegsMode save_fp,MacroAssembler::RememberedSetFinalAction and_then)147 void MacroAssembler::RememberedSetHelper(
148     Register object,  // Only used for debug checks.
149     Register addr,
150     Register scratch,
151     SaveFPRegsMode save_fp,
152     MacroAssembler::RememberedSetFinalAction and_then) {
153   Label done;
154   if (emit_debug_code()) {
155     Label ok;
156     JumpIfNotInNewSpace(object, scratch, &ok, Label::kNear);
157     int3();
158     bind(&ok);
159   }
160   // Load store buffer top.
161   ExternalReference store_buffer =
162       ExternalReference::store_buffer_top(isolate());
163   mov(scratch, Operand::StaticVariable(store_buffer));
164   // Store pointer to buffer.
165   mov(Operand(scratch, 0), addr);
166   // Increment buffer top.
167   add(scratch, Immediate(kPointerSize));
168   // Write back new top of buffer.
169   mov(Operand::StaticVariable(store_buffer), scratch);
170   // Call stub on end of buffer.
171   // Check for end of buffer.
172   test(scratch, Immediate(StoreBuffer::kStoreBufferOverflowBit));
173   if (and_then == kReturnAtEnd) {
174     Label buffer_overflowed;
175     j(not_equal, &buffer_overflowed, Label::kNear);
176     ret(0);
177     bind(&buffer_overflowed);
178   } else {
179     ASSERT(and_then == kFallThroughAtEnd);
180     j(equal, &done, Label::kNear);
181   }
182   StoreBufferOverflowStub store_buffer_overflow =
183       StoreBufferOverflowStub(isolate(), save_fp);
184   CallStub(&store_buffer_overflow);
185   if (and_then == kReturnAtEnd) {
186     ret(0);
187   } else {
188     ASSERT(and_then == kFallThroughAtEnd);
189     bind(&done);
190   }
191 }
192 
193 
ClampDoubleToUint8(XMMRegister input_reg,XMMRegister scratch_reg,Register result_reg)194 void MacroAssembler::ClampDoubleToUint8(XMMRegister input_reg,
195                                         XMMRegister scratch_reg,
196                                         Register result_reg) {
197   Label done;
198   Label conv_failure;
199   xorps(scratch_reg, scratch_reg);
200   cvtsd2si(result_reg, input_reg);
201   test(result_reg, Immediate(0xFFFFFF00));
202   j(zero, &done, Label::kNear);
203   cmp(result_reg, Immediate(0x1));
204   j(overflow, &conv_failure, Label::kNear);
205   mov(result_reg, Immediate(0));
206   setcc(sign, result_reg);
207   sub(result_reg, Immediate(1));
208   and_(result_reg, Immediate(255));
209   jmp(&done, Label::kNear);
210   bind(&conv_failure);
211   Move(result_reg, Immediate(0));
212   ucomisd(input_reg, scratch_reg);
213   j(below, &done, Label::kNear);
214   Move(result_reg, Immediate(255));
215   bind(&done);
216 }
217 
218 
ClampUint8(Register reg)219 void MacroAssembler::ClampUint8(Register reg) {
220   Label done;
221   test(reg, Immediate(0xFFFFFF00));
222   j(zero, &done, Label::kNear);
223   setcc(negative, reg);  // 1 if negative, 0 if positive.
224   dec_b(reg);  // 0 if negative, 255 if positive.
225   bind(&done);
226 }
227 
228 
SlowTruncateToI(Register result_reg,Register input_reg,int offset)229 void MacroAssembler::SlowTruncateToI(Register result_reg,
230                                      Register input_reg,
231                                      int offset) {
232   DoubleToIStub stub(isolate(), input_reg, result_reg, offset, true);
233   call(stub.GetCode(), RelocInfo::CODE_TARGET);
234 }
235 
236 
TruncateDoubleToI(Register result_reg,XMMRegister input_reg)237 void MacroAssembler::TruncateDoubleToI(Register result_reg,
238                                        XMMRegister input_reg) {
239   Label done;
240   cvttsd2si(result_reg, Operand(input_reg));
241   cmp(result_reg, 0x1);
242   j(no_overflow, &done, Label::kNear);
243 
244   sub(esp, Immediate(kDoubleSize));
245   movsd(MemOperand(esp, 0), input_reg);
246   SlowTruncateToI(result_reg, esp, 0);
247   add(esp, Immediate(kDoubleSize));
248   bind(&done);
249 }
250 
251 
DoubleToI(Register result_reg,XMMRegister input_reg,XMMRegister scratch,MinusZeroMode minus_zero_mode,Label * conversion_failed,Label::Distance dst)252 void MacroAssembler::DoubleToI(Register result_reg,
253                                XMMRegister input_reg,
254                                XMMRegister scratch,
255                                MinusZeroMode minus_zero_mode,
256                                Label* conversion_failed,
257                                Label::Distance dst) {
258   ASSERT(!input_reg.is(scratch));
259   cvttsd2si(result_reg, Operand(input_reg));
260   Cvtsi2sd(scratch, Operand(result_reg));
261   ucomisd(scratch, input_reg);
262   j(not_equal, conversion_failed, dst);
263   j(parity_even, conversion_failed, dst);  // NaN.
264   if (minus_zero_mode == FAIL_ON_MINUS_ZERO) {
265     Label done;
266     // The integer converted back is equal to the original. We
267     // only have to test if we got -0 as an input.
268     test(result_reg, Operand(result_reg));
269     j(not_zero, &done, Label::kNear);
270     movmskpd(result_reg, input_reg);
271     // Bit 0 contains the sign of the double in input_reg.
272     // If input was positive, we are ok and return 0, otherwise
273     // jump to conversion_failed.
274     and_(result_reg, 1);
275     j(not_zero, conversion_failed, dst);
276     bind(&done);
277   }
278 }
279 
280 
TruncateHeapNumberToI(Register result_reg,Register input_reg)281 void MacroAssembler::TruncateHeapNumberToI(Register result_reg,
282                                            Register input_reg) {
283   Label done, slow_case;
284 
285   if (CpuFeatures::IsSupported(SSE3)) {
286     CpuFeatureScope scope(this, SSE3);
287     Label convert;
288     // Use more powerful conversion when sse3 is available.
289     // Load x87 register with heap number.
290     fld_d(FieldOperand(input_reg, HeapNumber::kValueOffset));
291     // Get exponent alone and check for too-big exponent.
292     mov(result_reg, FieldOperand(input_reg, HeapNumber::kExponentOffset));
293     and_(result_reg, HeapNumber::kExponentMask);
294     const uint32_t kTooBigExponent =
295         (HeapNumber::kExponentBias + 63) << HeapNumber::kExponentShift;
296     cmp(Operand(result_reg), Immediate(kTooBigExponent));
297     j(greater_equal, &slow_case, Label::kNear);
298 
299     // Reserve space for 64 bit answer.
300     sub(Operand(esp), Immediate(kDoubleSize));
301     // Do conversion, which cannot fail because we checked the exponent.
302     fisttp_d(Operand(esp, 0));
303     mov(result_reg, Operand(esp, 0));  // Low word of answer is the result.
304     add(Operand(esp), Immediate(kDoubleSize));
305     jmp(&done, Label::kNear);
306 
307     // Slow case.
308     bind(&slow_case);
309     if (input_reg.is(result_reg)) {
310       // Input is clobbered. Restore number from fpu stack
311       sub(Operand(esp), Immediate(kDoubleSize));
312       fstp_d(Operand(esp, 0));
313       SlowTruncateToI(result_reg, esp, 0);
314       add(esp, Immediate(kDoubleSize));
315     } else {
316       fstp(0);
317       SlowTruncateToI(result_reg, input_reg);
318     }
319   } else {
320     movsd(xmm0, FieldOperand(input_reg, HeapNumber::kValueOffset));
321     cvttsd2si(result_reg, Operand(xmm0));
322     cmp(result_reg, 0x1);
323     j(no_overflow, &done, Label::kNear);
324     // Check if the input was 0x8000000 (kMinInt).
325     // If no, then we got an overflow and we deoptimize.
326     ExternalReference min_int = ExternalReference::address_of_min_int();
327     ucomisd(xmm0, Operand::StaticVariable(min_int));
328     j(not_equal, &slow_case, Label::kNear);
329     j(parity_even, &slow_case, Label::kNear);  // NaN.
330     jmp(&done, Label::kNear);
331 
332     // Slow case.
333     bind(&slow_case);
334     if (input_reg.is(result_reg)) {
335       // Input is clobbered. Restore number from double scratch.
336       sub(esp, Immediate(kDoubleSize));
337       movsd(MemOperand(esp, 0), xmm0);
338       SlowTruncateToI(result_reg, esp, 0);
339       add(esp, Immediate(kDoubleSize));
340     } else {
341       SlowTruncateToI(result_reg, input_reg);
342     }
343   }
344   bind(&done);
345 }
346 
347 
TaggedToI(Register result_reg,Register input_reg,XMMRegister temp,MinusZeroMode minus_zero_mode,Label * lost_precision)348 void MacroAssembler::TaggedToI(Register result_reg,
349                                Register input_reg,
350                                XMMRegister temp,
351                                MinusZeroMode minus_zero_mode,
352                                Label* lost_precision) {
353   Label done;
354   ASSERT(!temp.is(xmm0));
355 
356   cmp(FieldOperand(input_reg, HeapObject::kMapOffset),
357       isolate()->factory()->heap_number_map());
358   j(not_equal, lost_precision, Label::kNear);
359 
360   ASSERT(!temp.is(no_xmm_reg));
361 
362   movsd(xmm0, FieldOperand(input_reg, HeapNumber::kValueOffset));
363   cvttsd2si(result_reg, Operand(xmm0));
364   Cvtsi2sd(temp, Operand(result_reg));
365   ucomisd(xmm0, temp);
366   RecordComment("Deferred TaggedToI: lost precision");
367   j(not_equal, lost_precision, Label::kNear);
368   RecordComment("Deferred TaggedToI: NaN");
369   j(parity_even, lost_precision, Label::kNear);
370   if (minus_zero_mode == FAIL_ON_MINUS_ZERO) {
371     test(result_reg, Operand(result_reg));
372     j(not_zero, &done, Label::kNear);
373     movmskpd(result_reg, xmm0);
374     and_(result_reg, 1);
375     RecordComment("Deferred TaggedToI: minus zero");
376     j(not_zero, lost_precision, Label::kNear);
377   }
378   bind(&done);
379 }
380 
381 
LoadUint32(XMMRegister dst,Register src)382 void MacroAssembler::LoadUint32(XMMRegister dst,
383                                 Register src) {
384   Label done;
385   cmp(src, Immediate(0));
386   ExternalReference uint32_bias =
387         ExternalReference::address_of_uint32_bias();
388   Cvtsi2sd(dst, src);
389   j(not_sign, &done, Label::kNear);
390   addsd(dst, Operand::StaticVariable(uint32_bias));
391   bind(&done);
392 }
393 
394 
RecordWriteArray(Register object,Register value,Register index,SaveFPRegsMode save_fp,RememberedSetAction remembered_set_action,SmiCheck smi_check,PointersToHereCheck pointers_to_here_check_for_value)395 void MacroAssembler::RecordWriteArray(
396     Register object,
397     Register value,
398     Register index,
399     SaveFPRegsMode save_fp,
400     RememberedSetAction remembered_set_action,
401     SmiCheck smi_check,
402     PointersToHereCheck pointers_to_here_check_for_value) {
403   // First, check if a write barrier is even needed. The tests below
404   // catch stores of Smis.
405   Label done;
406 
407   // Skip barrier if writing a smi.
408   if (smi_check == INLINE_SMI_CHECK) {
409     ASSERT_EQ(0, kSmiTag);
410     test(value, Immediate(kSmiTagMask));
411     j(zero, &done);
412   }
413 
414   // Array access: calculate the destination address in the same manner as
415   // KeyedStoreIC::GenerateGeneric.  Multiply a smi by 2 to get an offset
416   // into an array of words.
417   Register dst = index;
418   lea(dst, Operand(object, index, times_half_pointer_size,
419                    FixedArray::kHeaderSize - kHeapObjectTag));
420 
421   RecordWrite(object, dst, value, save_fp, remembered_set_action,
422               OMIT_SMI_CHECK, pointers_to_here_check_for_value);
423 
424   bind(&done);
425 
426   // Clobber clobbered input registers when running with the debug-code flag
427   // turned on to provoke errors.
428   if (emit_debug_code()) {
429     mov(value, Immediate(BitCast<int32_t>(kZapValue)));
430     mov(index, Immediate(BitCast<int32_t>(kZapValue)));
431   }
432 }
433 
434 
RecordWriteField(Register object,int offset,Register value,Register dst,SaveFPRegsMode save_fp,RememberedSetAction remembered_set_action,SmiCheck smi_check,PointersToHereCheck pointers_to_here_check_for_value)435 void MacroAssembler::RecordWriteField(
436     Register object,
437     int offset,
438     Register value,
439     Register dst,
440     SaveFPRegsMode save_fp,
441     RememberedSetAction remembered_set_action,
442     SmiCheck smi_check,
443     PointersToHereCheck pointers_to_here_check_for_value) {
444   // First, check if a write barrier is even needed. The tests below
445   // catch stores of Smis.
446   Label done;
447 
448   // Skip barrier if writing a smi.
449   if (smi_check == INLINE_SMI_CHECK) {
450     JumpIfSmi(value, &done, Label::kNear);
451   }
452 
453   // Although the object register is tagged, the offset is relative to the start
454   // of the object, so so offset must be a multiple of kPointerSize.
455   ASSERT(IsAligned(offset, kPointerSize));
456 
457   lea(dst, FieldOperand(object, offset));
458   if (emit_debug_code()) {
459     Label ok;
460     test_b(dst, (1 << kPointerSizeLog2) - 1);
461     j(zero, &ok, Label::kNear);
462     int3();
463     bind(&ok);
464   }
465 
466   RecordWrite(object, dst, value, save_fp, remembered_set_action,
467               OMIT_SMI_CHECK, pointers_to_here_check_for_value);
468 
469   bind(&done);
470 
471   // Clobber clobbered input registers when running with the debug-code flag
472   // turned on to provoke errors.
473   if (emit_debug_code()) {
474     mov(value, Immediate(BitCast<int32_t>(kZapValue)));
475     mov(dst, Immediate(BitCast<int32_t>(kZapValue)));
476   }
477 }
478 
479 
RecordWriteForMap(Register object,Handle<Map> map,Register scratch1,Register scratch2,SaveFPRegsMode save_fp)480 void MacroAssembler::RecordWriteForMap(
481     Register object,
482     Handle<Map> map,
483     Register scratch1,
484     Register scratch2,
485     SaveFPRegsMode save_fp) {
486   Label done;
487 
488   Register address = scratch1;
489   Register value = scratch2;
490   if (emit_debug_code()) {
491     Label ok;
492     lea(address, FieldOperand(object, HeapObject::kMapOffset));
493     test_b(address, (1 << kPointerSizeLog2) - 1);
494     j(zero, &ok, Label::kNear);
495     int3();
496     bind(&ok);
497   }
498 
499   ASSERT(!object.is(value));
500   ASSERT(!object.is(address));
501   ASSERT(!value.is(address));
502   AssertNotSmi(object);
503 
504   if (!FLAG_incremental_marking) {
505     return;
506   }
507 
508   // Compute the address.
509   lea(address, FieldOperand(object, HeapObject::kMapOffset));
510 
511   // Count number of write barriers in generated code.
512   isolate()->counters()->write_barriers_static()->Increment();
513   IncrementCounter(isolate()->counters()->write_barriers_dynamic(), 1);
514 
515   // A single check of the map's pages interesting flag suffices, since it is
516   // only set during incremental collection, and then it's also guaranteed that
517   // the from object's page's interesting flag is also set.  This optimization
518   // relies on the fact that maps can never be in new space.
519   ASSERT(!isolate()->heap()->InNewSpace(*map));
520   CheckPageFlagForMap(map,
521                       MemoryChunk::kPointersToHereAreInterestingMask,
522                       zero,
523                       &done,
524                       Label::kNear);
525 
526   RecordWriteStub stub(isolate(), object, value, address, OMIT_REMEMBERED_SET,
527                        save_fp);
528   CallStub(&stub);
529 
530   bind(&done);
531 
532   // Clobber clobbered input registers when running with the debug-code flag
533   // turned on to provoke errors.
534   if (emit_debug_code()) {
535     mov(value, Immediate(BitCast<int32_t>(kZapValue)));
536     mov(scratch1, Immediate(BitCast<int32_t>(kZapValue)));
537     mov(scratch2, Immediate(BitCast<int32_t>(kZapValue)));
538   }
539 }
540 
541 
RecordWrite(Register object,Register address,Register value,SaveFPRegsMode fp_mode,RememberedSetAction remembered_set_action,SmiCheck smi_check,PointersToHereCheck pointers_to_here_check_for_value)542 void MacroAssembler::RecordWrite(
543     Register object,
544     Register address,
545     Register value,
546     SaveFPRegsMode fp_mode,
547     RememberedSetAction remembered_set_action,
548     SmiCheck smi_check,
549     PointersToHereCheck pointers_to_here_check_for_value) {
550   ASSERT(!object.is(value));
551   ASSERT(!object.is(address));
552   ASSERT(!value.is(address));
553   AssertNotSmi(object);
554 
555   if (remembered_set_action == OMIT_REMEMBERED_SET &&
556       !FLAG_incremental_marking) {
557     return;
558   }
559 
560   if (emit_debug_code()) {
561     Label ok;
562     cmp(value, Operand(address, 0));
563     j(equal, &ok, Label::kNear);
564     int3();
565     bind(&ok);
566   }
567 
568   // Count number of write barriers in generated code.
569   isolate()->counters()->write_barriers_static()->Increment();
570   IncrementCounter(isolate()->counters()->write_barriers_dynamic(), 1);
571 
572   // First, check if a write barrier is even needed. The tests below
573   // catch stores of Smis and stores into young gen.
574   Label done;
575 
576   if (smi_check == INLINE_SMI_CHECK) {
577     // Skip barrier if writing a smi.
578     JumpIfSmi(value, &done, Label::kNear);
579   }
580 
581   if (pointers_to_here_check_for_value != kPointersToHereAreAlwaysInteresting) {
582     CheckPageFlag(value,
583                   value,  // Used as scratch.
584                   MemoryChunk::kPointersToHereAreInterestingMask,
585                   zero,
586                   &done,
587                   Label::kNear);
588   }
589   CheckPageFlag(object,
590                 value,  // Used as scratch.
591                 MemoryChunk::kPointersFromHereAreInterestingMask,
592                 zero,
593                 &done,
594                 Label::kNear);
595 
596   RecordWriteStub stub(isolate(), object, value, address, remembered_set_action,
597                        fp_mode);
598   CallStub(&stub);
599 
600   bind(&done);
601 
602   // Clobber clobbered registers when running with the debug-code flag
603   // turned on to provoke errors.
604   if (emit_debug_code()) {
605     mov(address, Immediate(BitCast<int32_t>(kZapValue)));
606     mov(value, Immediate(BitCast<int32_t>(kZapValue)));
607   }
608 }
609 
610 
DebugBreak()611 void MacroAssembler::DebugBreak() {
612   Move(eax, Immediate(0));
613   mov(ebx, Immediate(ExternalReference(Runtime::kDebugBreak, isolate())));
614   CEntryStub ces(isolate(), 1);
615   call(ces.GetCode(), RelocInfo::DEBUG_BREAK);
616 }
617 
618 
Cvtsi2sd(XMMRegister dst,const Operand & src)619 void MacroAssembler::Cvtsi2sd(XMMRegister dst, const Operand& src) {
620   xorps(dst, dst);
621   cvtsi2sd(dst, src);
622 }
623 
624 
IsUnsafeImmediate(const Immediate & x)625 bool MacroAssembler::IsUnsafeImmediate(const Immediate& x) {
626   static const int kMaxImmediateBits = 17;
627   if (!RelocInfo::IsNone(x.rmode_)) return false;
628   return !is_intn(x.x_, kMaxImmediateBits);
629 }
630 
631 
SafeMove(Register dst,const Immediate & x)632 void MacroAssembler::SafeMove(Register dst, const Immediate& x) {
633   if (IsUnsafeImmediate(x) && jit_cookie() != 0) {
634     Move(dst, Immediate(x.x_ ^ jit_cookie()));
635     xor_(dst, jit_cookie());
636   } else {
637     Move(dst, x);
638   }
639 }
640 
641 
SafePush(const Immediate & x)642 void MacroAssembler::SafePush(const Immediate& x) {
643   if (IsUnsafeImmediate(x) && jit_cookie() != 0) {
644     push(Immediate(x.x_ ^ jit_cookie()));
645     xor_(Operand(esp, 0), Immediate(jit_cookie()));
646   } else {
647     push(x);
648   }
649 }
650 
651 
CmpObjectType(Register heap_object,InstanceType type,Register map)652 void MacroAssembler::CmpObjectType(Register heap_object,
653                                    InstanceType type,
654                                    Register map) {
655   mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
656   CmpInstanceType(map, type);
657 }
658 
659 
CmpInstanceType(Register map,InstanceType type)660 void MacroAssembler::CmpInstanceType(Register map, InstanceType type) {
661   cmpb(FieldOperand(map, Map::kInstanceTypeOffset),
662        static_cast<int8_t>(type));
663 }
664 
665 
CheckFastElements(Register map,Label * fail,Label::Distance distance)666 void MacroAssembler::CheckFastElements(Register map,
667                                        Label* fail,
668                                        Label::Distance distance) {
669   STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
670   STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
671   STATIC_ASSERT(FAST_ELEMENTS == 2);
672   STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3);
673   cmpb(FieldOperand(map, Map::kBitField2Offset),
674        Map::kMaximumBitField2FastHoleyElementValue);
675   j(above, fail, distance);
676 }
677 
678 
CheckFastObjectElements(Register map,Label * fail,Label::Distance distance)679 void MacroAssembler::CheckFastObjectElements(Register map,
680                                              Label* fail,
681                                              Label::Distance distance) {
682   STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
683   STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
684   STATIC_ASSERT(FAST_ELEMENTS == 2);
685   STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3);
686   cmpb(FieldOperand(map, Map::kBitField2Offset),
687        Map::kMaximumBitField2FastHoleySmiElementValue);
688   j(below_equal, fail, distance);
689   cmpb(FieldOperand(map, Map::kBitField2Offset),
690        Map::kMaximumBitField2FastHoleyElementValue);
691   j(above, fail, distance);
692 }
693 
694 
CheckFastSmiElements(Register map,Label * fail,Label::Distance distance)695 void MacroAssembler::CheckFastSmiElements(Register map,
696                                           Label* fail,
697                                           Label::Distance distance) {
698   STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
699   STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
700   cmpb(FieldOperand(map, Map::kBitField2Offset),
701        Map::kMaximumBitField2FastHoleySmiElementValue);
702   j(above, fail, distance);
703 }
704 
705 
StoreNumberToDoubleElements(Register maybe_number,Register elements,Register key,Register scratch1,XMMRegister scratch2,Label * fail,int elements_offset)706 void MacroAssembler::StoreNumberToDoubleElements(
707     Register maybe_number,
708     Register elements,
709     Register key,
710     Register scratch1,
711     XMMRegister scratch2,
712     Label* fail,
713     int elements_offset) {
714   Label smi_value, done, maybe_nan, not_nan, is_nan, have_double_value;
715   JumpIfSmi(maybe_number, &smi_value, Label::kNear);
716 
717   CheckMap(maybe_number,
718            isolate()->factory()->heap_number_map(),
719            fail,
720            DONT_DO_SMI_CHECK);
721 
722   // Double value, canonicalize NaN.
723   uint32_t offset = HeapNumber::kValueOffset + sizeof(kHoleNanLower32);
724   cmp(FieldOperand(maybe_number, offset),
725       Immediate(kNaNOrInfinityLowerBoundUpper32));
726   j(greater_equal, &maybe_nan, Label::kNear);
727 
728   bind(&not_nan);
729   ExternalReference canonical_nan_reference =
730       ExternalReference::address_of_canonical_non_hole_nan();
731   movsd(scratch2, FieldOperand(maybe_number, HeapNumber::kValueOffset));
732   bind(&have_double_value);
733   movsd(FieldOperand(elements, key, times_4,
734                      FixedDoubleArray::kHeaderSize - elements_offset),
735         scratch2);
736   jmp(&done);
737 
738   bind(&maybe_nan);
739   // Could be NaN or Infinity. If fraction is not zero, it's NaN, otherwise
740   // it's an Infinity, and the non-NaN code path applies.
741   j(greater, &is_nan, Label::kNear);
742   cmp(FieldOperand(maybe_number, HeapNumber::kValueOffset), Immediate(0));
743   j(zero, &not_nan);
744   bind(&is_nan);
745   movsd(scratch2, Operand::StaticVariable(canonical_nan_reference));
746   jmp(&have_double_value, Label::kNear);
747 
748   bind(&smi_value);
749   // Value is a smi. Convert to a double and store.
750   // Preserve original value.
751   mov(scratch1, maybe_number);
752   SmiUntag(scratch1);
753   Cvtsi2sd(scratch2, scratch1);
754   movsd(FieldOperand(elements, key, times_4,
755                      FixedDoubleArray::kHeaderSize - elements_offset),
756         scratch2);
757   bind(&done);
758 }
759 
760 
CompareMap(Register obj,Handle<Map> map)761 void MacroAssembler::CompareMap(Register obj, Handle<Map> map) {
762   cmp(FieldOperand(obj, HeapObject::kMapOffset), map);
763 }
764 
765 
CheckMap(Register obj,Handle<Map> map,Label * fail,SmiCheckType smi_check_type)766 void MacroAssembler::CheckMap(Register obj,
767                               Handle<Map> map,
768                               Label* fail,
769                               SmiCheckType smi_check_type) {
770   if (smi_check_type == DO_SMI_CHECK) {
771     JumpIfSmi(obj, fail);
772   }
773 
774   CompareMap(obj, map);
775   j(not_equal, fail);
776 }
777 
778 
DispatchMap(Register obj,Register unused,Handle<Map> map,Handle<Code> success,SmiCheckType smi_check_type)779 void MacroAssembler::DispatchMap(Register obj,
780                                  Register unused,
781                                  Handle<Map> map,
782                                  Handle<Code> success,
783                                  SmiCheckType smi_check_type) {
784   Label fail;
785   if (smi_check_type == DO_SMI_CHECK) {
786     JumpIfSmi(obj, &fail);
787   }
788   cmp(FieldOperand(obj, HeapObject::kMapOffset), Immediate(map));
789   j(equal, success);
790 
791   bind(&fail);
792 }
793 
794 
IsObjectStringType(Register heap_object,Register map,Register instance_type)795 Condition MacroAssembler::IsObjectStringType(Register heap_object,
796                                              Register map,
797                                              Register instance_type) {
798   mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
799   movzx_b(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
800   STATIC_ASSERT(kNotStringTag != 0);
801   test(instance_type, Immediate(kIsNotStringMask));
802   return zero;
803 }
804 
805 
IsObjectNameType(Register heap_object,Register map,Register instance_type)806 Condition MacroAssembler::IsObjectNameType(Register heap_object,
807                                            Register map,
808                                            Register instance_type) {
809   mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
810   movzx_b(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
811   cmpb(instance_type, static_cast<uint8_t>(LAST_NAME_TYPE));
812   return below_equal;
813 }
814 
815 
IsObjectJSObjectType(Register heap_object,Register map,Register scratch,Label * fail)816 void MacroAssembler::IsObjectJSObjectType(Register heap_object,
817                                           Register map,
818                                           Register scratch,
819                                           Label* fail) {
820   mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
821   IsInstanceJSObjectType(map, scratch, fail);
822 }
823 
824 
IsInstanceJSObjectType(Register map,Register scratch,Label * fail)825 void MacroAssembler::IsInstanceJSObjectType(Register map,
826                                             Register scratch,
827                                             Label* fail) {
828   movzx_b(scratch, FieldOperand(map, Map::kInstanceTypeOffset));
829   sub(scratch, Immediate(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
830   cmp(scratch,
831       LAST_NONCALLABLE_SPEC_OBJECT_TYPE - FIRST_NONCALLABLE_SPEC_OBJECT_TYPE);
832   j(above, fail);
833 }
834 
835 
FCmp()836 void MacroAssembler::FCmp() {
837   fucomip();
838   fstp(0);
839 }
840 
841 
AssertNumber(Register object)842 void MacroAssembler::AssertNumber(Register object) {
843   if (emit_debug_code()) {
844     Label ok;
845     JumpIfSmi(object, &ok);
846     cmp(FieldOperand(object, HeapObject::kMapOffset),
847         isolate()->factory()->heap_number_map());
848     Check(equal, kOperandNotANumber);
849     bind(&ok);
850   }
851 }
852 
853 
AssertSmi(Register object)854 void MacroAssembler::AssertSmi(Register object) {
855   if (emit_debug_code()) {
856     test(object, Immediate(kSmiTagMask));
857     Check(equal, kOperandIsNotASmi);
858   }
859 }
860 
861 
AssertString(Register object)862 void MacroAssembler::AssertString(Register object) {
863   if (emit_debug_code()) {
864     test(object, Immediate(kSmiTagMask));
865     Check(not_equal, kOperandIsASmiAndNotAString);
866     push(object);
867     mov(object, FieldOperand(object, HeapObject::kMapOffset));
868     CmpInstanceType(object, FIRST_NONSTRING_TYPE);
869     pop(object);
870     Check(below, kOperandIsNotAString);
871   }
872 }
873 
874 
AssertName(Register object)875 void MacroAssembler::AssertName(Register object) {
876   if (emit_debug_code()) {
877     test(object, Immediate(kSmiTagMask));
878     Check(not_equal, kOperandIsASmiAndNotAName);
879     push(object);
880     mov(object, FieldOperand(object, HeapObject::kMapOffset));
881     CmpInstanceType(object, LAST_NAME_TYPE);
882     pop(object);
883     Check(below_equal, kOperandIsNotAName);
884   }
885 }
886 
887 
AssertUndefinedOrAllocationSite(Register object)888 void MacroAssembler::AssertUndefinedOrAllocationSite(Register object) {
889   if (emit_debug_code()) {
890     Label done_checking;
891     AssertNotSmi(object);
892     cmp(object, isolate()->factory()->undefined_value());
893     j(equal, &done_checking);
894     cmp(FieldOperand(object, 0),
895         Immediate(isolate()->factory()->allocation_site_map()));
896     Assert(equal, kExpectedUndefinedOrCell);
897     bind(&done_checking);
898   }
899 }
900 
901 
AssertNotSmi(Register object)902 void MacroAssembler::AssertNotSmi(Register object) {
903   if (emit_debug_code()) {
904     test(object, Immediate(kSmiTagMask));
905     Check(not_equal, kOperandIsASmi);
906   }
907 }
908 
909 
StubPrologue()910 void MacroAssembler::StubPrologue() {
911   push(ebp);  // Caller's frame pointer.
912   mov(ebp, esp);
913   push(esi);  // Callee's context.
914   push(Immediate(Smi::FromInt(StackFrame::STUB)));
915 }
916 
917 
Prologue(bool code_pre_aging)918 void MacroAssembler::Prologue(bool code_pre_aging) {
919   PredictableCodeSizeScope predictible_code_size_scope(this,
920       kNoCodeAgeSequenceLength);
921   if (code_pre_aging) {
922       // Pre-age the code.
923     call(isolate()->builtins()->MarkCodeAsExecutedOnce(),
924         RelocInfo::CODE_AGE_SEQUENCE);
925     Nop(kNoCodeAgeSequenceLength - Assembler::kCallInstructionLength);
926   } else {
927     push(ebp);  // Caller's frame pointer.
928     mov(ebp, esp);
929     push(esi);  // Callee's context.
930     push(edi);  // Callee's JS function.
931   }
932 }
933 
934 
EnterFrame(StackFrame::Type type)935 void MacroAssembler::EnterFrame(StackFrame::Type type) {
936   push(ebp);
937   mov(ebp, esp);
938   push(esi);
939   push(Immediate(Smi::FromInt(type)));
940   push(Immediate(CodeObject()));
941   if (emit_debug_code()) {
942     cmp(Operand(esp, 0), Immediate(isolate()->factory()->undefined_value()));
943     Check(not_equal, kCodeObjectNotProperlyPatched);
944   }
945 }
946 
947 
LeaveFrame(StackFrame::Type type)948 void MacroAssembler::LeaveFrame(StackFrame::Type type) {
949   if (emit_debug_code()) {
950     cmp(Operand(ebp, StandardFrameConstants::kMarkerOffset),
951         Immediate(Smi::FromInt(type)));
952     Check(equal, kStackFrameTypesMustMatch);
953   }
954   leave();
955 }
956 
957 
EnterExitFramePrologue()958 void MacroAssembler::EnterExitFramePrologue() {
959   // Set up the frame structure on the stack.
960   ASSERT(ExitFrameConstants::kCallerSPDisplacement == +2 * kPointerSize);
961   ASSERT(ExitFrameConstants::kCallerPCOffset == +1 * kPointerSize);
962   ASSERT(ExitFrameConstants::kCallerFPOffset ==  0 * kPointerSize);
963   push(ebp);
964   mov(ebp, esp);
965 
966   // Reserve room for entry stack pointer and push the code object.
967   ASSERT(ExitFrameConstants::kSPOffset  == -1 * kPointerSize);
968   push(Immediate(0));  // Saved entry sp, patched before call.
969   push(Immediate(CodeObject()));  // Accessed from ExitFrame::code_slot.
970 
971   // Save the frame pointer and the context in top.
972   ExternalReference c_entry_fp_address(Isolate::kCEntryFPAddress, isolate());
973   ExternalReference context_address(Isolate::kContextAddress, isolate());
974   mov(Operand::StaticVariable(c_entry_fp_address), ebp);
975   mov(Operand::StaticVariable(context_address), esi);
976 }
977 
978 
EnterExitFrameEpilogue(int argc,bool save_doubles)979 void MacroAssembler::EnterExitFrameEpilogue(int argc, bool save_doubles) {
980   // Optionally save all XMM registers.
981   if (save_doubles) {
982     int space = XMMRegister::kMaxNumRegisters * kDoubleSize +
983                 argc * kPointerSize;
984     sub(esp, Immediate(space));
985     const int offset = -2 * kPointerSize;
986     for (int i = 0; i < XMMRegister::kMaxNumRegisters; i++) {
987       XMMRegister reg = XMMRegister::from_code(i);
988       movsd(Operand(ebp, offset - ((i + 1) * kDoubleSize)), reg);
989     }
990   } else {
991     sub(esp, Immediate(argc * kPointerSize));
992   }
993 
994   // Get the required frame alignment for the OS.
995   const int kFrameAlignment = OS::ActivationFrameAlignment();
996   if (kFrameAlignment > 0) {
997     ASSERT(IsPowerOf2(kFrameAlignment));
998     and_(esp, -kFrameAlignment);
999   }
1000 
1001   // Patch the saved entry sp.
1002   mov(Operand(ebp, ExitFrameConstants::kSPOffset), esp);
1003 }
1004 
1005 
EnterExitFrame(bool save_doubles)1006 void MacroAssembler::EnterExitFrame(bool save_doubles) {
1007   EnterExitFramePrologue();
1008 
1009   // Set up argc and argv in callee-saved registers.
1010   int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
1011   mov(edi, eax);
1012   lea(esi, Operand(ebp, eax, times_4, offset));
1013 
1014   // Reserve space for argc, argv and isolate.
1015   EnterExitFrameEpilogue(3, save_doubles);
1016 }
1017 
1018 
EnterApiExitFrame(int argc)1019 void MacroAssembler::EnterApiExitFrame(int argc) {
1020   EnterExitFramePrologue();
1021   EnterExitFrameEpilogue(argc, false);
1022 }
1023 
1024 
LeaveExitFrame(bool save_doubles)1025 void MacroAssembler::LeaveExitFrame(bool save_doubles) {
1026   // Optionally restore all XMM registers.
1027   if (save_doubles) {
1028     const int offset = -2 * kPointerSize;
1029     for (int i = 0; i < XMMRegister::kMaxNumRegisters; i++) {
1030       XMMRegister reg = XMMRegister::from_code(i);
1031       movsd(reg, Operand(ebp, offset - ((i + 1) * kDoubleSize)));
1032     }
1033   }
1034 
1035   // Get the return address from the stack and restore the frame pointer.
1036   mov(ecx, Operand(ebp, 1 * kPointerSize));
1037   mov(ebp, Operand(ebp, 0 * kPointerSize));
1038 
1039   // Pop the arguments and the receiver from the caller stack.
1040   lea(esp, Operand(esi, 1 * kPointerSize));
1041 
1042   // Push the return address to get ready to return.
1043   push(ecx);
1044 
1045   LeaveExitFrameEpilogue(true);
1046 }
1047 
1048 
LeaveExitFrameEpilogue(bool restore_context)1049 void MacroAssembler::LeaveExitFrameEpilogue(bool restore_context) {
1050   // Restore current context from top and clear it in debug mode.
1051   ExternalReference context_address(Isolate::kContextAddress, isolate());
1052   if (restore_context) {
1053     mov(esi, Operand::StaticVariable(context_address));
1054   }
1055 #ifdef DEBUG
1056   mov(Operand::StaticVariable(context_address), Immediate(0));
1057 #endif
1058 
1059   // Clear the top frame.
1060   ExternalReference c_entry_fp_address(Isolate::kCEntryFPAddress,
1061                                        isolate());
1062   mov(Operand::StaticVariable(c_entry_fp_address), Immediate(0));
1063 }
1064 
1065 
LeaveApiExitFrame(bool restore_context)1066 void MacroAssembler::LeaveApiExitFrame(bool restore_context) {
1067   mov(esp, ebp);
1068   pop(ebp);
1069 
1070   LeaveExitFrameEpilogue(restore_context);
1071 }
1072 
1073 
PushTryHandler(StackHandler::Kind kind,int handler_index)1074 void MacroAssembler::PushTryHandler(StackHandler::Kind kind,
1075                                     int handler_index) {
1076   // Adjust this code if not the case.
1077   STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize);
1078   STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
1079   STATIC_ASSERT(StackHandlerConstants::kCodeOffset == 1 * kPointerSize);
1080   STATIC_ASSERT(StackHandlerConstants::kStateOffset == 2 * kPointerSize);
1081   STATIC_ASSERT(StackHandlerConstants::kContextOffset == 3 * kPointerSize);
1082   STATIC_ASSERT(StackHandlerConstants::kFPOffset == 4 * kPointerSize);
1083 
1084   // We will build up the handler from the bottom by pushing on the stack.
1085   // First push the frame pointer and context.
1086   if (kind == StackHandler::JS_ENTRY) {
1087     // The frame pointer does not point to a JS frame so we save NULL for
1088     // ebp. We expect the code throwing an exception to check ebp before
1089     // dereferencing it to restore the context.
1090     push(Immediate(0));  // NULL frame pointer.
1091     push(Immediate(Smi::FromInt(0)));  // No context.
1092   } else {
1093     push(ebp);
1094     push(esi);
1095   }
1096   // Push the state and the code object.
1097   unsigned state =
1098       StackHandler::IndexField::encode(handler_index) |
1099       StackHandler::KindField::encode(kind);
1100   push(Immediate(state));
1101   Push(CodeObject());
1102 
1103   // Link the current handler as the next handler.
1104   ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
1105   push(Operand::StaticVariable(handler_address));
1106   // Set this new handler as the current one.
1107   mov(Operand::StaticVariable(handler_address), esp);
1108 }
1109 
1110 
PopTryHandler()1111 void MacroAssembler::PopTryHandler() {
1112   STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
1113   ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
1114   pop(Operand::StaticVariable(handler_address));
1115   add(esp, Immediate(StackHandlerConstants::kSize - kPointerSize));
1116 }
1117 
1118 
JumpToHandlerEntry()1119 void MacroAssembler::JumpToHandlerEntry() {
1120   // Compute the handler entry address and jump to it.  The handler table is
1121   // a fixed array of (smi-tagged) code offsets.
1122   // eax = exception, edi = code object, edx = state.
1123   mov(ebx, FieldOperand(edi, Code::kHandlerTableOffset));
1124   shr(edx, StackHandler::kKindWidth);
1125   mov(edx, FieldOperand(ebx, edx, times_4, FixedArray::kHeaderSize));
1126   SmiUntag(edx);
1127   lea(edi, FieldOperand(edi, edx, times_1, Code::kHeaderSize));
1128   jmp(edi);
1129 }
1130 
1131 
Throw(Register value)1132 void MacroAssembler::Throw(Register value) {
1133   // Adjust this code if not the case.
1134   STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize);
1135   STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
1136   STATIC_ASSERT(StackHandlerConstants::kCodeOffset == 1 * kPointerSize);
1137   STATIC_ASSERT(StackHandlerConstants::kStateOffset == 2 * kPointerSize);
1138   STATIC_ASSERT(StackHandlerConstants::kContextOffset == 3 * kPointerSize);
1139   STATIC_ASSERT(StackHandlerConstants::kFPOffset == 4 * kPointerSize);
1140 
1141   // The exception is expected in eax.
1142   if (!value.is(eax)) {
1143     mov(eax, value);
1144   }
1145   // Drop the stack pointer to the top of the top handler.
1146   ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
1147   mov(esp, Operand::StaticVariable(handler_address));
1148   // Restore the next handler.
1149   pop(Operand::StaticVariable(handler_address));
1150 
1151   // Remove the code object and state, compute the handler address in edi.
1152   pop(edi);  // Code object.
1153   pop(edx);  // Index and state.
1154 
1155   // Restore the context and frame pointer.
1156   pop(esi);  // Context.
1157   pop(ebp);  // Frame pointer.
1158 
1159   // If the handler is a JS frame, restore the context to the frame.
1160   // (kind == ENTRY) == (ebp == 0) == (esi == 0), so we could test either
1161   // ebp or esi.
1162   Label skip;
1163   test(esi, esi);
1164   j(zero, &skip, Label::kNear);
1165   mov(Operand(ebp, StandardFrameConstants::kContextOffset), esi);
1166   bind(&skip);
1167 
1168   JumpToHandlerEntry();
1169 }
1170 
1171 
ThrowUncatchable(Register value)1172 void MacroAssembler::ThrowUncatchable(Register value) {
1173   // Adjust this code if not the case.
1174   STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize);
1175   STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
1176   STATIC_ASSERT(StackHandlerConstants::kCodeOffset == 1 * kPointerSize);
1177   STATIC_ASSERT(StackHandlerConstants::kStateOffset == 2 * kPointerSize);
1178   STATIC_ASSERT(StackHandlerConstants::kContextOffset == 3 * kPointerSize);
1179   STATIC_ASSERT(StackHandlerConstants::kFPOffset == 4 * kPointerSize);
1180 
1181   // The exception is expected in eax.
1182   if (!value.is(eax)) {
1183     mov(eax, value);
1184   }
1185   // Drop the stack pointer to the top of the top stack handler.
1186   ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
1187   mov(esp, Operand::StaticVariable(handler_address));
1188 
1189   // Unwind the handlers until the top ENTRY handler is found.
1190   Label fetch_next, check_kind;
1191   jmp(&check_kind, Label::kNear);
1192   bind(&fetch_next);
1193   mov(esp, Operand(esp, StackHandlerConstants::kNextOffset));
1194 
1195   bind(&check_kind);
1196   STATIC_ASSERT(StackHandler::JS_ENTRY == 0);
1197   test(Operand(esp, StackHandlerConstants::kStateOffset),
1198        Immediate(StackHandler::KindField::kMask));
1199   j(not_zero, &fetch_next);
1200 
1201   // Set the top handler address to next handler past the top ENTRY handler.
1202   pop(Operand::StaticVariable(handler_address));
1203 
1204   // Remove the code object and state, compute the handler address in edi.
1205   pop(edi);  // Code object.
1206   pop(edx);  // Index and state.
1207 
1208   // Clear the context pointer and frame pointer (0 was saved in the handler).
1209   pop(esi);
1210   pop(ebp);
1211 
1212   JumpToHandlerEntry();
1213 }
1214 
1215 
CheckAccessGlobalProxy(Register holder_reg,Register scratch1,Register scratch2,Label * miss)1216 void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
1217                                             Register scratch1,
1218                                             Register scratch2,
1219                                             Label* miss) {
1220   Label same_contexts;
1221 
1222   ASSERT(!holder_reg.is(scratch1));
1223   ASSERT(!holder_reg.is(scratch2));
1224   ASSERT(!scratch1.is(scratch2));
1225 
1226   // Load current lexical context from the stack frame.
1227   mov(scratch1, Operand(ebp, StandardFrameConstants::kContextOffset));
1228 
1229   // When generating debug code, make sure the lexical context is set.
1230   if (emit_debug_code()) {
1231     cmp(scratch1, Immediate(0));
1232     Check(not_equal, kWeShouldNotHaveAnEmptyLexicalContext);
1233   }
1234   // Load the native context of the current context.
1235   int offset =
1236       Context::kHeaderSize + Context::GLOBAL_OBJECT_INDEX * kPointerSize;
1237   mov(scratch1, FieldOperand(scratch1, offset));
1238   mov(scratch1, FieldOperand(scratch1, GlobalObject::kNativeContextOffset));
1239 
1240   // Check the context is a native context.
1241   if (emit_debug_code()) {
1242     // Read the first word and compare to native_context_map.
1243     cmp(FieldOperand(scratch1, HeapObject::kMapOffset),
1244         isolate()->factory()->native_context_map());
1245     Check(equal, kJSGlobalObjectNativeContextShouldBeANativeContext);
1246   }
1247 
1248   // Check if both contexts are the same.
1249   cmp(scratch1, FieldOperand(holder_reg, JSGlobalProxy::kNativeContextOffset));
1250   j(equal, &same_contexts);
1251 
1252   // Compare security tokens, save holder_reg on the stack so we can use it
1253   // as a temporary register.
1254   //
1255   // Check that the security token in the calling global object is
1256   // compatible with the security token in the receiving global
1257   // object.
1258   mov(scratch2,
1259       FieldOperand(holder_reg, JSGlobalProxy::kNativeContextOffset));
1260 
1261   // Check the context is a native context.
1262   if (emit_debug_code()) {
1263     cmp(scratch2, isolate()->factory()->null_value());
1264     Check(not_equal, kJSGlobalProxyContextShouldNotBeNull);
1265 
1266     // Read the first word and compare to native_context_map(),
1267     cmp(FieldOperand(scratch2, HeapObject::kMapOffset),
1268         isolate()->factory()->native_context_map());
1269     Check(equal, kJSGlobalObjectNativeContextShouldBeANativeContext);
1270   }
1271 
1272   int token_offset = Context::kHeaderSize +
1273                      Context::SECURITY_TOKEN_INDEX * kPointerSize;
1274   mov(scratch1, FieldOperand(scratch1, token_offset));
1275   cmp(scratch1, FieldOperand(scratch2, token_offset));
1276   j(not_equal, miss);
1277 
1278   bind(&same_contexts);
1279 }
1280 
1281 
1282 // Compute the hash code from the untagged key.  This must be kept in sync with
1283 // ComputeIntegerHash in utils.h and KeyedLoadGenericElementStub in
1284 // code-stub-hydrogen.cc
1285 //
1286 // Note: r0 will contain hash code
GetNumberHash(Register r0,Register scratch)1287 void MacroAssembler::GetNumberHash(Register r0, Register scratch) {
1288   // Xor original key with a seed.
1289   if (serializer_enabled()) {
1290     ExternalReference roots_array_start =
1291         ExternalReference::roots_array_start(isolate());
1292     mov(scratch, Immediate(Heap::kHashSeedRootIndex));
1293     mov(scratch,
1294         Operand::StaticArray(scratch, times_pointer_size, roots_array_start));
1295     SmiUntag(scratch);
1296     xor_(r0, scratch);
1297   } else {
1298     int32_t seed = isolate()->heap()->HashSeed();
1299     xor_(r0, Immediate(seed));
1300   }
1301 
1302   // hash = ~hash + (hash << 15);
1303   mov(scratch, r0);
1304   not_(r0);
1305   shl(scratch, 15);
1306   add(r0, scratch);
1307   // hash = hash ^ (hash >> 12);
1308   mov(scratch, r0);
1309   shr(scratch, 12);
1310   xor_(r0, scratch);
1311   // hash = hash + (hash << 2);
1312   lea(r0, Operand(r0, r0, times_4, 0));
1313   // hash = hash ^ (hash >> 4);
1314   mov(scratch, r0);
1315   shr(scratch, 4);
1316   xor_(r0, scratch);
1317   // hash = hash * 2057;
1318   imul(r0, r0, 2057);
1319   // hash = hash ^ (hash >> 16);
1320   mov(scratch, r0);
1321   shr(scratch, 16);
1322   xor_(r0, scratch);
1323 }
1324 
1325 
1326 
LoadFromNumberDictionary(Label * miss,Register elements,Register key,Register r0,Register r1,Register r2,Register result)1327 void MacroAssembler::LoadFromNumberDictionary(Label* miss,
1328                                               Register elements,
1329                                               Register key,
1330                                               Register r0,
1331                                               Register r1,
1332                                               Register r2,
1333                                               Register result) {
1334   // Register use:
1335   //
1336   // elements - holds the slow-case elements of the receiver and is unchanged.
1337   //
1338   // key      - holds the smi key on entry and is unchanged.
1339   //
1340   // Scratch registers:
1341   //
1342   // r0 - holds the untagged key on entry and holds the hash once computed.
1343   //
1344   // r1 - used to hold the capacity mask of the dictionary
1345   //
1346   // r2 - used for the index into the dictionary.
1347   //
1348   // result - holds the result on exit if the load succeeds and we fall through.
1349 
1350   Label done;
1351 
1352   GetNumberHash(r0, r1);
1353 
1354   // Compute capacity mask.
1355   mov(r1, FieldOperand(elements, SeededNumberDictionary::kCapacityOffset));
1356   shr(r1, kSmiTagSize);  // convert smi to int
1357   dec(r1);
1358 
1359   // Generate an unrolled loop that performs a few probes before giving up.
1360   for (int i = 0; i < kNumberDictionaryProbes; i++) {
1361     // Use r2 for index calculations and keep the hash intact in r0.
1362     mov(r2, r0);
1363     // Compute the masked index: (hash + i + i * i) & mask.
1364     if (i > 0) {
1365       add(r2, Immediate(SeededNumberDictionary::GetProbeOffset(i)));
1366     }
1367     and_(r2, r1);
1368 
1369     // Scale the index by multiplying by the entry size.
1370     ASSERT(SeededNumberDictionary::kEntrySize == 3);
1371     lea(r2, Operand(r2, r2, times_2, 0));  // r2 = r2 * 3
1372 
1373     // Check if the key matches.
1374     cmp(key, FieldOperand(elements,
1375                           r2,
1376                           times_pointer_size,
1377                           SeededNumberDictionary::kElementsStartOffset));
1378     if (i != (kNumberDictionaryProbes - 1)) {
1379       j(equal, &done);
1380     } else {
1381       j(not_equal, miss);
1382     }
1383   }
1384 
1385   bind(&done);
1386   // Check that the value is a normal propety.
1387   const int kDetailsOffset =
1388       SeededNumberDictionary::kElementsStartOffset + 2 * kPointerSize;
1389   ASSERT_EQ(NORMAL, 0);
1390   test(FieldOperand(elements, r2, times_pointer_size, kDetailsOffset),
1391        Immediate(PropertyDetails::TypeField::kMask << kSmiTagSize));
1392   j(not_zero, miss);
1393 
1394   // Get the value at the masked, scaled index.
1395   const int kValueOffset =
1396       SeededNumberDictionary::kElementsStartOffset + kPointerSize;
1397   mov(result, FieldOperand(elements, r2, times_pointer_size, kValueOffset));
1398 }
1399 
1400 
LoadAllocationTopHelper(Register result,Register scratch,AllocationFlags flags)1401 void MacroAssembler::LoadAllocationTopHelper(Register result,
1402                                              Register scratch,
1403                                              AllocationFlags flags) {
1404   ExternalReference allocation_top =
1405       AllocationUtils::GetAllocationTopReference(isolate(), flags);
1406 
1407   // Just return if allocation top is already known.
1408   if ((flags & RESULT_CONTAINS_TOP) != 0) {
1409     // No use of scratch if allocation top is provided.
1410     ASSERT(scratch.is(no_reg));
1411 #ifdef DEBUG
1412     // Assert that result actually contains top on entry.
1413     cmp(result, Operand::StaticVariable(allocation_top));
1414     Check(equal, kUnexpectedAllocationTop);
1415 #endif
1416     return;
1417   }
1418 
1419   // Move address of new object to result. Use scratch register if available.
1420   if (scratch.is(no_reg)) {
1421     mov(result, Operand::StaticVariable(allocation_top));
1422   } else {
1423     mov(scratch, Immediate(allocation_top));
1424     mov(result, Operand(scratch, 0));
1425   }
1426 }
1427 
1428 
UpdateAllocationTopHelper(Register result_end,Register scratch,AllocationFlags flags)1429 void MacroAssembler::UpdateAllocationTopHelper(Register result_end,
1430                                                Register scratch,
1431                                                AllocationFlags flags) {
1432   if (emit_debug_code()) {
1433     test(result_end, Immediate(kObjectAlignmentMask));
1434     Check(zero, kUnalignedAllocationInNewSpace);
1435   }
1436 
1437   ExternalReference allocation_top =
1438       AllocationUtils::GetAllocationTopReference(isolate(), flags);
1439 
1440   // Update new top. Use scratch if available.
1441   if (scratch.is(no_reg)) {
1442     mov(Operand::StaticVariable(allocation_top), result_end);
1443   } else {
1444     mov(Operand(scratch, 0), result_end);
1445   }
1446 }
1447 
1448 
Allocate(int object_size,Register result,Register result_end,Register scratch,Label * gc_required,AllocationFlags flags)1449 void MacroAssembler::Allocate(int object_size,
1450                               Register result,
1451                               Register result_end,
1452                               Register scratch,
1453                               Label* gc_required,
1454                               AllocationFlags flags) {
1455   ASSERT((flags & (RESULT_CONTAINS_TOP | SIZE_IN_WORDS)) == 0);
1456   ASSERT(object_size <= Page::kMaxRegularHeapObjectSize);
1457   if (!FLAG_inline_new) {
1458     if (emit_debug_code()) {
1459       // Trash the registers to simulate an allocation failure.
1460       mov(result, Immediate(0x7091));
1461       if (result_end.is_valid()) {
1462         mov(result_end, Immediate(0x7191));
1463       }
1464       if (scratch.is_valid()) {
1465         mov(scratch, Immediate(0x7291));
1466       }
1467     }
1468     jmp(gc_required);
1469     return;
1470   }
1471   ASSERT(!result.is(result_end));
1472 
1473   // Load address of new object into result.
1474   LoadAllocationTopHelper(result, scratch, flags);
1475 
1476   ExternalReference allocation_limit =
1477       AllocationUtils::GetAllocationLimitReference(isolate(), flags);
1478 
1479   // Align the next allocation. Storing the filler map without checking top is
1480   // safe in new-space because the limit of the heap is aligned there.
1481   if ((flags & DOUBLE_ALIGNMENT) != 0) {
1482     ASSERT((flags & PRETENURE_OLD_POINTER_SPACE) == 0);
1483     ASSERT(kPointerAlignment * 2 == kDoubleAlignment);
1484     Label aligned;
1485     test(result, Immediate(kDoubleAlignmentMask));
1486     j(zero, &aligned, Label::kNear);
1487     if ((flags & PRETENURE_OLD_DATA_SPACE) != 0) {
1488       cmp(result, Operand::StaticVariable(allocation_limit));
1489       j(above_equal, gc_required);
1490     }
1491     mov(Operand(result, 0),
1492         Immediate(isolate()->factory()->one_pointer_filler_map()));
1493     add(result, Immediate(kDoubleSize / 2));
1494     bind(&aligned);
1495   }
1496 
1497   // Calculate new top and bail out if space is exhausted.
1498   Register top_reg = result_end.is_valid() ? result_end : result;
1499   if (!top_reg.is(result)) {
1500     mov(top_reg, result);
1501   }
1502   add(top_reg, Immediate(object_size));
1503   j(carry, gc_required);
1504   cmp(top_reg, Operand::StaticVariable(allocation_limit));
1505   j(above, gc_required);
1506 
1507   // Update allocation top.
1508   UpdateAllocationTopHelper(top_reg, scratch, flags);
1509 
1510   // Tag result if requested.
1511   bool tag_result = (flags & TAG_OBJECT) != 0;
1512   if (top_reg.is(result)) {
1513     if (tag_result) {
1514       sub(result, Immediate(object_size - kHeapObjectTag));
1515     } else {
1516       sub(result, Immediate(object_size));
1517     }
1518   } else if (tag_result) {
1519     ASSERT(kHeapObjectTag == 1);
1520     inc(result);
1521   }
1522 }
1523 
1524 
Allocate(int header_size,ScaleFactor element_size,Register element_count,RegisterValueType element_count_type,Register result,Register result_end,Register scratch,Label * gc_required,AllocationFlags flags)1525 void MacroAssembler::Allocate(int header_size,
1526                               ScaleFactor element_size,
1527                               Register element_count,
1528                               RegisterValueType element_count_type,
1529                               Register result,
1530                               Register result_end,
1531                               Register scratch,
1532                               Label* gc_required,
1533                               AllocationFlags flags) {
1534   ASSERT((flags & SIZE_IN_WORDS) == 0);
1535   if (!FLAG_inline_new) {
1536     if (emit_debug_code()) {
1537       // Trash the registers to simulate an allocation failure.
1538       mov(result, Immediate(0x7091));
1539       mov(result_end, Immediate(0x7191));
1540       if (scratch.is_valid()) {
1541         mov(scratch, Immediate(0x7291));
1542       }
1543       // Register element_count is not modified by the function.
1544     }
1545     jmp(gc_required);
1546     return;
1547   }
1548   ASSERT(!result.is(result_end));
1549 
1550   // Load address of new object into result.
1551   LoadAllocationTopHelper(result, scratch, flags);
1552 
1553   ExternalReference allocation_limit =
1554       AllocationUtils::GetAllocationLimitReference(isolate(), flags);
1555 
1556   // Align the next allocation. Storing the filler map without checking top is
1557   // safe in new-space because the limit of the heap is aligned there.
1558   if ((flags & DOUBLE_ALIGNMENT) != 0) {
1559     ASSERT((flags & PRETENURE_OLD_POINTER_SPACE) == 0);
1560     ASSERT(kPointerAlignment * 2 == kDoubleAlignment);
1561     Label aligned;
1562     test(result, Immediate(kDoubleAlignmentMask));
1563     j(zero, &aligned, Label::kNear);
1564     if ((flags & PRETENURE_OLD_DATA_SPACE) != 0) {
1565       cmp(result, Operand::StaticVariable(allocation_limit));
1566       j(above_equal, gc_required);
1567     }
1568     mov(Operand(result, 0),
1569         Immediate(isolate()->factory()->one_pointer_filler_map()));
1570     add(result, Immediate(kDoubleSize / 2));
1571     bind(&aligned);
1572   }
1573 
1574   // Calculate new top and bail out if space is exhausted.
1575   // We assume that element_count*element_size + header_size does not
1576   // overflow.
1577   if (element_count_type == REGISTER_VALUE_IS_SMI) {
1578     STATIC_ASSERT(static_cast<ScaleFactor>(times_2 - 1) == times_1);
1579     STATIC_ASSERT(static_cast<ScaleFactor>(times_4 - 1) == times_2);
1580     STATIC_ASSERT(static_cast<ScaleFactor>(times_8 - 1) == times_4);
1581     ASSERT(element_size >= times_2);
1582     ASSERT(kSmiTagSize == 1);
1583     element_size = static_cast<ScaleFactor>(element_size - 1);
1584   } else {
1585     ASSERT(element_count_type == REGISTER_VALUE_IS_INT32);
1586   }
1587   lea(result_end, Operand(element_count, element_size, header_size));
1588   add(result_end, result);
1589   j(carry, gc_required);
1590   cmp(result_end, Operand::StaticVariable(allocation_limit));
1591   j(above, gc_required);
1592 
1593   if ((flags & TAG_OBJECT) != 0) {
1594     ASSERT(kHeapObjectTag == 1);
1595     inc(result);
1596   }
1597 
1598   // Update allocation top.
1599   UpdateAllocationTopHelper(result_end, scratch, flags);
1600 }
1601 
1602 
Allocate(Register object_size,Register result,Register result_end,Register scratch,Label * gc_required,AllocationFlags flags)1603 void MacroAssembler::Allocate(Register object_size,
1604                               Register result,
1605                               Register result_end,
1606                               Register scratch,
1607                               Label* gc_required,
1608                               AllocationFlags flags) {
1609   ASSERT((flags & (RESULT_CONTAINS_TOP | SIZE_IN_WORDS)) == 0);
1610   if (!FLAG_inline_new) {
1611     if (emit_debug_code()) {
1612       // Trash the registers to simulate an allocation failure.
1613       mov(result, Immediate(0x7091));
1614       mov(result_end, Immediate(0x7191));
1615       if (scratch.is_valid()) {
1616         mov(scratch, Immediate(0x7291));
1617       }
1618       // object_size is left unchanged by this function.
1619     }
1620     jmp(gc_required);
1621     return;
1622   }
1623   ASSERT(!result.is(result_end));
1624 
1625   // Load address of new object into result.
1626   LoadAllocationTopHelper(result, scratch, flags);
1627 
1628   ExternalReference allocation_limit =
1629       AllocationUtils::GetAllocationLimitReference(isolate(), flags);
1630 
1631   // Align the next allocation. Storing the filler map without checking top is
1632   // safe in new-space because the limit of the heap is aligned there.
1633   if ((flags & DOUBLE_ALIGNMENT) != 0) {
1634     ASSERT((flags & PRETENURE_OLD_POINTER_SPACE) == 0);
1635     ASSERT(kPointerAlignment * 2 == kDoubleAlignment);
1636     Label aligned;
1637     test(result, Immediate(kDoubleAlignmentMask));
1638     j(zero, &aligned, Label::kNear);
1639     if ((flags & PRETENURE_OLD_DATA_SPACE) != 0) {
1640       cmp(result, Operand::StaticVariable(allocation_limit));
1641       j(above_equal, gc_required);
1642     }
1643     mov(Operand(result, 0),
1644         Immediate(isolate()->factory()->one_pointer_filler_map()));
1645     add(result, Immediate(kDoubleSize / 2));
1646     bind(&aligned);
1647   }
1648 
1649   // Calculate new top and bail out if space is exhausted.
1650   if (!object_size.is(result_end)) {
1651     mov(result_end, object_size);
1652   }
1653   add(result_end, result);
1654   j(carry, gc_required);
1655   cmp(result_end, Operand::StaticVariable(allocation_limit));
1656   j(above, gc_required);
1657 
1658   // Tag result if requested.
1659   if ((flags & TAG_OBJECT) != 0) {
1660     ASSERT(kHeapObjectTag == 1);
1661     inc(result);
1662   }
1663 
1664   // Update allocation top.
1665   UpdateAllocationTopHelper(result_end, scratch, flags);
1666 }
1667 
1668 
UndoAllocationInNewSpace(Register object)1669 void MacroAssembler::UndoAllocationInNewSpace(Register object) {
1670   ExternalReference new_space_allocation_top =
1671       ExternalReference::new_space_allocation_top_address(isolate());
1672 
1673   // Make sure the object has no tag before resetting top.
1674   and_(object, Immediate(~kHeapObjectTagMask));
1675 #ifdef DEBUG
1676   cmp(object, Operand::StaticVariable(new_space_allocation_top));
1677   Check(below, kUndoAllocationOfNonAllocatedMemory);
1678 #endif
1679   mov(Operand::StaticVariable(new_space_allocation_top), object);
1680 }
1681 
1682 
AllocateHeapNumber(Register result,Register scratch1,Register scratch2,Label * gc_required)1683 void MacroAssembler::AllocateHeapNumber(Register result,
1684                                         Register scratch1,
1685                                         Register scratch2,
1686                                         Label* gc_required) {
1687   // Allocate heap number in new space.
1688   Allocate(HeapNumber::kSize, result, scratch1, scratch2, gc_required,
1689            TAG_OBJECT);
1690 
1691   // Set the map.
1692   mov(FieldOperand(result, HeapObject::kMapOffset),
1693       Immediate(isolate()->factory()->heap_number_map()));
1694 }
1695 
1696 
AllocateTwoByteString(Register result,Register length,Register scratch1,Register scratch2,Register scratch3,Label * gc_required)1697 void MacroAssembler::AllocateTwoByteString(Register result,
1698                                            Register length,
1699                                            Register scratch1,
1700                                            Register scratch2,
1701                                            Register scratch3,
1702                                            Label* gc_required) {
1703   // Calculate the number of bytes needed for the characters in the string while
1704   // observing object alignment.
1705   ASSERT((SeqTwoByteString::kHeaderSize & kObjectAlignmentMask) == 0);
1706   ASSERT(kShortSize == 2);
1707   // scratch1 = length * 2 + kObjectAlignmentMask.
1708   lea(scratch1, Operand(length, length, times_1, kObjectAlignmentMask));
1709   and_(scratch1, Immediate(~kObjectAlignmentMask));
1710 
1711   // Allocate two byte string in new space.
1712   Allocate(SeqTwoByteString::kHeaderSize,
1713            times_1,
1714            scratch1,
1715            REGISTER_VALUE_IS_INT32,
1716            result,
1717            scratch2,
1718            scratch3,
1719            gc_required,
1720            TAG_OBJECT);
1721 
1722   // Set the map, length and hash field.
1723   mov(FieldOperand(result, HeapObject::kMapOffset),
1724       Immediate(isolate()->factory()->string_map()));
1725   mov(scratch1, length);
1726   SmiTag(scratch1);
1727   mov(FieldOperand(result, String::kLengthOffset), scratch1);
1728   mov(FieldOperand(result, String::kHashFieldOffset),
1729       Immediate(String::kEmptyHashField));
1730 }
1731 
1732 
AllocateAsciiString(Register result,Register length,Register scratch1,Register scratch2,Register scratch3,Label * gc_required)1733 void MacroAssembler::AllocateAsciiString(Register result,
1734                                          Register length,
1735                                          Register scratch1,
1736                                          Register scratch2,
1737                                          Register scratch3,
1738                                          Label* gc_required) {
1739   // Calculate the number of bytes needed for the characters in the string while
1740   // observing object alignment.
1741   ASSERT((SeqOneByteString::kHeaderSize & kObjectAlignmentMask) == 0);
1742   mov(scratch1, length);
1743   ASSERT(kCharSize == 1);
1744   add(scratch1, Immediate(kObjectAlignmentMask));
1745   and_(scratch1, Immediate(~kObjectAlignmentMask));
1746 
1747   // Allocate ASCII string in new space.
1748   Allocate(SeqOneByteString::kHeaderSize,
1749            times_1,
1750            scratch1,
1751            REGISTER_VALUE_IS_INT32,
1752            result,
1753            scratch2,
1754            scratch3,
1755            gc_required,
1756            TAG_OBJECT);
1757 
1758   // Set the map, length and hash field.
1759   mov(FieldOperand(result, HeapObject::kMapOffset),
1760       Immediate(isolate()->factory()->ascii_string_map()));
1761   mov(scratch1, length);
1762   SmiTag(scratch1);
1763   mov(FieldOperand(result, String::kLengthOffset), scratch1);
1764   mov(FieldOperand(result, String::kHashFieldOffset),
1765       Immediate(String::kEmptyHashField));
1766 }
1767 
1768 
AllocateAsciiString(Register result,int length,Register scratch1,Register scratch2,Label * gc_required)1769 void MacroAssembler::AllocateAsciiString(Register result,
1770                                          int length,
1771                                          Register scratch1,
1772                                          Register scratch2,
1773                                          Label* gc_required) {
1774   ASSERT(length > 0);
1775 
1776   // Allocate ASCII string in new space.
1777   Allocate(SeqOneByteString::SizeFor(length), result, scratch1, scratch2,
1778            gc_required, TAG_OBJECT);
1779 
1780   // Set the map, length and hash field.
1781   mov(FieldOperand(result, HeapObject::kMapOffset),
1782       Immediate(isolate()->factory()->ascii_string_map()));
1783   mov(FieldOperand(result, String::kLengthOffset),
1784       Immediate(Smi::FromInt(length)));
1785   mov(FieldOperand(result, String::kHashFieldOffset),
1786       Immediate(String::kEmptyHashField));
1787 }
1788 
1789 
AllocateTwoByteConsString(Register result,Register scratch1,Register scratch2,Label * gc_required)1790 void MacroAssembler::AllocateTwoByteConsString(Register result,
1791                                         Register scratch1,
1792                                         Register scratch2,
1793                                         Label* gc_required) {
1794   // Allocate heap number in new space.
1795   Allocate(ConsString::kSize, result, scratch1, scratch2, gc_required,
1796            TAG_OBJECT);
1797 
1798   // Set the map. The other fields are left uninitialized.
1799   mov(FieldOperand(result, HeapObject::kMapOffset),
1800       Immediate(isolate()->factory()->cons_string_map()));
1801 }
1802 
1803 
AllocateAsciiConsString(Register result,Register scratch1,Register scratch2,Label * gc_required)1804 void MacroAssembler::AllocateAsciiConsString(Register result,
1805                                              Register scratch1,
1806                                              Register scratch2,
1807                                              Label* gc_required) {
1808   Allocate(ConsString::kSize,
1809            result,
1810            scratch1,
1811            scratch2,
1812            gc_required,
1813            TAG_OBJECT);
1814 
1815   // Set the map. The other fields are left uninitialized.
1816   mov(FieldOperand(result, HeapObject::kMapOffset),
1817       Immediate(isolate()->factory()->cons_ascii_string_map()));
1818 }
1819 
1820 
AllocateTwoByteSlicedString(Register result,Register scratch1,Register scratch2,Label * gc_required)1821 void MacroAssembler::AllocateTwoByteSlicedString(Register result,
1822                                           Register scratch1,
1823                                           Register scratch2,
1824                                           Label* gc_required) {
1825   // Allocate heap number in new space.
1826   Allocate(SlicedString::kSize, result, scratch1, scratch2, gc_required,
1827            TAG_OBJECT);
1828 
1829   // Set the map. The other fields are left uninitialized.
1830   mov(FieldOperand(result, HeapObject::kMapOffset),
1831       Immediate(isolate()->factory()->sliced_string_map()));
1832 }
1833 
1834 
AllocateAsciiSlicedString(Register result,Register scratch1,Register scratch2,Label * gc_required)1835 void MacroAssembler::AllocateAsciiSlicedString(Register result,
1836                                                Register scratch1,
1837                                                Register scratch2,
1838                                                Label* gc_required) {
1839   // Allocate heap number in new space.
1840   Allocate(SlicedString::kSize, result, scratch1, scratch2, gc_required,
1841            TAG_OBJECT);
1842 
1843   // Set the map. The other fields are left uninitialized.
1844   mov(FieldOperand(result, HeapObject::kMapOffset),
1845       Immediate(isolate()->factory()->sliced_ascii_string_map()));
1846 }
1847 
1848 
1849 // Copy memory, byte-by-byte, from source to destination.  Not optimized for
1850 // long or aligned copies.  The contents of scratch and length are destroyed.
1851 // Source and destination are incremented by length.
1852 // Many variants of movsb, loop unrolling, word moves, and indexed operands
1853 // have been tried here already, and this is fastest.
1854 // A simpler loop is faster on small copies, but 30% slower on large ones.
1855 // The cld() instruction must have been emitted, to set the direction flag(),
1856 // before calling this function.
CopyBytes(Register source,Register destination,Register length,Register scratch)1857 void MacroAssembler::CopyBytes(Register source,
1858                                Register destination,
1859                                Register length,
1860                                Register scratch) {
1861   Label short_loop, len4, len8, len12, done, short_string;
1862   ASSERT(source.is(esi));
1863   ASSERT(destination.is(edi));
1864   ASSERT(length.is(ecx));
1865   cmp(length, Immediate(4));
1866   j(below, &short_string, Label::kNear);
1867 
1868   // Because source is 4-byte aligned in our uses of this function,
1869   // we keep source aligned for the rep_movs call by copying the odd bytes
1870   // at the end of the ranges.
1871   mov(scratch, Operand(source, length, times_1, -4));
1872   mov(Operand(destination, length, times_1, -4), scratch);
1873 
1874   cmp(length, Immediate(8));
1875   j(below_equal, &len4, Label::kNear);
1876   cmp(length, Immediate(12));
1877   j(below_equal, &len8, Label::kNear);
1878   cmp(length, Immediate(16));
1879   j(below_equal, &len12, Label::kNear);
1880 
1881   mov(scratch, ecx);
1882   shr(ecx, 2);
1883   rep_movs();
1884   and_(scratch, Immediate(0x3));
1885   add(destination, scratch);
1886   jmp(&done, Label::kNear);
1887 
1888   bind(&len12);
1889   mov(scratch, Operand(source, 8));
1890   mov(Operand(destination, 8), scratch);
1891   bind(&len8);
1892   mov(scratch, Operand(source, 4));
1893   mov(Operand(destination, 4), scratch);
1894   bind(&len4);
1895   mov(scratch, Operand(source, 0));
1896   mov(Operand(destination, 0), scratch);
1897   add(destination, length);
1898   jmp(&done, Label::kNear);
1899 
1900   bind(&short_string);
1901   test(length, length);
1902   j(zero, &done, Label::kNear);
1903 
1904   bind(&short_loop);
1905   mov_b(scratch, Operand(source, 0));
1906   mov_b(Operand(destination, 0), scratch);
1907   inc(source);
1908   inc(destination);
1909   dec(length);
1910   j(not_zero, &short_loop);
1911 
1912   bind(&done);
1913 }
1914 
1915 
InitializeFieldsWithFiller(Register start_offset,Register end_offset,Register filler)1916 void MacroAssembler::InitializeFieldsWithFiller(Register start_offset,
1917                                                 Register end_offset,
1918                                                 Register filler) {
1919   Label loop, entry;
1920   jmp(&entry);
1921   bind(&loop);
1922   mov(Operand(start_offset, 0), filler);
1923   add(start_offset, Immediate(kPointerSize));
1924   bind(&entry);
1925   cmp(start_offset, end_offset);
1926   j(less, &loop);
1927 }
1928 
1929 
BooleanBitTest(Register object,int field_offset,int bit_index)1930 void MacroAssembler::BooleanBitTest(Register object,
1931                                     int field_offset,
1932                                     int bit_index) {
1933   bit_index += kSmiTagSize + kSmiShiftSize;
1934   ASSERT(IsPowerOf2(kBitsPerByte));
1935   int byte_index = bit_index / kBitsPerByte;
1936   int byte_bit_index = bit_index & (kBitsPerByte - 1);
1937   test_b(FieldOperand(object, field_offset + byte_index),
1938          static_cast<byte>(1 << byte_bit_index));
1939 }
1940 
1941 
1942 
NegativeZeroTest(Register result,Register op,Label * then_label)1943 void MacroAssembler::NegativeZeroTest(Register result,
1944                                       Register op,
1945                                       Label* then_label) {
1946   Label ok;
1947   test(result, result);
1948   j(not_zero, &ok);
1949   test(op, op);
1950   j(sign, then_label);
1951   bind(&ok);
1952 }
1953 
1954 
NegativeZeroTest(Register result,Register op1,Register op2,Register scratch,Label * then_label)1955 void MacroAssembler::NegativeZeroTest(Register result,
1956                                       Register op1,
1957                                       Register op2,
1958                                       Register scratch,
1959                                       Label* then_label) {
1960   Label ok;
1961   test(result, result);
1962   j(not_zero, &ok);
1963   mov(scratch, op1);
1964   or_(scratch, op2);
1965   j(sign, then_label);
1966   bind(&ok);
1967 }
1968 
1969 
TryGetFunctionPrototype(Register function,Register result,Register scratch,Label * miss,bool miss_on_bound_function)1970 void MacroAssembler::TryGetFunctionPrototype(Register function,
1971                                              Register result,
1972                                              Register scratch,
1973                                              Label* miss,
1974                                              bool miss_on_bound_function) {
1975   // Check that the receiver isn't a smi.
1976   JumpIfSmi(function, miss);
1977 
1978   // Check that the function really is a function.
1979   CmpObjectType(function, JS_FUNCTION_TYPE, result);
1980   j(not_equal, miss);
1981 
1982   if (miss_on_bound_function) {
1983     // If a bound function, go to miss label.
1984     mov(scratch,
1985         FieldOperand(function, JSFunction::kSharedFunctionInfoOffset));
1986     BooleanBitTest(scratch, SharedFunctionInfo::kCompilerHintsOffset,
1987                    SharedFunctionInfo::kBoundFunction);
1988     j(not_zero, miss);
1989   }
1990 
1991   // Make sure that the function has an instance prototype.
1992   Label non_instance;
1993   movzx_b(scratch, FieldOperand(result, Map::kBitFieldOffset));
1994   test(scratch, Immediate(1 << Map::kHasNonInstancePrototype));
1995   j(not_zero, &non_instance);
1996 
1997   // Get the prototype or initial map from the function.
1998   mov(result,
1999       FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
2000 
2001   // If the prototype or initial map is the hole, don't return it and
2002   // simply miss the cache instead. This will allow us to allocate a
2003   // prototype object on-demand in the runtime system.
2004   cmp(result, Immediate(isolate()->factory()->the_hole_value()));
2005   j(equal, miss);
2006 
2007   // If the function does not have an initial map, we're done.
2008   Label done;
2009   CmpObjectType(result, MAP_TYPE, scratch);
2010   j(not_equal, &done);
2011 
2012   // Get the prototype from the initial map.
2013   mov(result, FieldOperand(result, Map::kPrototypeOffset));
2014   jmp(&done);
2015 
2016   // Non-instance prototype: Fetch prototype from constructor field
2017   // in initial map.
2018   bind(&non_instance);
2019   mov(result, FieldOperand(result, Map::kConstructorOffset));
2020 
2021   // All done.
2022   bind(&done);
2023 }
2024 
2025 
CallStub(CodeStub * stub,TypeFeedbackId ast_id)2026 void MacroAssembler::CallStub(CodeStub* stub, TypeFeedbackId ast_id) {
2027   ASSERT(AllowThisStubCall(stub));  // Calls are not allowed in some stubs.
2028   call(stub->GetCode(), RelocInfo::CODE_TARGET, ast_id);
2029 }
2030 
2031 
TailCallStub(CodeStub * stub)2032 void MacroAssembler::TailCallStub(CodeStub* stub) {
2033   jmp(stub->GetCode(), RelocInfo::CODE_TARGET);
2034 }
2035 
2036 
StubReturn(int argc)2037 void MacroAssembler::StubReturn(int argc) {
2038   ASSERT(argc >= 1 && generating_stub());
2039   ret((argc - 1) * kPointerSize);
2040 }
2041 
2042 
AllowThisStubCall(CodeStub * stub)2043 bool MacroAssembler::AllowThisStubCall(CodeStub* stub) {
2044   return has_frame_ || !stub->SometimesSetsUpAFrame();
2045 }
2046 
2047 
IndexFromHash(Register hash,Register index)2048 void MacroAssembler::IndexFromHash(Register hash, Register index) {
2049   // The assert checks that the constants for the maximum number of digits
2050   // for an array index cached in the hash field and the number of bits
2051   // reserved for it does not conflict.
2052   ASSERT(TenToThe(String::kMaxCachedArrayIndexLength) <
2053          (1 << String::kArrayIndexValueBits));
2054   if (!index.is(hash)) {
2055     mov(index, hash);
2056   }
2057   DecodeFieldToSmi<String::ArrayIndexValueBits>(index);
2058 }
2059 
2060 
CallRuntime(const Runtime::Function * f,int num_arguments,SaveFPRegsMode save_doubles)2061 void MacroAssembler::CallRuntime(const Runtime::Function* f,
2062                                  int num_arguments,
2063                                  SaveFPRegsMode save_doubles) {
2064   // If the expected number of arguments of the runtime function is
2065   // constant, we check that the actual number of arguments match the
2066   // expectation.
2067   CHECK(f->nargs < 0 || f->nargs == num_arguments);
2068 
2069   // TODO(1236192): Most runtime routines don't need the number of
2070   // arguments passed in because it is constant. At some point we
2071   // should remove this need and make the runtime routine entry code
2072   // smarter.
2073   Move(eax, Immediate(num_arguments));
2074   mov(ebx, Immediate(ExternalReference(f, isolate())));
2075   CEntryStub ces(isolate(), 1, save_doubles);
2076   CallStub(&ces);
2077 }
2078 
2079 
CallExternalReference(ExternalReference ref,int num_arguments)2080 void MacroAssembler::CallExternalReference(ExternalReference ref,
2081                                            int num_arguments) {
2082   mov(eax, Immediate(num_arguments));
2083   mov(ebx, Immediate(ref));
2084 
2085   CEntryStub stub(isolate(), 1);
2086   CallStub(&stub);
2087 }
2088 
2089 
TailCallExternalReference(const ExternalReference & ext,int num_arguments,int result_size)2090 void MacroAssembler::TailCallExternalReference(const ExternalReference& ext,
2091                                                int num_arguments,
2092                                                int result_size) {
2093   // TODO(1236192): Most runtime routines don't need the number of
2094   // arguments passed in because it is constant. At some point we
2095   // should remove this need and make the runtime routine entry code
2096   // smarter.
2097   Move(eax, Immediate(num_arguments));
2098   JumpToExternalReference(ext);
2099 }
2100 
2101 
TailCallRuntime(Runtime::FunctionId fid,int num_arguments,int result_size)2102 void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid,
2103                                      int num_arguments,
2104                                      int result_size) {
2105   TailCallExternalReference(ExternalReference(fid, isolate()),
2106                             num_arguments,
2107                             result_size);
2108 }
2109 
2110 
ApiParameterOperand(int index)2111 Operand ApiParameterOperand(int index) {
2112   return Operand(esp, index * kPointerSize);
2113 }
2114 
2115 
PrepareCallApiFunction(int argc)2116 void MacroAssembler::PrepareCallApiFunction(int argc) {
2117   EnterApiExitFrame(argc);
2118   if (emit_debug_code()) {
2119     mov(esi, Immediate(BitCast<int32_t>(kZapValue)));
2120   }
2121 }
2122 
2123 
CallApiFunctionAndReturn(Register function_address,ExternalReference thunk_ref,Operand thunk_last_arg,int stack_space,Operand return_value_operand,Operand * context_restore_operand)2124 void MacroAssembler::CallApiFunctionAndReturn(
2125     Register function_address,
2126     ExternalReference thunk_ref,
2127     Operand thunk_last_arg,
2128     int stack_space,
2129     Operand return_value_operand,
2130     Operand* context_restore_operand) {
2131   ExternalReference next_address =
2132       ExternalReference::handle_scope_next_address(isolate());
2133   ExternalReference limit_address =
2134       ExternalReference::handle_scope_limit_address(isolate());
2135   ExternalReference level_address =
2136       ExternalReference::handle_scope_level_address(isolate());
2137 
2138   ASSERT(edx.is(function_address));
2139   // Allocate HandleScope in callee-save registers.
2140   mov(ebx, Operand::StaticVariable(next_address));
2141   mov(edi, Operand::StaticVariable(limit_address));
2142   add(Operand::StaticVariable(level_address), Immediate(1));
2143 
2144   if (FLAG_log_timer_events) {
2145     FrameScope frame(this, StackFrame::MANUAL);
2146     PushSafepointRegisters();
2147     PrepareCallCFunction(1, eax);
2148     mov(Operand(esp, 0),
2149         Immediate(ExternalReference::isolate_address(isolate())));
2150     CallCFunction(ExternalReference::log_enter_external_function(isolate()), 1);
2151     PopSafepointRegisters();
2152   }
2153 
2154 
2155   Label profiler_disabled;
2156   Label end_profiler_check;
2157   mov(eax, Immediate(ExternalReference::is_profiling_address(isolate())));
2158   cmpb(Operand(eax, 0), 0);
2159   j(zero, &profiler_disabled);
2160 
2161   // Additional parameter is the address of the actual getter function.
2162   mov(thunk_last_arg, function_address);
2163   // Call the api function.
2164   mov(eax, Immediate(thunk_ref));
2165   call(eax);
2166   jmp(&end_profiler_check);
2167 
2168   bind(&profiler_disabled);
2169   // Call the api function.
2170   call(function_address);
2171   bind(&end_profiler_check);
2172 
2173   if (FLAG_log_timer_events) {
2174     FrameScope frame(this, StackFrame::MANUAL);
2175     PushSafepointRegisters();
2176     PrepareCallCFunction(1, eax);
2177     mov(Operand(esp, 0),
2178         Immediate(ExternalReference::isolate_address(isolate())));
2179     CallCFunction(ExternalReference::log_leave_external_function(isolate()), 1);
2180     PopSafepointRegisters();
2181   }
2182 
2183   Label prologue;
2184   // Load the value from ReturnValue
2185   mov(eax, return_value_operand);
2186 
2187   Label promote_scheduled_exception;
2188   Label exception_handled;
2189   Label delete_allocated_handles;
2190   Label leave_exit_frame;
2191 
2192   bind(&prologue);
2193   // No more valid handles (the result handle was the last one). Restore
2194   // previous handle scope.
2195   mov(Operand::StaticVariable(next_address), ebx);
2196   sub(Operand::StaticVariable(level_address), Immediate(1));
2197   Assert(above_equal, kInvalidHandleScopeLevel);
2198   cmp(edi, Operand::StaticVariable(limit_address));
2199   j(not_equal, &delete_allocated_handles);
2200   bind(&leave_exit_frame);
2201 
2202   // Check if the function scheduled an exception.
2203   ExternalReference scheduled_exception_address =
2204       ExternalReference::scheduled_exception_address(isolate());
2205   cmp(Operand::StaticVariable(scheduled_exception_address),
2206       Immediate(isolate()->factory()->the_hole_value()));
2207   j(not_equal, &promote_scheduled_exception);
2208   bind(&exception_handled);
2209 
2210 #if ENABLE_EXTRA_CHECKS
2211   // Check if the function returned a valid JavaScript value.
2212   Label ok;
2213   Register return_value = eax;
2214   Register map = ecx;
2215 
2216   JumpIfSmi(return_value, &ok, Label::kNear);
2217   mov(map, FieldOperand(return_value, HeapObject::kMapOffset));
2218 
2219   CmpInstanceType(map, FIRST_NONSTRING_TYPE);
2220   j(below, &ok, Label::kNear);
2221 
2222   CmpInstanceType(map, FIRST_SPEC_OBJECT_TYPE);
2223   j(above_equal, &ok, Label::kNear);
2224 
2225   cmp(map, isolate()->factory()->heap_number_map());
2226   j(equal, &ok, Label::kNear);
2227 
2228   cmp(return_value, isolate()->factory()->undefined_value());
2229   j(equal, &ok, Label::kNear);
2230 
2231   cmp(return_value, isolate()->factory()->true_value());
2232   j(equal, &ok, Label::kNear);
2233 
2234   cmp(return_value, isolate()->factory()->false_value());
2235   j(equal, &ok, Label::kNear);
2236 
2237   cmp(return_value, isolate()->factory()->null_value());
2238   j(equal, &ok, Label::kNear);
2239 
2240   Abort(kAPICallReturnedInvalidObject);
2241 
2242   bind(&ok);
2243 #endif
2244 
2245   bool restore_context = context_restore_operand != NULL;
2246   if (restore_context) {
2247     mov(esi, *context_restore_operand);
2248   }
2249   LeaveApiExitFrame(!restore_context);
2250   ret(stack_space * kPointerSize);
2251 
2252   bind(&promote_scheduled_exception);
2253   {
2254     FrameScope frame(this, StackFrame::INTERNAL);
2255     CallRuntime(Runtime::kHiddenPromoteScheduledException, 0);
2256   }
2257   jmp(&exception_handled);
2258 
2259   // HandleScope limit has changed. Delete allocated extensions.
2260   ExternalReference delete_extensions =
2261       ExternalReference::delete_handle_scope_extensions(isolate());
2262   bind(&delete_allocated_handles);
2263   mov(Operand::StaticVariable(limit_address), edi);
2264   mov(edi, eax);
2265   mov(Operand(esp, 0),
2266       Immediate(ExternalReference::isolate_address(isolate())));
2267   mov(eax, Immediate(delete_extensions));
2268   call(eax);
2269   mov(eax, edi);
2270   jmp(&leave_exit_frame);
2271 }
2272 
2273 
JumpToExternalReference(const ExternalReference & ext)2274 void MacroAssembler::JumpToExternalReference(const ExternalReference& ext) {
2275   // Set the entry point and jump to the C entry runtime stub.
2276   mov(ebx, Immediate(ext));
2277   CEntryStub ces(isolate(), 1);
2278   jmp(ces.GetCode(), RelocInfo::CODE_TARGET);
2279 }
2280 
2281 
InvokePrologue(const ParameterCount & expected,const ParameterCount & actual,Handle<Code> code_constant,const Operand & code_operand,Label * done,bool * definitely_mismatches,InvokeFlag flag,Label::Distance done_near,const CallWrapper & call_wrapper)2282 void MacroAssembler::InvokePrologue(const ParameterCount& expected,
2283                                     const ParameterCount& actual,
2284                                     Handle<Code> code_constant,
2285                                     const Operand& code_operand,
2286                                     Label* done,
2287                                     bool* definitely_mismatches,
2288                                     InvokeFlag flag,
2289                                     Label::Distance done_near,
2290                                     const CallWrapper& call_wrapper) {
2291   bool definitely_matches = false;
2292   *definitely_mismatches = false;
2293   Label invoke;
2294   if (expected.is_immediate()) {
2295     ASSERT(actual.is_immediate());
2296     if (expected.immediate() == actual.immediate()) {
2297       definitely_matches = true;
2298     } else {
2299       mov(eax, actual.immediate());
2300       const int sentinel = SharedFunctionInfo::kDontAdaptArgumentsSentinel;
2301       if (expected.immediate() == sentinel) {
2302         // Don't worry about adapting arguments for builtins that
2303         // don't want that done. Skip adaption code by making it look
2304         // like we have a match between expected and actual number of
2305         // arguments.
2306         definitely_matches = true;
2307       } else {
2308         *definitely_mismatches = true;
2309         mov(ebx, expected.immediate());
2310       }
2311     }
2312   } else {
2313     if (actual.is_immediate()) {
2314       // Expected is in register, actual is immediate. This is the
2315       // case when we invoke function values without going through the
2316       // IC mechanism.
2317       cmp(expected.reg(), actual.immediate());
2318       j(equal, &invoke);
2319       ASSERT(expected.reg().is(ebx));
2320       mov(eax, actual.immediate());
2321     } else if (!expected.reg().is(actual.reg())) {
2322       // Both expected and actual are in (different) registers. This
2323       // is the case when we invoke functions using call and apply.
2324       cmp(expected.reg(), actual.reg());
2325       j(equal, &invoke);
2326       ASSERT(actual.reg().is(eax));
2327       ASSERT(expected.reg().is(ebx));
2328     }
2329   }
2330 
2331   if (!definitely_matches) {
2332     Handle<Code> adaptor =
2333         isolate()->builtins()->ArgumentsAdaptorTrampoline();
2334     if (!code_constant.is_null()) {
2335       mov(edx, Immediate(code_constant));
2336       add(edx, Immediate(Code::kHeaderSize - kHeapObjectTag));
2337     } else if (!code_operand.is_reg(edx)) {
2338       mov(edx, code_operand);
2339     }
2340 
2341     if (flag == CALL_FUNCTION) {
2342       call_wrapper.BeforeCall(CallSize(adaptor, RelocInfo::CODE_TARGET));
2343       call(adaptor, RelocInfo::CODE_TARGET);
2344       call_wrapper.AfterCall();
2345       if (!*definitely_mismatches) {
2346         jmp(done, done_near);
2347       }
2348     } else {
2349       jmp(adaptor, RelocInfo::CODE_TARGET);
2350     }
2351     bind(&invoke);
2352   }
2353 }
2354 
2355 
InvokeCode(const Operand & code,const ParameterCount & expected,const ParameterCount & actual,InvokeFlag flag,const CallWrapper & call_wrapper)2356 void MacroAssembler::InvokeCode(const Operand& code,
2357                                 const ParameterCount& expected,
2358                                 const ParameterCount& actual,
2359                                 InvokeFlag flag,
2360                                 const CallWrapper& call_wrapper) {
2361   // You can't call a function without a valid frame.
2362   ASSERT(flag == JUMP_FUNCTION || has_frame());
2363 
2364   Label done;
2365   bool definitely_mismatches = false;
2366   InvokePrologue(expected, actual, Handle<Code>::null(), code,
2367                  &done, &definitely_mismatches, flag, Label::kNear,
2368                  call_wrapper);
2369   if (!definitely_mismatches) {
2370     if (flag == CALL_FUNCTION) {
2371       call_wrapper.BeforeCall(CallSize(code));
2372       call(code);
2373       call_wrapper.AfterCall();
2374     } else {
2375       ASSERT(flag == JUMP_FUNCTION);
2376       jmp(code);
2377     }
2378     bind(&done);
2379   }
2380 }
2381 
2382 
InvokeFunction(Register fun,const ParameterCount & actual,InvokeFlag flag,const CallWrapper & call_wrapper)2383 void MacroAssembler::InvokeFunction(Register fun,
2384                                     const ParameterCount& actual,
2385                                     InvokeFlag flag,
2386                                     const CallWrapper& call_wrapper) {
2387   // You can't call a function without a valid frame.
2388   ASSERT(flag == JUMP_FUNCTION || has_frame());
2389 
2390   ASSERT(fun.is(edi));
2391   mov(edx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
2392   mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
2393   mov(ebx, FieldOperand(edx, SharedFunctionInfo::kFormalParameterCountOffset));
2394   SmiUntag(ebx);
2395 
2396   ParameterCount expected(ebx);
2397   InvokeCode(FieldOperand(edi, JSFunction::kCodeEntryOffset),
2398              expected, actual, flag, call_wrapper);
2399 }
2400 
2401 
InvokeFunction(Register fun,const ParameterCount & expected,const ParameterCount & actual,InvokeFlag flag,const CallWrapper & call_wrapper)2402 void MacroAssembler::InvokeFunction(Register fun,
2403                                     const ParameterCount& expected,
2404                                     const ParameterCount& actual,
2405                                     InvokeFlag flag,
2406                                     const CallWrapper& call_wrapper) {
2407   // You can't call a function without a valid frame.
2408   ASSERT(flag == JUMP_FUNCTION || has_frame());
2409 
2410   ASSERT(fun.is(edi));
2411   mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
2412 
2413   InvokeCode(FieldOperand(edi, JSFunction::kCodeEntryOffset),
2414              expected, actual, flag, call_wrapper);
2415 }
2416 
2417 
InvokeFunction(Handle<JSFunction> function,const ParameterCount & expected,const ParameterCount & actual,InvokeFlag flag,const CallWrapper & call_wrapper)2418 void MacroAssembler::InvokeFunction(Handle<JSFunction> function,
2419                                     const ParameterCount& expected,
2420                                     const ParameterCount& actual,
2421                                     InvokeFlag flag,
2422                                     const CallWrapper& call_wrapper) {
2423   LoadHeapObject(edi, function);
2424   InvokeFunction(edi, expected, actual, flag, call_wrapper);
2425 }
2426 
2427 
InvokeBuiltin(Builtins::JavaScript id,InvokeFlag flag,const CallWrapper & call_wrapper)2428 void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id,
2429                                    InvokeFlag flag,
2430                                    const CallWrapper& call_wrapper) {
2431   // You can't call a builtin without a valid frame.
2432   ASSERT(flag == JUMP_FUNCTION || has_frame());
2433 
2434   // Rely on the assertion to check that the number of provided
2435   // arguments match the expected number of arguments. Fake a
2436   // parameter count to avoid emitting code to do the check.
2437   ParameterCount expected(0);
2438   GetBuiltinFunction(edi, id);
2439   InvokeCode(FieldOperand(edi, JSFunction::kCodeEntryOffset),
2440              expected, expected, flag, call_wrapper);
2441 }
2442 
2443 
GetBuiltinFunction(Register target,Builtins::JavaScript id)2444 void MacroAssembler::GetBuiltinFunction(Register target,
2445                                         Builtins::JavaScript id) {
2446   // Load the JavaScript builtin function from the builtins object.
2447   mov(target, Operand(esi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
2448   mov(target, FieldOperand(target, GlobalObject::kBuiltinsOffset));
2449   mov(target, FieldOperand(target,
2450                            JSBuiltinsObject::OffsetOfFunctionWithId(id)));
2451 }
2452 
2453 
GetBuiltinEntry(Register target,Builtins::JavaScript id)2454 void MacroAssembler::GetBuiltinEntry(Register target, Builtins::JavaScript id) {
2455   ASSERT(!target.is(edi));
2456   // Load the JavaScript builtin function from the builtins object.
2457   GetBuiltinFunction(edi, id);
2458   // Load the code entry point from the function into the target register.
2459   mov(target, FieldOperand(edi, JSFunction::kCodeEntryOffset));
2460 }
2461 
2462 
LoadContext(Register dst,int context_chain_length)2463 void MacroAssembler::LoadContext(Register dst, int context_chain_length) {
2464   if (context_chain_length > 0) {
2465     // Move up the chain of contexts to the context containing the slot.
2466     mov(dst, Operand(esi, Context::SlotOffset(Context::PREVIOUS_INDEX)));
2467     for (int i = 1; i < context_chain_length; i++) {
2468       mov(dst, Operand(dst, Context::SlotOffset(Context::PREVIOUS_INDEX)));
2469     }
2470   } else {
2471     // Slot is in the current function context.  Move it into the
2472     // destination register in case we store into it (the write barrier
2473     // cannot be allowed to destroy the context in esi).
2474     mov(dst, esi);
2475   }
2476 
2477   // We should not have found a with context by walking the context chain
2478   // (i.e., the static scope chain and runtime context chain do not agree).
2479   // A variable occurring in such a scope should have slot type LOOKUP and
2480   // not CONTEXT.
2481   if (emit_debug_code()) {
2482     cmp(FieldOperand(dst, HeapObject::kMapOffset),
2483         isolate()->factory()->with_context_map());
2484     Check(not_equal, kVariableResolvedToWithContext);
2485   }
2486 }
2487 
2488 
LoadTransitionedArrayMapConditional(ElementsKind expected_kind,ElementsKind transitioned_kind,Register map_in_out,Register scratch,Label * no_map_match)2489 void MacroAssembler::LoadTransitionedArrayMapConditional(
2490     ElementsKind expected_kind,
2491     ElementsKind transitioned_kind,
2492     Register map_in_out,
2493     Register scratch,
2494     Label* no_map_match) {
2495   // Load the global or builtins object from the current context.
2496   mov(scratch, Operand(esi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
2497   mov(scratch, FieldOperand(scratch, GlobalObject::kNativeContextOffset));
2498 
2499   // Check that the function's map is the same as the expected cached map.
2500   mov(scratch, Operand(scratch,
2501                        Context::SlotOffset(Context::JS_ARRAY_MAPS_INDEX)));
2502 
2503   size_t offset = expected_kind * kPointerSize +
2504       FixedArrayBase::kHeaderSize;
2505   cmp(map_in_out, FieldOperand(scratch, offset));
2506   j(not_equal, no_map_match);
2507 
2508   // Use the transitioned cached map.
2509   offset = transitioned_kind * kPointerSize +
2510       FixedArrayBase::kHeaderSize;
2511   mov(map_in_out, FieldOperand(scratch, offset));
2512 }
2513 
2514 
LoadGlobalFunction(int index,Register function)2515 void MacroAssembler::LoadGlobalFunction(int index, Register function) {
2516   // Load the global or builtins object from the current context.
2517   mov(function,
2518       Operand(esi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
2519   // Load the native context from the global or builtins object.
2520   mov(function,
2521       FieldOperand(function, GlobalObject::kNativeContextOffset));
2522   // Load the function from the native context.
2523   mov(function, Operand(function, Context::SlotOffset(index)));
2524 }
2525 
2526 
LoadGlobalFunctionInitialMap(Register function,Register map)2527 void MacroAssembler::LoadGlobalFunctionInitialMap(Register function,
2528                                                   Register map) {
2529   // Load the initial map.  The global functions all have initial maps.
2530   mov(map, FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
2531   if (emit_debug_code()) {
2532     Label ok, fail;
2533     CheckMap(map, isolate()->factory()->meta_map(), &fail, DO_SMI_CHECK);
2534     jmp(&ok);
2535     bind(&fail);
2536     Abort(kGlobalFunctionsMustHaveInitialMap);
2537     bind(&ok);
2538   }
2539 }
2540 
2541 
2542 // Store the value in register src in the safepoint register stack
2543 // slot for register dst.
StoreToSafepointRegisterSlot(Register dst,Register src)2544 void MacroAssembler::StoreToSafepointRegisterSlot(Register dst, Register src) {
2545   mov(SafepointRegisterSlot(dst), src);
2546 }
2547 
2548 
StoreToSafepointRegisterSlot(Register dst,Immediate src)2549 void MacroAssembler::StoreToSafepointRegisterSlot(Register dst, Immediate src) {
2550   mov(SafepointRegisterSlot(dst), src);
2551 }
2552 
2553 
LoadFromSafepointRegisterSlot(Register dst,Register src)2554 void MacroAssembler::LoadFromSafepointRegisterSlot(Register dst, Register src) {
2555   mov(dst, SafepointRegisterSlot(src));
2556 }
2557 
2558 
SafepointRegisterSlot(Register reg)2559 Operand MacroAssembler::SafepointRegisterSlot(Register reg) {
2560   return Operand(esp, SafepointRegisterStackIndex(reg.code()) * kPointerSize);
2561 }
2562 
2563 
SafepointRegisterStackIndex(int reg_code)2564 int MacroAssembler::SafepointRegisterStackIndex(int reg_code) {
2565   // The registers are pushed starting with the lowest encoding,
2566   // which means that lowest encodings are furthest away from
2567   // the stack pointer.
2568   ASSERT(reg_code >= 0 && reg_code < kNumSafepointRegisters);
2569   return kNumSafepointRegisters - reg_code - 1;
2570 }
2571 
2572 
LoadHeapObject(Register result,Handle<HeapObject> object)2573 void MacroAssembler::LoadHeapObject(Register result,
2574                                     Handle<HeapObject> object) {
2575   AllowDeferredHandleDereference embedding_raw_address;
2576   if (isolate()->heap()->InNewSpace(*object)) {
2577     Handle<Cell> cell = isolate()->factory()->NewCell(object);
2578     mov(result, Operand::ForCell(cell));
2579   } else {
2580     mov(result, object);
2581   }
2582 }
2583 
2584 
CmpHeapObject(Register reg,Handle<HeapObject> object)2585 void MacroAssembler::CmpHeapObject(Register reg, Handle<HeapObject> object) {
2586   AllowDeferredHandleDereference using_raw_address;
2587   if (isolate()->heap()->InNewSpace(*object)) {
2588     Handle<Cell> cell = isolate()->factory()->NewCell(object);
2589     cmp(reg, Operand::ForCell(cell));
2590   } else {
2591     cmp(reg, object);
2592   }
2593 }
2594 
2595 
PushHeapObject(Handle<HeapObject> object)2596 void MacroAssembler::PushHeapObject(Handle<HeapObject> object) {
2597   AllowDeferredHandleDereference using_raw_address;
2598   if (isolate()->heap()->InNewSpace(*object)) {
2599     Handle<Cell> cell = isolate()->factory()->NewCell(object);
2600     push(Operand::ForCell(cell));
2601   } else {
2602     Push(object);
2603   }
2604 }
2605 
2606 
Ret()2607 void MacroAssembler::Ret() {
2608   ret(0);
2609 }
2610 
2611 
Ret(int bytes_dropped,Register scratch)2612 void MacroAssembler::Ret(int bytes_dropped, Register scratch) {
2613   if (is_uint16(bytes_dropped)) {
2614     ret(bytes_dropped);
2615   } else {
2616     pop(scratch);
2617     add(esp, Immediate(bytes_dropped));
2618     push(scratch);
2619     ret(0);
2620   }
2621 }
2622 
2623 
Drop(int stack_elements)2624 void MacroAssembler::Drop(int stack_elements) {
2625   if (stack_elements > 0) {
2626     add(esp, Immediate(stack_elements * kPointerSize));
2627   }
2628 }
2629 
2630 
Move(Register dst,Register src)2631 void MacroAssembler::Move(Register dst, Register src) {
2632   if (!dst.is(src)) {
2633     mov(dst, src);
2634   }
2635 }
2636 
2637 
Move(Register dst,const Immediate & x)2638 void MacroAssembler::Move(Register dst, const Immediate& x) {
2639   if (x.is_zero()) {
2640     xor_(dst, dst);  // Shorter than mov of 32-bit immediate 0.
2641   } else {
2642     mov(dst, x);
2643   }
2644 }
2645 
2646 
Move(const Operand & dst,const Immediate & x)2647 void MacroAssembler::Move(const Operand& dst, const Immediate& x) {
2648   mov(dst, x);
2649 }
2650 
2651 
Move(XMMRegister dst,double val)2652 void MacroAssembler::Move(XMMRegister dst, double val) {
2653   // TODO(titzer): recognize double constants with ExternalReferences.
2654   uint64_t int_val = BitCast<uint64_t, double>(val);
2655   if (int_val == 0) {
2656     xorps(dst, dst);
2657   } else {
2658     int32_t lower = static_cast<int32_t>(int_val);
2659     int32_t upper = static_cast<int32_t>(int_val >> kBitsPerInt);
2660     push(Immediate(upper));
2661     push(Immediate(lower));
2662     movsd(dst, Operand(esp, 0));
2663     add(esp, Immediate(kDoubleSize));
2664   }
2665 }
2666 
2667 
SetCounter(StatsCounter * counter,int value)2668 void MacroAssembler::SetCounter(StatsCounter* counter, int value) {
2669   if (FLAG_native_code_counters && counter->Enabled()) {
2670     mov(Operand::StaticVariable(ExternalReference(counter)), Immediate(value));
2671   }
2672 }
2673 
2674 
IncrementCounter(StatsCounter * counter,int value)2675 void MacroAssembler::IncrementCounter(StatsCounter* counter, int value) {
2676   ASSERT(value > 0);
2677   if (FLAG_native_code_counters && counter->Enabled()) {
2678     Operand operand = Operand::StaticVariable(ExternalReference(counter));
2679     if (value == 1) {
2680       inc(operand);
2681     } else {
2682       add(operand, Immediate(value));
2683     }
2684   }
2685 }
2686 
2687 
DecrementCounter(StatsCounter * counter,int value)2688 void MacroAssembler::DecrementCounter(StatsCounter* counter, int value) {
2689   ASSERT(value > 0);
2690   if (FLAG_native_code_counters && counter->Enabled()) {
2691     Operand operand = Operand::StaticVariable(ExternalReference(counter));
2692     if (value == 1) {
2693       dec(operand);
2694     } else {
2695       sub(operand, Immediate(value));
2696     }
2697   }
2698 }
2699 
2700 
IncrementCounter(Condition cc,StatsCounter * counter,int value)2701 void MacroAssembler::IncrementCounter(Condition cc,
2702                                       StatsCounter* counter,
2703                                       int value) {
2704   ASSERT(value > 0);
2705   if (FLAG_native_code_counters && counter->Enabled()) {
2706     Label skip;
2707     j(NegateCondition(cc), &skip);
2708     pushfd();
2709     IncrementCounter(counter, value);
2710     popfd();
2711     bind(&skip);
2712   }
2713 }
2714 
2715 
DecrementCounter(Condition cc,StatsCounter * counter,int value)2716 void MacroAssembler::DecrementCounter(Condition cc,
2717                                       StatsCounter* counter,
2718                                       int value) {
2719   ASSERT(value > 0);
2720   if (FLAG_native_code_counters && counter->Enabled()) {
2721     Label skip;
2722     j(NegateCondition(cc), &skip);
2723     pushfd();
2724     DecrementCounter(counter, value);
2725     popfd();
2726     bind(&skip);
2727   }
2728 }
2729 
2730 
Assert(Condition cc,BailoutReason reason)2731 void MacroAssembler::Assert(Condition cc, BailoutReason reason) {
2732   if (emit_debug_code()) Check(cc, reason);
2733 }
2734 
2735 
AssertFastElements(Register elements)2736 void MacroAssembler::AssertFastElements(Register elements) {
2737   if (emit_debug_code()) {
2738     Factory* factory = isolate()->factory();
2739     Label ok;
2740     cmp(FieldOperand(elements, HeapObject::kMapOffset),
2741         Immediate(factory->fixed_array_map()));
2742     j(equal, &ok);
2743     cmp(FieldOperand(elements, HeapObject::kMapOffset),
2744         Immediate(factory->fixed_double_array_map()));
2745     j(equal, &ok);
2746     cmp(FieldOperand(elements, HeapObject::kMapOffset),
2747         Immediate(factory->fixed_cow_array_map()));
2748     j(equal, &ok);
2749     Abort(kJSObjectWithFastElementsMapHasSlowElements);
2750     bind(&ok);
2751   }
2752 }
2753 
2754 
Check(Condition cc,BailoutReason reason)2755 void MacroAssembler::Check(Condition cc, BailoutReason reason) {
2756   Label L;
2757   j(cc, &L);
2758   Abort(reason);
2759   // will not return here
2760   bind(&L);
2761 }
2762 
2763 
CheckStackAlignment()2764 void MacroAssembler::CheckStackAlignment() {
2765   int frame_alignment = OS::ActivationFrameAlignment();
2766   int frame_alignment_mask = frame_alignment - 1;
2767   if (frame_alignment > kPointerSize) {
2768     ASSERT(IsPowerOf2(frame_alignment));
2769     Label alignment_as_expected;
2770     test(esp, Immediate(frame_alignment_mask));
2771     j(zero, &alignment_as_expected);
2772     // Abort if stack is not aligned.
2773     int3();
2774     bind(&alignment_as_expected);
2775   }
2776 }
2777 
2778 
Abort(BailoutReason reason)2779 void MacroAssembler::Abort(BailoutReason reason) {
2780 #ifdef DEBUG
2781   const char* msg = GetBailoutReason(reason);
2782   if (msg != NULL) {
2783     RecordComment("Abort message: ");
2784     RecordComment(msg);
2785   }
2786 
2787   if (FLAG_trap_on_abort) {
2788     int3();
2789     return;
2790   }
2791 #endif
2792 
2793   push(Immediate(reinterpret_cast<intptr_t>(Smi::FromInt(reason))));
2794   // Disable stub call restrictions to always allow calls to abort.
2795   if (!has_frame_) {
2796     // We don't actually want to generate a pile of code for this, so just
2797     // claim there is a stack frame, without generating one.
2798     FrameScope scope(this, StackFrame::NONE);
2799     CallRuntime(Runtime::kAbort, 1);
2800   } else {
2801     CallRuntime(Runtime::kAbort, 1);
2802   }
2803   // will not return here
2804   int3();
2805 }
2806 
2807 
LoadInstanceDescriptors(Register map,Register descriptors)2808 void MacroAssembler::LoadInstanceDescriptors(Register map,
2809                                              Register descriptors) {
2810   mov(descriptors, FieldOperand(map, Map::kDescriptorsOffset));
2811 }
2812 
2813 
NumberOfOwnDescriptors(Register dst,Register map)2814 void MacroAssembler::NumberOfOwnDescriptors(Register dst, Register map) {
2815   mov(dst, FieldOperand(map, Map::kBitField3Offset));
2816   DecodeField<Map::NumberOfOwnDescriptorsBits>(dst);
2817 }
2818 
2819 
LoadPowerOf2(XMMRegister dst,Register scratch,int power)2820 void MacroAssembler::LoadPowerOf2(XMMRegister dst,
2821                                   Register scratch,
2822                                   int power) {
2823   ASSERT(is_uintn(power + HeapNumber::kExponentBias,
2824                   HeapNumber::kExponentBits));
2825   mov(scratch, Immediate(power + HeapNumber::kExponentBias));
2826   movd(dst, scratch);
2827   psllq(dst, HeapNumber::kMantissaBits);
2828 }
2829 
2830 
LookupNumberStringCache(Register object,Register result,Register scratch1,Register scratch2,Label * not_found)2831 void MacroAssembler::LookupNumberStringCache(Register object,
2832                                              Register result,
2833                                              Register scratch1,
2834                                              Register scratch2,
2835                                              Label* not_found) {
2836   // Use of registers. Register result is used as a temporary.
2837   Register number_string_cache = result;
2838   Register mask = scratch1;
2839   Register scratch = scratch2;
2840 
2841   // Load the number string cache.
2842   LoadRoot(number_string_cache, Heap::kNumberStringCacheRootIndex);
2843   // Make the hash mask from the length of the number string cache. It
2844   // contains two elements (number and string) for each cache entry.
2845   mov(mask, FieldOperand(number_string_cache, FixedArray::kLengthOffset));
2846   shr(mask, kSmiTagSize + 1);  // Untag length and divide it by two.
2847   sub(mask, Immediate(1));  // Make mask.
2848 
2849   // Calculate the entry in the number string cache. The hash value in the
2850   // number string cache for smis is just the smi value, and the hash for
2851   // doubles is the xor of the upper and lower words. See
2852   // Heap::GetNumberStringCache.
2853   Label smi_hash_calculated;
2854   Label load_result_from_cache;
2855   Label not_smi;
2856   STATIC_ASSERT(kSmiTag == 0);
2857   JumpIfNotSmi(object, &not_smi, Label::kNear);
2858   mov(scratch, object);
2859   SmiUntag(scratch);
2860   jmp(&smi_hash_calculated, Label::kNear);
2861   bind(&not_smi);
2862   cmp(FieldOperand(object, HeapObject::kMapOffset),
2863       isolate()->factory()->heap_number_map());
2864   j(not_equal, not_found);
2865   STATIC_ASSERT(8 == kDoubleSize);
2866   mov(scratch, FieldOperand(object, HeapNumber::kValueOffset));
2867   xor_(scratch, FieldOperand(object, HeapNumber::kValueOffset + 4));
2868   // Object is heap number and hash is now in scratch. Calculate cache index.
2869   and_(scratch, mask);
2870   Register index = scratch;
2871   Register probe = mask;
2872   mov(probe,
2873       FieldOperand(number_string_cache,
2874                    index,
2875                    times_twice_pointer_size,
2876                    FixedArray::kHeaderSize));
2877   JumpIfSmi(probe, not_found);
2878   movsd(xmm0, FieldOperand(object, HeapNumber::kValueOffset));
2879   ucomisd(xmm0, FieldOperand(probe, HeapNumber::kValueOffset));
2880   j(parity_even, not_found);  // Bail out if NaN is involved.
2881   j(not_equal, not_found);  // The cache did not contain this value.
2882   jmp(&load_result_from_cache, Label::kNear);
2883 
2884   bind(&smi_hash_calculated);
2885   // Object is smi and hash is now in scratch. Calculate cache index.
2886   and_(scratch, mask);
2887   // Check if the entry is the smi we are looking for.
2888   cmp(object,
2889       FieldOperand(number_string_cache,
2890                    index,
2891                    times_twice_pointer_size,
2892                    FixedArray::kHeaderSize));
2893   j(not_equal, not_found);
2894 
2895   // Get the result from the cache.
2896   bind(&load_result_from_cache);
2897   mov(result,
2898       FieldOperand(number_string_cache,
2899                    index,
2900                    times_twice_pointer_size,
2901                    FixedArray::kHeaderSize + kPointerSize));
2902   IncrementCounter(isolate()->counters()->number_to_string_native(), 1);
2903 }
2904 
2905 
JumpIfInstanceTypeIsNotSequentialAscii(Register instance_type,Register scratch,Label * failure)2906 void MacroAssembler::JumpIfInstanceTypeIsNotSequentialAscii(
2907     Register instance_type,
2908     Register scratch,
2909     Label* failure) {
2910   if (!scratch.is(instance_type)) {
2911     mov(scratch, instance_type);
2912   }
2913   and_(scratch,
2914        kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask);
2915   cmp(scratch, kStringTag | kSeqStringTag | kOneByteStringTag);
2916   j(not_equal, failure);
2917 }
2918 
2919 
JumpIfNotBothSequentialAsciiStrings(Register object1,Register object2,Register scratch1,Register scratch2,Label * failure)2920 void MacroAssembler::JumpIfNotBothSequentialAsciiStrings(Register object1,
2921                                                          Register object2,
2922                                                          Register scratch1,
2923                                                          Register scratch2,
2924                                                          Label* failure) {
2925   // Check that both objects are not smis.
2926   STATIC_ASSERT(kSmiTag == 0);
2927   mov(scratch1, object1);
2928   and_(scratch1, object2);
2929   JumpIfSmi(scratch1, failure);
2930 
2931   // Load instance type for both strings.
2932   mov(scratch1, FieldOperand(object1, HeapObject::kMapOffset));
2933   mov(scratch2, FieldOperand(object2, HeapObject::kMapOffset));
2934   movzx_b(scratch1, FieldOperand(scratch1, Map::kInstanceTypeOffset));
2935   movzx_b(scratch2, FieldOperand(scratch2, Map::kInstanceTypeOffset));
2936 
2937   // Check that both are flat ASCII strings.
2938   const int kFlatAsciiStringMask =
2939       kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
2940   const int kFlatAsciiStringTag =
2941       kStringTag | kOneByteStringTag | kSeqStringTag;
2942   // Interleave bits from both instance types and compare them in one check.
2943   ASSERT_EQ(0, kFlatAsciiStringMask & (kFlatAsciiStringMask << 3));
2944   and_(scratch1, kFlatAsciiStringMask);
2945   and_(scratch2, kFlatAsciiStringMask);
2946   lea(scratch1, Operand(scratch1, scratch2, times_8, 0));
2947   cmp(scratch1, kFlatAsciiStringTag | (kFlatAsciiStringTag << 3));
2948   j(not_equal, failure);
2949 }
2950 
2951 
JumpIfNotUniqueName(Operand operand,Label * not_unique_name,Label::Distance distance)2952 void MacroAssembler::JumpIfNotUniqueName(Operand operand,
2953                                          Label* not_unique_name,
2954                                          Label::Distance distance) {
2955   STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0);
2956   Label succeed;
2957   test(operand, Immediate(kIsNotStringMask | kIsNotInternalizedMask));
2958   j(zero, &succeed);
2959   cmpb(operand, static_cast<uint8_t>(SYMBOL_TYPE));
2960   j(not_equal, not_unique_name, distance);
2961 
2962   bind(&succeed);
2963 }
2964 
2965 
EmitSeqStringSetCharCheck(Register string,Register index,Register value,uint32_t encoding_mask)2966 void MacroAssembler::EmitSeqStringSetCharCheck(Register string,
2967                                                Register index,
2968                                                Register value,
2969                                                uint32_t encoding_mask) {
2970   Label is_object;
2971   JumpIfNotSmi(string, &is_object, Label::kNear);
2972   Abort(kNonObject);
2973   bind(&is_object);
2974 
2975   push(value);
2976   mov(value, FieldOperand(string, HeapObject::kMapOffset));
2977   movzx_b(value, FieldOperand(value, Map::kInstanceTypeOffset));
2978 
2979   and_(value, Immediate(kStringRepresentationMask | kStringEncodingMask));
2980   cmp(value, Immediate(encoding_mask));
2981   pop(value);
2982   Check(equal, kUnexpectedStringType);
2983 
2984   // The index is assumed to be untagged coming in, tag it to compare with the
2985   // string length without using a temp register, it is restored at the end of
2986   // this function.
2987   SmiTag(index);
2988   Check(no_overflow, kIndexIsTooLarge);
2989 
2990   cmp(index, FieldOperand(string, String::kLengthOffset));
2991   Check(less, kIndexIsTooLarge);
2992 
2993   cmp(index, Immediate(Smi::FromInt(0)));
2994   Check(greater_equal, kIndexIsNegative);
2995 
2996   // Restore the index
2997   SmiUntag(index);
2998 }
2999 
3000 
PrepareCallCFunction(int num_arguments,Register scratch)3001 void MacroAssembler::PrepareCallCFunction(int num_arguments, Register scratch) {
3002   int frame_alignment = OS::ActivationFrameAlignment();
3003   if (frame_alignment != 0) {
3004     // Make stack end at alignment and make room for num_arguments words
3005     // and the original value of esp.
3006     mov(scratch, esp);
3007     sub(esp, Immediate((num_arguments + 1) * kPointerSize));
3008     ASSERT(IsPowerOf2(frame_alignment));
3009     and_(esp, -frame_alignment);
3010     mov(Operand(esp, num_arguments * kPointerSize), scratch);
3011   } else {
3012     sub(esp, Immediate(num_arguments * kPointerSize));
3013   }
3014 }
3015 
3016 
CallCFunction(ExternalReference function,int num_arguments)3017 void MacroAssembler::CallCFunction(ExternalReference function,
3018                                    int num_arguments) {
3019   // Trashing eax is ok as it will be the return value.
3020   mov(eax, Immediate(function));
3021   CallCFunction(eax, num_arguments);
3022 }
3023 
3024 
CallCFunction(Register function,int num_arguments)3025 void MacroAssembler::CallCFunction(Register function,
3026                                    int num_arguments) {
3027   ASSERT(has_frame());
3028   // Check stack alignment.
3029   if (emit_debug_code()) {
3030     CheckStackAlignment();
3031   }
3032 
3033   call(function);
3034   if (OS::ActivationFrameAlignment() != 0) {
3035     mov(esp, Operand(esp, num_arguments * kPointerSize));
3036   } else {
3037     add(esp, Immediate(num_arguments * kPointerSize));
3038   }
3039 }
3040 
3041 
AreAliased(Register r1,Register r2,Register r3,Register r4)3042 bool AreAliased(Register r1, Register r2, Register r3, Register r4) {
3043   if (r1.is(r2)) return true;
3044   if (r1.is(r3)) return true;
3045   if (r1.is(r4)) return true;
3046   if (r2.is(r3)) return true;
3047   if (r2.is(r4)) return true;
3048   if (r3.is(r4)) return true;
3049   return false;
3050 }
3051 
3052 
CodePatcher(byte * address,int size)3053 CodePatcher::CodePatcher(byte* address, int size)
3054     : address_(address),
3055       size_(size),
3056       masm_(NULL, address, size + Assembler::kGap) {
3057   // Create a new macro assembler pointing to the address of the code to patch.
3058   // The size is adjusted with kGap on order for the assembler to generate size
3059   // bytes of instructions without failing with buffer size constraints.
3060   ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
3061 }
3062 
3063 
~CodePatcher()3064 CodePatcher::~CodePatcher() {
3065   // Indicate that code has changed.
3066   CPU::FlushICache(address_, size_);
3067 
3068   // Check that the code was patched as expected.
3069   ASSERT(masm_.pc_ == address_ + size_);
3070   ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
3071 }
3072 
3073 
CheckPageFlag(Register object,Register scratch,int mask,Condition cc,Label * condition_met,Label::Distance condition_met_distance)3074 void MacroAssembler::CheckPageFlag(
3075     Register object,
3076     Register scratch,
3077     int mask,
3078     Condition cc,
3079     Label* condition_met,
3080     Label::Distance condition_met_distance) {
3081   ASSERT(cc == zero || cc == not_zero);
3082   if (scratch.is(object)) {
3083     and_(scratch, Immediate(~Page::kPageAlignmentMask));
3084   } else {
3085     mov(scratch, Immediate(~Page::kPageAlignmentMask));
3086     and_(scratch, object);
3087   }
3088   if (mask < (1 << kBitsPerByte)) {
3089     test_b(Operand(scratch, MemoryChunk::kFlagsOffset),
3090            static_cast<uint8_t>(mask));
3091   } else {
3092     test(Operand(scratch, MemoryChunk::kFlagsOffset), Immediate(mask));
3093   }
3094   j(cc, condition_met, condition_met_distance);
3095 }
3096 
3097 
CheckPageFlagForMap(Handle<Map> map,int mask,Condition cc,Label * condition_met,Label::Distance condition_met_distance)3098 void MacroAssembler::CheckPageFlagForMap(
3099     Handle<Map> map,
3100     int mask,
3101     Condition cc,
3102     Label* condition_met,
3103     Label::Distance condition_met_distance) {
3104   ASSERT(cc == zero || cc == not_zero);
3105   Page* page = Page::FromAddress(map->address());
3106   ExternalReference reference(ExternalReference::page_flags(page));
3107   // The inlined static address check of the page's flags relies
3108   // on maps never being compacted.
3109   ASSERT(!isolate()->heap()->mark_compact_collector()->
3110          IsOnEvacuationCandidate(*map));
3111   if (mask < (1 << kBitsPerByte)) {
3112     test_b(Operand::StaticVariable(reference), static_cast<uint8_t>(mask));
3113   } else {
3114     test(Operand::StaticVariable(reference), Immediate(mask));
3115   }
3116   j(cc, condition_met, condition_met_distance);
3117 }
3118 
3119 
CheckMapDeprecated(Handle<Map> map,Register scratch,Label * if_deprecated)3120 void MacroAssembler::CheckMapDeprecated(Handle<Map> map,
3121                                         Register scratch,
3122                                         Label* if_deprecated) {
3123   if (map->CanBeDeprecated()) {
3124     mov(scratch, map);
3125     mov(scratch, FieldOperand(scratch, Map::kBitField3Offset));
3126     and_(scratch, Immediate(Map::Deprecated::kMask));
3127     j(not_zero, if_deprecated);
3128   }
3129 }
3130 
3131 
JumpIfBlack(Register object,Register scratch0,Register scratch1,Label * on_black,Label::Distance on_black_near)3132 void MacroAssembler::JumpIfBlack(Register object,
3133                                  Register scratch0,
3134                                  Register scratch1,
3135                                  Label* on_black,
3136                                  Label::Distance on_black_near) {
3137   HasColor(object, scratch0, scratch1,
3138            on_black, on_black_near,
3139            1, 0);  // kBlackBitPattern.
3140   ASSERT(strcmp(Marking::kBlackBitPattern, "10") == 0);
3141 }
3142 
3143 
HasColor(Register object,Register bitmap_scratch,Register mask_scratch,Label * has_color,Label::Distance has_color_distance,int first_bit,int second_bit)3144 void MacroAssembler::HasColor(Register object,
3145                               Register bitmap_scratch,
3146                               Register mask_scratch,
3147                               Label* has_color,
3148                               Label::Distance has_color_distance,
3149                               int first_bit,
3150                               int second_bit) {
3151   ASSERT(!AreAliased(object, bitmap_scratch, mask_scratch, ecx));
3152 
3153   GetMarkBits(object, bitmap_scratch, mask_scratch);
3154 
3155   Label other_color, word_boundary;
3156   test(mask_scratch, Operand(bitmap_scratch, MemoryChunk::kHeaderSize));
3157   j(first_bit == 1 ? zero : not_zero, &other_color, Label::kNear);
3158   add(mask_scratch, mask_scratch);  // Shift left 1 by adding.
3159   j(zero, &word_boundary, Label::kNear);
3160   test(mask_scratch, Operand(bitmap_scratch, MemoryChunk::kHeaderSize));
3161   j(second_bit == 1 ? not_zero : zero, has_color, has_color_distance);
3162   jmp(&other_color, Label::kNear);
3163 
3164   bind(&word_boundary);
3165   test_b(Operand(bitmap_scratch, MemoryChunk::kHeaderSize + kPointerSize), 1);
3166 
3167   j(second_bit == 1 ? not_zero : zero, has_color, has_color_distance);
3168   bind(&other_color);
3169 }
3170 
3171 
GetMarkBits(Register addr_reg,Register bitmap_reg,Register mask_reg)3172 void MacroAssembler::GetMarkBits(Register addr_reg,
3173                                  Register bitmap_reg,
3174                                  Register mask_reg) {
3175   ASSERT(!AreAliased(addr_reg, mask_reg, bitmap_reg, ecx));
3176   mov(bitmap_reg, Immediate(~Page::kPageAlignmentMask));
3177   and_(bitmap_reg, addr_reg);
3178   mov(ecx, addr_reg);
3179   int shift =
3180       Bitmap::kBitsPerCellLog2 + kPointerSizeLog2 - Bitmap::kBytesPerCellLog2;
3181   shr(ecx, shift);
3182   and_(ecx,
3183        (Page::kPageAlignmentMask >> shift) & ~(Bitmap::kBytesPerCell - 1));
3184 
3185   add(bitmap_reg, ecx);
3186   mov(ecx, addr_reg);
3187   shr(ecx, kPointerSizeLog2);
3188   and_(ecx, (1 << Bitmap::kBitsPerCellLog2) - 1);
3189   mov(mask_reg, Immediate(1));
3190   shl_cl(mask_reg);
3191 }
3192 
3193 
EnsureNotWhite(Register value,Register bitmap_scratch,Register mask_scratch,Label * value_is_white_and_not_data,Label::Distance distance)3194 void MacroAssembler::EnsureNotWhite(
3195     Register value,
3196     Register bitmap_scratch,
3197     Register mask_scratch,
3198     Label* value_is_white_and_not_data,
3199     Label::Distance distance) {
3200   ASSERT(!AreAliased(value, bitmap_scratch, mask_scratch, ecx));
3201   GetMarkBits(value, bitmap_scratch, mask_scratch);
3202 
3203   // If the value is black or grey we don't need to do anything.
3204   ASSERT(strcmp(Marking::kWhiteBitPattern, "00") == 0);
3205   ASSERT(strcmp(Marking::kBlackBitPattern, "10") == 0);
3206   ASSERT(strcmp(Marking::kGreyBitPattern, "11") == 0);
3207   ASSERT(strcmp(Marking::kImpossibleBitPattern, "01") == 0);
3208 
3209   Label done;
3210 
3211   // Since both black and grey have a 1 in the first position and white does
3212   // not have a 1 there we only need to check one bit.
3213   test(mask_scratch, Operand(bitmap_scratch, MemoryChunk::kHeaderSize));
3214   j(not_zero, &done, Label::kNear);
3215 
3216   if (emit_debug_code()) {
3217     // Check for impossible bit pattern.
3218     Label ok;
3219     push(mask_scratch);
3220     // shl.  May overflow making the check conservative.
3221     add(mask_scratch, mask_scratch);
3222     test(mask_scratch, Operand(bitmap_scratch, MemoryChunk::kHeaderSize));
3223     j(zero, &ok, Label::kNear);
3224     int3();
3225     bind(&ok);
3226     pop(mask_scratch);
3227   }
3228 
3229   // Value is white.  We check whether it is data that doesn't need scanning.
3230   // Currently only checks for HeapNumber and non-cons strings.
3231   Register map = ecx;  // Holds map while checking type.
3232   Register length = ecx;  // Holds length of object after checking type.
3233   Label not_heap_number;
3234   Label is_data_object;
3235 
3236   // Check for heap-number
3237   mov(map, FieldOperand(value, HeapObject::kMapOffset));
3238   cmp(map, isolate()->factory()->heap_number_map());
3239   j(not_equal, &not_heap_number, Label::kNear);
3240   mov(length, Immediate(HeapNumber::kSize));
3241   jmp(&is_data_object, Label::kNear);
3242 
3243   bind(&not_heap_number);
3244   // Check for strings.
3245   ASSERT(kIsIndirectStringTag == 1 && kIsIndirectStringMask == 1);
3246   ASSERT(kNotStringTag == 0x80 && kIsNotStringMask == 0x80);
3247   // If it's a string and it's not a cons string then it's an object containing
3248   // no GC pointers.
3249   Register instance_type = ecx;
3250   movzx_b(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
3251   test_b(instance_type, kIsIndirectStringMask | kIsNotStringMask);
3252   j(not_zero, value_is_white_and_not_data);
3253   // It's a non-indirect (non-cons and non-slice) string.
3254   // If it's external, the length is just ExternalString::kSize.
3255   // Otherwise it's String::kHeaderSize + string->length() * (1 or 2).
3256   Label not_external;
3257   // External strings are the only ones with the kExternalStringTag bit
3258   // set.
3259   ASSERT_EQ(0, kSeqStringTag & kExternalStringTag);
3260   ASSERT_EQ(0, kConsStringTag & kExternalStringTag);
3261   test_b(instance_type, kExternalStringTag);
3262   j(zero, &not_external, Label::kNear);
3263   mov(length, Immediate(ExternalString::kSize));
3264   jmp(&is_data_object, Label::kNear);
3265 
3266   bind(&not_external);
3267   // Sequential string, either ASCII or UC16.
3268   ASSERT(kOneByteStringTag == 0x04);
3269   and_(length, Immediate(kStringEncodingMask));
3270   xor_(length, Immediate(kStringEncodingMask));
3271   add(length, Immediate(0x04));
3272   // Value now either 4 (if ASCII) or 8 (if UC16), i.e., char-size shifted
3273   // by 2. If we multiply the string length as smi by this, it still
3274   // won't overflow a 32-bit value.
3275   ASSERT_EQ(SeqOneByteString::kMaxSize, SeqTwoByteString::kMaxSize);
3276   ASSERT(SeqOneByteString::kMaxSize <=
3277          static_cast<int>(0xffffffffu >> (2 + kSmiTagSize)));
3278   imul(length, FieldOperand(value, String::kLengthOffset));
3279   shr(length, 2 + kSmiTagSize + kSmiShiftSize);
3280   add(length, Immediate(SeqString::kHeaderSize + kObjectAlignmentMask));
3281   and_(length, Immediate(~kObjectAlignmentMask));
3282 
3283   bind(&is_data_object);
3284   // Value is a data object, and it is white.  Mark it black.  Since we know
3285   // that the object is white we can make it black by flipping one bit.
3286   or_(Operand(bitmap_scratch, MemoryChunk::kHeaderSize), mask_scratch);
3287 
3288   and_(bitmap_scratch, Immediate(~Page::kPageAlignmentMask));
3289   add(Operand(bitmap_scratch, MemoryChunk::kLiveBytesOffset),
3290       length);
3291   if (emit_debug_code()) {
3292     mov(length, Operand(bitmap_scratch, MemoryChunk::kLiveBytesOffset));
3293     cmp(length, Operand(bitmap_scratch, MemoryChunk::kSizeOffset));
3294     Check(less_equal, kLiveBytesCountOverflowChunkSize);
3295   }
3296 
3297   bind(&done);
3298 }
3299 
3300 
EnumLength(Register dst,Register map)3301 void MacroAssembler::EnumLength(Register dst, Register map) {
3302   STATIC_ASSERT(Map::EnumLengthBits::kShift == 0);
3303   mov(dst, FieldOperand(map, Map::kBitField3Offset));
3304   and_(dst, Immediate(Map::EnumLengthBits::kMask));
3305   SmiTag(dst);
3306 }
3307 
3308 
CheckEnumCache(Label * call_runtime)3309 void MacroAssembler::CheckEnumCache(Label* call_runtime) {
3310   Label next, start;
3311   mov(ecx, eax);
3312 
3313   // Check if the enum length field is properly initialized, indicating that
3314   // there is an enum cache.
3315   mov(ebx, FieldOperand(ecx, HeapObject::kMapOffset));
3316 
3317   EnumLength(edx, ebx);
3318   cmp(edx, Immediate(Smi::FromInt(kInvalidEnumCacheSentinel)));
3319   j(equal, call_runtime);
3320 
3321   jmp(&start);
3322 
3323   bind(&next);
3324   mov(ebx, FieldOperand(ecx, HeapObject::kMapOffset));
3325 
3326   // For all objects but the receiver, check that the cache is empty.
3327   EnumLength(edx, ebx);
3328   cmp(edx, Immediate(Smi::FromInt(0)));
3329   j(not_equal, call_runtime);
3330 
3331   bind(&start);
3332 
3333   // Check that there are no elements. Register rcx contains the current JS
3334   // object we've reached through the prototype chain.
3335   Label no_elements;
3336   mov(ecx, FieldOperand(ecx, JSObject::kElementsOffset));
3337   cmp(ecx, isolate()->factory()->empty_fixed_array());
3338   j(equal, &no_elements);
3339 
3340   // Second chance, the object may be using the empty slow element dictionary.
3341   cmp(ecx, isolate()->factory()->empty_slow_element_dictionary());
3342   j(not_equal, call_runtime);
3343 
3344   bind(&no_elements);
3345   mov(ecx, FieldOperand(ebx, Map::kPrototypeOffset));
3346   cmp(ecx, isolate()->factory()->null_value());
3347   j(not_equal, &next);
3348 }
3349 
3350 
TestJSArrayForAllocationMemento(Register receiver_reg,Register scratch_reg,Label * no_memento_found)3351 void MacroAssembler::TestJSArrayForAllocationMemento(
3352     Register receiver_reg,
3353     Register scratch_reg,
3354     Label* no_memento_found) {
3355   ExternalReference new_space_start =
3356       ExternalReference::new_space_start(isolate());
3357   ExternalReference new_space_allocation_top =
3358       ExternalReference::new_space_allocation_top_address(isolate());
3359 
3360   lea(scratch_reg, Operand(receiver_reg,
3361       JSArray::kSize + AllocationMemento::kSize - kHeapObjectTag));
3362   cmp(scratch_reg, Immediate(new_space_start));
3363   j(less, no_memento_found);
3364   cmp(scratch_reg, Operand::StaticVariable(new_space_allocation_top));
3365   j(greater, no_memento_found);
3366   cmp(MemOperand(scratch_reg, -AllocationMemento::kSize),
3367       Immediate(isolate()->factory()->allocation_memento_map()));
3368 }
3369 
3370 
JumpIfDictionaryInPrototypeChain(Register object,Register scratch0,Register scratch1,Label * found)3371 void MacroAssembler::JumpIfDictionaryInPrototypeChain(
3372     Register object,
3373     Register scratch0,
3374     Register scratch1,
3375     Label* found) {
3376   ASSERT(!scratch1.is(scratch0));
3377   Factory* factory = isolate()->factory();
3378   Register current = scratch0;
3379   Label loop_again;
3380 
3381   // scratch contained elements pointer.
3382   mov(current, object);
3383 
3384   // Loop based on the map going up the prototype chain.
3385   bind(&loop_again);
3386   mov(current, FieldOperand(current, HeapObject::kMapOffset));
3387   mov(scratch1, FieldOperand(current, Map::kBitField2Offset));
3388   DecodeField<Map::ElementsKindBits>(scratch1);
3389   cmp(scratch1, Immediate(DICTIONARY_ELEMENTS));
3390   j(equal, found);
3391   mov(current, FieldOperand(current, Map::kPrototypeOffset));
3392   cmp(current, Immediate(factory->null_value()));
3393   j(not_equal, &loop_again);
3394 }
3395 
3396 
TruncatingDiv(Register dividend,int32_t divisor)3397 void MacroAssembler::TruncatingDiv(Register dividend, int32_t divisor) {
3398   ASSERT(!dividend.is(eax));
3399   ASSERT(!dividend.is(edx));
3400   MultiplierAndShift ms(divisor);
3401   mov(eax, Immediate(ms.multiplier()));
3402   imul(dividend);
3403   if (divisor > 0 && ms.multiplier() < 0) add(edx, dividend);
3404   if (divisor < 0 && ms.multiplier() > 0) sub(edx, dividend);
3405   if (ms.shift() > 0) sar(edx, ms.shift());
3406   mov(eax, dividend);
3407   shr(eax, 31);
3408   add(edx, eax);
3409 }
3410 
3411 
3412 } }  // namespace v8::internal
3413 
3414 #endif  // V8_TARGET_ARCH_IA32
3415