• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 //     * Redistributions of source code must retain the above copyright
7 //       notice, this list of conditions and the following disclaimer.
8 //     * Redistributions in binary form must reproduce the above
9 //       copyright notice, this list of conditions and the following
10 //       disclaimer in the documentation and/or other materials provided
11 //       with the distribution.
12 //     * Neither the name of Google Inc. nor the names of its
13 //       contributors may be used to endorse or promote products derived
14 //       from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 
28 #include "v8.h"
29 
30 #if defined(V8_TARGET_ARCH_X64)
31 
32 #include "codegen.h"
33 #include "ic-inl.h"
34 #include "runtime.h"
35 #include "stub-cache.h"
36 
37 namespace v8 {
38 namespace internal {
39 
40 // ----------------------------------------------------------------------------
41 // Static IC stub generators.
42 //
43 
44 #define __ ACCESS_MASM(masm)
45 
46 
GenerateGlobalInstanceTypeCheck(MacroAssembler * masm,Register type,Label * global_object)47 static void GenerateGlobalInstanceTypeCheck(MacroAssembler* masm,
48                                             Register type,
49                                             Label* global_object) {
50   // Register usage:
51   //   type: holds the receiver instance type on entry.
52   __ cmpb(type, Immediate(JS_GLOBAL_OBJECT_TYPE));
53   __ j(equal, global_object);
54   __ cmpb(type, Immediate(JS_BUILTINS_OBJECT_TYPE));
55   __ j(equal, global_object);
56   __ cmpb(type, Immediate(JS_GLOBAL_PROXY_TYPE));
57   __ j(equal, global_object);
58 }
59 
60 
61 // Generated code falls through if the receiver is a regular non-global
62 // JS object with slow properties and no interceptors.
GenerateStringDictionaryReceiverCheck(MacroAssembler * masm,Register receiver,Register r0,Register r1,Label * miss)63 static void GenerateStringDictionaryReceiverCheck(MacroAssembler* masm,
64                                                   Register receiver,
65                                                   Register r0,
66                                                   Register r1,
67                                                   Label* miss) {
68   // Register usage:
69   //   receiver: holds the receiver on entry and is unchanged.
70   //   r0: used to hold receiver instance type.
71   //       Holds the property dictionary on fall through.
72   //   r1: used to hold receivers map.
73 
74   __ JumpIfSmi(receiver, miss);
75 
76   // Check that the receiver is a valid JS object.
77   __ movq(r1, FieldOperand(receiver, HeapObject::kMapOffset));
78   __ movb(r0, FieldOperand(r1, Map::kInstanceTypeOffset));
79   __ cmpb(r0, Immediate(FIRST_JS_OBJECT_TYPE));
80   __ j(below, miss);
81 
82   // If this assert fails, we have to check upper bound too.
83   ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
84 
85   GenerateGlobalInstanceTypeCheck(masm, r0, miss);
86 
87   // Check for non-global object that requires access check.
88   __ testb(FieldOperand(r1, Map::kBitFieldOffset),
89            Immediate((1 << Map::kIsAccessCheckNeeded) |
90                      (1 << Map::kHasNamedInterceptor)));
91   __ j(not_zero, miss);
92 
93   __ movq(r0, FieldOperand(receiver, JSObject::kPropertiesOffset));
94   __ CompareRoot(FieldOperand(r0, HeapObject::kMapOffset),
95                  Heap::kHashTableMapRootIndex);
96   __ j(not_equal, miss);
97 }
98 
99 
100 // Probe the string dictionary in the |elements| register. Jump to the
101 // |done| label if a property with the given name is found leaving the
102 // index into the dictionary in |r1|. Jump to the |miss| label
103 // otherwise.
GenerateStringDictionaryProbes(MacroAssembler * masm,Label * miss,Label * done,Register elements,Register name,Register r0,Register r1)104 static void GenerateStringDictionaryProbes(MacroAssembler* masm,
105                                            Label* miss,
106                                            Label* done,
107                                            Register elements,
108                                            Register name,
109                                            Register r0,
110                                            Register r1) {
111   // Assert that name contains a string.
112   if (FLAG_debug_code) __ AbortIfNotString(name);
113 
114   // Compute the capacity mask.
115   const int kCapacityOffset =
116       StringDictionary::kHeaderSize +
117       StringDictionary::kCapacityIndex * kPointerSize;
118   __ SmiToInteger32(r0, FieldOperand(elements, kCapacityOffset));
119   __ decl(r0);
120 
121   // Generate an unrolled loop that performs a few probes before
122   // giving up. Measurements done on Gmail indicate that 2 probes
123   // cover ~93% of loads from dictionaries.
124   static const int kProbes = 4;
125   const int kElementsStartOffset =
126       StringDictionary::kHeaderSize +
127       StringDictionary::kElementsStartIndex * kPointerSize;
128   for (int i = 0; i < kProbes; i++) {
129     // Compute the masked index: (hash + i + i * i) & mask.
130     __ movl(r1, FieldOperand(name, String::kHashFieldOffset));
131     __ shrl(r1, Immediate(String::kHashShift));
132     if (i > 0) {
133       __ addl(r1, Immediate(StringDictionary::GetProbeOffset(i)));
134     }
135     __ and_(r1, r0);
136 
137     // Scale the index by multiplying by the entry size.
138     ASSERT(StringDictionary::kEntrySize == 3);
139     __ lea(r1, Operand(r1, r1, times_2, 0));  // r1 = r1 * 3
140 
141     // Check if the key is identical to the name.
142     __ cmpq(name, Operand(elements, r1, times_pointer_size,
143                           kElementsStartOffset - kHeapObjectTag));
144     if (i != kProbes - 1) {
145       __ j(equal, done);
146     } else {
147       __ j(not_equal, miss);
148     }
149   }
150 }
151 
152 
153 // Helper function used to load a property from a dictionary backing storage.
154 // This function may return false negatives, so miss_label
155 // must always call a backup property load that is complete.
156 // This function is safe to call if name is not a symbol, and will jump to
157 // the miss_label in that case.
158 // The generated code assumes that the receiver has slow properties,
159 // is not a global object and does not have interceptors.
GenerateDictionaryLoad(MacroAssembler * masm,Label * miss_label,Register elements,Register name,Register r0,Register r1,Register result)160 static void GenerateDictionaryLoad(MacroAssembler* masm,
161                                    Label* miss_label,
162                                    Register elements,
163                                    Register name,
164                                    Register r0,
165                                    Register r1,
166                                    Register result) {
167   // Register use:
168   //
169   // elements - holds the property dictionary on entry and is unchanged.
170   //
171   // name - holds the name of the property on entry and is unchanged.
172   //
173   // r0   - used to hold the capacity of the property dictionary.
174   //
175   // r1   - used to hold the index into the property dictionary.
176   //
177   // result - holds the result on exit if the load succeeded.
178 
179   Label done;
180 
181   // Probe the dictionary.
182   GenerateStringDictionaryProbes(masm,
183                                  miss_label,
184                                  &done,
185                                  elements,
186                                  name,
187                                  r0,
188                                  r1);
189 
190   // If probing finds an entry in the dictionary, r0 contains the
191   // index into the dictionary. Check that the value is a normal
192   // property.
193   __ bind(&done);
194   const int kElementsStartOffset =
195       StringDictionary::kHeaderSize +
196       StringDictionary::kElementsStartIndex * kPointerSize;
197   const int kDetailsOffset = kElementsStartOffset + 2 * kPointerSize;
198   __ Test(Operand(elements, r1, times_pointer_size,
199                   kDetailsOffset - kHeapObjectTag),
200           Smi::FromInt(PropertyDetails::TypeField::mask()));
201   __ j(not_zero, miss_label);
202 
203   // Get the value at the masked, scaled index.
204   const int kValueOffset = kElementsStartOffset + kPointerSize;
205   __ movq(result,
206           Operand(elements, r1, times_pointer_size,
207                   kValueOffset - kHeapObjectTag));
208 }
209 
210 
211 // Helper function used to store a property to a dictionary backing
212 // storage. This function may fail to store a property even though it
213 // is in the dictionary, so code at miss_label must always call a
214 // backup property store that is complete. This function is safe to
215 // call if name is not a symbol, and will jump to the miss_label in
216 // that case. The generated code assumes that the receiver has slow
217 // properties, is not a global object and does not have interceptors.
GenerateDictionaryStore(MacroAssembler * masm,Label * miss_label,Register elements,Register name,Register value,Register scratch0,Register scratch1)218 static void GenerateDictionaryStore(MacroAssembler* masm,
219                                     Label* miss_label,
220                                     Register elements,
221                                     Register name,
222                                     Register value,
223                                     Register scratch0,
224                                     Register scratch1) {
225   // Register use:
226   //
227   // elements - holds the property dictionary on entry and is clobbered.
228   //
229   // name - holds the name of the property on entry and is unchanged.
230   //
231   // value - holds the value to store and is unchanged.
232   //
233   // scratch0 - used for index into the property dictionary and is clobbered.
234   //
235   // scratch1 - used to hold the capacity of the property dictionary and is
236   //            clobbered.
237   Label done;
238 
239   // Probe the dictionary.
240   GenerateStringDictionaryProbes(masm,
241                                  miss_label,
242                                  &done,
243                                  elements,
244                                  name,
245                                  scratch0,
246                                  scratch1);
247 
248   // If probing finds an entry in the dictionary, scratch0 contains the
249   // index into the dictionary. Check that the value is a normal
250   // property that is not read only.
251   __ bind(&done);
252   const int kElementsStartOffset =
253       StringDictionary::kHeaderSize +
254       StringDictionary::kElementsStartIndex * kPointerSize;
255   const int kDetailsOffset = kElementsStartOffset + 2 * kPointerSize;
256   const int kTypeAndReadOnlyMask
257       = (PropertyDetails::TypeField::mask() |
258          PropertyDetails::AttributesField::encode(READ_ONLY)) << kSmiTagSize;
259   __ Test(Operand(elements,
260                   scratch1,
261                   times_pointer_size,
262                   kDetailsOffset - kHeapObjectTag),
263           Smi::FromInt(kTypeAndReadOnlyMask));
264   __ j(not_zero, miss_label);
265 
266   // Store the value at the masked, scaled index.
267   const int kValueOffset = kElementsStartOffset + kPointerSize;
268   __ lea(scratch1, Operand(elements,
269                            scratch1,
270                            times_pointer_size,
271                            kValueOffset - kHeapObjectTag));
272   __ movq(Operand(scratch1, 0), value);
273 
274   // Update write barrier. Make sure not to clobber the value.
275   __ movq(scratch0, value);
276   __ RecordWrite(elements, scratch1, scratch0);
277 }
278 
279 
GenerateNumberDictionaryLoad(MacroAssembler * masm,Label * miss,Register elements,Register key,Register r0,Register r1,Register r2,Register result)280 static void GenerateNumberDictionaryLoad(MacroAssembler* masm,
281                                          Label* miss,
282                                          Register elements,
283                                          Register key,
284                                          Register r0,
285                                          Register r1,
286                                          Register r2,
287                                          Register result) {
288   // Register use:
289   //
290   // elements - holds the slow-case elements of the receiver on entry.
291   //            Unchanged unless 'result' is the same register.
292   //
293   // key      - holds the smi key on entry.
294   //            Unchanged unless 'result' is the same register.
295   //
296   // Scratch registers:
297   //
298   // r0 - holds the untagged key on entry and holds the hash once computed.
299   //
300   // r1 - used to hold the capacity mask of the dictionary
301   //
302   // r2 - used for the index into the dictionary.
303   //
304   // result - holds the result on exit if the load succeeded.
305   //          Allowed to be the same as 'key' or 'result'.
306   //          Unchanged on bailout so 'key' or 'result' can be used
307   //          in further computation.
308 
309   Label done;
310 
311   // Compute the hash code from the untagged key.  This must be kept in sync
312   // with ComputeIntegerHash in utils.h.
313   //
314   // hash = ~hash + (hash << 15);
315   __ movl(r1, r0);
316   __ notl(r0);
317   __ shll(r1, Immediate(15));
318   __ addl(r0, r1);
319   // hash = hash ^ (hash >> 12);
320   __ movl(r1, r0);
321   __ shrl(r1, Immediate(12));
322   __ xorl(r0, r1);
323   // hash = hash + (hash << 2);
324   __ leal(r0, Operand(r0, r0, times_4, 0));
325   // hash = hash ^ (hash >> 4);
326   __ movl(r1, r0);
327   __ shrl(r1, Immediate(4));
328   __ xorl(r0, r1);
329   // hash = hash * 2057;
330   __ imull(r0, r0, Immediate(2057));
331   // hash = hash ^ (hash >> 16);
332   __ movl(r1, r0);
333   __ shrl(r1, Immediate(16));
334   __ xorl(r0, r1);
335 
336   // Compute capacity mask.
337   __ SmiToInteger32(r1,
338                     FieldOperand(elements, NumberDictionary::kCapacityOffset));
339   __ decl(r1);
340 
341   // Generate an unrolled loop that performs a few probes before giving up.
342   const int kProbes = 4;
343   for (int i = 0; i < kProbes; i++) {
344     // Use r2 for index calculations and keep the hash intact in r0.
345     __ movq(r2, r0);
346     // Compute the masked index: (hash + i + i * i) & mask.
347     if (i > 0) {
348       __ addl(r2, Immediate(NumberDictionary::GetProbeOffset(i)));
349     }
350     __ and_(r2, r1);
351 
352     // Scale the index by multiplying by the entry size.
353     ASSERT(NumberDictionary::kEntrySize == 3);
354     __ lea(r2, Operand(r2, r2, times_2, 0));  // r2 = r2 * 3
355 
356     // Check if the key matches.
357     __ cmpq(key, FieldOperand(elements,
358                               r2,
359                               times_pointer_size,
360                               NumberDictionary::kElementsStartOffset));
361     if (i != (kProbes - 1)) {
362       __ j(equal, &done);
363     } else {
364       __ j(not_equal, miss);
365     }
366   }
367 
368   __ bind(&done);
369   // Check that the value is a normal propety.
370   const int kDetailsOffset =
371       NumberDictionary::kElementsStartOffset + 2 * kPointerSize;
372   ASSERT_EQ(NORMAL, 0);
373   __ Test(FieldOperand(elements, r2, times_pointer_size, kDetailsOffset),
374           Smi::FromInt(PropertyDetails::TypeField::mask()));
375   __ j(not_zero, miss);
376 
377   // Get the value at the masked, scaled index.
378   const int kValueOffset =
379       NumberDictionary::kElementsStartOffset + kPointerSize;
380   __ movq(result, FieldOperand(elements, r2, times_pointer_size, kValueOffset));
381 }
382 
383 
384 // The offset from the inlined patch site to the start of the inlined
385 // load instruction.
386 const int LoadIC::kOffsetToLoadInstruction = 20;
387 
388 
GenerateArrayLength(MacroAssembler * masm)389 void LoadIC::GenerateArrayLength(MacroAssembler* masm) {
390   // ----------- S t a t e -------------
391   //  -- rax    : receiver
392   //  -- rcx    : name
393   //  -- rsp[0] : return address
394   // -----------------------------------
395   Label miss;
396 
397   StubCompiler::GenerateLoadArrayLength(masm, rax, rdx, &miss);
398   __ bind(&miss);
399   StubCompiler::GenerateLoadMiss(masm, Code::LOAD_IC);
400 }
401 
402 
GenerateStringLength(MacroAssembler * masm,bool support_wrappers)403 void LoadIC::GenerateStringLength(MacroAssembler* masm, bool support_wrappers) {
404   // ----------- S t a t e -------------
405   //  -- rax    : receiver
406   //  -- rcx    : name
407   //  -- rsp[0] : return address
408   // -----------------------------------
409   Label miss;
410 
411   StubCompiler::GenerateLoadStringLength(masm, rax, rdx, rbx, &miss,
412                                          support_wrappers);
413   __ bind(&miss);
414   StubCompiler::GenerateLoadMiss(masm, Code::LOAD_IC);
415 }
416 
417 
GenerateFunctionPrototype(MacroAssembler * masm)418 void LoadIC::GenerateFunctionPrototype(MacroAssembler* masm) {
419   // ----------- S t a t e -------------
420   //  -- rax    : receiver
421   //  -- rcx    : name
422   //  -- rsp[0] : return address
423   // -----------------------------------
424   Label miss;
425 
426   StubCompiler::GenerateLoadFunctionPrototype(masm, rax, rdx, rbx, &miss);
427   __ bind(&miss);
428   StubCompiler::GenerateLoadMiss(masm, Code::LOAD_IC);
429 }
430 
431 
432 // Checks the receiver for special cases (value type, slow case bits).
433 // Falls through for regular JS object.
GenerateKeyedLoadReceiverCheck(MacroAssembler * masm,Register receiver,Register map,int interceptor_bit,Label * slow)434 static void GenerateKeyedLoadReceiverCheck(MacroAssembler* masm,
435                                            Register receiver,
436                                            Register map,
437                                            int interceptor_bit,
438                                            Label* slow) {
439   // Register use:
440   //   receiver - holds the receiver and is unchanged.
441   // Scratch registers:
442   //   map - used to hold the map of the receiver.
443 
444   // Check that the object isn't a smi.
445   __ JumpIfSmi(receiver, slow);
446 
447   // Check that the object is some kind of JS object EXCEPT JS Value type.
448   // In the case that the object is a value-wrapper object,
449   // we enter the runtime system to make sure that indexing
450   // into string objects work as intended.
451   ASSERT(JS_OBJECT_TYPE > JS_VALUE_TYPE);
452   __ CmpObjectType(receiver, JS_OBJECT_TYPE, map);
453   __ j(below, slow);
454 
455   // Check bit field.
456   __ testb(FieldOperand(map, Map::kBitFieldOffset),
457            Immediate((1 << Map::kIsAccessCheckNeeded) |
458                      (1 << interceptor_bit)));
459   __ j(not_zero, slow);
460 }
461 
462 
463 // Loads an indexed element from a fast case array.
464 // If not_fast_array is NULL, doesn't perform the elements map check.
GenerateFastArrayLoad(MacroAssembler * masm,Register receiver,Register key,Register elements,Register scratch,Register result,Label * not_fast_array,Label * out_of_range)465 static void GenerateFastArrayLoad(MacroAssembler* masm,
466                                   Register receiver,
467                                   Register key,
468                                   Register elements,
469                                   Register scratch,
470                                   Register result,
471                                   Label* not_fast_array,
472                                   Label* out_of_range) {
473   // Register use:
474   //
475   // receiver - holds the receiver on entry.
476   //            Unchanged unless 'result' is the same register.
477   //
478   // key      - holds the smi key on entry.
479   //            Unchanged unless 'result' is the same register.
480   //
481   // elements - holds the elements of the receiver on exit.
482   //
483   // result   - holds the result on exit if the load succeeded.
484   //            Allowed to be the the same as 'receiver' or 'key'.
485   //            Unchanged on bailout so 'receiver' and 'key' can be safely
486   //            used by further computation.
487   //
488   // Scratch registers:
489   //
490   //   scratch - used to hold elements of the receiver and the loaded value.
491 
492   __ movq(elements, FieldOperand(receiver, JSObject::kElementsOffset));
493   if (not_fast_array != NULL) {
494     // Check that the object is in fast mode and writable.
495     __ CompareRoot(FieldOperand(elements, HeapObject::kMapOffset),
496                    Heap::kFixedArrayMapRootIndex);
497     __ j(not_equal, not_fast_array);
498   } else {
499     __ AssertFastElements(elements);
500   }
501   // Check that the key (index) is within bounds.
502   __ SmiCompare(key, FieldOperand(elements, FixedArray::kLengthOffset));
503   // Unsigned comparison rejects negative indices.
504   __ j(above_equal, out_of_range);
505   // Fast case: Do the load.
506   SmiIndex index = masm->SmiToIndex(scratch, key, kPointerSizeLog2);
507   __ movq(scratch, FieldOperand(elements,
508                                 index.reg,
509                                 index.scale,
510                                 FixedArray::kHeaderSize));
511   __ CompareRoot(scratch, Heap::kTheHoleValueRootIndex);
512   // In case the loaded value is the_hole we have to consult GetProperty
513   // to ensure the prototype chain is searched.
514   __ j(equal, out_of_range);
515   if (!result.is(scratch)) {
516     __ movq(result, scratch);
517   }
518 }
519 
520 
521 // Checks whether a key is an array index string or a symbol string.
522 // Falls through if the key is a symbol.
GenerateKeyStringCheck(MacroAssembler * masm,Register key,Register map,Register hash,Label * index_string,Label * not_symbol)523 static void GenerateKeyStringCheck(MacroAssembler* masm,
524                                    Register key,
525                                    Register map,
526                                    Register hash,
527                                    Label* index_string,
528                                    Label* not_symbol) {
529   // Register use:
530   //   key - holds the key and is unchanged. Assumed to be non-smi.
531   // Scratch registers:
532   //   map - used to hold the map of the key.
533   //   hash - used to hold the hash of the key.
534   __ CmpObjectType(key, FIRST_NONSTRING_TYPE, map);
535   __ j(above_equal, not_symbol);
536   // Is the string an array index, with cached numeric value?
537   __ movl(hash, FieldOperand(key, String::kHashFieldOffset));
538   __ testl(hash, Immediate(String::kContainsCachedArrayIndexMask));
539   __ j(zero, index_string);  // The value in hash is used at jump target.
540 
541   // Is the string a symbol?
542   ASSERT(kSymbolTag != 0);
543   __ testb(FieldOperand(map, Map::kInstanceTypeOffset),
544            Immediate(kIsSymbolMask));
545   __ j(zero, not_symbol);
546 }
547 
548 
549 
GenerateGeneric(MacroAssembler * masm)550 void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) {
551   // ----------- S t a t e -------------
552   //  -- rax    : key
553   //  -- rdx    : receiver
554   //  -- rsp[0] : return address
555   // -----------------------------------
556   Label slow, check_string, index_smi, index_string, property_array_property;
557   Label probe_dictionary, check_number_dictionary;
558 
559   // Check that the key is a smi.
560   __ JumpIfNotSmi(rax, &check_string);
561   __ bind(&index_smi);
562   // Now the key is known to be a smi. This place is also jumped to from below
563   // where a numeric string is converted to a smi.
564 
565   GenerateKeyedLoadReceiverCheck(
566       masm, rdx, rcx, Map::kHasIndexedInterceptor, &slow);
567 
568   // Check the "has fast elements" bit in the receiver's map which is
569   // now in rcx.
570   __ testb(FieldOperand(rcx, Map::kBitField2Offset),
571            Immediate(1 << Map::kHasFastElements));
572   __ j(zero, &check_number_dictionary);
573 
574   GenerateFastArrayLoad(masm,
575                         rdx,
576                         rax,
577                         rcx,
578                         rbx,
579                         rax,
580                         NULL,
581                         &slow);
582   Counters* counters = masm->isolate()->counters();
583   __ IncrementCounter(counters->keyed_load_generic_smi(), 1);
584   __ ret(0);
585 
586   __ bind(&check_number_dictionary);
587   __ SmiToInteger32(rbx, rax);
588   __ movq(rcx, FieldOperand(rdx, JSObject::kElementsOffset));
589 
590   // Check whether the elements is a number dictionary.
591   // rdx: receiver
592   // rax: key
593   // rbx: key as untagged int32
594   // rcx: elements
595   __ CompareRoot(FieldOperand(rcx, HeapObject::kMapOffset),
596                  Heap::kHashTableMapRootIndex);
597   __ j(not_equal, &slow);
598   GenerateNumberDictionaryLoad(masm, &slow, rcx, rax, rbx, r9, rdi, rax);
599   __ ret(0);
600 
601   __ bind(&slow);
602   // Slow case: Jump to runtime.
603   // rdx: receiver
604   // rax: key
605   __ IncrementCounter(counters->keyed_load_generic_slow(), 1);
606   GenerateRuntimeGetProperty(masm);
607 
608   __ bind(&check_string);
609   GenerateKeyStringCheck(masm, rax, rcx, rbx, &index_string, &slow);
610 
611   GenerateKeyedLoadReceiverCheck(
612       masm, rdx, rcx, Map::kHasNamedInterceptor, &slow);
613 
614   // If the receiver is a fast-case object, check the keyed lookup
615   // cache. Otherwise probe the dictionary leaving result in rcx.
616   __ movq(rbx, FieldOperand(rdx, JSObject::kPropertiesOffset));
617   __ CompareRoot(FieldOperand(rbx, HeapObject::kMapOffset),
618                  Heap::kHashTableMapRootIndex);
619   __ j(equal, &probe_dictionary);
620 
621   // Load the map of the receiver, compute the keyed lookup cache hash
622   // based on 32 bits of the map pointer and the string hash.
623   __ movq(rbx, FieldOperand(rdx, HeapObject::kMapOffset));
624   __ movl(rcx, rbx);
625   __ shr(rcx, Immediate(KeyedLookupCache::kMapHashShift));
626   __ movl(rdi, FieldOperand(rax, String::kHashFieldOffset));
627   __ shr(rdi, Immediate(String::kHashShift));
628   __ xor_(rcx, rdi);
629   __ and_(rcx, Immediate(KeyedLookupCache::kCapacityMask));
630 
631   // Load the key (consisting of map and symbol) from the cache and
632   // check for match.
633   ExternalReference cache_keys
634       = ExternalReference::keyed_lookup_cache_keys(masm->isolate());
635   __ movq(rdi, rcx);
636   __ shl(rdi, Immediate(kPointerSizeLog2 + 1));
637   __ LoadAddress(kScratchRegister, cache_keys);
638   __ cmpq(rbx, Operand(kScratchRegister, rdi, times_1, 0));
639   __ j(not_equal, &slow);
640   __ cmpq(rax, Operand(kScratchRegister, rdi, times_1, kPointerSize));
641   __ j(not_equal, &slow);
642 
643   // Get field offset, which is a 32-bit integer.
644   ExternalReference cache_field_offsets
645       = ExternalReference::keyed_lookup_cache_field_offsets(masm->isolate());
646   __ LoadAddress(kScratchRegister, cache_field_offsets);
647   __ movl(rdi, Operand(kScratchRegister, rcx, times_4, 0));
648   __ movzxbq(rcx, FieldOperand(rbx, Map::kInObjectPropertiesOffset));
649   __ subq(rdi, rcx);
650   __ j(above_equal, &property_array_property);
651 
652   // Load in-object property.
653   __ movzxbq(rcx, FieldOperand(rbx, Map::kInstanceSizeOffset));
654   __ addq(rcx, rdi);
655   __ movq(rax, FieldOperand(rdx, rcx, times_pointer_size, 0));
656   __ IncrementCounter(counters->keyed_load_generic_lookup_cache(), 1);
657   __ ret(0);
658 
659   // Load property array property.
660   __ bind(&property_array_property);
661   __ movq(rax, FieldOperand(rdx, JSObject::kPropertiesOffset));
662   __ movq(rax, FieldOperand(rax, rdi, times_pointer_size,
663                             FixedArray::kHeaderSize));
664   __ IncrementCounter(counters->keyed_load_generic_lookup_cache(), 1);
665   __ ret(0);
666 
667   // Do a quick inline probe of the receiver's dictionary, if it
668   // exists.
669   __ bind(&probe_dictionary);
670   // rdx: receiver
671   // rax: key
672   // rbx: elements
673 
674   __ movq(rcx, FieldOperand(rdx, JSObject::kMapOffset));
675   __ movb(rcx, FieldOperand(rcx, Map::kInstanceTypeOffset));
676   GenerateGlobalInstanceTypeCheck(masm, rcx, &slow);
677 
678   GenerateDictionaryLoad(masm, &slow, rbx, rax, rcx, rdi, rax);
679   __ IncrementCounter(counters->keyed_load_generic_symbol(), 1);
680   __ ret(0);
681 
682   __ bind(&index_string);
683   __ IndexFromHash(rbx, rax);
684   __ jmp(&index_smi);
685 }
686 
687 
GenerateString(MacroAssembler * masm)688 void KeyedLoadIC::GenerateString(MacroAssembler* masm) {
689   // ----------- S t a t e -------------
690   //  -- rax    : key
691   //  -- rdx    : receiver
692   //  -- rsp[0] : return address
693   // -----------------------------------
694   Label miss;
695 
696   Register receiver = rdx;
697   Register index = rax;
698   Register scratch1 = rbx;
699   Register scratch2 = rcx;
700   Register result = rax;
701 
702   StringCharAtGenerator char_at_generator(receiver,
703                                           index,
704                                           scratch1,
705                                           scratch2,
706                                           result,
707                                           &miss,  // When not a string.
708                                           &miss,  // When not a number.
709                                           &miss,  // When index out of range.
710                                           STRING_INDEX_IS_ARRAY_INDEX);
711   char_at_generator.GenerateFast(masm);
712   __ ret(0);
713 
714   StubRuntimeCallHelper call_helper;
715   char_at_generator.GenerateSlow(masm, call_helper);
716 
717   __ bind(&miss);
718   GenerateMiss(masm);
719 }
720 
721 
GenerateIndexedInterceptor(MacroAssembler * masm)722 void KeyedLoadIC::GenerateIndexedInterceptor(MacroAssembler* masm) {
723   // ----------- S t a t e -------------
724   //  -- rax    : key
725   //  -- rdx    : receiver
726   //  -- rsp[0] : return address
727   // -----------------------------------
728   Label slow;
729 
730   // Check that the receiver isn't a smi.
731   __ JumpIfSmi(rdx, &slow);
732 
733   // Check that the key is an array index, that is Uint32.
734   STATIC_ASSERT(kSmiValueSize <= 32);
735   __ JumpUnlessNonNegativeSmi(rax, &slow);
736 
737   // Get the map of the receiver.
738   __ movq(rcx, FieldOperand(rdx, HeapObject::kMapOffset));
739 
740   // Check that it has indexed interceptor and access checks
741   // are not enabled for this object.
742   __ movb(rcx, FieldOperand(rcx, Map::kBitFieldOffset));
743   __ andb(rcx, Immediate(kSlowCaseBitFieldMask));
744   __ cmpb(rcx, Immediate(1 << Map::kHasIndexedInterceptor));
745   __ j(not_zero, &slow);
746 
747   // Everything is fine, call runtime.
748   __ pop(rcx);
749   __ push(rdx);  // receiver
750   __ push(rax);  // key
751   __ push(rcx);  // return address
752 
753   // Perform tail call to the entry.
754   __ TailCallExternalReference(
755       ExternalReference(IC_Utility(kKeyedLoadPropertyWithInterceptor),
756                         masm->isolate()),
757       2,
758       1);
759 
760   __ bind(&slow);
761   GenerateMiss(masm);
762 }
763 
764 
GenerateGeneric(MacroAssembler * masm,StrictModeFlag strict_mode)765 void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm,
766                                    StrictModeFlag strict_mode) {
767   // ----------- S t a t e -------------
768   //  -- rax     : value
769   //  -- rcx     : key
770   //  -- rdx     : receiver
771   //  -- rsp[0]  : return address
772   // -----------------------------------
773   Label slow, slow_with_tagged_index, fast, array, extra;
774 
775   // Check that the object isn't a smi.
776   __ JumpIfSmi(rdx, &slow_with_tagged_index);
777   // Get the map from the receiver.
778   __ movq(rbx, FieldOperand(rdx, HeapObject::kMapOffset));
779   // Check that the receiver does not require access checks.  We need
780   // to do this because this generic stub does not perform map checks.
781   __ testb(FieldOperand(rbx, Map::kBitFieldOffset),
782            Immediate(1 << Map::kIsAccessCheckNeeded));
783   __ j(not_zero, &slow_with_tagged_index);
784   // Check that the key is a smi.
785   __ JumpIfNotSmi(rcx, &slow_with_tagged_index);
786   __ SmiToInteger32(rcx, rcx);
787 
788   __ CmpInstanceType(rbx, JS_ARRAY_TYPE);
789   __ j(equal, &array);
790   // Check that the object is some kind of JS object.
791   __ CmpInstanceType(rbx, FIRST_JS_OBJECT_TYPE);
792   __ j(below, &slow);
793 
794   // Object case: Check key against length in the elements array.
795   // rax: value
796   // rdx: JSObject
797   // rcx: index
798   __ movq(rbx, FieldOperand(rdx, JSObject::kElementsOffset));
799   // Check that the object is in fast mode and writable.
800   __ CompareRoot(FieldOperand(rbx, HeapObject::kMapOffset),
801                  Heap::kFixedArrayMapRootIndex);
802   __ j(not_equal, &slow);
803   __ SmiCompareInteger32(FieldOperand(rbx, FixedArray::kLengthOffset), rcx);
804   // rax: value
805   // rbx: FixedArray
806   // rcx: index
807   __ j(above, &fast);
808 
809   // Slow case: call runtime.
810   __ bind(&slow);
811   __ Integer32ToSmi(rcx, rcx);
812   __ bind(&slow_with_tagged_index);
813   GenerateRuntimeSetProperty(masm, strict_mode);
814   // Never returns to here.
815 
816   // Extra capacity case: Check if there is extra capacity to
817   // perform the store and update the length. Used for adding one
818   // element to the array by writing to array[array.length].
819   __ bind(&extra);
820   // rax: value
821   // rdx: receiver (a JSArray)
822   // rbx: receiver's elements array (a FixedArray)
823   // rcx: index
824   // flags: smicompare (rdx.length(), rbx)
825   __ j(not_equal, &slow);  // do not leave holes in the array
826   __ SmiCompareInteger32(FieldOperand(rbx, FixedArray::kLengthOffset), rcx);
827   __ j(below_equal, &slow);
828   // Increment index to get new length.
829   __ leal(rdi, Operand(rcx, 1));
830   __ Integer32ToSmiField(FieldOperand(rdx, JSArray::kLengthOffset), rdi);
831   __ jmp(&fast);
832 
833   // Array case: Get the length and the elements array from the JS
834   // array. Check that the array is in fast mode (and writable); if it
835   // is the length is always a smi.
836   __ bind(&array);
837   // rax: value
838   // rdx: receiver (a JSArray)
839   // rcx: index
840   __ movq(rbx, FieldOperand(rdx, JSObject::kElementsOffset));
841   __ CompareRoot(FieldOperand(rbx, HeapObject::kMapOffset),
842                  Heap::kFixedArrayMapRootIndex);
843   __ j(not_equal, &slow);
844 
845   // Check the key against the length in the array, compute the
846   // address to store into and fall through to fast case.
847   __ SmiCompareInteger32(FieldOperand(rdx, JSArray::kLengthOffset), rcx);
848   __ j(below_equal, &extra);
849 
850   // Fast case: Do the store.
851   __ bind(&fast);
852   // rax: value
853   // rbx: receiver's elements array (a FixedArray)
854   // rcx: index
855   NearLabel non_smi_value;
856   __ movq(FieldOperand(rbx, rcx, times_pointer_size, FixedArray::kHeaderSize),
857           rax);
858   __ JumpIfNotSmi(rax, &non_smi_value);
859   __ ret(0);
860   __ bind(&non_smi_value);
861   // Slow case that needs to retain rcx for use by RecordWrite.
862   // Update write barrier for the elements array address.
863   __ movq(rdx, rax);
864   __ RecordWriteNonSmi(rbx, 0, rdx, rcx);
865   __ ret(0);
866 }
867 
868 
869 // The generated code does not accept smi keys.
870 // The generated code falls through if both probes miss.
GenerateMonomorphicCacheProbe(MacroAssembler * masm,int argc,Code::Kind kind)871 static void GenerateMonomorphicCacheProbe(MacroAssembler* masm,
872                                           int argc,
873                                           Code::Kind kind) {
874   // ----------- S t a t e -------------
875   // rcx                      : function name
876   // rdx                      : receiver
877   // -----------------------------------
878   Label number, non_number, non_string, boolean, probe, miss;
879 
880   // Probe the stub cache.
881   Code::Flags flags = Code::ComputeFlags(kind,
882                                          NOT_IN_LOOP,
883                                          MONOMORPHIC,
884                                          Code::kNoExtraICState,
885                                          NORMAL,
886                                          argc);
887   Isolate::Current()->stub_cache()->GenerateProbe(masm, flags, rdx, rcx, rbx,
888                                                   rax);
889 
890   // If the stub cache probing failed, the receiver might be a value.
891   // For value objects, we use the map of the prototype objects for
892   // the corresponding JSValue for the cache and that is what we need
893   // to probe.
894   //
895   // Check for number.
896   __ JumpIfSmi(rdx, &number);
897   __ CmpObjectType(rdx, HEAP_NUMBER_TYPE, rbx);
898   __ j(not_equal, &non_number);
899   __ bind(&number);
900   StubCompiler::GenerateLoadGlobalFunctionPrototype(
901       masm, Context::NUMBER_FUNCTION_INDEX, rdx);
902   __ jmp(&probe);
903 
904   // Check for string.
905   __ bind(&non_number);
906   __ CmpInstanceType(rbx, FIRST_NONSTRING_TYPE);
907   __ j(above_equal, &non_string);
908   StubCompiler::GenerateLoadGlobalFunctionPrototype(
909       masm, Context::STRING_FUNCTION_INDEX, rdx);
910   __ jmp(&probe);
911 
912   // Check for boolean.
913   __ bind(&non_string);
914   __ CompareRoot(rdx, Heap::kTrueValueRootIndex);
915   __ j(equal, &boolean);
916   __ CompareRoot(rdx, Heap::kFalseValueRootIndex);
917   __ j(not_equal, &miss);
918   __ bind(&boolean);
919   StubCompiler::GenerateLoadGlobalFunctionPrototype(
920       masm, Context::BOOLEAN_FUNCTION_INDEX, rdx);
921 
922   // Probe the stub cache for the value object.
923   __ bind(&probe);
924   Isolate::Current()->stub_cache()->GenerateProbe(masm, flags, rdx, rcx, rbx,
925                                                   no_reg);
926 
927   __ bind(&miss);
928 }
929 
930 
GenerateFunctionTailCall(MacroAssembler * masm,int argc,Label * miss)931 static void GenerateFunctionTailCall(MacroAssembler* masm,
932                                      int argc,
933                                      Label* miss) {
934   // ----------- S t a t e -------------
935   // rcx                    : function name
936   // rdi                    : function
937   // rsp[0]                 : return address
938   // rsp[8]                 : argument argc
939   // rsp[16]                : argument argc - 1
940   // ...
941   // rsp[argc * 8]          : argument 1
942   // rsp[(argc + 1) * 8]    : argument 0 = receiver
943   // -----------------------------------
944   __ JumpIfSmi(rdi, miss);
945   // Check that the value is a JavaScript function.
946   __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rdx);
947   __ j(not_equal, miss);
948 
949   // Invoke the function.
950   ParameterCount actual(argc);
951   __ InvokeFunction(rdi, actual, JUMP_FUNCTION);
952 }
953 
954 
955 // The generated code falls through if the call should be handled by runtime.
GenerateCallNormal(MacroAssembler * masm,int argc)956 static void GenerateCallNormal(MacroAssembler* masm, int argc) {
957   // ----------- S t a t e -------------
958   // rcx                    : function name
959   // rsp[0]                 : return address
960   // rsp[8]                 : argument argc
961   // rsp[16]                : argument argc - 1
962   // ...
963   // rsp[argc * 8]          : argument 1
964   // rsp[(argc + 1) * 8]    : argument 0 = receiver
965   // -----------------------------------
966   Label miss;
967 
968   // Get the receiver of the function from the stack.
969   __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
970 
971   GenerateStringDictionaryReceiverCheck(masm, rdx, rax, rbx, &miss);
972 
973   // rax: elements
974   // Search the dictionary placing the result in rdi.
975   GenerateDictionaryLoad(masm, &miss, rax, rcx, rbx, rdi, rdi);
976 
977   GenerateFunctionTailCall(masm, argc, &miss);
978 
979   __ bind(&miss);
980 }
981 
982 
GenerateCallMiss(MacroAssembler * masm,int argc,IC::UtilityId id)983 static void GenerateCallMiss(MacroAssembler* masm, int argc, IC::UtilityId id) {
984   // ----------- S t a t e -------------
985   // rcx                      : function name
986   // rsp[0]                   : return address
987   // rsp[8]                   : argument argc
988   // rsp[16]                  : argument argc - 1
989   // ...
990   // rsp[argc * 8]            : argument 1
991   // rsp[(argc + 1) * 8]      : argument 0 = receiver
992   // -----------------------------------
993 
994   Counters* counters = masm->isolate()->counters();
995   if (id == IC::kCallIC_Miss) {
996     __ IncrementCounter(counters->call_miss(), 1);
997   } else {
998     __ IncrementCounter(counters->keyed_call_miss(), 1);
999   }
1000 
1001   // Get the receiver of the function from the stack; 1 ~ return address.
1002   __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
1003 
1004   // Enter an internal frame.
1005   __ EnterInternalFrame();
1006 
1007   // Push the receiver and the name of the function.
1008   __ push(rdx);
1009   __ push(rcx);
1010 
1011   // Call the entry.
1012   CEntryStub stub(1);
1013   __ Set(rax, 2);
1014   __ LoadAddress(rbx, ExternalReference(IC_Utility(id), masm->isolate()));
1015   __ CallStub(&stub);
1016 
1017   // Move result to rdi and exit the internal frame.
1018   __ movq(rdi, rax);
1019   __ LeaveInternalFrame();
1020 
1021   // Check if the receiver is a global object of some sort.
1022   // This can happen only for regular CallIC but not KeyedCallIC.
1023   if (id == IC::kCallIC_Miss) {
1024     Label invoke, global;
1025     __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));  // receiver
1026     __ JumpIfSmi(rdx, &invoke);
1027     __ CmpObjectType(rdx, JS_GLOBAL_OBJECT_TYPE, rcx);
1028     __ j(equal, &global);
1029     __ CmpInstanceType(rcx, JS_BUILTINS_OBJECT_TYPE);
1030     __ j(not_equal, &invoke);
1031 
1032     // Patch the receiver on the stack.
1033     __ bind(&global);
1034     __ movq(rdx, FieldOperand(rdx, GlobalObject::kGlobalReceiverOffset));
1035     __ movq(Operand(rsp, (argc + 1) * kPointerSize), rdx);
1036     __ bind(&invoke);
1037   }
1038 
1039   // Invoke the function.
1040   ParameterCount actual(argc);
1041   __ InvokeFunction(rdi, actual, JUMP_FUNCTION);
1042 }
1043 
1044 
GenerateMegamorphic(MacroAssembler * masm,int argc)1045 void CallIC::GenerateMegamorphic(MacroAssembler* masm, int argc) {
1046   // ----------- S t a t e -------------
1047   // rcx                      : function name
1048   // rsp[0]                   : return address
1049   // rsp[8]                   : argument argc
1050   // rsp[16]                  : argument argc - 1
1051   // ...
1052   // rsp[argc * 8]            : argument 1
1053   // rsp[(argc + 1) * 8]      : argument 0 = receiver
1054   // -----------------------------------
1055 
1056   // Get the receiver of the function from the stack; 1 ~ return address.
1057   __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
1058   GenerateMonomorphicCacheProbe(masm, argc, Code::CALL_IC);
1059   GenerateMiss(masm, argc);
1060 }
1061 
1062 
GenerateNormal(MacroAssembler * masm,int argc)1063 void CallIC::GenerateNormal(MacroAssembler* masm, int argc) {
1064   // ----------- S t a t e -------------
1065   // rcx                      : function name
1066   // rsp[0]                   : return address
1067   // rsp[8]                   : argument argc
1068   // rsp[16]                  : argument argc - 1
1069   // ...
1070   // rsp[argc * 8]            : argument 1
1071   // rsp[(argc + 1) * 8]      : argument 0 = receiver
1072   // -----------------------------------
1073 
1074   GenerateCallNormal(masm, argc);
1075   GenerateMiss(masm, argc);
1076 }
1077 
1078 
GenerateMiss(MacroAssembler * masm,int argc)1079 void CallIC::GenerateMiss(MacroAssembler* masm, int argc) {
1080   // ----------- S t a t e -------------
1081   // rcx                      : function name
1082   // rsp[0]                   : return address
1083   // rsp[8]                   : argument argc
1084   // rsp[16]                  : argument argc - 1
1085   // ...
1086   // rsp[argc * 8]            : argument 1
1087   // rsp[(argc + 1) * 8]      : argument 0 = receiver
1088   // -----------------------------------
1089 
1090   GenerateCallMiss(masm, argc, IC::kCallIC_Miss);
1091 }
1092 
1093 
GenerateMegamorphic(MacroAssembler * masm,int argc)1094 void KeyedCallIC::GenerateMegamorphic(MacroAssembler* masm, int argc) {
1095   // ----------- S t a t e -------------
1096   // rcx                      : function name
1097   // rsp[0]                   : return address
1098   // rsp[8]                   : argument argc
1099   // rsp[16]                  : argument argc - 1
1100   // ...
1101   // rsp[argc * 8]            : argument 1
1102   // rsp[(argc + 1) * 8]      : argument 0 = receiver
1103   // -----------------------------------
1104 
1105   // Get the receiver of the function from the stack; 1 ~ return address.
1106   __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
1107 
1108   Label do_call, slow_call, slow_load;
1109   Label check_number_dictionary, check_string, lookup_monomorphic_cache;
1110   Label index_smi, index_string;
1111 
1112   // Check that the key is a smi.
1113   __ JumpIfNotSmi(rcx, &check_string);
1114 
1115   __ bind(&index_smi);
1116   // Now the key is known to be a smi. This place is also jumped to from below
1117   // where a numeric string is converted to a smi.
1118 
1119   GenerateKeyedLoadReceiverCheck(
1120       masm, rdx, rax, Map::kHasIndexedInterceptor, &slow_call);
1121 
1122   GenerateFastArrayLoad(
1123       masm, rdx, rcx, rax, rbx, rdi, &check_number_dictionary, &slow_load);
1124   Counters* counters = masm->isolate()->counters();
1125   __ IncrementCounter(counters->keyed_call_generic_smi_fast(), 1);
1126 
1127   __ bind(&do_call);
1128   // receiver in rdx is not used after this point.
1129   // rcx: key
1130   // rdi: function
1131   GenerateFunctionTailCall(masm, argc, &slow_call);
1132 
1133   __ bind(&check_number_dictionary);
1134   // rax: elements
1135   // rcx: smi key
1136   // Check whether the elements is a number dictionary.
1137   __ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset),
1138                  Heap::kHashTableMapRootIndex);
1139   __ j(not_equal, &slow_load);
1140   __ SmiToInteger32(rbx, rcx);
1141   // ebx: untagged index
1142   GenerateNumberDictionaryLoad(masm, &slow_load, rax, rcx, rbx, r9, rdi, rdi);
1143   __ IncrementCounter(counters->keyed_call_generic_smi_dict(), 1);
1144   __ jmp(&do_call);
1145 
1146   __ bind(&slow_load);
1147   // This branch is taken when calling KeyedCallIC_Miss is neither required
1148   // nor beneficial.
1149   __ IncrementCounter(counters->keyed_call_generic_slow_load(), 1);
1150   __ EnterInternalFrame();
1151   __ push(rcx);  // save the key
1152   __ push(rdx);  // pass the receiver
1153   __ push(rcx);  // pass the key
1154   __ CallRuntime(Runtime::kKeyedGetProperty, 2);
1155   __ pop(rcx);  // restore the key
1156   __ LeaveInternalFrame();
1157   __ movq(rdi, rax);
1158   __ jmp(&do_call);
1159 
1160   __ bind(&check_string);
1161   GenerateKeyStringCheck(masm, rcx, rax, rbx, &index_string, &slow_call);
1162 
1163   // The key is known to be a symbol.
1164   // If the receiver is a regular JS object with slow properties then do
1165   // a quick inline probe of the receiver's dictionary.
1166   // Otherwise do the monomorphic cache probe.
1167   GenerateKeyedLoadReceiverCheck(
1168       masm, rdx, rax, Map::kHasNamedInterceptor, &lookup_monomorphic_cache);
1169 
1170   __ movq(rbx, FieldOperand(rdx, JSObject::kPropertiesOffset));
1171   __ CompareRoot(FieldOperand(rbx, HeapObject::kMapOffset),
1172                  Heap::kHashTableMapRootIndex);
1173   __ j(not_equal, &lookup_monomorphic_cache);
1174 
1175   GenerateDictionaryLoad(masm, &slow_load, rbx, rcx, rax, rdi, rdi);
1176   __ IncrementCounter(counters->keyed_call_generic_lookup_dict(), 1);
1177   __ jmp(&do_call);
1178 
1179   __ bind(&lookup_monomorphic_cache);
1180   __ IncrementCounter(counters->keyed_call_generic_lookup_cache(), 1);
1181   GenerateMonomorphicCacheProbe(masm, argc, Code::KEYED_CALL_IC);
1182   // Fall through on miss.
1183 
1184   __ bind(&slow_call);
1185   // This branch is taken if:
1186   // - the receiver requires boxing or access check,
1187   // - the key is neither smi nor symbol,
1188   // - the value loaded is not a function,
1189   // - there is hope that the runtime will create a monomorphic call stub
1190   //   that will get fetched next time.
1191   __ IncrementCounter(counters->keyed_call_generic_slow(), 1);
1192   GenerateMiss(masm, argc);
1193 
1194   __ bind(&index_string);
1195   __ IndexFromHash(rbx, rcx);
1196   // Now jump to the place where smi keys are handled.
1197   __ jmp(&index_smi);
1198 }
1199 
1200 
GenerateNormal(MacroAssembler * masm,int argc)1201 void KeyedCallIC::GenerateNormal(MacroAssembler* masm, int argc) {
1202   // ----------- S t a t e -------------
1203   // rcx                      : function name
1204   // rsp[0]                   : return address
1205   // rsp[8]                   : argument argc
1206   // rsp[16]                  : argument argc - 1
1207   // ...
1208   // rsp[argc * 8]            : argument 1
1209   // rsp[(argc + 1) * 8]      : argument 0 = receiver
1210   // -----------------------------------
1211 
1212   // Check if the name is a string.
1213   Label miss;
1214   __ JumpIfSmi(rcx, &miss);
1215   Condition cond = masm->IsObjectStringType(rcx, rax, rax);
1216   __ j(NegateCondition(cond), &miss);
1217   GenerateCallNormal(masm, argc);
1218   __ bind(&miss);
1219   GenerateMiss(masm, argc);
1220 }
1221 
1222 
GenerateMiss(MacroAssembler * masm,int argc)1223 void KeyedCallIC::GenerateMiss(MacroAssembler* masm, int argc) {
1224   // ----------- S t a t e -------------
1225   // rcx                      : function name
1226   // rsp[0]                   : return address
1227   // rsp[8]                   : argument argc
1228   // rsp[16]                  : argument argc - 1
1229   // ...
1230   // rsp[argc * 8]            : argument 1
1231   // rsp[(argc + 1) * 8]      : argument 0 = receiver
1232   // -----------------------------------
1233 
1234   GenerateCallMiss(masm, argc, IC::kKeyedCallIC_Miss);
1235 }
1236 
1237 
GenerateMegamorphic(MacroAssembler * masm)1238 void LoadIC::GenerateMegamorphic(MacroAssembler* masm) {
1239   // ----------- S t a t e -------------
1240   //  -- rax    : receiver
1241   //  -- rcx    : name
1242   //  -- rsp[0] : return address
1243   // -----------------------------------
1244 
1245   // Probe the stub cache.
1246   Code::Flags flags = Code::ComputeFlags(Code::LOAD_IC,
1247                                          NOT_IN_LOOP,
1248                                          MONOMORPHIC);
1249   Isolate::Current()->stub_cache()->GenerateProbe(masm, flags, rax, rcx, rbx,
1250                                                   rdx);
1251 
1252   // Cache miss: Jump to runtime.
1253   StubCompiler::GenerateLoadMiss(masm, Code::LOAD_IC);
1254 }
1255 
1256 
GenerateNormal(MacroAssembler * masm)1257 void LoadIC::GenerateNormal(MacroAssembler* masm) {
1258   // ----------- S t a t e -------------
1259   //  -- rax    : receiver
1260   //  -- rcx    : name
1261   //  -- rsp[0] : return address
1262   // -----------------------------------
1263   Label miss;
1264 
1265   GenerateStringDictionaryReceiverCheck(masm, rax, rdx, rbx, &miss);
1266 
1267   //  rdx: elements
1268   // Search the dictionary placing the result in rax.
1269   GenerateDictionaryLoad(masm, &miss, rdx, rcx, rbx, rdi, rax);
1270   __ ret(0);
1271 
1272   // Cache miss: Jump to runtime.
1273   __ bind(&miss);
1274   GenerateMiss(masm);
1275 }
1276 
1277 
GenerateMiss(MacroAssembler * masm)1278 void LoadIC::GenerateMiss(MacroAssembler* masm) {
1279   // ----------- S t a t e -------------
1280   //  -- rax    : receiver
1281   //  -- rcx    : name
1282   //  -- rsp[0] : return address
1283   // -----------------------------------
1284 
1285   Counters* counters = masm->isolate()->counters();
1286   __ IncrementCounter(counters->load_miss(), 1);
1287 
1288   __ pop(rbx);
1289   __ push(rax);  // receiver
1290   __ push(rcx);  // name
1291   __ push(rbx);  // return address
1292 
1293   // Perform tail call to the entry.
1294   ExternalReference ref =
1295       ExternalReference(IC_Utility(kLoadIC_Miss), masm->isolate());
1296   __ TailCallExternalReference(ref, 2, 1);
1297 }
1298 
1299 
PatchInlinedLoad(Address address,Object * map,int offset)1300 bool LoadIC::PatchInlinedLoad(Address address, Object* map, int offset) {
1301   if (V8::UseCrankshaft()) return false;
1302 
1303   // The address of the instruction following the call.
1304   Address test_instruction_address =
1305       address + Assembler::kCallTargetAddressOffset;
1306   // If the instruction following the call is not a test rax, nothing
1307   // was inlined.
1308   if (*test_instruction_address != Assembler::kTestEaxByte) return false;
1309 
1310   Address delta_address = test_instruction_address + 1;
1311   // The delta to the start of the map check instruction.
1312   int delta = *reinterpret_cast<int*>(delta_address);
1313 
1314   // The map address is the last 8 bytes of the 10-byte
1315   // immediate move instruction, so we add 2 to get the
1316   // offset to the last 8 bytes.
1317   Address map_address = test_instruction_address + delta + 2;
1318   *(reinterpret_cast<Object**>(map_address)) = map;
1319 
1320   // The offset is in the 32-bit displacement of a seven byte
1321   // memory-to-register move instruction (REX.W 0x88 ModR/M disp32),
1322   // so we add 3 to get the offset of the displacement.
1323   Address offset_address =
1324       test_instruction_address + delta + kOffsetToLoadInstruction + 3;
1325   *reinterpret_cast<int*>(offset_address) = offset - kHeapObjectTag;
1326   return true;
1327 }
1328 
1329 
PatchInlinedContextualLoad(Address address,Object * map,Object * cell,bool is_dont_delete)1330 bool LoadIC::PatchInlinedContextualLoad(Address address,
1331                                         Object* map,
1332                                         Object* cell,
1333                                         bool is_dont_delete) {
1334   // TODO(<bug#>): implement this.
1335   return false;
1336 }
1337 
1338 
PatchInlinedStore(Address address,Object * map,int offset)1339 bool StoreIC::PatchInlinedStore(Address address, Object* map, int offset) {
1340   if (V8::UseCrankshaft()) return false;
1341 
1342   // The address of the instruction following the call.
1343   Address test_instruction_address =
1344       address + Assembler::kCallTargetAddressOffset;
1345 
1346   // If the instruction following the call is not a test rax, nothing
1347   // was inlined.
1348   if (*test_instruction_address != Assembler::kTestEaxByte) return false;
1349 
1350   // Extract the encoded deltas from the test rax instruction.
1351   Address encoded_offsets_address = test_instruction_address + 1;
1352   int encoded_offsets = *reinterpret_cast<int*>(encoded_offsets_address);
1353   int delta_to_map_check = -(encoded_offsets & 0xFFFF);
1354   int delta_to_record_write = encoded_offsets >> 16;
1355 
1356   // Patch the map to check. The map address is the last 8 bytes of
1357   // the 10-byte immediate move instruction.
1358   Address map_check_address = test_instruction_address + delta_to_map_check;
1359   Address map_address = map_check_address + 2;
1360   *(reinterpret_cast<Object**>(map_address)) = map;
1361 
1362   // Patch the offset in the store instruction. The offset is in the
1363   // last 4 bytes of a 7 byte register-to-memory move instruction.
1364   Address offset_address =
1365       map_check_address + StoreIC::kOffsetToStoreInstruction + 3;
1366   // The offset should have initial value (kMaxInt - 1), cleared value
1367   // (-1) or we should be clearing the inlined version.
1368   ASSERT(*reinterpret_cast<int*>(offset_address) == kMaxInt - 1 ||
1369          *reinterpret_cast<int*>(offset_address) == -1 ||
1370          (offset == 0 && map == HEAP->null_value()));
1371   *reinterpret_cast<int*>(offset_address) = offset - kHeapObjectTag;
1372 
1373   // Patch the offset in the write-barrier code. The offset is the
1374   // last 4 bytes of a 7 byte lea instruction.
1375   offset_address = map_check_address + delta_to_record_write + 3;
1376   // The offset should have initial value (kMaxInt), cleared value
1377   // (-1) or we should be clearing the inlined version.
1378   ASSERT(*reinterpret_cast<int*>(offset_address) == kMaxInt ||
1379          *reinterpret_cast<int*>(offset_address) == -1 ||
1380          (offset == 0 && map == HEAP->null_value()));
1381   *reinterpret_cast<int*>(offset_address) = offset - kHeapObjectTag;
1382 
1383   return true;
1384 }
1385 
1386 
PatchInlinedMapCheck(Address address,Object * map)1387 static bool PatchInlinedMapCheck(Address address, Object* map) {
1388   if (V8::UseCrankshaft()) return false;
1389 
1390   // Arguments are address of start of call sequence that called
1391   // the IC,
1392   Address test_instruction_address =
1393       address + Assembler::kCallTargetAddressOffset;
1394   // The keyed load has a fast inlined case if the IC call instruction
1395   // is immediately followed by a test instruction.
1396   if (*test_instruction_address != Assembler::kTestEaxByte) return false;
1397 
1398   // Fetch the offset from the test instruction to the map compare
1399   // instructions (starting with the 64-bit immediate mov of the map
1400   // address). This offset is stored in the last 4 bytes of the 5
1401   // byte test instruction.
1402   Address delta_address = test_instruction_address + 1;
1403   int delta = *reinterpret_cast<int*>(delta_address);
1404   // Compute the map address.  The map address is in the last 8 bytes
1405   // of the 10-byte immediate mov instruction (incl. REX prefix), so we add 2
1406   // to the offset to get the map address.
1407   Address map_address = test_instruction_address + delta + 2;
1408   // Patch the map check.
1409   *(reinterpret_cast<Object**>(map_address)) = map;
1410   return true;
1411 }
1412 
1413 
PatchInlinedLoad(Address address,Object * map)1414 bool KeyedLoadIC::PatchInlinedLoad(Address address, Object* map) {
1415   return PatchInlinedMapCheck(address, map);
1416 }
1417 
1418 
PatchInlinedStore(Address address,Object * map)1419 bool KeyedStoreIC::PatchInlinedStore(Address address, Object* map) {
1420   return PatchInlinedMapCheck(address, map);
1421 }
1422 
1423 
GenerateMiss(MacroAssembler * masm)1424 void KeyedLoadIC::GenerateMiss(MacroAssembler* masm) {
1425   // ----------- S t a t e -------------
1426   //  -- rax    : key
1427   //  -- rdx    : receiver
1428   //  -- rsp[0]  : return address
1429   // -----------------------------------
1430 
1431   Counters* counters = masm->isolate()->counters();
1432   __ IncrementCounter(counters->keyed_load_miss(), 1);
1433 
1434   __ pop(rbx);
1435   __ push(rdx);  // receiver
1436   __ push(rax);  // name
1437   __ push(rbx);  // return address
1438 
1439   // Perform tail call to the entry.
1440   ExternalReference ref
1441       = ExternalReference(IC_Utility(kKeyedLoadIC_Miss), masm->isolate());
1442   __ TailCallExternalReference(ref, 2, 1);
1443 }
1444 
1445 
GenerateRuntimeGetProperty(MacroAssembler * masm)1446 void KeyedLoadIC::GenerateRuntimeGetProperty(MacroAssembler* masm) {
1447   // ----------- S t a t e -------------
1448   //  -- rax    : key
1449   //  -- rdx    : receiver
1450   //  -- rsp[0]  : return address
1451   // -----------------------------------
1452 
1453   __ pop(rbx);
1454   __ push(rdx);  // receiver
1455   __ push(rax);  // name
1456   __ push(rbx);  // return address
1457 
1458   // Perform tail call to the entry.
1459   __ TailCallRuntime(Runtime::kKeyedGetProperty, 2, 1);
1460 }
1461 
1462 
GenerateMegamorphic(MacroAssembler * masm,StrictModeFlag strict_mode)1463 void StoreIC::GenerateMegamorphic(MacroAssembler* masm,
1464                                   StrictModeFlag strict_mode) {
1465   // ----------- S t a t e -------------
1466   //  -- rax    : value
1467   //  -- rcx    : name
1468   //  -- rdx    : receiver
1469   //  -- rsp[0] : return address
1470   // -----------------------------------
1471 
1472   // Get the receiver from the stack and probe the stub cache.
1473   Code::Flags flags = Code::ComputeFlags(Code::STORE_IC,
1474                                          NOT_IN_LOOP,
1475                                          MONOMORPHIC,
1476                                          strict_mode);
1477   Isolate::Current()->stub_cache()->GenerateProbe(masm, flags, rdx, rcx, rbx,
1478                                                   no_reg);
1479 
1480   // Cache miss: Jump to runtime.
1481   GenerateMiss(masm);
1482 }
1483 
1484 
GenerateMiss(MacroAssembler * masm)1485 void StoreIC::GenerateMiss(MacroAssembler* masm) {
1486   // ----------- S t a t e -------------
1487   //  -- rax    : value
1488   //  -- rcx    : name
1489   //  -- rdx    : receiver
1490   //  -- rsp[0] : return address
1491   // -----------------------------------
1492 
1493   __ pop(rbx);
1494   __ push(rdx);  // receiver
1495   __ push(rcx);  // name
1496   __ push(rax);  // value
1497   __ push(rbx);  // return address
1498 
1499   // Perform tail call to the entry.
1500   ExternalReference ref =
1501       ExternalReference(IC_Utility(kStoreIC_Miss), masm->isolate());
1502   __ TailCallExternalReference(ref, 3, 1);
1503 }
1504 
1505 
1506 // The offset from the inlined patch site to the start of the inlined
1507 // store instruction.
1508 const int StoreIC::kOffsetToStoreInstruction = 20;
1509 
1510 
GenerateArrayLength(MacroAssembler * masm)1511 void StoreIC::GenerateArrayLength(MacroAssembler* masm) {
1512   // ----------- S t a t e -------------
1513   //  -- rax    : value
1514   //  -- rcx    : name
1515   //  -- rdx    : receiver
1516   //  -- rsp[0] : return address
1517   // -----------------------------------
1518   //
1519   // This accepts as a receiver anything JSObject::SetElementsLength accepts
1520   // (currently anything except for external and pixel arrays which means
1521   // anything with elements of FixedArray type.), but currently is restricted
1522   // to JSArray.
1523   // Value must be a number, but only smis are accepted as the most common case.
1524 
1525   Label miss;
1526 
1527   Register receiver = rdx;
1528   Register value = rax;
1529   Register scratch = rbx;
1530 
1531   // Check that the receiver isn't a smi.
1532   __ JumpIfSmi(receiver, &miss);
1533 
1534   // Check that the object is a JS array.
1535   __ CmpObjectType(receiver, JS_ARRAY_TYPE, scratch);
1536   __ j(not_equal, &miss);
1537 
1538   // Check that elements are FixedArray.
1539   // We rely on StoreIC_ArrayLength below to deal with all types of
1540   // fast elements (including COW).
1541   __ movq(scratch, FieldOperand(receiver, JSArray::kElementsOffset));
1542   __ CmpObjectType(scratch, FIXED_ARRAY_TYPE, scratch);
1543   __ j(not_equal, &miss);
1544 
1545   // Check that value is a smi.
1546   __ JumpIfNotSmi(value, &miss);
1547 
1548   // Prepare tail call to StoreIC_ArrayLength.
1549   __ pop(scratch);
1550   __ push(receiver);
1551   __ push(value);
1552   __ push(scratch);  // return address
1553 
1554   ExternalReference ref =
1555       ExternalReference(IC_Utility(kStoreIC_ArrayLength), masm->isolate());
1556   __ TailCallExternalReference(ref, 2, 1);
1557 
1558   __ bind(&miss);
1559 
1560   GenerateMiss(masm);
1561 }
1562 
1563 
GenerateNormal(MacroAssembler * masm)1564 void StoreIC::GenerateNormal(MacroAssembler* masm) {
1565   // ----------- S t a t e -------------
1566   //  -- rax    : value
1567   //  -- rcx    : name
1568   //  -- rdx    : receiver
1569   //  -- rsp[0] : return address
1570   // -----------------------------------
1571 
1572   Label miss;
1573 
1574   GenerateStringDictionaryReceiverCheck(masm, rdx, rbx, rdi, &miss);
1575 
1576   GenerateDictionaryStore(masm, &miss, rbx, rcx, rax, r8, r9);
1577   Counters* counters = masm->isolate()->counters();
1578   __ IncrementCounter(counters->store_normal_hit(), 1);
1579   __ ret(0);
1580 
1581   __ bind(&miss);
1582   __ IncrementCounter(counters->store_normal_miss(), 1);
1583   GenerateMiss(masm);
1584 }
1585 
1586 
GenerateGlobalProxy(MacroAssembler * masm,StrictModeFlag strict_mode)1587 void StoreIC::GenerateGlobalProxy(MacroAssembler* masm,
1588                                   StrictModeFlag strict_mode) {
1589   // ----------- S t a t e -------------
1590   //  -- rax    : value
1591   //  -- rcx    : name
1592   //  -- rdx    : receiver
1593   //  -- rsp[0] : return address
1594   // -----------------------------------
1595   __ pop(rbx);
1596   __ push(rdx);
1597   __ push(rcx);
1598   __ push(rax);
1599   __ Push(Smi::FromInt(NONE));  // PropertyAttributes
1600   __ Push(Smi::FromInt(strict_mode));
1601   __ push(rbx);  // return address
1602 
1603   // Do tail-call to runtime routine.
1604   __ TailCallRuntime(Runtime::kSetProperty, 5, 1);
1605 }
1606 
1607 
GenerateRuntimeSetProperty(MacroAssembler * masm,StrictModeFlag strict_mode)1608 void KeyedStoreIC::GenerateRuntimeSetProperty(MacroAssembler* masm,
1609                                               StrictModeFlag strict_mode) {
1610   // ----------- S t a t e -------------
1611   //  -- rax     : value
1612   //  -- rcx     : key
1613   //  -- rdx     : receiver
1614   //  -- rsp[0]  : return address
1615   // -----------------------------------
1616 
1617   __ pop(rbx);
1618   __ push(rdx);  // receiver
1619   __ push(rcx);  // key
1620   __ push(rax);  // value
1621   __ Push(Smi::FromInt(NONE));          // PropertyAttributes
1622   __ Push(Smi::FromInt(strict_mode));   // Strict mode.
1623   __ push(rbx);  // return address
1624 
1625   // Do tail-call to runtime routine.
1626   __ TailCallRuntime(Runtime::kSetProperty, 5, 1);
1627 }
1628 
1629 
GenerateMiss(MacroAssembler * masm)1630 void KeyedStoreIC::GenerateMiss(MacroAssembler* masm) {
1631   // ----------- S t a t e -------------
1632   //  -- rax     : value
1633   //  -- rcx     : key
1634   //  -- rdx     : receiver
1635   //  -- rsp[0]  : return address
1636   // -----------------------------------
1637 
1638   __ pop(rbx);
1639   __ push(rdx);  // receiver
1640   __ push(rcx);  // key
1641   __ push(rax);  // value
1642   __ push(rbx);  // return address
1643 
1644   // Do tail-call to runtime routine.
1645   ExternalReference ref =
1646       ExternalReference(IC_Utility(kKeyedStoreIC_Miss), masm->isolate());
1647   __ TailCallExternalReference(ref, 3, 1);
1648 }
1649 
1650 
1651 #undef __
1652 
1653 
ComputeCondition(Token::Value op)1654 Condition CompareIC::ComputeCondition(Token::Value op) {
1655   switch (op) {
1656     case Token::EQ_STRICT:
1657     case Token::EQ:
1658       return equal;
1659     case Token::LT:
1660       return less;
1661     case Token::GT:
1662       // Reverse left and right operands to obtain ECMA-262 conversion order.
1663       return less;
1664     case Token::LTE:
1665       // Reverse left and right operands to obtain ECMA-262 conversion order.
1666       return greater_equal;
1667     case Token::GTE:
1668       return greater_equal;
1669     default:
1670       UNREACHABLE();
1671       return no_condition;
1672   }
1673 }
1674 
1675 
HasInlinedSmiCode(Address address)1676 static bool HasInlinedSmiCode(Address address) {
1677   // The address of the instruction following the call.
1678   Address test_instruction_address =
1679       address + Assembler::kCallTargetAddressOffset;
1680 
1681   // If the instruction following the call is not a test al, nothing
1682   // was inlined.
1683   return *test_instruction_address == Assembler::kTestAlByte;
1684 }
1685 
1686 
UpdateCaches(Handle<Object> x,Handle<Object> y)1687 void CompareIC::UpdateCaches(Handle<Object> x, Handle<Object> y) {
1688   HandleScope scope;
1689   Handle<Code> rewritten;
1690   State previous_state = GetState();
1691 
1692   State state = TargetState(previous_state, HasInlinedSmiCode(address()), x, y);
1693   if (state == GENERIC) {
1694     CompareStub stub(GetCondition(), strict(), NO_COMPARE_FLAGS);
1695     rewritten = stub.GetCode();
1696   } else {
1697     ICCompareStub stub(op_, state);
1698     rewritten = stub.GetCode();
1699   }
1700   set_target(*rewritten);
1701 
1702 #ifdef DEBUG
1703   if (FLAG_trace_ic) {
1704     PrintF("[CompareIC (%s->%s)#%s]\n",
1705            GetStateName(previous_state),
1706            GetStateName(state),
1707            Token::Name(op_));
1708   }
1709 #endif
1710 
1711   // Activate inlined smi code.
1712   if (previous_state == UNINITIALIZED) {
1713     PatchInlinedSmiCode(address());
1714   }
1715 }
1716 
PatchInlinedSmiCode(Address address)1717 void PatchInlinedSmiCode(Address address) {
1718   // The address of the instruction following the call.
1719   Address test_instruction_address =
1720       address + Assembler::kCallTargetAddressOffset;
1721 
1722   // If the instruction following the call is not a test al, nothing
1723   // was inlined.
1724   if (*test_instruction_address != Assembler::kTestAlByte) {
1725     ASSERT(*test_instruction_address == Assembler::kNopByte);
1726     return;
1727   }
1728 
1729   Address delta_address = test_instruction_address + 1;
1730   // The delta to the start of the map check instruction and the
1731   // condition code uses at the patched jump.
1732   int8_t delta = *reinterpret_cast<int8_t*>(delta_address);
1733   if (FLAG_trace_ic) {
1734     PrintF("[  patching ic at %p, test=%p, delta=%d\n",
1735            address, test_instruction_address, delta);
1736   }
1737 
1738   // Patch with a short conditional jump. There must be a
1739   // short jump-if-carry/not-carry at this position.
1740   Address jmp_address = test_instruction_address - delta;
1741   ASSERT(*jmp_address == Assembler::kJncShortOpcode ||
1742          *jmp_address == Assembler::kJcShortOpcode);
1743   Condition cc = *jmp_address == Assembler::kJncShortOpcode
1744       ? not_zero
1745       : zero;
1746   *jmp_address = static_cast<byte>(Assembler::kJccShortPrefix | cc);
1747 }
1748 
1749 
1750 } }  // namespace v8::internal
1751 
1752 #endif  // V8_TARGET_ARCH_X64
1753