• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "src/x87/codegen-x87.h"
6 
7 #if V8_TARGET_ARCH_X87
8 
9 #include "src/codegen.h"
10 #include "src/heap/heap.h"
11 #include "src/macro-assembler.h"
12 
13 namespace v8 {
14 namespace internal {
15 
16 
17 // -------------------------------------------------------------------------
18 // Platform-specific RuntimeCallHelper functions.
19 
BeforeCall(MacroAssembler * masm) const20 void StubRuntimeCallHelper::BeforeCall(MacroAssembler* masm) const {
21   masm->EnterFrame(StackFrame::INTERNAL);
22   DCHECK(!masm->has_frame());
23   masm->set_has_frame(true);
24 }
25 
26 
AfterCall(MacroAssembler * masm) const27 void StubRuntimeCallHelper::AfterCall(MacroAssembler* masm) const {
28   masm->LeaveFrame(StackFrame::INTERNAL);
29   DCHECK(masm->has_frame());
30   masm->set_has_frame(false);
31 }
32 
33 
34 #define __ masm.
35 
CreateExpFunction(Isolate * isolate)36 UnaryMathFunctionWithIsolate CreateExpFunction(Isolate* isolate) {
37   return nullptr;
38 }
39 
40 
CreateSqrtFunction(Isolate * isolate)41 UnaryMathFunctionWithIsolate CreateSqrtFunction(Isolate* isolate) {
42   size_t actual_size;
43   // Allocate buffer in executable space.
44   byte* buffer =
45       static_cast<byte*>(base::OS::Allocate(1 * KB, &actual_size, true));
46   if (buffer == nullptr) return nullptr;
47 
48   MacroAssembler masm(isolate, buffer, static_cast<int>(actual_size),
49                       CodeObjectRequired::kNo);
50   // Load double input into registers.
51   __ fld_d(MemOperand(esp, 4));
52   __ X87SetFPUCW(0x027F);
53   __ fsqrt();
54   __ X87SetFPUCW(0x037F);
55   __ Ret();
56 
57   CodeDesc desc;
58   masm.GetCode(&desc);
59   DCHECK(!RelocInfo::RequiresRelocation(desc));
60 
61   Assembler::FlushICache(isolate, buffer, actual_size);
62   base::OS::ProtectCode(buffer, actual_size);
63   return FUNCTION_CAST<UnaryMathFunctionWithIsolate>(buffer);
64 }
65 
66 
67 // Helper functions for CreateMemMoveFunction.
68 #undef __
69 #define __ ACCESS_MASM(masm)
70 
71 enum Direction { FORWARD, BACKWARD };
72 enum Alignment { MOVE_ALIGNED, MOVE_UNALIGNED };
73 
74 
MemMoveEmitPopAndReturn(MacroAssembler * masm)75 void MemMoveEmitPopAndReturn(MacroAssembler* masm) {
76   __ pop(esi);
77   __ pop(edi);
78   __ ret(0);
79 }
80 
81 
82 #undef __
83 #define __ masm.
84 
85 
86 class LabelConverter {
87  public:
LabelConverter(byte * buffer)88   explicit LabelConverter(byte* buffer) : buffer_(buffer) {}
address(Label * l) const89   int32_t address(Label* l) const {
90     return reinterpret_cast<int32_t>(buffer_) + l->pos();
91   }
92  private:
93   byte* buffer_;
94 };
95 
96 
CreateMemMoveFunction(Isolate * isolate)97 MemMoveFunction CreateMemMoveFunction(Isolate* isolate) {
98   size_t actual_size;
99   // Allocate buffer in executable space.
100   byte* buffer =
101       static_cast<byte*>(base::OS::Allocate(1 * KB, &actual_size, true));
102   if (buffer == nullptr) return nullptr;
103   MacroAssembler masm(isolate, buffer, static_cast<int>(actual_size),
104                       CodeObjectRequired::kNo);
105   LabelConverter conv(buffer);
106 
107   // Generated code is put into a fixed, unmovable buffer, and not into
108   // the V8 heap. We can't, and don't, refer to any relocatable addresses
109   // (e.g. the JavaScript nan-object).
110 
111   // 32-bit C declaration function calls pass arguments on stack.
112 
113   // Stack layout:
114   // esp[12]: Third argument, size.
115   // esp[8]: Second argument, source pointer.
116   // esp[4]: First argument, destination pointer.
117   // esp[0]: return address
118 
119   const int kDestinationOffset = 1 * kPointerSize;
120   const int kSourceOffset = 2 * kPointerSize;
121   const int kSizeOffset = 3 * kPointerSize;
122 
123   int stack_offset = 0;  // Update if we change the stack height.
124 
125   Label backward, backward_much_overlap;
126   Label forward_much_overlap, small_size, medium_size, pop_and_return;
127   __ push(edi);
128   __ push(esi);
129   stack_offset += 2 * kPointerSize;
130   Register dst = edi;
131   Register src = esi;
132   Register count = ecx;
133   __ mov(dst, Operand(esp, stack_offset + kDestinationOffset));
134   __ mov(src, Operand(esp, stack_offset + kSourceOffset));
135   __ mov(count, Operand(esp, stack_offset + kSizeOffset));
136 
137   __ cmp(dst, src);
138   __ j(equal, &pop_and_return);
139 
140   // No SSE2.
141   Label forward;
142   __ cmp(count, 0);
143   __ j(equal, &pop_and_return);
144   __ cmp(dst, src);
145   __ j(above, &backward);
146   __ jmp(&forward);
147   {
148     // Simple forward copier.
149     Label forward_loop_1byte, forward_loop_4byte;
150     __ bind(&forward_loop_4byte);
151     __ mov(eax, Operand(src, 0));
152     __ sub(count, Immediate(4));
153     __ add(src, Immediate(4));
154     __ mov(Operand(dst, 0), eax);
155     __ add(dst, Immediate(4));
156     __ bind(&forward);  // Entry point.
157     __ cmp(count, 3);
158     __ j(above, &forward_loop_4byte);
159     __ bind(&forward_loop_1byte);
160     __ cmp(count, 0);
161     __ j(below_equal, &pop_and_return);
162     __ mov_b(eax, Operand(src, 0));
163     __ dec(count);
164     __ inc(src);
165     __ mov_b(Operand(dst, 0), eax);
166     __ inc(dst);
167     __ jmp(&forward_loop_1byte);
168   }
169   {
170     // Simple backward copier.
171     Label backward_loop_1byte, backward_loop_4byte, entry_shortcut;
172     __ bind(&backward);
173     __ add(src, count);
174     __ add(dst, count);
175     __ cmp(count, 3);
176     __ j(below_equal, &entry_shortcut);
177 
178     __ bind(&backward_loop_4byte);
179     __ sub(src, Immediate(4));
180     __ sub(count, Immediate(4));
181     __ mov(eax, Operand(src, 0));
182     __ sub(dst, Immediate(4));
183     __ mov(Operand(dst, 0), eax);
184     __ cmp(count, 3);
185     __ j(above, &backward_loop_4byte);
186     __ bind(&backward_loop_1byte);
187     __ cmp(count, 0);
188     __ j(below_equal, &pop_and_return);
189     __ bind(&entry_shortcut);
190     __ dec(src);
191     __ dec(count);
192     __ mov_b(eax, Operand(src, 0));
193     __ dec(dst);
194     __ mov_b(Operand(dst, 0), eax);
195     __ jmp(&backward_loop_1byte);
196   }
197 
198   __ bind(&pop_and_return);
199   MemMoveEmitPopAndReturn(&masm);
200 
201   CodeDesc desc;
202   masm.GetCode(&desc);
203   DCHECK(!RelocInfo::RequiresRelocation(desc));
204   Assembler::FlushICache(isolate, buffer, actual_size);
205   base::OS::ProtectCode(buffer, actual_size);
206   // TODO(jkummerow): It would be nice to register this code creation event
207   // with the PROFILE / GDBJIT system.
208   return FUNCTION_CAST<MemMoveFunction>(buffer);
209 }
210 
211 
212 #undef __
213 
214 // -------------------------------------------------------------------------
215 // Code generators
216 
217 #define __ ACCESS_MASM(masm)
218 
219 
GenerateMapChangeElementsTransition(MacroAssembler * masm,Register receiver,Register key,Register value,Register target_map,AllocationSiteMode mode,Label * allocation_memento_found)220 void ElementsTransitionGenerator::GenerateMapChangeElementsTransition(
221     MacroAssembler* masm,
222     Register receiver,
223     Register key,
224     Register value,
225     Register target_map,
226     AllocationSiteMode mode,
227     Label* allocation_memento_found) {
228   Register scratch = edi;
229   DCHECK(!AreAliased(receiver, key, value, target_map, scratch));
230 
231   if (mode == TRACK_ALLOCATION_SITE) {
232     DCHECK(allocation_memento_found != NULL);
233     __ JumpIfJSArrayHasAllocationMemento(
234         receiver, scratch, allocation_memento_found);
235   }
236 
237   // Set transitioned map.
238   __ mov(FieldOperand(receiver, HeapObject::kMapOffset), target_map);
239   __ RecordWriteField(receiver, HeapObject::kMapOffset, target_map, scratch,
240                       kDontSaveFPRegs, EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
241 }
242 
243 
GenerateSmiToDouble(MacroAssembler * masm,Register receiver,Register key,Register value,Register target_map,AllocationSiteMode mode,Label * fail)244 void ElementsTransitionGenerator::GenerateSmiToDouble(
245     MacroAssembler* masm,
246     Register receiver,
247     Register key,
248     Register value,
249     Register target_map,
250     AllocationSiteMode mode,
251     Label* fail) {
252   // Return address is on the stack.
253   DCHECK(receiver.is(edx));
254   DCHECK(key.is(ecx));
255   DCHECK(value.is(eax));
256   DCHECK(target_map.is(ebx));
257 
258   Label loop, entry, convert_hole, gc_required, only_change_map;
259 
260   if (mode == TRACK_ALLOCATION_SITE) {
261     __ JumpIfJSArrayHasAllocationMemento(edx, edi, fail);
262   }
263 
264   // Check for empty arrays, which only require a map transition and no changes
265   // to the backing store.
266   __ mov(edi, FieldOperand(edx, JSObject::kElementsOffset));
267   __ cmp(edi, Immediate(masm->isolate()->factory()->empty_fixed_array()));
268   __ j(equal, &only_change_map);
269 
270   __ push(eax);
271   __ push(ebx);
272 
273   __ mov(edi, FieldOperand(edi, FixedArray::kLengthOffset));
274 
275   // Allocate new FixedDoubleArray.
276   // edx: receiver
277   // edi: length of source FixedArray (smi-tagged)
278   AllocationFlags flags =
279       static_cast<AllocationFlags>(TAG_OBJECT | DOUBLE_ALIGNMENT);
280   __ Allocate(FixedDoubleArray::kHeaderSize, times_8, edi,
281               REGISTER_VALUE_IS_SMI, eax, ebx, no_reg, &gc_required, flags);
282 
283   // eax: destination FixedDoubleArray
284   // edi: number of elements
285   // edx: receiver
286   __ mov(FieldOperand(eax, HeapObject::kMapOffset),
287          Immediate(masm->isolate()->factory()->fixed_double_array_map()));
288   __ mov(FieldOperand(eax, FixedDoubleArray::kLengthOffset), edi);
289   __ mov(esi, FieldOperand(edx, JSObject::kElementsOffset));
290   // Replace receiver's backing store with newly created FixedDoubleArray.
291   __ mov(FieldOperand(edx, JSObject::kElementsOffset), eax);
292   __ mov(ebx, eax);
293   __ RecordWriteField(edx, JSObject::kElementsOffset, ebx, edi, kDontSaveFPRegs,
294                       EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
295 
296   __ mov(edi, FieldOperand(esi, FixedArray::kLengthOffset));
297 
298   // Prepare for conversion loop.
299   ExternalReference canonical_the_hole_nan_reference =
300       ExternalReference::address_of_the_hole_nan();
301   __ jmp(&entry);
302 
303   // Call into runtime if GC is required.
304   __ bind(&gc_required);
305   // Restore registers before jumping into runtime.
306   __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
307   __ pop(ebx);
308   __ pop(eax);
309   __ jmp(fail);
310 
311   // Convert and copy elements
312   // esi: source FixedArray
313   __ bind(&loop);
314   __ mov(ebx, FieldOperand(esi, edi, times_2, FixedArray::kHeaderSize));
315   // ebx: current element from source
316   // edi: index of current element
317   __ JumpIfNotSmi(ebx, &convert_hole);
318 
319   // Normal smi, convert it to double and store.
320   __ SmiUntag(ebx);
321   __ push(ebx);
322   __ fild_s(Operand(esp, 0));
323   __ pop(ebx);
324   __ fstp_d(FieldOperand(eax, edi, times_4, FixedDoubleArray::kHeaderSize));
325   __ jmp(&entry);
326 
327   // Found hole, store hole_nan_as_double instead.
328   __ bind(&convert_hole);
329 
330   if (FLAG_debug_code) {
331     __ cmp(ebx, masm->isolate()->factory()->the_hole_value());
332     __ Assert(equal, kObjectFoundInSmiOnlyArray);
333   }
334 
335   __ fld_d(Operand::StaticVariable(canonical_the_hole_nan_reference));
336   __ fstp_d(FieldOperand(eax, edi, times_4, FixedDoubleArray::kHeaderSize));
337 
338   __ bind(&entry);
339   __ sub(edi, Immediate(Smi::FromInt(1)));
340   __ j(not_sign, &loop);
341 
342   __ pop(ebx);
343   __ pop(eax);
344 
345   // Restore esi.
346   __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
347 
348   __ bind(&only_change_map);
349   // eax: value
350   // ebx: target map
351   // Set transitioned map.
352   __ mov(FieldOperand(edx, HeapObject::kMapOffset), ebx);
353   __ RecordWriteField(edx, HeapObject::kMapOffset, ebx, edi, kDontSaveFPRegs,
354                       OMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
355 }
356 
357 
GenerateDoubleToObject(MacroAssembler * masm,Register receiver,Register key,Register value,Register target_map,AllocationSiteMode mode,Label * fail)358 void ElementsTransitionGenerator::GenerateDoubleToObject(
359     MacroAssembler* masm,
360     Register receiver,
361     Register key,
362     Register value,
363     Register target_map,
364     AllocationSiteMode mode,
365     Label* fail) {
366   // Return address is on the stack.
367   DCHECK(receiver.is(edx));
368   DCHECK(key.is(ecx));
369   DCHECK(value.is(eax));
370   DCHECK(target_map.is(ebx));
371 
372   Label loop, entry, convert_hole, gc_required, only_change_map, success;
373 
374   if (mode == TRACK_ALLOCATION_SITE) {
375     __ JumpIfJSArrayHasAllocationMemento(edx, edi, fail);
376   }
377 
378   // Check for empty arrays, which only require a map transition and no changes
379   // to the backing store.
380   __ mov(edi, FieldOperand(edx, JSObject::kElementsOffset));
381   __ cmp(edi, Immediate(masm->isolate()->factory()->empty_fixed_array()));
382   __ j(equal, &only_change_map);
383 
384   __ push(eax);
385   __ push(edx);
386   __ push(ebx);
387 
388   __ mov(ebx, FieldOperand(edi, FixedDoubleArray::kLengthOffset));
389 
390   // Allocate new FixedArray.
391   // ebx: length of source FixedDoubleArray (smi-tagged)
392   __ lea(edi, Operand(ebx, times_2, FixedArray::kHeaderSize));
393   __ Allocate(edi, eax, esi, no_reg, &gc_required, TAG_OBJECT);
394 
395   // eax: destination FixedArray
396   // ebx: number of elements
397   __ mov(FieldOperand(eax, HeapObject::kMapOffset),
398          Immediate(masm->isolate()->factory()->fixed_array_map()));
399   __ mov(FieldOperand(eax, FixedArray::kLengthOffset), ebx);
400   __ mov(edi, FieldOperand(edx, JSObject::kElementsOffset));
401 
402   // Allocating heap numbers in the loop below can fail and cause a jump to
403   // gc_required. We can't leave a partly initialized FixedArray behind,
404   // so pessimistically fill it with holes now.
405   Label initialization_loop, initialization_loop_entry;
406   __ jmp(&initialization_loop_entry, Label::kNear);
407   __ bind(&initialization_loop);
408   __ mov(FieldOperand(eax, ebx, times_2, FixedArray::kHeaderSize),
409          masm->isolate()->factory()->the_hole_value());
410   __ bind(&initialization_loop_entry);
411   __ sub(ebx, Immediate(Smi::FromInt(1)));
412   __ j(not_sign, &initialization_loop);
413 
414   __ mov(ebx, FieldOperand(edi, FixedDoubleArray::kLengthOffset));
415   __ jmp(&entry);
416 
417   // ebx: target map
418   // edx: receiver
419   // Set transitioned map.
420   __ bind(&only_change_map);
421   __ mov(FieldOperand(edx, HeapObject::kMapOffset), ebx);
422   __ RecordWriteField(edx, HeapObject::kMapOffset, ebx, edi, kDontSaveFPRegs,
423                       OMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
424   __ jmp(&success);
425 
426   // Call into runtime if GC is required.
427   __ bind(&gc_required);
428   __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
429   __ pop(ebx);
430   __ pop(edx);
431   __ pop(eax);
432   __ jmp(fail);
433 
434   // Box doubles into heap numbers.
435   // edi: source FixedDoubleArray
436   // eax: destination FixedArray
437   __ bind(&loop);
438   // ebx: index of current element (smi-tagged)
439   uint32_t offset = FixedDoubleArray::kHeaderSize + sizeof(kHoleNanLower32);
440   __ cmp(FieldOperand(edi, ebx, times_4, offset), Immediate(kHoleNanUpper32));
441   __ j(equal, &convert_hole);
442 
443   // Non-hole double, copy value into a heap number.
444   __ AllocateHeapNumber(edx, esi, no_reg, &gc_required);
445   // edx: new heap number
446   __ mov(esi, FieldOperand(edi, ebx, times_4, FixedDoubleArray::kHeaderSize));
447   __ mov(FieldOperand(edx, HeapNumber::kValueOffset), esi);
448   __ mov(esi, FieldOperand(edi, ebx, times_4, offset));
449   __ mov(FieldOperand(edx, HeapNumber::kValueOffset + kPointerSize), esi);
450   __ mov(FieldOperand(eax, ebx, times_2, FixedArray::kHeaderSize), edx);
451   __ mov(esi, ebx);
452   __ RecordWriteArray(eax, edx, esi, kDontSaveFPRegs, EMIT_REMEMBERED_SET,
453                       OMIT_SMI_CHECK);
454   __ jmp(&entry, Label::kNear);
455 
456   // Replace the-hole NaN with the-hole pointer.
457   __ bind(&convert_hole);
458   __ mov(FieldOperand(eax, ebx, times_2, FixedArray::kHeaderSize),
459          masm->isolate()->factory()->the_hole_value());
460 
461   __ bind(&entry);
462   __ sub(ebx, Immediate(Smi::FromInt(1)));
463   __ j(not_sign, &loop);
464 
465   __ pop(ebx);
466   __ pop(edx);
467   // ebx: target map
468   // edx: receiver
469   // Set transitioned map.
470   __ mov(FieldOperand(edx, HeapObject::kMapOffset), ebx);
471   __ RecordWriteField(edx, HeapObject::kMapOffset, ebx, edi, kDontSaveFPRegs,
472                       OMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
473   // Replace receiver's backing store with newly created and filled FixedArray.
474   __ mov(FieldOperand(edx, JSObject::kElementsOffset), eax);
475   __ RecordWriteField(edx, JSObject::kElementsOffset, eax, edi, kDontSaveFPRegs,
476                       EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
477 
478   // Restore registers.
479   __ pop(eax);
480   __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
481 
482   __ bind(&success);
483 }
484 
485 
Generate(MacroAssembler * masm,Factory * factory,Register string,Register index,Register result,Label * call_runtime)486 void StringCharLoadGenerator::Generate(MacroAssembler* masm,
487                                        Factory* factory,
488                                        Register string,
489                                        Register index,
490                                        Register result,
491                                        Label* call_runtime) {
492   // Fetch the instance type of the receiver into result register.
493   __ mov(result, FieldOperand(string, HeapObject::kMapOffset));
494   __ movzx_b(result, FieldOperand(result, Map::kInstanceTypeOffset));
495 
496   // We need special handling for indirect strings.
497   Label check_sequential;
498   __ test(result, Immediate(kIsIndirectStringMask));
499   __ j(zero, &check_sequential, Label::kNear);
500 
501   // Dispatch on the indirect string shape: slice or cons.
502   Label cons_string;
503   __ test(result, Immediate(kSlicedNotConsMask));
504   __ j(zero, &cons_string, Label::kNear);
505 
506   // Handle slices.
507   Label indirect_string_loaded;
508   __ mov(result, FieldOperand(string, SlicedString::kOffsetOffset));
509   __ SmiUntag(result);
510   __ add(index, result);
511   __ mov(string, FieldOperand(string, SlicedString::kParentOffset));
512   __ jmp(&indirect_string_loaded, Label::kNear);
513 
514   // Handle cons strings.
515   // Check whether the right hand side is the empty string (i.e. if
516   // this is really a flat string in a cons string). If that is not
517   // the case we would rather go to the runtime system now to flatten
518   // the string.
519   __ bind(&cons_string);
520   __ cmp(FieldOperand(string, ConsString::kSecondOffset),
521          Immediate(factory->empty_string()));
522   __ j(not_equal, call_runtime);
523   __ mov(string, FieldOperand(string, ConsString::kFirstOffset));
524 
525   __ bind(&indirect_string_loaded);
526   __ mov(result, FieldOperand(string, HeapObject::kMapOffset));
527   __ movzx_b(result, FieldOperand(result, Map::kInstanceTypeOffset));
528 
529   // Distinguish sequential and external strings. Only these two string
530   // representations can reach here (slices and flat cons strings have been
531   // reduced to the underlying sequential or external string).
532   Label seq_string;
533   __ bind(&check_sequential);
534   STATIC_ASSERT(kSeqStringTag == 0);
535   __ test(result, Immediate(kStringRepresentationMask));
536   __ j(zero, &seq_string, Label::kNear);
537 
538   // Handle external strings.
539   Label one_byte_external, done;
540   if (FLAG_debug_code) {
541     // Assert that we do not have a cons or slice (indirect strings) here.
542     // Sequential strings have already been ruled out.
543     __ test(result, Immediate(kIsIndirectStringMask));
544     __ Assert(zero, kExternalStringExpectedButNotFound);
545   }
546   // Rule out short external strings.
547   STATIC_ASSERT(kShortExternalStringTag != 0);
548   __ test_b(result, kShortExternalStringMask);
549   __ j(not_zero, call_runtime);
550   // Check encoding.
551   STATIC_ASSERT(kTwoByteStringTag == 0);
552   __ test_b(result, kStringEncodingMask);
553   __ mov(result, FieldOperand(string, ExternalString::kResourceDataOffset));
554   __ j(not_equal, &one_byte_external, Label::kNear);
555   // Two-byte string.
556   __ movzx_w(result, Operand(result, index, times_2, 0));
557   __ jmp(&done, Label::kNear);
558   __ bind(&one_byte_external);
559   // One-byte string.
560   __ movzx_b(result, Operand(result, index, times_1, 0));
561   __ jmp(&done, Label::kNear);
562 
563   // Dispatch on the encoding: one-byte or two-byte.
564   Label one_byte;
565   __ bind(&seq_string);
566   STATIC_ASSERT((kStringEncodingMask & kOneByteStringTag) != 0);
567   STATIC_ASSERT((kStringEncodingMask & kTwoByteStringTag) == 0);
568   __ test(result, Immediate(kStringEncodingMask));
569   __ j(not_zero, &one_byte, Label::kNear);
570 
571   // Two-byte string.
572   // Load the two-byte character code into the result register.
573   __ movzx_w(result, FieldOperand(string,
574                                   index,
575                                   times_2,
576                                   SeqTwoByteString::kHeaderSize));
577   __ jmp(&done, Label::kNear);
578 
579   // One-byte string.
580   // Load the byte into the result register.
581   __ bind(&one_byte);
582   __ movzx_b(result, FieldOperand(string,
583                                   index,
584                                   times_1,
585                                   SeqOneByteString::kHeaderSize));
586   __ bind(&done);
587 }
588 
589 
590 #undef __
591 
592 
CodeAgingHelper(Isolate * isolate)593 CodeAgingHelper::CodeAgingHelper(Isolate* isolate) {
594   USE(isolate);
595   DCHECK(young_sequence_.length() == kNoCodeAgeSequenceLength);
596   CodePatcher patcher(isolate, young_sequence_.start(),
597                       young_sequence_.length());
598   patcher.masm()->push(ebp);
599   patcher.masm()->mov(ebp, esp);
600   patcher.masm()->push(esi);
601   patcher.masm()->push(edi);
602 }
603 
604 
605 #ifdef DEBUG
IsOld(byte * candidate) const606 bool CodeAgingHelper::IsOld(byte* candidate) const {
607   return *candidate == kCallOpcode;
608 }
609 #endif
610 
611 
IsYoungSequence(Isolate * isolate,byte * sequence)612 bool Code::IsYoungSequence(Isolate* isolate, byte* sequence) {
613   bool result = isolate->code_aging_helper()->IsYoung(sequence);
614   DCHECK(result || isolate->code_aging_helper()->IsOld(sequence));
615   return result;
616 }
617 
618 
GetCodeAgeAndParity(Isolate * isolate,byte * sequence,Age * age,MarkingParity * parity)619 void Code::GetCodeAgeAndParity(Isolate* isolate, byte* sequence, Age* age,
620                                MarkingParity* parity) {
621   if (IsYoungSequence(isolate, sequence)) {
622     *age = kNoAgeCodeAge;
623     *parity = NO_MARKING_PARITY;
624   } else {
625     sequence++;  // Skip the kCallOpcode byte
626     Address target_address = sequence + *reinterpret_cast<int*>(sequence) +
627         Assembler::kCallTargetAddressOffset;
628     Code* stub = GetCodeFromTargetAddress(target_address);
629     GetCodeAgeAndParity(stub, age, parity);
630   }
631 }
632 
633 
PatchPlatformCodeAge(Isolate * isolate,byte * sequence,Code::Age age,MarkingParity parity)634 void Code::PatchPlatformCodeAge(Isolate* isolate,
635                                 byte* sequence,
636                                 Code::Age age,
637                                 MarkingParity parity) {
638   uint32_t young_length = isolate->code_aging_helper()->young_sequence_length();
639   if (age == kNoAgeCodeAge) {
640     isolate->code_aging_helper()->CopyYoungSequenceTo(sequence);
641     Assembler::FlushICache(isolate, sequence, young_length);
642   } else {
643     Code* stub = GetCodeAgeStub(isolate, age, parity);
644     CodePatcher patcher(isolate, sequence, young_length);
645     patcher.masm()->call(stub->instruction_start(), RelocInfo::NONE32);
646   }
647 }
648 
649 
650 }  // namespace internal
651 }  // namespace v8
652 
653 #endif  // V8_TARGET_ARCH_X87
654