1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "src/x64/codegen-x64.h"
6
7 #if V8_TARGET_ARCH_X64
8
9 #include "src/codegen.h"
10 #include "src/macro-assembler.h"
11 #include "src/x64/assembler-x64-inl.h"
12
13 namespace v8 {
14 namespace internal {
15
16 // -------------------------------------------------------------------------
17 // Platform-specific RuntimeCallHelper functions.
18
BeforeCall(MacroAssembler * masm) const19 void StubRuntimeCallHelper::BeforeCall(MacroAssembler* masm) const {
20 masm->EnterFrame(StackFrame::INTERNAL);
21 DCHECK(!masm->has_frame());
22 masm->set_has_frame(true);
23 }
24
25
AfterCall(MacroAssembler * masm) const26 void StubRuntimeCallHelper::AfterCall(MacroAssembler* masm) const {
27 masm->LeaveFrame(StackFrame::INTERNAL);
28 DCHECK(masm->has_frame());
29 masm->set_has_frame(false);
30 }
31
32
33 #define __ masm.
34
35
CreateSqrtFunction(Isolate * isolate)36 UnaryMathFunctionWithIsolate CreateSqrtFunction(Isolate* isolate) {
37 size_t actual_size;
38 // Allocate buffer in executable space.
39 byte* buffer =
40 static_cast<byte*>(base::OS::Allocate(1 * KB, &actual_size, true));
41 if (buffer == nullptr) return nullptr;
42
43 MacroAssembler masm(isolate, buffer, static_cast<int>(actual_size),
44 CodeObjectRequired::kNo);
45 // xmm0: raw double input.
46 // Move double input into registers.
47 __ Sqrtsd(xmm0, xmm0);
48 __ Ret();
49
50 CodeDesc desc;
51 masm.GetCode(&desc);
52 DCHECK(!RelocInfo::RequiresRelocation(desc));
53
54 Assembler::FlushICache(isolate, buffer, actual_size);
55 base::OS::ProtectCode(buffer, actual_size);
56 return FUNCTION_CAST<UnaryMathFunctionWithIsolate>(buffer);
57 }
58
59 #undef __
60
61 // -------------------------------------------------------------------------
62 // Code generators
63
64 #define __ ACCESS_MASM(masm)
65
Generate(MacroAssembler * masm,Register string,Register index,Register result,Label * call_runtime)66 void StringCharLoadGenerator::Generate(MacroAssembler* masm,
67 Register string,
68 Register index,
69 Register result,
70 Label* call_runtime) {
71 Label indirect_string_loaded;
72 __ bind(&indirect_string_loaded);
73
74 // Fetch the instance type of the receiver into result register.
75 __ movp(result, FieldOperand(string, HeapObject::kMapOffset));
76 __ movzxbl(result, FieldOperand(result, Map::kInstanceTypeOffset));
77
78 // We need special handling for indirect strings.
79 Label check_sequential;
80 __ testb(result, Immediate(kIsIndirectStringMask));
81 __ j(zero, &check_sequential, Label::kNear);
82
83 // Dispatch on the indirect string shape: slice or cons.
84 Label cons_string, thin_string;
85 __ andl(result, Immediate(kStringRepresentationMask));
86 __ cmpl(result, Immediate(kConsStringTag));
87 __ j(equal, &cons_string, Label::kNear);
88 __ cmpl(result, Immediate(kThinStringTag));
89 __ j(equal, &thin_string, Label::kNear);
90
91 // Handle slices.
92 __ SmiToInteger32(result, FieldOperand(string, SlicedString::kOffsetOffset));
93 __ addp(index, result);
94 __ movp(string, FieldOperand(string, SlicedString::kParentOffset));
95 __ jmp(&indirect_string_loaded);
96
97 // Handle thin strings.
98 __ bind(&thin_string);
99 __ movp(string, FieldOperand(string, ThinString::kActualOffset));
100 __ jmp(&indirect_string_loaded);
101
102 // Handle cons strings.
103 // Check whether the right hand side is the empty string (i.e. if
104 // this is really a flat string in a cons string). If that is not
105 // the case we would rather go to the runtime system now to flatten
106 // the string.
107 __ bind(&cons_string);
108 __ CompareRoot(FieldOperand(string, ConsString::kSecondOffset),
109 Heap::kempty_stringRootIndex);
110 __ j(not_equal, call_runtime);
111 __ movp(string, FieldOperand(string, ConsString::kFirstOffset));
112 __ jmp(&indirect_string_loaded);
113
114 // Distinguish sequential and external strings. Only these two string
115 // representations can reach here (slices and flat cons strings have been
116 // reduced to the underlying sequential or external string).
117 Label seq_string;
118 __ bind(&check_sequential);
119 STATIC_ASSERT(kSeqStringTag == 0);
120 __ testb(result, Immediate(kStringRepresentationMask));
121 __ j(zero, &seq_string, Label::kNear);
122
123 // Handle external strings.
124 Label one_byte_external, done;
125 if (FLAG_debug_code) {
126 // Assert that we do not have a cons or slice (indirect strings) here.
127 // Sequential strings have already been ruled out.
128 __ testb(result, Immediate(kIsIndirectStringMask));
129 __ Assert(zero, kExternalStringExpectedButNotFound);
130 }
131 // Rule out short external strings.
132 STATIC_ASSERT(kShortExternalStringTag != 0);
133 __ testb(result, Immediate(kShortExternalStringTag));
134 __ j(not_zero, call_runtime);
135 // Check encoding.
136 STATIC_ASSERT(kTwoByteStringTag == 0);
137 __ testb(result, Immediate(kStringEncodingMask));
138 __ movp(result, FieldOperand(string, ExternalString::kResourceDataOffset));
139 __ j(not_equal, &one_byte_external, Label::kNear);
140 // Two-byte string.
141 __ movzxwl(result, Operand(result, index, times_2, 0));
142 __ jmp(&done, Label::kNear);
143 __ bind(&one_byte_external);
144 // One-byte string.
145 __ movzxbl(result, Operand(result, index, times_1, 0));
146 __ jmp(&done, Label::kNear);
147
148 // Dispatch on the encoding: one-byte or two-byte.
149 Label one_byte;
150 __ bind(&seq_string);
151 STATIC_ASSERT((kStringEncodingMask & kOneByteStringTag) != 0);
152 STATIC_ASSERT((kStringEncodingMask & kTwoByteStringTag) == 0);
153 __ testb(result, Immediate(kStringEncodingMask));
154 __ j(not_zero, &one_byte, Label::kNear);
155
156 // Two-byte string.
157 // Load the two-byte character code into the result register.
158 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
159 __ movzxwl(result, FieldOperand(string,
160 index,
161 times_2,
162 SeqTwoByteString::kHeaderSize));
163 __ jmp(&done, Label::kNear);
164
165 // One-byte string.
166 // Load the byte into the result register.
167 __ bind(&one_byte);
168 __ movzxbl(result, FieldOperand(string,
169 index,
170 times_1,
171 SeqOneByteString::kHeaderSize));
172 __ bind(&done);
173 }
174
175 #undef __
176
177
CodeAgingHelper(Isolate * isolate)178 CodeAgingHelper::CodeAgingHelper(Isolate* isolate) {
179 USE(isolate);
180 DCHECK(young_sequence_.length() == kNoCodeAgeSequenceLength);
181 // The sequence of instructions that is patched out for aging code is the
182 // following boilerplate stack-building prologue that is found both in
183 // FUNCTION and OPTIMIZED_FUNCTION code:
184 CodePatcher patcher(isolate, young_sequence_.start(),
185 young_sequence_.length());
186 patcher.masm()->pushq(rbp);
187 patcher.masm()->movp(rbp, rsp);
188 patcher.masm()->Push(rsi);
189 patcher.masm()->Push(rdi);
190 }
191
192
193 #ifdef DEBUG
IsOld(byte * candidate) const194 bool CodeAgingHelper::IsOld(byte* candidate) const {
195 return *candidate == kCallOpcode;
196 }
197 #endif
198
199
IsYoungSequence(Isolate * isolate,byte * sequence)200 bool Code::IsYoungSequence(Isolate* isolate, byte* sequence) {
201 bool result = isolate->code_aging_helper()->IsYoung(sequence);
202 DCHECK(result || isolate->code_aging_helper()->IsOld(sequence));
203 return result;
204 }
205
GetCodeAge(Isolate * isolate,byte * sequence)206 Code::Age Code::GetCodeAge(Isolate* isolate, byte* sequence) {
207 if (IsYoungSequence(isolate, sequence)) return kNoAgeCodeAge;
208
209 sequence++; // Skip the kCallOpcode byte
210 Address target_address = sequence + *reinterpret_cast<int*>(sequence) +
211 Assembler::kCallTargetAddressOffset;
212 Code* stub = GetCodeFromTargetAddress(target_address);
213 return GetAgeOfCodeAgeStub(stub);
214 }
215
PatchPlatformCodeAge(Isolate * isolate,byte * sequence,Code::Age age)216 void Code::PatchPlatformCodeAge(Isolate* isolate, byte* sequence,
217 Code::Age age) {
218 uint32_t young_length = isolate->code_aging_helper()->young_sequence_length();
219 if (age == kNoAgeCodeAge) {
220 isolate->code_aging_helper()->CopyYoungSequenceTo(sequence);
221 Assembler::FlushICache(isolate, sequence, young_length);
222 } else {
223 Code* stub = GetCodeAgeStub(isolate, age);
224 CodePatcher patcher(isolate, sequence, young_length);
225 patcher.masm()->call(stub->instruction_start());
226 patcher.masm()->Nop(
227 kNoCodeAgeSequenceLength - Assembler::kShortCallInstructionLength);
228 }
229 }
230
231
GetArgumentOperand(int index)232 Operand StackArgumentsAccessor::GetArgumentOperand(int index) {
233 DCHECK(index >= 0);
234 int receiver = (receiver_mode_ == ARGUMENTS_CONTAIN_RECEIVER) ? 1 : 0;
235 int displacement_to_last_argument = base_reg_.is(rsp) ?
236 kPCOnStackSize : kFPOnStackSize + kPCOnStackSize;
237 displacement_to_last_argument += extra_displacement_to_last_argument_;
238 if (argument_count_reg_.is(no_reg)) {
239 // argument[0] is at base_reg_ + displacement_to_last_argument +
240 // (argument_count_immediate_ + receiver - 1) * kPointerSize.
241 DCHECK(argument_count_immediate_ + receiver > 0);
242 return Operand(base_reg_, displacement_to_last_argument +
243 (argument_count_immediate_ + receiver - 1 - index) * kPointerSize);
244 } else {
245 // argument[0] is at base_reg_ + displacement_to_last_argument +
246 // argument_count_reg_ * times_pointer_size + (receiver - 1) * kPointerSize.
247 return Operand(base_reg_, argument_count_reg_, times_pointer_size,
248 displacement_to_last_argument + (receiver - 1 - index) * kPointerSize);
249 }
250 }
251
252
253 } // namespace internal
254 } // namespace v8
255
256 #endif // V8_TARGET_ARCH_X64
257