• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "src/ppc/codegen-ppc.h"
6 
7 #if V8_TARGET_ARCH_PPC
8 
9 #include <memory>
10 
11 #include "src/codegen.h"
12 #include "src/macro-assembler.h"
13 #include "src/ppc/simulator-ppc.h"
14 
15 namespace v8 {
16 namespace internal {
17 
18 
19 #define __ masm.
20 
CreateSqrtFunction(Isolate * isolate)21 UnaryMathFunctionWithIsolate CreateSqrtFunction(Isolate* isolate) {
22 #if defined(USE_SIMULATOR)
23   return nullptr;
24 #else
25   size_t actual_size;
26   byte* buffer =
27       static_cast<byte*>(base::OS::Allocate(1 * KB, &actual_size, true));
28   if (buffer == nullptr) return nullptr;
29 
30   MacroAssembler masm(isolate, buffer, static_cast<int>(actual_size),
31                       CodeObjectRequired::kNo);
32 
33 // Called from C
34   __ function_descriptor();
35 
36   __ MovFromFloatParameter(d1);
37   __ fsqrt(d1, d1);
38   __ MovToFloatResult(d1);
39   __ Ret();
40 
41   CodeDesc desc;
42   masm.GetCode(&desc);
43   DCHECK(ABI_USES_FUNCTION_DESCRIPTORS || !RelocInfo::RequiresRelocation(desc));
44 
45   Assembler::FlushICache(isolate, buffer, actual_size);
46   base::OS::ProtectCode(buffer, actual_size);
47   return FUNCTION_CAST<UnaryMathFunctionWithIsolate>(buffer);
48 #endif
49 }
50 
51 #undef __
52 
53 
54 // -------------------------------------------------------------------------
55 // Platform-specific RuntimeCallHelper functions.
56 
BeforeCall(MacroAssembler * masm) const57 void StubRuntimeCallHelper::BeforeCall(MacroAssembler* masm) const {
58   masm->EnterFrame(StackFrame::INTERNAL);
59   DCHECK(!masm->has_frame());
60   masm->set_has_frame(true);
61 }
62 
63 
AfterCall(MacroAssembler * masm) const64 void StubRuntimeCallHelper::AfterCall(MacroAssembler* masm) const {
65   masm->LeaveFrame(StackFrame::INTERNAL);
66   DCHECK(masm->has_frame());
67   masm->set_has_frame(false);
68 }
69 
70 
71 // -------------------------------------------------------------------------
72 // Code generators
73 
74 #define __ ACCESS_MASM(masm)
75 
76 // assume ip can be used as a scratch register below
Generate(MacroAssembler * masm,Register string,Register index,Register result,Label * call_runtime)77 void StringCharLoadGenerator::Generate(MacroAssembler* masm, Register string,
78                                        Register index, Register result,
79                                        Label* call_runtime) {
80   Label indirect_string_loaded;
81   __ bind(&indirect_string_loaded);
82 
83   // Fetch the instance type of the receiver into result register.
84   __ LoadP(result, FieldMemOperand(string, HeapObject::kMapOffset));
85   __ lbz(result, FieldMemOperand(result, Map::kInstanceTypeOffset));
86 
87   // We need special handling for indirect strings.
88   Label check_sequential;
89   __ andi(r0, result, Operand(kIsIndirectStringMask));
90   __ beq(&check_sequential, cr0);
91 
92   // Dispatch on the indirect string shape: slice or cons or thin.
93   Label cons_string, thin_string;
94   __ andi(ip, result, Operand(kStringRepresentationMask));
95   __ cmpi(ip, Operand(kConsStringTag));
96   __ beq(&cons_string);
97   __ cmpi(ip, Operand(kThinStringTag));
98   __ beq(&thin_string);
99 
100   // Handle slices.
101   __ LoadP(result, FieldMemOperand(string, SlicedString::kOffsetOffset));
102   __ LoadP(string, FieldMemOperand(string, SlicedString::kParentOffset));
103   __ SmiUntag(ip, result);
104   __ add(index, index, ip);
105   __ b(&indirect_string_loaded);
106 
107   // Handle thin strings.
108   __ bind(&thin_string);
109   __ LoadP(string, FieldMemOperand(string, ThinString::kActualOffset));
110   __ b(&indirect_string_loaded);
111 
112   // Handle cons strings.
113   // Check whether the right hand side is the empty string (i.e. if
114   // this is really a flat string in a cons string). If that is not
115   // the case we would rather go to the runtime system now to flatten
116   // the string.
117   __ bind(&cons_string);
118   __ LoadP(result, FieldMemOperand(string, ConsString::kSecondOffset));
119   __ CompareRoot(result, Heap::kempty_stringRootIndex);
120   __ bne(call_runtime);
121   // Get the first of the two strings and load its instance type.
122   __ LoadP(string, FieldMemOperand(string, ConsString::kFirstOffset));
123   __ b(&indirect_string_loaded);
124 
125   // Distinguish sequential and external strings. Only these two string
126   // representations can reach here (slices and flat cons strings have been
127   // reduced to the underlying sequential or external string).
128   Label external_string, check_encoding;
129   __ bind(&check_sequential);
130   STATIC_ASSERT(kSeqStringTag == 0);
131   __ andi(r0, result, Operand(kStringRepresentationMask));
132   __ bne(&external_string, cr0);
133 
134   // Prepare sequential strings
135   STATIC_ASSERT(SeqTwoByteString::kHeaderSize == SeqOneByteString::kHeaderSize);
136   __ addi(string, string,
137           Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
138   __ b(&check_encoding);
139 
140   // Handle external strings.
141   __ bind(&external_string);
142   if (FLAG_debug_code) {
143     // Assert that we do not have a cons or slice (indirect strings) here.
144     // Sequential strings have already been ruled out.
145     __ andi(r0, result, Operand(kIsIndirectStringMask));
146     __ Assert(eq, kExternalStringExpectedButNotFound, cr0);
147   }
148   // Rule out short external strings.
149   STATIC_ASSERT(kShortExternalStringTag != 0);
150   __ andi(r0, result, Operand(kShortExternalStringMask));
151   __ bne(call_runtime, cr0);
152   __ LoadP(string,
153            FieldMemOperand(string, ExternalString::kResourceDataOffset));
154 
155   Label one_byte, done;
156   __ bind(&check_encoding);
157   STATIC_ASSERT(kTwoByteStringTag == 0);
158   __ andi(r0, result, Operand(kStringEncodingMask));
159   __ bne(&one_byte, cr0);
160   // Two-byte string.
161   __ ShiftLeftImm(result, index, Operand(1));
162   __ lhzx(result, MemOperand(string, result));
163   __ b(&done);
164   __ bind(&one_byte);
165   // One-byte string.
166   __ lbzx(result, MemOperand(string, index));
167   __ bind(&done);
168 }
169 
170 #undef __
171 
CodeAgingHelper(Isolate * isolate)172 CodeAgingHelper::CodeAgingHelper(Isolate* isolate) {
173   USE(isolate);
174   DCHECK(young_sequence_.length() == kNoCodeAgeSequenceLength);
175   // Since patcher is a large object, allocate it dynamically when needed,
176   // to avoid overloading the stack in stress conditions.
177   // DONT_FLUSH is used because the CodeAgingHelper is initialized early in
178   // the process, before ARM simulator ICache is setup.
179   std::unique_ptr<CodePatcher> patcher(
180       new CodePatcher(isolate, young_sequence_.start(),
181                       young_sequence_.length() / Assembler::kInstrSize,
182                       CodePatcher::DONT_FLUSH));
183   PredictableCodeSizeScope scope(patcher->masm(), young_sequence_.length());
184   patcher->masm()->PushStandardFrame(r4);
185   for (int i = 0; i < kNoCodeAgeSequenceNops; i++) {
186     patcher->masm()->nop();
187   }
188 }
189 
190 
191 #ifdef DEBUG
IsOld(byte * candidate) const192 bool CodeAgingHelper::IsOld(byte* candidate) const {
193   return Assembler::IsNop(Assembler::instr_at(candidate));
194 }
195 #endif
196 
197 
IsYoungSequence(Isolate * isolate,byte * sequence)198 bool Code::IsYoungSequence(Isolate* isolate, byte* sequence) {
199   bool result = isolate->code_aging_helper()->IsYoung(sequence);
200   DCHECK(result || isolate->code_aging_helper()->IsOld(sequence));
201   return result;
202 }
203 
GetCodeAge(Isolate * isolate,byte * sequence)204 Code::Age Code::GetCodeAge(Isolate* isolate, byte* sequence) {
205   if (IsYoungSequence(isolate, sequence)) return kNoAgeCodeAge;
206 
207   Code* code = NULL;
208   Address target_address =
209       Assembler::target_address_at(sequence + kCodeAgingTargetDelta, code);
210   Code* stub = GetCodeFromTargetAddress(target_address);
211   return GetAgeOfCodeAgeStub(stub);
212 }
213 
PatchPlatformCodeAge(Isolate * isolate,byte * sequence,Code::Age age)214 void Code::PatchPlatformCodeAge(Isolate* isolate, byte* sequence,
215                                 Code::Age age) {
216   uint32_t young_length = isolate->code_aging_helper()->young_sequence_length();
217   if (age == kNoAgeCodeAge) {
218     isolate->code_aging_helper()->CopyYoungSequenceTo(sequence);
219     Assembler::FlushICache(isolate, sequence, young_length);
220   } else {
221     // FIXED_SEQUENCE
222     Code* stub = GetCodeAgeStub(isolate, age);
223     CodePatcher patcher(isolate, sequence,
224                         young_length / Assembler::kInstrSize);
225     Assembler::BlockTrampolinePoolScope block_trampoline_pool(patcher.masm());
226     intptr_t target = reinterpret_cast<intptr_t>(stub->instruction_start());
227     // Don't use Call -- we need to preserve ip and lr.
228     // GenerateMakeCodeYoungAgainCommon for the stub code.
229     patcher.masm()->nop();  // marker to detect sequence (see IsOld)
230     patcher.masm()->mov(r3, Operand(target));
231     patcher.masm()->Jump(r3);
232     for (int i = 0; i < kCodeAgingSequenceNops; i++) {
233       patcher.masm()->nop();
234     }
235   }
236 }
237 }  // namespace internal
238 }  // namespace v8
239 
240 #endif  // V8_TARGET_ARCH_PPC
241