• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2015 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "src/s390/codegen-s390.h"
6 
7 #if V8_TARGET_ARCH_S390
8 
9 #include <memory>
10 
11 #include "src/codegen.h"
12 #include "src/macro-assembler.h"
13 #include "src/s390/simulator-s390.h"
14 
15 namespace v8 {
16 namespace internal {
17 
18 #define __ masm.
19 
CreateSqrtFunction(Isolate * isolate)20 UnaryMathFunctionWithIsolate CreateSqrtFunction(Isolate* isolate) {
21 #if defined(USE_SIMULATOR)
22   return nullptr;
23 #else
24   size_t actual_size;
25   byte* buffer =
26       static_cast<byte*>(base::OS::Allocate(1 * KB, &actual_size, true));
27   if (buffer == nullptr) return nullptr;
28 
29   MacroAssembler masm(isolate, buffer, static_cast<int>(actual_size),
30                       CodeObjectRequired::kNo);
31 
32   __ MovFromFloatParameter(d0);
33   __ sqdbr(d0, d0);
34   __ MovToFloatResult(d0);
35   __ Ret();
36 
37   CodeDesc desc;
38   masm.GetCode(&desc);
39   DCHECK(ABI_USES_FUNCTION_DESCRIPTORS || !RelocInfo::RequiresRelocation(desc));
40 
41   Assembler::FlushICache(isolate, buffer, actual_size);
42   base::OS::ProtectCode(buffer, actual_size);
43   return FUNCTION_CAST<UnaryMathFunctionWithIsolate>(buffer);
44 #endif
45 }
46 
47 #undef __
48 
49 // -------------------------------------------------------------------------
50 // Platform-specific RuntimeCallHelper functions.
51 
BeforeCall(MacroAssembler * masm) const52 void StubRuntimeCallHelper::BeforeCall(MacroAssembler* masm) const {
53   masm->EnterFrame(StackFrame::INTERNAL);
54   DCHECK(!masm->has_frame());
55   masm->set_has_frame(true);
56 }
57 
AfterCall(MacroAssembler * masm) const58 void StubRuntimeCallHelper::AfterCall(MacroAssembler* masm) const {
59   masm->LeaveFrame(StackFrame::INTERNAL);
60   DCHECK(masm->has_frame());
61   masm->set_has_frame(false);
62 }
63 
64 // -------------------------------------------------------------------------
65 // Code generators
66 
67 #define __ ACCESS_MASM(masm)
68 
69 // assume ip can be used as a scratch register below
Generate(MacroAssembler * masm,Register string,Register index,Register result,Label * call_runtime)70 void StringCharLoadGenerator::Generate(MacroAssembler* masm, Register string,
71                                        Register index, Register result,
72                                        Label* call_runtime) {
73   Label indirect_string_loaded;
74   __ bind(&indirect_string_loaded);
75 
76   // Fetch the instance type of the receiver into result register.
77   __ LoadP(result, FieldMemOperand(string, HeapObject::kMapOffset));
78   __ LoadlB(result, FieldMemOperand(result, Map::kInstanceTypeOffset));
79 
80   // We need special handling for indirect strings.
81   Label check_sequential;
82   __ mov(r0, Operand(kIsIndirectStringMask));
83   __ AndP(r0, result);
84   __ beq(&check_sequential, Label::kNear /*, cr0*/);
85 
86   // Dispatch on the indirect string shape: slice or cons.
87   Label cons_string, thin_string;
88   __ LoadRR(ip, result);
89   __ nilf(ip, Operand(kStringRepresentationMask));
90   __ CmpP(ip, Operand(kConsStringTag));
91   __ beq(&cons_string);
92   __ CmpP(ip, Operand(kThinStringTag));
93   __ beq(&thin_string);
94 
95   // Handle slices.
96   __ LoadP(result, FieldMemOperand(string, SlicedString::kOffsetOffset));
97   __ LoadP(string, FieldMemOperand(string, SlicedString::kParentOffset));
98   __ SmiUntag(ip, result);
99   __ AddP(index, ip);
100   __ b(&indirect_string_loaded);
101 
102   // Handle thin strings.
103   __ bind(&thin_string);
104   __ LoadP(string, FieldMemOperand(string, ThinString::kActualOffset));
105   __ b(&indirect_string_loaded);
106 
107   // Handle cons strings.
108   // Check whether the right hand side is the empty string (i.e. if
109   // this is really a flat string in a cons string). If that is not
110   // the case we would rather go to the runtime system now to flatten
111   // the string.
112   __ bind(&cons_string);
113   __ LoadP(result, FieldMemOperand(string, ConsString::kSecondOffset));
114   __ CompareRoot(result, Heap::kempty_stringRootIndex);
115   __ bne(call_runtime);
116   // Get the first of the two strings and load its instance type.
117   __ LoadP(string, FieldMemOperand(string, ConsString::kFirstOffset));
118   __ b(&indirect_string_loaded);
119 
120   // Distinguish sequential and external strings. Only these two string
121   // representations can reach here (slices and flat cons strings have been
122   // reduced to the underlying sequential or external string).
123   Label external_string, check_encoding;
124   __ bind(&check_sequential);
125   STATIC_ASSERT(kSeqStringTag == 0);
126   __ mov(r0, Operand(kStringRepresentationMask));
127   __ AndP(r0, result);
128   __ bne(&external_string, Label::kNear);
129 
130   // Prepare sequential strings
131   STATIC_ASSERT(SeqTwoByteString::kHeaderSize == SeqOneByteString::kHeaderSize);
132   __ AddP(string, Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
133   __ b(&check_encoding, Label::kNear);
134 
135   // Handle external strings.
136   __ bind(&external_string);
137   if (FLAG_debug_code) {
138     // Assert that we do not have a cons or slice (indirect strings) here.
139     // Sequential strings have already been ruled out.
140     __ mov(r0, Operand(kIsIndirectStringMask));
141     __ AndP(r0, result);
142     __ Assert(eq, kExternalStringExpectedButNotFound, cr0);
143   }
144   // Rule out short external strings.
145   STATIC_ASSERT(kShortExternalStringTag != 0);
146   __ mov(r0, Operand(kShortExternalStringMask));
147   __ AndP(r0, result);
148   __ bne(call_runtime /*, cr0*/);
149   __ LoadP(string,
150            FieldMemOperand(string, ExternalString::kResourceDataOffset));
151 
152   Label one_byte, done;
153   __ bind(&check_encoding);
154   STATIC_ASSERT(kTwoByteStringTag == 0);
155   __ mov(r0, Operand(kStringEncodingMask));
156   __ AndP(r0, result);
157   __ bne(&one_byte, Label::kNear);
158   // Two-byte string.
159   __ ShiftLeftP(result, index, Operand(1));
160   __ LoadLogicalHalfWordP(result, MemOperand(string, result));
161   __ b(&done, Label::kNear);
162   __ bind(&one_byte);
163   // One-byte string.
164   __ LoadlB(result, MemOperand(string, index));
165   __ bind(&done);
166 }
167 
168 #undef __
169 
CodeAgingHelper(Isolate * isolate)170 CodeAgingHelper::CodeAgingHelper(Isolate* isolate) {
171   USE(isolate);
172   DCHECK(young_sequence_.length() == kNoCodeAgeSequenceLength);
173   // Since patcher is a large object, allocate it dynamically when needed,
174   // to avoid overloading the stack in stress conditions.
175   // DONT_FLUSH is used because the CodeAgingHelper is initialized early in
176   // the process, before ARM simulator ICache is setup.
177   std::unique_ptr<CodePatcher> patcher(
178       new CodePatcher(isolate, young_sequence_.start(),
179                       young_sequence_.length(), CodePatcher::DONT_FLUSH));
180   PredictableCodeSizeScope scope(patcher->masm(), young_sequence_.length());
181   patcher->masm()->PushStandardFrame(r3);
182 }
183 
184 #ifdef DEBUG
IsOld(byte * candidate) const185 bool CodeAgingHelper::IsOld(byte* candidate) const {
186   return Assembler::IsNop(Assembler::instr_at(candidate));
187 }
188 #endif
189 
IsYoungSequence(Isolate * isolate,byte * sequence)190 bool Code::IsYoungSequence(Isolate* isolate, byte* sequence) {
191   bool result = isolate->code_aging_helper()->IsYoung(sequence);
192   DCHECK(result || isolate->code_aging_helper()->IsOld(sequence));
193   return result;
194 }
195 
GetCodeAge(Isolate * isolate,byte * sequence)196 Code::Age Code::GetCodeAge(Isolate* isolate, byte* sequence) {
197   if (IsYoungSequence(isolate, sequence)) return kNoAgeCodeAge;
198 
199   Code* code = NULL;
200   Address target_address =
201       Assembler::target_address_at(sequence + kCodeAgingTargetDelta, code);
202   Code* stub = GetCodeFromTargetAddress(target_address);
203   return GetAgeOfCodeAgeStub(stub);
204 }
205 
PatchPlatformCodeAge(Isolate * isolate,byte * sequence,Code::Age age)206 void Code::PatchPlatformCodeAge(Isolate* isolate, byte* sequence,
207                                 Code::Age age) {
208   uint32_t young_length = isolate->code_aging_helper()->young_sequence_length();
209   if (age == kNoAgeCodeAge) {
210     isolate->code_aging_helper()->CopyYoungSequenceTo(sequence);
211     Assembler::FlushICache(isolate, sequence, young_length);
212   } else {
213     // FIXED_SEQUENCE
214     Code* stub = GetCodeAgeStub(isolate, age);
215     CodePatcher patcher(isolate, sequence, young_length);
216     intptr_t target = reinterpret_cast<intptr_t>(stub->instruction_start());
217     // We need to push lr on stack so that GenerateMakeCodeYoungAgainCommon
218     // knows where to pick up the return address
219     //
220     // Since we can no longer guarentee ip will hold the branch address
221     // because of BRASL, use Call so that GenerateMakeCodeYoungAgainCommon
222     // can calculate the branch address offset
223     patcher.masm()->nop();  // marker to detect sequence (see IsOld)
224     patcher.masm()->CleanseP(r14);
225     patcher.masm()->Push(r14);
226     patcher.masm()->mov(r2, Operand(target));
227     patcher.masm()->Call(r2);
228     for (int i = 0; i < kNoCodeAgeSequenceLength - kCodeAgingSequenceLength;
229          i += 2) {
230       // TODO(joransiu): Create nop function to pad
231       //       (kNoCodeAgeSequenceLength - kCodeAgingSequenceLength) bytes.
232       patcher.masm()->nop();  // 2-byte nops().
233     }
234   }
235 }
236 
237 }  // namespace internal
238 }  // namespace v8
239 
240 #endif  // V8_TARGET_ARCH_S390
241