1 // Copyright 2011 the V8 project authors. All rights reserved. 2 // Use of this source code is governed by a BSD-style license that can be 3 // found in the LICENSE file. 4 5 #ifndef V8_MIPS_CODE_STUBS_MIPS_H_ 6 #define V8_MIPS_CODE_STUBS_MIPS_H_ 7 8 #include "src/mips/frames-mips.h" 9 10 namespace v8 { 11 namespace internal { 12 13 14 void ArrayNativeCode(MacroAssembler* masm, Label* call_generic_code); 15 16 17 class StringHelper : public AllStatic { 18 public: 19 // Compares two flat one-byte strings and returns result in v0. 20 static void GenerateCompareFlatOneByteStrings( 21 MacroAssembler* masm, Register left, Register right, Register scratch1, 22 Register scratch2, Register scratch3, Register scratch4); 23 24 // Compares two flat one-byte strings for equality and returns result in v0. 25 static void GenerateFlatOneByteStringEquals(MacroAssembler* masm, 26 Register left, Register right, 27 Register scratch1, 28 Register scratch2, 29 Register scratch3); 30 31 private: 32 static void GenerateOneByteCharsCompareLoop( 33 MacroAssembler* masm, Register left, Register right, Register length, 34 Register scratch1, Register scratch2, Register scratch3, 35 Label* chars_not_equal); 36 37 DISALLOW_IMPLICIT_CONSTRUCTORS(StringHelper); 38 }; 39 40 41 class StoreRegistersStateStub: public PlatformCodeStub { 42 public: StoreRegistersStateStub(Isolate * isolate)43 explicit StoreRegistersStateStub(Isolate* isolate) 44 : PlatformCodeStub(isolate) {} 45 46 static void GenerateAheadOfTime(Isolate* isolate); 47 48 private: 49 DEFINE_NULL_CALL_INTERFACE_DESCRIPTOR(); 50 DEFINE_PLATFORM_CODE_STUB(StoreRegistersState, PlatformCodeStub); 51 }; 52 53 54 class RestoreRegistersStateStub: public PlatformCodeStub { 55 public: RestoreRegistersStateStub(Isolate * isolate)56 explicit RestoreRegistersStateStub(Isolate* isolate) 57 : PlatformCodeStub(isolate) {} 58 59 static void GenerateAheadOfTime(Isolate* isolate); 60 61 private: 62 DEFINE_NULL_CALL_INTERFACE_DESCRIPTOR(); 63 DEFINE_PLATFORM_CODE_STUB(RestoreRegistersState, PlatformCodeStub); 64 }; 65 66 67 class RecordWriteStub: public PlatformCodeStub { 68 public: RecordWriteStub(Isolate * isolate,Register object,Register value,Register address,RememberedSetAction remembered_set_action,SaveFPRegsMode fp_mode)69 RecordWriteStub(Isolate* isolate, 70 Register object, 71 Register value, 72 Register address, 73 RememberedSetAction remembered_set_action, 74 SaveFPRegsMode fp_mode) 75 : PlatformCodeStub(isolate), 76 regs_(object, // An input reg. 77 address, // An input reg. 78 value) { // One scratch reg. 79 minor_key_ = ObjectBits::encode(object.code()) | 80 ValueBits::encode(value.code()) | 81 AddressBits::encode(address.code()) | 82 RememberedSetActionBits::encode(remembered_set_action) | 83 SaveFPRegsModeBits::encode(fp_mode); 84 } 85 RecordWriteStub(uint32_t key,Isolate * isolate)86 RecordWriteStub(uint32_t key, Isolate* isolate) 87 : PlatformCodeStub(key, isolate), regs_(object(), address(), value()) {} 88 89 enum Mode { 90 STORE_BUFFER_ONLY, 91 INCREMENTAL, 92 INCREMENTAL_COMPACTION 93 }; 94 SometimesSetsUpAFrame()95 bool SometimesSetsUpAFrame() override { return false; } 96 PatchBranchIntoNop(MacroAssembler * masm,int pos)97 static void PatchBranchIntoNop(MacroAssembler* masm, int pos) { 98 const unsigned offset = masm->instr_at(pos) & kImm16Mask; 99 masm->instr_at_put(pos, BNE | (zero_reg.code() << kRsShift) | 100 (zero_reg.code() << kRtShift) | (offset & kImm16Mask)); 101 DCHECK(Assembler::IsBne(masm->instr_at(pos))); 102 } 103 PatchNopIntoBranch(MacroAssembler * masm,int pos)104 static void PatchNopIntoBranch(MacroAssembler* masm, int pos) { 105 const unsigned offset = masm->instr_at(pos) & kImm16Mask; 106 masm->instr_at_put(pos, BEQ | (zero_reg.code() << kRsShift) | 107 (zero_reg.code() << kRtShift) | (offset & kImm16Mask)); 108 DCHECK(Assembler::IsBeq(masm->instr_at(pos))); 109 } 110 GetMode(Code * stub)111 static Mode GetMode(Code* stub) { 112 Instr first_instruction = Assembler::instr_at(stub->instruction_start()); 113 Instr second_instruction = Assembler::instr_at(stub->instruction_start() + 114 2 * Assembler::kInstrSize); 115 116 if (Assembler::IsBeq(first_instruction)) { 117 return INCREMENTAL; 118 } 119 120 DCHECK(Assembler::IsBne(first_instruction)); 121 122 if (Assembler::IsBeq(second_instruction)) { 123 return INCREMENTAL_COMPACTION; 124 } 125 126 DCHECK(Assembler::IsBne(second_instruction)); 127 128 return STORE_BUFFER_ONLY; 129 } 130 Patch(Code * stub,Mode mode)131 static void Patch(Code* stub, Mode mode) { 132 MacroAssembler masm(stub->GetIsolate(), stub->instruction_start(), 133 stub->instruction_size(), CodeObjectRequired::kNo); 134 switch (mode) { 135 case STORE_BUFFER_ONLY: 136 DCHECK(GetMode(stub) == INCREMENTAL || 137 GetMode(stub) == INCREMENTAL_COMPACTION); 138 PatchBranchIntoNop(&masm, 0); 139 PatchBranchIntoNop(&masm, 2 * Assembler::kInstrSize); 140 break; 141 case INCREMENTAL: 142 DCHECK(GetMode(stub) == STORE_BUFFER_ONLY); 143 PatchNopIntoBranch(&masm, 0); 144 break; 145 case INCREMENTAL_COMPACTION: 146 DCHECK(GetMode(stub) == STORE_BUFFER_ONLY); 147 PatchNopIntoBranch(&masm, 2 * Assembler::kInstrSize); 148 break; 149 } 150 DCHECK(GetMode(stub) == mode); 151 Assembler::FlushICache(stub->GetIsolate(), stub->instruction_start(), 152 4 * Assembler::kInstrSize); 153 } 154 155 DEFINE_NULL_CALL_INTERFACE_DESCRIPTOR(); 156 157 private: 158 // This is a helper class for freeing up 3 scratch registers. The input is 159 // two registers that must be preserved and one scratch register provided by 160 // the caller. 161 class RegisterAllocation { 162 public: RegisterAllocation(Register object,Register address,Register scratch0)163 RegisterAllocation(Register object, 164 Register address, 165 Register scratch0) 166 : object_(object), 167 address_(address), 168 scratch0_(scratch0) { 169 DCHECK(!AreAliased(scratch0, object, address, no_reg)); 170 scratch1_ = GetRegisterThatIsNotOneOf(object_, address_, scratch0_); 171 } 172 Save(MacroAssembler * masm)173 void Save(MacroAssembler* masm) { 174 DCHECK(!AreAliased(object_, address_, scratch1_, scratch0_)); 175 // We don't have to save scratch0_ because it was given to us as 176 // a scratch register. 177 masm->push(scratch1_); 178 } 179 Restore(MacroAssembler * masm)180 void Restore(MacroAssembler* masm) { 181 masm->pop(scratch1_); 182 } 183 184 // If we have to call into C then we need to save and restore all caller- 185 // saved registers that were not already preserved. The scratch registers 186 // will be restored by other means so we don't bother pushing them here. SaveCallerSaveRegisters(MacroAssembler * masm,SaveFPRegsMode mode)187 void SaveCallerSaveRegisters(MacroAssembler* masm, SaveFPRegsMode mode) { 188 masm->MultiPush((kJSCallerSaved | ra.bit()) & ~scratch1_.bit()); 189 if (mode == kSaveFPRegs) { 190 masm->MultiPushFPU(kCallerSavedFPU); 191 } 192 } 193 RestoreCallerSaveRegisters(MacroAssembler * masm,SaveFPRegsMode mode)194 inline void RestoreCallerSaveRegisters(MacroAssembler*masm, 195 SaveFPRegsMode mode) { 196 if (mode == kSaveFPRegs) { 197 masm->MultiPopFPU(kCallerSavedFPU); 198 } 199 masm->MultiPop((kJSCallerSaved | ra.bit()) & ~scratch1_.bit()); 200 } 201 object()202 inline Register object() { return object_; } address()203 inline Register address() { return address_; } scratch0()204 inline Register scratch0() { return scratch0_; } scratch1()205 inline Register scratch1() { return scratch1_; } 206 207 private: 208 Register object_; 209 Register address_; 210 Register scratch0_; 211 Register scratch1_; 212 213 friend class RecordWriteStub; 214 }; 215 216 enum OnNoNeedToInformIncrementalMarker { 217 kReturnOnNoNeedToInformIncrementalMarker, 218 kUpdateRememberedSetOnNoNeedToInformIncrementalMarker 219 }; 220 MajorKey()221 inline Major MajorKey() const final { return RecordWrite; } 222 223 void Generate(MacroAssembler* masm) override; 224 void GenerateIncremental(MacroAssembler* masm, Mode mode); 225 void CheckNeedsToInformIncrementalMarker( 226 MacroAssembler* masm, 227 OnNoNeedToInformIncrementalMarker on_no_need, 228 Mode mode); 229 void InformIncrementalMarker(MacroAssembler* masm); 230 Activate(Code * code)231 void Activate(Code* code) override { 232 code->GetHeap()->incremental_marking()->ActivateGeneratedStub(code); 233 } 234 object()235 Register object() const { 236 return Register::from_code(ObjectBits::decode(minor_key_)); 237 } 238 value()239 Register value() const { 240 return Register::from_code(ValueBits::decode(minor_key_)); 241 } 242 address()243 Register address() const { 244 return Register::from_code(AddressBits::decode(minor_key_)); 245 } 246 remembered_set_action()247 RememberedSetAction remembered_set_action() const { 248 return RememberedSetActionBits::decode(minor_key_); 249 } 250 save_fp_regs_mode()251 SaveFPRegsMode save_fp_regs_mode() const { 252 return SaveFPRegsModeBits::decode(minor_key_); 253 } 254 255 class ObjectBits: public BitField<int, 0, 5> {}; 256 class ValueBits: public BitField<int, 5, 5> {}; 257 class AddressBits: public BitField<int, 10, 5> {}; 258 class RememberedSetActionBits: public BitField<RememberedSetAction, 15, 1> {}; 259 class SaveFPRegsModeBits: public BitField<SaveFPRegsMode, 16, 1> {}; 260 261 Label slow_; 262 RegisterAllocation regs_; 263 264 DISALLOW_COPY_AND_ASSIGN(RecordWriteStub); 265 }; 266 267 268 // Trampoline stub to call into native code. To call safely into native code 269 // in the presence of compacting GC (which can move code objects) we need to 270 // keep the code which called into native pinned in the memory. Currently the 271 // simplest approach is to generate such stub early enough so it can never be 272 // moved by GC 273 class DirectCEntryStub: public PlatformCodeStub { 274 public: DirectCEntryStub(Isolate * isolate)275 explicit DirectCEntryStub(Isolate* isolate) : PlatformCodeStub(isolate) {} 276 void GenerateCall(MacroAssembler* masm, Register target); 277 278 private: NeedsImmovableCode()279 bool NeedsImmovableCode() override { return true; } 280 281 DEFINE_NULL_CALL_INTERFACE_DESCRIPTOR(); 282 DEFINE_PLATFORM_CODE_STUB(DirectCEntry, PlatformCodeStub); 283 }; 284 285 286 class NameDictionaryLookupStub: public PlatformCodeStub { 287 public: 288 enum LookupMode { POSITIVE_LOOKUP, NEGATIVE_LOOKUP }; 289 NameDictionaryLookupStub(Isolate * isolate,LookupMode mode)290 NameDictionaryLookupStub(Isolate* isolate, LookupMode mode) 291 : PlatformCodeStub(isolate) { 292 minor_key_ = LookupModeBits::encode(mode); 293 } 294 295 static void GenerateNegativeLookup(MacroAssembler* masm, 296 Label* miss, 297 Label* done, 298 Register receiver, 299 Register properties, 300 Handle<Name> name, 301 Register scratch0); 302 SometimesSetsUpAFrame()303 bool SometimesSetsUpAFrame() override { return false; } 304 305 private: 306 static const int kInlinedProbes = 4; 307 static const int kTotalProbes = 20; 308 309 static const int kCapacityOffset = 310 NameDictionary::kHeaderSize + 311 NameDictionary::kCapacityIndex * kPointerSize; 312 313 static const int kElementsStartOffset = 314 NameDictionary::kHeaderSize + 315 NameDictionary::kElementsStartIndex * kPointerSize; 316 mode()317 LookupMode mode() const { return LookupModeBits::decode(minor_key_); } 318 319 class LookupModeBits: public BitField<LookupMode, 0, 1> {}; 320 321 DEFINE_NULL_CALL_INTERFACE_DESCRIPTOR(); 322 DEFINE_PLATFORM_CODE_STUB(NameDictionaryLookup, PlatformCodeStub); 323 }; 324 325 326 } // namespace internal 327 } // namespace v8 328 329 #endif // V8_MIPS_CODE_STUBS_MIPS_H_ 330