• Home
  • Raw
  • Download

Lines Matching refs:__

39 #define __ ACCESS_MASM(masm)  macro
74 __ sll(offset_scratch, offset, 1); in ProbeTable()
75 __ Addu(offset_scratch, offset_scratch, offset); in ProbeTable()
78 __ li(base_addr, Operand(key_offset)); in ProbeTable()
79 __ sll(at, offset_scratch, kPointerSizeLog2); in ProbeTable()
80 __ Addu(base_addr, base_addr, at); in ProbeTable()
83 __ lw(at, MemOperand(base_addr, 0)); in ProbeTable()
84 __ Branch(&miss, ne, name, Operand(at)); in ProbeTable()
87 __ lw(at, MemOperand(base_addr, map_off_addr - key_off_addr)); in ProbeTable()
88 __ lw(scratch2, FieldMemOperand(receiver, HeapObject::kMapOffset)); in ProbeTable()
89 __ Branch(&miss, ne, at, Operand(scratch2)); in ProbeTable()
94 __ lw(code, MemOperand(base_addr, value_off_addr - key_off_addr)); in ProbeTable()
99 __ lw(flags_reg, FieldMemOperand(code, Code::kFlagsOffset)); in ProbeTable()
100 __ And(flags_reg, flags_reg, Operand(~Code::kFlagsNotUsedInLookup)); in ProbeTable()
101 __ Branch(&miss, ne, flags_reg, Operand(flags)); in ProbeTable()
105 __ jmp(&miss); in ProbeTable()
107 __ jmp(&miss); in ProbeTable()
112 __ Addu(at, code, Operand(Code::kHeaderSize - kHeapObjectTag)); in ProbeTable()
113 __ Jump(at); in ProbeTable()
116 __ bind(&miss); in ProbeTable()
133 __ IncrementCounter(counters->negative_lookups(), 1, scratch0, scratch1); in GenerateDictionaryNegativeLookup()
134 __ IncrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1); in GenerateDictionaryNegativeLookup()
143 __ lw(map, FieldMemOperand(receiver, HeapObject::kMapOffset)); in GenerateDictionaryNegativeLookup()
144 __ lbu(scratch0, FieldMemOperand(map, Map::kBitFieldOffset)); in GenerateDictionaryNegativeLookup()
145 __ And(scratch0, scratch0, Operand(kInterceptorOrAccessCheckNeededMask)); in GenerateDictionaryNegativeLookup()
146 __ Branch(miss_label, ne, scratch0, Operand(zero_reg)); in GenerateDictionaryNegativeLookup()
149 __ lbu(scratch0, FieldMemOperand(map, Map::kInstanceTypeOffset)); in GenerateDictionaryNegativeLookup()
150 __ Branch(miss_label, lt, scratch0, Operand(FIRST_SPEC_OBJECT_TYPE)); in GenerateDictionaryNegativeLookup()
154 __ lw(properties, FieldMemOperand(receiver, JSObject::kPropertiesOffset)); in GenerateDictionaryNegativeLookup()
156 __ lw(map, FieldMemOperand(properties, HeapObject::kMapOffset)); in GenerateDictionaryNegativeLookup()
158 __ LoadRoot(tmp, Heap::kHashTableMapRootIndex); in GenerateDictionaryNegativeLookup()
159 __ Branch(miss_label, ne, map, Operand(tmp)); in GenerateDictionaryNegativeLookup()
162 __ lw(properties, FieldMemOperand(receiver, JSObject::kPropertiesOffset)); in GenerateDictionaryNegativeLookup()
172 __ bind(&done); in GenerateDictionaryNegativeLookup()
173 __ DecrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1); in GenerateDictionaryNegativeLookup()
213 __ IncrementCounter(counters->megamorphic_stub_cache_probes(), 1, in GenerateProbe()
217 __ JumpIfSmi(receiver, &miss); in GenerateProbe()
220 __ lw(scratch, FieldMemOperand(name, String::kHashFieldOffset)); in GenerateProbe()
221 __ lw(at, FieldMemOperand(receiver, HeapObject::kMapOffset)); in GenerateProbe()
222 __ Addu(scratch, scratch, at); in GenerateProbe()
226 __ srl(scratch, scratch, kHeapObjectTagSize); in GenerateProbe()
227 __ Xor(scratch, scratch, Operand((flags >> kHeapObjectTagSize) & mask)); in GenerateProbe()
228 __ And(scratch, scratch, Operand(mask)); in GenerateProbe()
243 __ srl(at, name, kHeapObjectTagSize); in GenerateProbe()
244 __ Subu(scratch, scratch, at); in GenerateProbe()
246 __ Addu(scratch, scratch, Operand((flags >> kHeapObjectTagSize) & mask2)); in GenerateProbe()
247 __ And(scratch, scratch, Operand(mask2)); in GenerateProbe()
263 __ bind(&miss); in GenerateProbe()
264 __ IncrementCounter(counters->megamorphic_stub_cache_misses(), 1, in GenerateProbe()
273 __ lw(prototype, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX))); in GenerateLoadGlobalFunctionPrototype()
275 __ lw(prototype, in GenerateLoadGlobalFunctionPrototype()
278 __ lw(prototype, MemOperand(prototype, Context::SlotOffset(index))); in GenerateLoadGlobalFunctionPrototype()
280 __ lw(prototype, in GenerateLoadGlobalFunctionPrototype()
283 __ lw(prototype, FieldMemOperand(prototype, Map::kPrototypeOffset)); in GenerateLoadGlobalFunctionPrototype()
294 __ lw(prototype, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX))); in GenerateDirectLoadGlobalFunctionPrototype()
296 __ li(at, isolate->global()); in GenerateDirectLoadGlobalFunctionPrototype()
297 __ Branch(miss, ne, prototype, Operand(at)); in GenerateDirectLoadGlobalFunctionPrototype()
302 __ li(prototype, Handle<Map>(function->initial_map())); in GenerateDirectLoadGlobalFunctionPrototype()
304 __ lw(prototype, FieldMemOperand(prototype, Map::kPrototypeOffset)); in GenerateDirectLoadGlobalFunctionPrototype()
321 __ lw(dst, FieldMemOperand(src, offset)); in GenerateFastPropertyLoad()
325 __ lw(dst, FieldMemOperand(src, JSObject::kPropertiesOffset)); in GenerateFastPropertyLoad()
326 __ lw(dst, FieldMemOperand(dst, offset)); in GenerateFastPropertyLoad()
336 __ JumpIfSmi(receiver, miss_label); in GenerateLoadArrayLength()
339 __ GetObjectType(receiver, scratch, scratch); in GenerateLoadArrayLength()
340 __ Branch(miss_label, ne, scratch, Operand(JS_ARRAY_TYPE)); in GenerateLoadArrayLength()
343 __ lw(v0, FieldMemOperand(receiver, JSArray::kLengthOffset)); in GenerateLoadArrayLength()
344 __ Ret(); in GenerateLoadArrayLength()
358 __ JumpIfSmi(receiver, smi, t0); in GenerateStringCheck()
361 __ lw(scratch1, FieldMemOperand(receiver, HeapObject::kMapOffset)); in GenerateStringCheck()
362 __ lbu(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset)); in GenerateStringCheck()
363 __ And(scratch2, scratch1, Operand(kIsNotStringMask)); in GenerateStringCheck()
365 __ Branch(non_string_object, in GenerateStringCheck()
390 __ lw(v0, FieldMemOperand(receiver, String::kLengthOffset)); in GenerateLoadStringLength()
391 __ Ret(); in GenerateLoadStringLength()
395 __ bind(&check_wrapper); in GenerateLoadStringLength()
396 __ Branch(miss, ne, scratch1, Operand(JS_VALUE_TYPE)); in GenerateLoadStringLength()
399 __ lw(scratch1, FieldMemOperand(receiver, JSValue::kValueOffset)); in GenerateLoadStringLength()
401 __ lw(v0, FieldMemOperand(scratch1, String::kLengthOffset)); in GenerateLoadStringLength()
402 __ Ret(); in GenerateLoadStringLength()
412 __ TryGetFunctionPrototype(receiver, scratch1, scratch2, miss_label); in GenerateLoadFunctionPrototype()
413 __ mov(v0, scratch1); in GenerateLoadFunctionPrototype()
414 __ Ret(); in GenerateLoadFunctionPrototype()
434 __ CheckMap(receiver_reg, scratch, Handle<Map>(object->map()), miss_label, in GenerateStoreField()
439 __ CheckAccessGlobalProxy(receiver_reg, scratch, miss_label); in GenerateStoreField()
450 __ push(receiver_reg); in GenerateStoreField()
451 __ li(a2, Operand(transition)); in GenerateStoreField()
452 __ Push(a2, a0); in GenerateStoreField()
453 __ TailCallExternalReference( in GenerateStoreField()
463 __ li(t0, Operand(transition)); in GenerateStoreField()
464 __ sw(t0, FieldMemOperand(receiver_reg, HeapObject::kMapOffset)); in GenerateStoreField()
475 __ sw(a0, FieldMemOperand(receiver_reg, offset)); in GenerateStoreField()
478 __ JumpIfSmi(a0, &exit, scratch); in GenerateStoreField()
482 __ mov(name_reg, a0); in GenerateStoreField()
483 __ RecordWriteField(receiver_reg, in GenerateStoreField()
493 __ lw(scratch, FieldMemOperand(receiver_reg, JSObject::kPropertiesOffset)); in GenerateStoreField()
494 __ sw(a0, FieldMemOperand(scratch, offset)); in GenerateStoreField()
497 __ JumpIfSmi(a0, &exit); in GenerateStoreField()
501 __ mov(name_reg, a0); in GenerateStoreField()
502 __ RecordWriteField(scratch, in GenerateStoreField()
511 __ bind(&exit); in GenerateStoreField()
512 __ mov(v0, a0); in GenerateStoreField()
513 __ Ret(); in GenerateStoreField()
522 __ Jump(code, RelocInfo::CODE_TARGET); in GenerateLoadMiss()
536 __ JumpIfSmi(a1, miss); in GenerateCallFunction()
537 __ GetObjectType(a1, a3, a3); in GenerateCallFunction()
538 __ Branch(miss, ne, a3, Operand(JS_FUNCTION_TYPE)); in GenerateCallFunction()
543 __ lw(a3, FieldMemOperand(a0, GlobalObject::kGlobalReceiverOffset)); in GenerateCallFunction()
544 __ sw(a3, MemOperand(sp, arguments.immediate() * kPointerSize)); in GenerateCallFunction()
551 __ InvokeFunction(a1, arguments, JUMP_FUNCTION, NullCallWrapper(), call_kind); in GenerateCallFunction()
560 __ push(name); in PushInterceptorArguments()
564 __ li(scratch, Operand(interceptor)); in PushInterceptorArguments()
565 __ Push(scratch, receiver, holder); in PushInterceptorArguments()
566 __ lw(scratch, FieldMemOperand(scratch, InterceptorInfo::kDataOffset)); in PushInterceptorArguments()
567 __ push(scratch); in PushInterceptorArguments()
582 __ PrepareCEntryArgs(5); in CompileCallLoadPropertyWithInterceptor()
583 __ PrepareCEntryFunction(ref); in CompileCallLoadPropertyWithInterceptor()
586 __ CallStub(&stub); in CompileCallLoadPropertyWithInterceptor()
601 __ push(zero_reg); in ReserveSpaceForFastApiCall()
608 __ Drop(kFastApiCallArguments); in FreeSpaceForFastApiCall()
626 __ LoadHeapObject(t1, function); in GenerateFastApiDirectCall()
627 __ lw(cp, FieldMemOperand(t1, JSFunction::kContextOffset)); in GenerateFastApiDirectCall()
633 __ li(a0, api_call_info); in GenerateFastApiDirectCall()
634 __ lw(t2, FieldMemOperand(a0, CallHandlerInfo::kDataOffset)); in GenerateFastApiDirectCall()
636 __ li(t2, call_data); in GenerateFastApiDirectCall()
640 __ sw(t1, MemOperand(sp, 1 * kPointerSize)); in GenerateFastApiDirectCall()
641 __ sw(t2, MemOperand(sp, 2 * kPointerSize)); in GenerateFastApiDirectCall()
645 __ Addu(a2, sp, Operand(2 * kPointerSize)); in GenerateFastApiDirectCall()
650 __ EnterExitFrame(false, kApiStackSpace); in GenerateFastApiDirectCall()
659 __ Addu(a1, sp, kPointerSize); in GenerateFastApiDirectCall()
662 __ sw(a2, MemOperand(a1, 0 * kPointerSize)); in GenerateFastApiDirectCall()
664 __ Addu(t0, a2, Operand(argc * kPointerSize)); in GenerateFastApiDirectCall()
665 __ sw(t0, MemOperand(a1, 1 * kPointerSize)); in GenerateFastApiDirectCall()
667 __ li(t0, Operand(argc)); in GenerateFastApiDirectCall()
668 __ sw(t0, MemOperand(a1, 2 * kPointerSize)); in GenerateFastApiDirectCall()
670 __ sw(zero_reg, MemOperand(a1, 3 * kPointerSize)); in GenerateFastApiDirectCall()
680 __ CallApiFunctionAndReturn(ref, kStackUnwindSpace); in GenerateFastApiDirectCall()
708 __ JumpIfSmi(receiver, miss); in Compile()
749 __ IncrementCounter(counters->call_const_interceptor(), 1, in CompileCacheable()
753 __ IncrementCounter(counters->call_const_interceptor_fast_api(), 1, in CompileCacheable()
798 __ InvokeFunction(optimization.constant_function(), arguments_, in CompileCacheable()
804 __ bind(&miss_cleanup); in CompileCacheable()
806 __ Branch(miss_label); in CompileCacheable()
810 __ bind(&regular_invoke); in CompileCacheable()
833 __ push(name_); in CompileRegular()
837 __ CallExternalReference( in CompileRegular()
843 __ pop(name_); in CompileRegular()
856 __ Push(holder, name_); in LoadWithInterceptor()
862 __ pop(name_); // Restore the name. in LoadWithInterceptor()
863 __ pop(receiver); // Restore the holder. in LoadWithInterceptor()
866 __ LoadRoot(scratch, Heap::kNoInterceptorResultSentinelRootIndex); in LoadWithInterceptor()
867 __ Branch(interceptor_succeeded, ne, v0, Operand(scratch)); in LoadWithInterceptor()
889 __ li(scratch, Operand(cell)); in GenerateCheckPropertyCell()
890 __ lw(scratch, in GenerateCheckPropertyCell()
892 __ LoadRoot(at, Heap::kTheHoleValueRootIndex); in GenerateCheckPropertyCell()
893 __ Branch(miss, ne, scratch, Operand(at)); in GenerateCheckPropertyCell()
931 __ mtc1(ival, f0); in StoreIntAsFloat()
932 __ cvt_s_w(f0, f0); in StoreIntAsFloat()
933 __ sll(scratch1, wordoffset, 2); in StoreIntAsFloat()
934 __ addu(scratch1, dst, scratch1); in StoreIntAsFloat()
935 __ swc1(f0, MemOperand(scratch1, 0)); in StoreIntAsFloat()
945 __ And(fval, ival, Operand(kBinary32SignMask)); in StoreIntAsFloat()
947 __ subu(scratch1, zero_reg, ival); in StoreIntAsFloat()
948 __ Movn(ival, scratch1, fval); in StoreIntAsFloat()
953 __ Branch(&not_special, gt, ival, Operand(1)); in StoreIntAsFloat()
959 __ Xor(scratch1, ival, Operand(1)); in StoreIntAsFloat()
960 __ li(scratch2, exponent_word_for_1); in StoreIntAsFloat()
961 __ or_(scratch2, fval, scratch2); in StoreIntAsFloat()
962 __ Movz(fval, scratch2, scratch1); // Only if ival is equal to 1. in StoreIntAsFloat()
963 __ Branch(&done); in StoreIntAsFloat()
965 __ bind(&not_special); in StoreIntAsFloat()
969 __ Clz(zeros, ival); in StoreIntAsFloat()
972 __ li(scratch1, (kBitsPerInt - 1) + kBinary32ExponentBias); in StoreIntAsFloat()
973 __ subu(scratch1, scratch1, zeros); in StoreIntAsFloat()
975 __ sll(scratch1, scratch1, kBinary32ExponentShift); in StoreIntAsFloat()
976 __ or_(fval, fval, scratch1); in StoreIntAsFloat()
979 __ Addu(zeros, zeros, Operand(1)); in StoreIntAsFloat()
981 __ sllv(ival, ival, zeros); in StoreIntAsFloat()
983 __ srl(scratch1, ival, kBitsPerInt - kBinary32MantissaBits); in StoreIntAsFloat()
984 __ or_(fval, fval, scratch1); in StoreIntAsFloat()
986 __ bind(&done); in StoreIntAsFloat()
988 __ sll(scratch1, wordoffset, 2); in StoreIntAsFloat()
989 __ addu(scratch1, dst, scratch1); in StoreIntAsFloat()
990 __ sw(fval, MemOperand(scratch1, 0)); in StoreIntAsFloat()
1014 __ li(scratch, biased_exponent << HeapNumber::kExponentShift); in GenerateUInt2Double()
1016 __ sll(loword, hiword, mantissa_shift_for_lo_word); in GenerateUInt2Double()
1017 __ srl(hiword, hiword, mantissa_shift_for_hi_word); in GenerateUInt2Double()
1018 __ or_(hiword, scratch, hiword); in GenerateUInt2Double()
1020 __ mov(loword, zero_reg); in GenerateUInt2Double()
1021 __ sll(hiword, hiword, mantissa_shift_for_hi_word); in GenerateUInt2Double()
1022 __ or_(hiword, scratch, hiword); in GenerateUInt2Double()
1028 __ li(scratch, 1 << HeapNumber::kExponentShift); in GenerateUInt2Double()
1029 __ nor(scratch, scratch, scratch); in GenerateUInt2Double()
1030 __ and_(hiword, hiword, scratch); in GenerateUInt2Double()
1035 #undef __
1036 #define __ ACCESS_MASM(masm()) macro
1058 __ sw(reg, MemOperand(sp)); in CheckPrototypes()
1084 __ lw(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset)); in CheckPrototypes()
1086 __ lw(reg, FieldMemOperand(scratch1, Map::kPrototypeOffset)); in CheckPrototypes()
1089 __ CheckMap(reg, scratch1, current_map, miss, DONT_DO_SMI_CHECK, in CheckPrototypes()
1095 __ CheckAccessGlobalProxy(reg, scratch2, miss); in CheckPrototypes()
1102 __ lw(reg, FieldMemOperand(scratch1, Map::kPrototypeOffset)); in CheckPrototypes()
1105 __ li(reg, Operand(prototype)); in CheckPrototypes()
1110 __ sw(reg, MemOperand(sp)); in CheckPrototypes()
1121 __ CheckMap(reg, scratch1, Handle<Map>(current->map()), miss, in CheckPrototypes()
1127 __ CheckAccessGlobalProxy(reg, scratch1, miss); in CheckPrototypes()
1150 __ JumpIfSmi(receiver, miss); in GenerateLoadField()
1156 __ Ret(); in GenerateLoadField()
1170 __ JumpIfSmi(receiver, miss, scratch1); in GenerateLoadConstant()
1177 __ LoadHeapObject(v0, value); in GenerateLoadConstant()
1178 __ Ret(); in GenerateLoadConstant()
1193 __ JumpIfSmi(receiver, miss, scratch1); in GenerateLoadCallback()
1201 __ push(receiver); in GenerateLoadCallback()
1202 __ mov(scratch2, sp); // scratch2 = AccessorInfo::args_ in GenerateLoadCallback()
1204 __ li(scratch3, callback); in GenerateLoadCallback()
1205 __ lw(scratch3, FieldMemOperand(scratch3, AccessorInfo::kDataOffset)); in GenerateLoadCallback()
1207 __ li(scratch3, Handle<Object>(callback->data())); in GenerateLoadCallback()
1209 __ Push(reg, scratch3, name_reg); in GenerateLoadCallback()
1210 __ mov(a2, scratch2); // Saved in case scratch2 == a1. in GenerateLoadCallback()
1211 __ mov(a1, sp); // a1 (first argument - see note below) = Handle<String> in GenerateLoadCallback()
1220 __ EnterExitFrame(false, kApiStackSpace); in GenerateLoadCallback()
1224 __ sw(a2, MemOperand(sp, kPointerSize)); in GenerateLoadCallback()
1226 __ Addu(a2, sp, kPointerSize); in GenerateLoadCallback()
1235 __ CallApiFunctionAndReturn(ref, kStackUnwindSpace); in GenerateLoadCallback()
1253 __ JumpIfSmi(receiver, miss); in GenerateLoadInterceptor()
1284 __ Push(receiver, holder_reg, name_reg); in GenerateLoadInterceptor()
1286 __ Push(holder_reg, name_reg); in GenerateLoadInterceptor()
1299 __ LoadRoot(scratch1, Heap::kNoInterceptorResultSentinelRootIndex); in GenerateLoadInterceptor()
1300 __ Branch(&interceptor_failed, eq, v0, Operand(scratch1)); in GenerateLoadInterceptor()
1302 __ Ret(); in GenerateLoadInterceptor()
1304 __ bind(&interceptor_failed); in GenerateLoadInterceptor()
1305 __ pop(name_reg); in GenerateLoadInterceptor()
1306 __ pop(holder_reg); in GenerateLoadInterceptor()
1308 __ pop(receiver); in GenerateLoadInterceptor()
1331 __ Ret(); in GenerateLoadInterceptor()
1343 __ li(scratch2, callback); in GenerateLoadInterceptor()
1347 __ Push(receiver, holder_reg); in GenerateLoadInterceptor()
1348 __ lw(scratch3, in GenerateLoadInterceptor()
1350 __ Push(scratch3, scratch2, name_reg); in GenerateLoadInterceptor()
1352 __ push(receiver); in GenerateLoadInterceptor()
1353 __ lw(scratch3, in GenerateLoadInterceptor()
1355 __ Push(holder_reg, scratch3, scratch2, name_reg); in GenerateLoadInterceptor()
1361 __ TailCallExternalReference(ref, 5, 1); in GenerateLoadInterceptor()
1374 __ TailCallExternalReference(ref, 5, 1); in GenerateLoadInterceptor()
1381 __ Branch(miss, ne, a2, Operand(name)); in GenerateNameCheck()
1396 __ lw(a0, MemOperand(sp, argc * kPointerSize)); in GenerateGlobalReceiverCheck()
1399 __ JumpIfSmi(a0, miss); in GenerateGlobalReceiverCheck()
1409 __ li(a3, Operand(cell)); in GenerateLoadFunctionFromCell()
1410 __ lw(a1, FieldMemOperand(a3, JSGlobalPropertyCell::kValueOffset)); in GenerateLoadFunctionFromCell()
1419 __ JumpIfSmi(a1, miss); in GenerateLoadFunctionFromCell()
1420 __ GetObjectType(a1, a3, a3); in GenerateLoadFunctionFromCell()
1421 __ Branch(miss, ne, a3, Operand(JS_FUNCTION_TYPE)); in GenerateLoadFunctionFromCell()
1424 __ li(a3, Handle<SharedFunctionInfo>(function->shared())); in GenerateLoadFunctionFromCell()
1425 __ lw(t0, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); in GenerateLoadFunctionFromCell()
1426 __ Branch(miss, ne, t0, Operand(a3)); in GenerateLoadFunctionFromCell()
1428 __ Branch(miss, ne, a1, Operand(function)); in GenerateLoadFunctionFromCell()
1438 __ Jump(code, RelocInfo::CODE_TARGET); in GenerateMissBranch()
1457 __ lw(a0, MemOperand(sp, argc * kPointerSize)); in CompileCallField()
1459 __ JumpIfSmi(a0, &miss, t0); in CompileCallField()
1468 __ bind(&miss); in CompileCallField()
1501 __ lw(receiver, MemOperand(sp, argc * kPointerSize)); in CompileArrayPushCall()
1504 __ JumpIfSmi(receiver, &miss); in CompileArrayPushCall()
1512 __ lw(v0, FieldMemOperand(receiver, JSArray::kLengthOffset)); in CompileArrayPushCall()
1513 __ Drop(argc + 1); in CompileArrayPushCall()
1514 __ Ret(); in CompileArrayPushCall()
1523 __ lw(elements, FieldMemOperand(receiver, JSArray::kElementsOffset)); in CompileArrayPushCall()
1526 __ CheckMap(elements, in CompileArrayPushCall()
1533 __ lw(v0, FieldMemOperand(receiver, JSArray::kLengthOffset)); in CompileArrayPushCall()
1536 __ Addu(v0, v0, Operand(Smi::FromInt(argc))); in CompileArrayPushCall()
1539 __ lw(t0, FieldMemOperand(elements, FixedArray::kLengthOffset)); in CompileArrayPushCall()
1542 __ Branch(&attempt_to_grow_elements, gt, v0, Operand(t0)); in CompileArrayPushCall()
1546 __ lw(t0, MemOperand(sp, (argc - 1) * kPointerSize)); in CompileArrayPushCall()
1547 __ JumpIfNotSmi(t0, &with_write_barrier); in CompileArrayPushCall()
1550 __ sw(v0, FieldMemOperand(receiver, JSArray::kLengthOffset)); in CompileArrayPushCall()
1555 __ sll(end_elements, v0, kPointerSizeLog2 - kSmiTagSize); in CompileArrayPushCall()
1556 __ Addu(end_elements, elements, end_elements); in CompileArrayPushCall()
1559 __ Addu(end_elements, end_elements, kEndElementsOffset); in CompileArrayPushCall()
1560 __ sw(t0, MemOperand(end_elements)); in CompileArrayPushCall()
1563 __ Drop(argc + 1); in CompileArrayPushCall()
1564 __ Ret(); in CompileArrayPushCall()
1566 __ bind(&with_write_barrier); in CompileArrayPushCall()
1568 __ lw(a3, FieldMemOperand(receiver, HeapObject::kMapOffset)); in CompileArrayPushCall()
1572 __ CheckFastObjectElements(a3, t3, &not_fast_object); in CompileArrayPushCall()
1573 __ jmp(&fast_object); in CompileArrayPushCall()
1575 __ bind(&not_fast_object); in CompileArrayPushCall()
1576 __ CheckFastSmiOnlyElements(a3, t3, &call_builtin); in CompileArrayPushCall()
1579 __ LoadTransitionedArrayMapConditional(FAST_SMI_ONLY_ELEMENTS, in CompileArrayPushCall()
1584 __ mov(a2, receiver); in CompileArrayPushCall()
1586 __ bind(&fast_object); in CompileArrayPushCall()
1588 __ CheckFastObjectElements(a3, a3, &call_builtin); in CompileArrayPushCall()
1592 __ sw(v0, FieldMemOperand(receiver, JSArray::kLengthOffset)); in CompileArrayPushCall()
1597 __ sll(end_elements, v0, kPointerSizeLog2 - kSmiTagSize); in CompileArrayPushCall()
1598 __ Addu(end_elements, elements, end_elements); in CompileArrayPushCall()
1599 __ Addu(end_elements, end_elements, kEndElementsOffset); in CompileArrayPushCall()
1600 __ sw(t0, MemOperand(end_elements)); in CompileArrayPushCall()
1602 __ RecordWrite(elements, in CompileArrayPushCall()
1609 __ Drop(argc + 1); in CompileArrayPushCall()
1610 __ Ret(); in CompileArrayPushCall()
1612 __ bind(&attempt_to_grow_elements); in CompileArrayPushCall()
1617 __ Branch(&call_builtin); in CompileArrayPushCall()
1620 __ lw(a2, MemOperand(sp, (argc - 1) * kPointerSize)); in CompileArrayPushCall()
1624 __ JumpIfSmi(a2, &no_fast_elements_check); in CompileArrayPushCall()
1625 __ lw(t3, FieldMemOperand(receiver, HeapObject::kMapOffset)); in CompileArrayPushCall()
1626 __ CheckFastObjectElements(t3, t3, &call_builtin); in CompileArrayPushCall()
1627 __ bind(&no_fast_elements_check); in CompileArrayPushCall()
1638 __ sll(end_elements, v0, kPointerSizeLog2 - kSmiTagSize); in CompileArrayPushCall()
1639 __ Addu(end_elements, elements, end_elements); in CompileArrayPushCall()
1640 __ Addu(end_elements, end_elements, Operand(kEndElementsOffset)); in CompileArrayPushCall()
1641 __ li(t3, Operand(new_space_allocation_top)); in CompileArrayPushCall()
1642 __ lw(a3, MemOperand(t3)); in CompileArrayPushCall()
1643 __ Branch(&call_builtin, ne, end_elements, Operand(a3)); in CompileArrayPushCall()
1645 __ li(t5, Operand(new_space_allocation_limit)); in CompileArrayPushCall()
1646 __ lw(t5, MemOperand(t5)); in CompileArrayPushCall()
1647 __ Addu(a3, a3, Operand(kAllocationDelta * kPointerSize)); in CompileArrayPushCall()
1648 __ Branch(&call_builtin, hi, a3, Operand(t5)); in CompileArrayPushCall()
1652 __ sw(a3, MemOperand(t3)); in CompileArrayPushCall()
1654 __ sw(a2, MemOperand(end_elements)); in CompileArrayPushCall()
1656 __ LoadRoot(a3, Heap::kTheHoleValueRootIndex); in CompileArrayPushCall()
1658 __ sw(a3, MemOperand(end_elements, i * kPointerSize)); in CompileArrayPushCall()
1662 __ sw(v0, FieldMemOperand(receiver, JSArray::kLengthOffset)); in CompileArrayPushCall()
1663 __ Addu(t0, t0, Operand(Smi::FromInt(kAllocationDelta))); in CompileArrayPushCall()
1664 __ sw(t0, FieldMemOperand(elements, FixedArray::kLengthOffset)); in CompileArrayPushCall()
1667 __ Drop(argc + 1); in CompileArrayPushCall()
1668 __ Ret(); in CompileArrayPushCall()
1670 __ bind(&call_builtin); in CompileArrayPushCall()
1671 __ TailCallExternalReference(ExternalReference(Builtins::c_ArrayPush, in CompileArrayPushCall()
1678 __ bind(&miss); in CompileArrayPushCall()
1710 __ lw(receiver, MemOperand(sp, argc * kPointerSize)); in CompileArrayPopCall()
1712 __ JumpIfSmi(receiver, &miss); in CompileArrayPopCall()
1719 __ lw(elements, FieldMemOperand(receiver, JSArray::kElementsOffset)); in CompileArrayPopCall()
1722 __ CheckMap(elements, in CompileArrayPopCall()
1729 __ lw(t0, FieldMemOperand(receiver, JSArray::kLengthOffset)); in CompileArrayPopCall()
1730 __ Subu(t0, t0, Operand(Smi::FromInt(1))); in CompileArrayPopCall()
1731 __ Branch(&return_undefined, lt, t0, Operand(zero_reg)); in CompileArrayPopCall()
1734 __ LoadRoot(t2, Heap::kTheHoleValueRootIndex); in CompileArrayPopCall()
1739 __ sll(t1, t0, kPointerSizeLog2 - kSmiTagSize); in CompileArrayPopCall()
1740 __ Addu(elements, elements, t1); in CompileArrayPopCall()
1741 __ lw(v0, MemOperand(elements, FixedArray::kHeaderSize - kHeapObjectTag)); in CompileArrayPopCall()
1742 __ Branch(&call_builtin, eq, v0, Operand(t2)); in CompileArrayPopCall()
1745 __ sw(t0, FieldMemOperand(receiver, JSArray::kLengthOffset)); in CompileArrayPopCall()
1748 __ sw(t2, MemOperand(elements, FixedArray::kHeaderSize - kHeapObjectTag)); in CompileArrayPopCall()
1749 __ Drop(argc + 1); in CompileArrayPopCall()
1750 __ Ret(); in CompileArrayPopCall()
1752 __ bind(&return_undefined); in CompileArrayPopCall()
1753 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex); in CompileArrayPopCall()
1754 __ Drop(argc + 1); in CompileArrayPopCall()
1755 __ Ret(); in CompileArrayPopCall()
1757 __ bind(&call_builtin); in CompileArrayPopCall()
1758 __ TailCallExternalReference(ExternalReference(Builtins::c_ArrayPop, in CompileArrayPopCall()
1764 __ bind(&miss); in CompileArrayPopCall()
1816 __ lw(receiver, MemOperand(sp, argc * kPointerSize)); in CompileStringCharCodeAtCall()
1818 __ lw(index, MemOperand(sp, (argc - 1) * kPointerSize)); in CompileStringCharCodeAtCall()
1820 __ LoadRoot(index, Heap::kUndefinedValueRootIndex); in CompileStringCharCodeAtCall()
1831 __ Drop(argc + 1); in CompileStringCharCodeAtCall()
1832 __ Ret(); in CompileStringCharCodeAtCall()
1838 __ bind(&index_out_of_range); in CompileStringCharCodeAtCall()
1839 __ LoadRoot(v0, Heap::kNanValueRootIndex); in CompileStringCharCodeAtCall()
1840 __ Drop(argc + 1); in CompileStringCharCodeAtCall()
1841 __ Ret(); in CompileStringCharCodeAtCall()
1844 __ bind(&miss); in CompileStringCharCodeAtCall()
1846 __ li(a2, name); in CompileStringCharCodeAtCall()
1847 __ bind(&name_miss); in CompileStringCharCodeAtCall()
1897 __ lw(receiver, MemOperand(sp, argc * kPointerSize)); in CompileStringCharAtCall()
1899 __ lw(index, MemOperand(sp, (argc - 1) * kPointerSize)); in CompileStringCharAtCall()
1901 __ LoadRoot(index, Heap::kUndefinedValueRootIndex); in CompileStringCharAtCall()
1913 __ Drop(argc + 1); in CompileStringCharAtCall()
1914 __ Ret(); in CompileStringCharAtCall()
1920 __ bind(&index_out_of_range); in CompileStringCharAtCall()
1921 __ LoadRoot(v0, Heap::kEmptyStringRootIndex); in CompileStringCharAtCall()
1922 __ Drop(argc + 1); in CompileStringCharAtCall()
1923 __ Ret(); in CompileStringCharAtCall()
1926 __ bind(&miss); in CompileStringCharAtCall()
1928 __ li(a2, name); in CompileStringCharAtCall()
1929 __ bind(&name_miss); in CompileStringCharAtCall()
1961 __ lw(a1, MemOperand(sp, 1 * kPointerSize)); in CompileStringFromCharCodeCall()
1964 __ JumpIfSmi(a1, &miss); in CompileStringFromCharCodeCall()
1977 __ lw(code, MemOperand(sp, 0 * kPointerSize)); in CompileStringFromCharCodeCall()
1982 __ JumpIfNotSmi(code, &slow); in CompileStringFromCharCodeCall()
1985 __ And(code, code, Operand(Smi::FromInt(0xffff))); in CompileStringFromCharCodeCall()
1989 __ Drop(argc + 1); in CompileStringFromCharCodeCall()
1990 __ Ret(); in CompileStringFromCharCodeCall()
1997 __ bind(&slow); in CompileStringFromCharCodeCall()
1998 __ InvokeFunction( in CompileStringFromCharCodeCall()
2001 __ bind(&miss); in CompileStringFromCharCodeCall()
2038 __ lw(a1, MemOperand(sp, 1 * kPointerSize)); in CompileMathFloorCall()
2040 __ JumpIfSmi(a1, &miss); in CompileMathFloorCall()
2051 __ lw(v0, MemOperand(sp, 0 * kPointerSize)); in CompileMathFloorCall()
2055 __ And(t0, v0, Operand(kSmiTagMask)); in CompileMathFloorCall()
2056 __ Drop(argc + 1, eq, t0, Operand(zero_reg)); in CompileMathFloorCall()
2057 __ Ret(eq, t0, Operand(zero_reg)); in CompileMathFloorCall()
2059 __ CheckMap(v0, a1, Heap::kHeapNumberMapRootIndex, &slow, DONT_DO_SMI_CHECK); in CompileMathFloorCall()
2066 __ ldc1(f0, FieldMemOperand(v0, HeapNumber::kValueOffset)); in CompileMathFloorCall()
2069 __ cfc1(a3, FCSR); in CompileMathFloorCall()
2071 __ ctc1(zero_reg, FCSR); in CompileMathFloorCall()
2073 __ floor_w_d(f0, f0); in CompileMathFloorCall()
2077 __ lw(t1, FieldMemOperand(v0, HeapNumber::kValueOffset + kPointerSize)); in CompileMathFloorCall()
2078 __ And(t2, t1, Operand(~HeapNumber::kSignMask)); in CompileMathFloorCall()
2079 __ srl(t2, t2, HeapNumber::kMantissaBitsInTopWord); in CompileMathFloorCall()
2082 __ cfc1(t5, FCSR); in CompileMathFloorCall()
2083 __ And(t5, t5, Operand(kFCSRExceptionFlagMask)); in CompileMathFloorCall()
2084 __ Branch(&no_fpu_error, eq, t5, Operand(zero_reg)); in CompileMathFloorCall()
2089 __ Subu(t3, t2, Operand(HeapNumber::kExponentMask in CompileMathFloorCall()
2091 __ Branch(&restore_fcsr_and_return, eq, t3, Operand(zero_reg)); in CompileMathFloorCall()
2095 __ Branch(&restore_fcsr_and_return, ge, t3, in CompileMathFloorCall()
2097 __ Branch(&wont_fit_smi); in CompileMathFloorCall()
2099 __ bind(&no_fpu_error); in CompileMathFloorCall()
2101 __ mfc1(v0, f0); in CompileMathFloorCall()
2103 __ Addu(a1, v0, Operand(0x40000000)); in CompileMathFloorCall()
2104 __ Branch(&wont_fit_smi, lt, a1, Operand(zero_reg)); in CompileMathFloorCall()
2107 __ sll(v0, v0, kSmiTagSize); in CompileMathFloorCall()
2110 __ Branch(&restore_fcsr_and_return, ne, v0, Operand(zero_reg)); in CompileMathFloorCall()
2112 __ And(t0, t1, Operand(HeapNumber::kSignMask)); in CompileMathFloorCall()
2115 __ Branch(&restore_fcsr_and_return, eq, t0, Operand(zero_reg)); in CompileMathFloorCall()
2116 __ lw(v0, MemOperand(sp, 0 * kPointerSize)); in CompileMathFloorCall()
2118 __ bind(&restore_fcsr_and_return); in CompileMathFloorCall()
2120 __ ctc1(a3, FCSR); in CompileMathFloorCall()
2122 __ Drop(argc + 1); in CompileMathFloorCall()
2123 __ Ret(); in CompileMathFloorCall()
2125 __ bind(&wont_fit_smi); in CompileMathFloorCall()
2127 __ ctc1(a3, FCSR); in CompileMathFloorCall()
2129 __ bind(&slow); in CompileMathFloorCall()
2132 __ InvokeFunction( in CompileMathFloorCall()
2135 __ bind(&miss); in CompileMathFloorCall()
2167 __ lw(a1, MemOperand(sp, 1 * kPointerSize)); in CompileMathAbsCall()
2169 __ JumpIfSmi(a1, &miss); in CompileMathAbsCall()
2180 __ lw(v0, MemOperand(sp, 0 * kPointerSize)); in CompileMathAbsCall()
2185 __ JumpIfNotSmi(v0, &not_smi); in CompileMathAbsCall()
2189 __ sra(t0, v0, kBitsPerInt - 1); in CompileMathAbsCall()
2190 __ Xor(a1, v0, t0); in CompileMathAbsCall()
2193 __ Subu(v0, a1, t0); in CompileMathAbsCall()
2198 __ Branch(&slow, lt, v0, Operand(zero_reg)); in CompileMathAbsCall()
2201 __ Drop(argc + 1); in CompileMathAbsCall()
2202 __ Ret(); in CompileMathAbsCall()
2206 __ bind(&not_smi); in CompileMathAbsCall()
2207 __ CheckMap(v0, a1, Heap::kHeapNumberMapRootIndex, &slow, DONT_DO_SMI_CHECK); in CompileMathAbsCall()
2208 __ lw(a1, FieldMemOperand(v0, HeapNumber::kExponentOffset)); in CompileMathAbsCall()
2213 __ And(t0, a1, Operand(HeapNumber::kSignMask)); in CompileMathAbsCall()
2214 __ Branch(&negative_sign, ne, t0, Operand(zero_reg)); in CompileMathAbsCall()
2215 __ Drop(argc + 1); in CompileMathAbsCall()
2216 __ Ret(); in CompileMathAbsCall()
2220 __ bind(&negative_sign); in CompileMathAbsCall()
2221 __ Xor(a1, a1, Operand(HeapNumber::kSignMask)); in CompileMathAbsCall()
2222 __ lw(a3, FieldMemOperand(v0, HeapNumber::kMantissaOffset)); in CompileMathAbsCall()
2223 __ LoadRoot(t2, Heap::kHeapNumberMapRootIndex); in CompileMathAbsCall()
2224 __ AllocateHeapNumber(v0, t0, t1, t2, &slow); in CompileMathAbsCall()
2225 __ sw(a1, FieldMemOperand(v0, HeapNumber::kExponentOffset)); in CompileMathAbsCall()
2226 __ sw(a3, FieldMemOperand(v0, HeapNumber::kMantissaOffset)); in CompileMathAbsCall()
2227 __ Drop(argc + 1); in CompileMathAbsCall()
2228 __ Ret(); in CompileMathAbsCall()
2232 __ bind(&slow); in CompileMathAbsCall()
2233 __ InvokeFunction( in CompileMathAbsCall()
2236 __ bind(&miss); in CompileMathAbsCall()
2271 __ lw(a1, MemOperand(sp, argc * kPointerSize)); in CompileFastApiCall()
2274 __ JumpIfSmi(a1, &miss_before_stack_reserved); in CompileFastApiCall()
2276 __ IncrementCounter(counters->call_const(), 1, a0, a3); in CompileFastApiCall()
2277 __ IncrementCounter(counters->call_const_fast_api(), 1, a0, a3); in CompileFastApiCall()
2287 __ bind(&miss); in CompileFastApiCall()
2290 __ bind(&miss_before_stack_reserved); in CompileFastApiCall()
2321 __ lw(a1, MemOperand(sp, argc * kPointerSize)); in CompileCallConstant()
2325 __ JumpIfSmi(a1, &miss); in CompileCallConstant()
2333 __ IncrementCounter(masm()->isolate()->counters()->call_const(), in CompileCallConstant()
2343 __ lw(a3, FieldMemOperand(a1, GlobalObject::kGlobalReceiverOffset)); in CompileCallConstant()
2344 __ sw(a3, MemOperand(sp, argc * kPointerSize)); in CompileCallConstant()
2351 __ GetObjectType(a1, a3, a3); in CompileCallConstant()
2352 __ Branch(&miss, Ugreater_equal, a3, Operand(FIRST_NONSTRING_TYPE)); in CompileCallConstant()
2362 __ jmp(&miss); in CompileCallConstant()
2370 __ JumpIfSmi(a1, &fast); in CompileCallConstant()
2371 __ GetObjectType(a1, a0, a0); in CompileCallConstant()
2372 __ Branch(&miss, ne, a0, Operand(HEAP_NUMBER_TYPE)); in CompileCallConstant()
2373 __ bind(&fast); in CompileCallConstant()
2383 __ jmp(&miss); in CompileCallConstant()
2391 __ LoadRoot(t0, Heap::kTrueValueRootIndex); in CompileCallConstant()
2392 __ Branch(&fast, eq, a1, Operand(t0)); in CompileCallConstant()
2393 __ LoadRoot(t0, Heap::kFalseValueRootIndex); in CompileCallConstant()
2394 __ Branch(&miss, ne, a1, Operand(t0)); in CompileCallConstant()
2395 __ bind(&fast); in CompileCallConstant()
2405 __ jmp(&miss); in CompileCallConstant()
2413 __ InvokeFunction( in CompileCallConstant()
2417 __ bind(&miss); in CompileCallConstant()
2444 __ lw(a1, MemOperand(sp, argc * kPointerSize)); in CompileCallInterceptor()
2451 __ mov(a1, v0); in CompileCallInterceptor()
2453 __ lw(a0, MemOperand(sp, argc * kPointerSize)); in CompileCallInterceptor()
2458 __ bind(&miss); in CompileCallInterceptor()
2494 __ lw(a3, FieldMemOperand(a0, GlobalObject::kGlobalReceiverOffset)); in CompileCallGlobal()
2495 __ sw(a3, MemOperand(sp, argc * kPointerSize)); in CompileCallGlobal()
2499 __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset)); in CompileCallGlobal()
2503 __ IncrementCounter(counters->call_global_inline(), 1, a3, t0); in CompileCallGlobal()
2511 __ lw(a3, FieldMemOperand(a1, JSFunction::kCodeEntryOffset)); in CompileCallGlobal()
2512 __ InvokeCode(a3, expected, arguments(), JUMP_FUNCTION, in CompileCallGlobal()
2516 __ bind(&miss); in CompileCallGlobal()
2517 __ IncrementCounter(counters->call_global_inline_miss(), 1, a1, a3); in CompileCallGlobal()
2539 __ bind(&miss); in CompileStoreField()
2540 __ li(a2, Operand(Handle<String>(name))); // Restore name. in CompileStoreField()
2542 __ Jump(ic, RelocInfo::CODE_TARGET); in CompileStoreField()
2562 __ CheckMap(a1, a3, Handle<Map>(object->map()), &miss, in CompileStoreCallback()
2567 __ CheckAccessGlobalProxy(a1, a3, &miss); in CompileStoreCallback()
2574 __ push(a1); // Receiver. in CompileStoreCallback()
2575 __ li(a3, Operand(callback)); // Callback info. in CompileStoreCallback()
2576 __ Push(a3, a2, a0); in CompileStoreCallback()
2582 __ TailCallExternalReference(store_callback_property, 4, 1); in CompileStoreCallback()
2585 __ bind(&miss); in CompileStoreCallback()
2587 __ Jump(ic, RelocInfo::CODE_TARGET); in CompileStoreCallback()
2606 __ CheckMap(a1, a3, Handle<Map>(receiver->map()), &miss, in CompileStoreInterceptor()
2611 __ CheckAccessGlobalProxy(a1, a3, &miss); in CompileStoreInterceptor()
2618 __ Push(a1, a2, a0); // Receiver, name, value. in CompileStoreInterceptor()
2620 __ li(a0, Operand(Smi::FromInt(strict_mode_))); in CompileStoreInterceptor()
2621 __ push(a0); // Strict mode. in CompileStoreInterceptor()
2627 __ TailCallExternalReference(store_ic_property, 4, 1); in CompileStoreInterceptor()
2630 __ bind(&miss); in CompileStoreInterceptor()
2632 __ Jump(ic, RelocInfo::CODE_TARGET); in CompileStoreInterceptor()
2652 __ lw(a3, FieldMemOperand(a1, HeapObject::kMapOffset)); in CompileStoreGlobal()
2653 __ Branch(&miss, ne, a3, Operand(Handle<Map>(object->map()))); in CompileStoreGlobal()
2659 __ li(t0, Operand(cell)); in CompileStoreGlobal()
2660 __ LoadRoot(t1, Heap::kTheHoleValueRootIndex); in CompileStoreGlobal()
2661 __ lw(t2, FieldMemOperand(t0, JSGlobalPropertyCell::kValueOffset)); in CompileStoreGlobal()
2662 __ Branch(&miss, eq, t1, Operand(t2)); in CompileStoreGlobal()
2665 __ sw(a0, FieldMemOperand(t0, JSGlobalPropertyCell::kValueOffset)); in CompileStoreGlobal()
2666 __ mov(v0, a0); // Stored value must be returned in v0. in CompileStoreGlobal()
2670 __ IncrementCounter(counters->named_store_global_inline(), 1, a1, a3); in CompileStoreGlobal()
2671 __ Ret(); in CompileStoreGlobal()
2674 __ bind(&miss); in CompileStoreGlobal()
2675 __ IncrementCounter(counters->named_store_global_inline_miss(), 1, a1, a3); in CompileStoreGlobal()
2677 __ Jump(ic, RelocInfo::CODE_TARGET); in CompileStoreGlobal()
2694 __ JumpIfSmi(a0, &miss); in CompileLoadNonexistent()
2707 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex); in CompileLoadNonexistent()
2708 __ Ret(); in CompileLoadNonexistent()
2710 __ bind(&miss); in CompileLoadNonexistent()
2729 __ mov(v0, a0); in CompileLoadField()
2732 __ bind(&miss); in CompileLoadField()
2753 __ bind(&miss); in CompileLoadCallback()
2773 __ bind(&miss); in CompileLoadConstant()
2796 __ bind(&miss); in CompileLoadInterceptor()
2818 __ JumpIfSmi(a0, &miss); in CompileLoadGlobal()
2822 __ li(a3, Operand(cell)); in CompileLoadGlobal()
2823 __ lw(t0, FieldMemOperand(a3, JSGlobalPropertyCell::kValueOffset)); in CompileLoadGlobal()
2827 __ LoadRoot(at, Heap::kTheHoleValueRootIndex); in CompileLoadGlobal()
2828 __ Branch(&miss, eq, t0, Operand(at)); in CompileLoadGlobal()
2831 __ mov(v0, t0); in CompileLoadGlobal()
2833 __ IncrementCounter(counters->named_load_global_stub(), 1, a1, a3); in CompileLoadGlobal()
2834 __ Ret(); in CompileLoadGlobal()
2836 __ bind(&miss); in CompileLoadGlobal()
2837 __ IncrementCounter(counters->named_load_global_stub_miss(), 1, a1, a3); in CompileLoadGlobal()
2857 __ Branch(&miss, ne, a0, Operand(name)); in CompileLoadField()
2860 __ bind(&miss); in CompileLoadField()
2880 __ Branch(&miss, ne, a0, Operand(name)); in CompileLoadCallback()
2884 __ bind(&miss); in CompileLoadCallback()
2904 __ Branch(&miss, ne, a0, Operand(name)); in CompileLoadConstant()
2907 __ bind(&miss); in CompileLoadConstant()
2927 __ Branch(&miss, ne, a0, Operand(name)); in CompileLoadInterceptor()
2933 __ bind(&miss); in CompileLoadInterceptor()
2950 __ Branch(&miss, ne, a0, Operand(name)); in CompileLoadArrayLength()
2953 __ bind(&miss); in CompileLoadArrayLength()
2970 __ IncrementCounter(counters->keyed_load_string_length(), 1, a2, a3); in CompileLoadStringLength()
2973 __ Branch(&miss, ne, a0, Operand(name)); in CompileLoadStringLength()
2976 __ bind(&miss); in CompileLoadStringLength()
2977 __ DecrementCounter(counters->keyed_load_string_length(), 1, a2, a3); in CompileLoadStringLength()
2995 __ IncrementCounter(counters->keyed_load_function_prototype(), 1, a2, a3); in CompileLoadFunctionPrototype()
2998 __ Branch(&miss, ne, a0, Operand(name)); in CompileLoadFunctionPrototype()
3001 __ bind(&miss); in CompileLoadFunctionPrototype()
3002 __ DecrementCounter(counters->keyed_load_function_prototype(), 1, a2, a3); in CompileLoadFunctionPrototype()
3019 __ DispatchMap(a1, a2, receiver_map, stub, DO_SMI_CHECK); in CompileLoadElement()
3022 __ Jump(ic, RelocInfo::CODE_TARGET); in CompileLoadElement()
3038 __ JumpIfSmi(a1, &miss); in CompileLoadPolymorphic()
3041 __ lw(a2, FieldMemOperand(a1, HeapObject::kMapOffset)); in CompileLoadPolymorphic()
3043 __ Jump(handler_ics->at(current), RelocInfo::CODE_TARGET, in CompileLoadPolymorphic()
3047 __ bind(&miss); in CompileLoadPolymorphic()
3049 __ Jump(miss_ic, RelocInfo::CODE_TARGET); in CompileLoadPolymorphic()
3070 __ IncrementCounter(counters->keyed_store_field(), 1, a3, t0); in CompileStoreField()
3073 __ Branch(&miss, ne, a1, Operand(name)); in CompileStoreField()
3078 __ bind(&miss); in CompileStoreField()
3080 __ DecrementCounter(counters->keyed_store_field(), 1, a3, t0); in CompileStoreField()
3082 __ Jump(ic, RelocInfo::CODE_TARGET); in CompileStoreField()
3103 __ DispatchMap(a2, a3, receiver_map, stub, DO_SMI_CHECK); in CompileStoreElement()
3106 __ Jump(ic, RelocInfo::CODE_TARGET); in CompileStoreElement()
3125 __ JumpIfSmi(a2, &miss); in CompileStorePolymorphic()
3128 __ lw(a3, FieldMemOperand(a2, HeapObject::kMapOffset)); in CompileStorePolymorphic()
3131 __ Jump(handler_stubs->at(i), RelocInfo::CODE_TARGET, eq, in CompileStorePolymorphic()
3135 __ Branch(&next_map, ne, a3, Operand(receiver_maps->at(i))); in CompileStorePolymorphic()
3136 __ li(a3, Operand(transitioned_maps->at(i))); in CompileStorePolymorphic()
3137 __ Jump(handler_stubs->at(i), RelocInfo::CODE_TARGET); in CompileStorePolymorphic()
3138 __ bind(&next_map); in CompileStorePolymorphic()
3142 __ bind(&miss); in CompileStorePolymorphic()
3144 __ Jump(miss_ic, RelocInfo::CODE_TARGET); in CompileStorePolymorphic()
3160 __ LoadRoot(t7, Heap::kUndefinedValueRootIndex); in CompileConstructStub()
3166 __ lw(t5, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); in CompileConstructStub()
3167 __ lw(a2, FieldMemOperand(t5, SharedFunctionInfo::kDebugInfoOffset)); in CompileConstructStub()
3168 __ Branch(&generic_stub_call, ne, a2, Operand(t7)); in CompileConstructStub()
3174 __ lw(a2, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset)); in CompileConstructStub()
3175 __ JumpIfSmi(a2, &generic_stub_call); in CompileConstructStub()
3176 __ GetObjectType(a2, a3, t0); in CompileConstructStub()
3177 __ Branch(&generic_stub_call, ne, t0, Operand(MAP_TYPE)); in CompileConstructStub()
3185 __ lbu(a3, FieldMemOperand(a2, Map::kInstanceTypeOffset)); in CompileConstructStub()
3186 __ Check(ne, "Function constructed by construct stub.", in CompileConstructStub()
3195 __ lbu(a3, FieldMemOperand(a2, Map::kInstanceSizeOffset)); in CompileConstructStub()
3196 __ AllocateInNewSpace(a3, t4, t5, t6, &generic_stub_call, SIZE_IN_WORDS); in CompileConstructStub()
3206 __ LoadRoot(t6, Heap::kEmptyFixedArrayRootIndex); in CompileConstructStub()
3207 __ mov(t5, t4); in CompileConstructStub()
3208 __ sw(a2, MemOperand(t5, JSObject::kMapOffset)); in CompileConstructStub()
3209 __ sw(t6, MemOperand(t5, JSObject::kPropertiesOffset)); in CompileConstructStub()
3210 __ sw(t6, MemOperand(t5, JSObject::kElementsOffset)); in CompileConstructStub()
3211 __ Addu(t5, t5, Operand(3 * kPointerSize)); in CompileConstructStub()
3219 __ sll(a1, a0, kPointerSizeLog2); in CompileConstructStub()
3220 __ Addu(a1, a1, sp); in CompileConstructStub()
3237 __ Branch(&not_passed, less_equal, a0, Operand(arg_number)); in CompileConstructStub()
3239 __ lw(a2, MemOperand(a1, (arg_number + 1) * -kPointerSize)); in CompileConstructStub()
3240 __ sw(a2, MemOperand(t5)); in CompileConstructStub()
3241 __ Addu(t5, t5, kPointerSize); in CompileConstructStub()
3242 __ jmp(&next); in CompileConstructStub()
3243 __ bind(&not_passed); in CompileConstructStub()
3245 __ sw(t7, MemOperand(t5)); in CompileConstructStub()
3246 __ Addu(t5, t5, Operand(kPointerSize)); in CompileConstructStub()
3247 __ bind(&next); in CompileConstructStub()
3251 __ li(a2, Operand(constant)); in CompileConstructStub()
3252 __ sw(a2, MemOperand(t5)); in CompileConstructStub()
3253 __ Addu(t5, t5, kPointerSize); in CompileConstructStub()
3262 __ sw(t7, MemOperand(t5)); in CompileConstructStub()
3263 __ Addu(t5, t5, kPointerSize); in CompileConstructStub()
3269 __ mov(a1, a0); in CompileConstructStub()
3270 __ mov(v0, t4); in CompileConstructStub()
3271 __ Or(v0, v0, Operand(kHeapObjectTag)); in CompileConstructStub()
3276 __ sll(t0, a1, kPointerSizeLog2); in CompileConstructStub()
3277 __ Addu(sp, sp, t0); in CompileConstructStub()
3278 __ Addu(sp, sp, Operand(kPointerSize)); in CompileConstructStub()
3280 __ IncrementCounter(counters->constructed_objects(), 1, a1, a2); in CompileConstructStub()
3281 __ IncrementCounter(counters->constructed_objects_stub(), 1, a1, a2); in CompileConstructStub()
3282 __ Ret(); in CompileConstructStub()
3286 __ bind(&generic_stub_call); in CompileConstructStub()
3289 __ Jump(generic_construct_stub, RelocInfo::CODE_TARGET); in CompileConstructStub()
3296 #undef __
3297 #define __ ACCESS_MASM(masm) macro
3312 __ JumpIfNotSmi(key, &miss_force_generic); in GenerateLoadDictionaryElement()
3313 __ lw(t0, FieldMemOperand(receiver, JSObject::kElementsOffset)); in GenerateLoadDictionaryElement()
3314 __ sra(a2, a0, kSmiTagSize); in GenerateLoadDictionaryElement()
3315 __ LoadFromNumberDictionary(&slow, t0, a0, v0, a2, a3, t1); in GenerateLoadDictionaryElement()
3316 __ Ret(); in GenerateLoadDictionaryElement()
3319 __ bind(&slow); in GenerateLoadDictionaryElement()
3320 __ IncrementCounter( in GenerateLoadDictionaryElement()
3331 __ Jump(slow_ic, RelocInfo::CODE_TARGET); in GenerateLoadDictionaryElement()
3334 __ bind(&miss_force_generic); in GenerateLoadDictionaryElement()
3344 __ Jump(miss_ic, RelocInfo::CODE_TARGET); in GenerateLoadDictionaryElement()
3392 __ JumpIfNotSmi(key, &miss_force_generic); in GenerateLoadExternalArray()
3394 __ lw(a3, FieldMemOperand(receiver, JSObject::kElementsOffset)); in GenerateLoadExternalArray()
3398 __ lw(t1, FieldMemOperand(a3, ExternalArray::kLengthOffset)); in GenerateLoadExternalArray()
3399 __ sra(t2, key, kSmiTagSize); in GenerateLoadExternalArray()
3401 __ Branch(&miss_force_generic, Ugreater_equal, key, Operand(t1)); in GenerateLoadExternalArray()
3403 __ lw(a3, FieldMemOperand(a3, ExternalArray::kExternalPointerOffset)); in GenerateLoadExternalArray()
3413 __ srl(t2, key, 1); in GenerateLoadExternalArray()
3414 __ addu(t3, a3, t2); in GenerateLoadExternalArray()
3415 __ lb(value, MemOperand(t3, 0)); in GenerateLoadExternalArray()
3419 __ srl(t2, key, 1); in GenerateLoadExternalArray()
3420 __ addu(t3, a3, t2); in GenerateLoadExternalArray()
3421 __ lbu(value, MemOperand(t3, 0)); in GenerateLoadExternalArray()
3424 __ addu(t3, a3, key); in GenerateLoadExternalArray()
3425 __ lh(value, MemOperand(t3, 0)); in GenerateLoadExternalArray()
3428 __ addu(t3, a3, key); in GenerateLoadExternalArray()
3429 __ lhu(value, MemOperand(t3, 0)); in GenerateLoadExternalArray()
3433 __ sll(t2, key, 1); in GenerateLoadExternalArray()
3434 __ addu(t3, a3, t2); in GenerateLoadExternalArray()
3435 __ lw(value, MemOperand(t3, 0)); in GenerateLoadExternalArray()
3438 __ sll(t3, t2, 2); in GenerateLoadExternalArray()
3439 __ addu(t3, a3, t3); in GenerateLoadExternalArray()
3442 __ lwc1(f0, MemOperand(t3, 0)); in GenerateLoadExternalArray()
3444 __ lw(value, MemOperand(t3, 0)); in GenerateLoadExternalArray()
3448 __ sll(t2, key, 2); in GenerateLoadExternalArray()
3449 __ addu(t3, a3, t2); in GenerateLoadExternalArray()
3452 __ ldc1(f0, MemOperand(t3, 0)); in GenerateLoadExternalArray()
3455 __ lw(a2, MemOperand(t3, 0)); in GenerateLoadExternalArray()
3456 __ lw(a3, MemOperand(t3, Register::kSizeInBytes)); in GenerateLoadExternalArray()
3482 __ Subu(t3, value, Operand(0xC0000000)); // Non-smi value gives neg result. in GenerateLoadExternalArray()
3483 __ Branch(&box_int, lt, t3, Operand(zero_reg)); in GenerateLoadExternalArray()
3485 __ sll(v0, value, kSmiTagSize); in GenerateLoadExternalArray()
3486 __ Ret(); in GenerateLoadExternalArray()
3488 __ bind(&box_int); in GenerateLoadExternalArray()
3493 __ LoadRoot(t1, Heap::kHeapNumberMapRootIndex); in GenerateLoadExternalArray()
3494 __ AllocateHeapNumber(v0, a3, t0, t1, &slow); in GenerateLoadExternalArray()
3498 __ mtc1(value, f0); in GenerateLoadExternalArray()
3499 __ cvt_d_w(f0, f0); in GenerateLoadExternalArray()
3500 __ sdc1(f0, MemOperand(v0, HeapNumber::kValueOffset - kHeapObjectTag)); in GenerateLoadExternalArray()
3501 __ Ret(); in GenerateLoadExternalArray()
3515 __ sw(dst1, FieldMemOperand(v0, HeapNumber::kMantissaOffset)); in GenerateLoadExternalArray()
3516 __ sw(dst2, FieldMemOperand(v0, HeapNumber::kExponentOffset)); in GenerateLoadExternalArray()
3517 __ Ret(); in GenerateLoadExternalArray()
3526 __ And(t2, value, Operand(0xC0000000)); in GenerateLoadExternalArray()
3527 __ Branch(&pl_box_int, ne, t2, Operand(zero_reg)); in GenerateLoadExternalArray()
3531 __ sll(v0, value, kSmiTagSize); in GenerateLoadExternalArray()
3532 __ Ret(); in GenerateLoadExternalArray()
3534 __ bind(&pl_box_int); in GenerateLoadExternalArray()
3538 __ LoadRoot(t6, Heap::kHeapNumberMapRootIndex); in GenerateLoadExternalArray()
3539 __ AllocateHeapNumber(v0, t2, t3, t6, &slow); in GenerateLoadExternalArray()
3546 __ Cvt_d_uw(f0, value, f22); in GenerateLoadExternalArray()
3548 __ sdc1(f0, MemOperand(v0, HeapNumber::kValueOffset - kHeapObjectTag)); in GenerateLoadExternalArray()
3550 __ Ret(); in GenerateLoadExternalArray()
3554 __ And(t2, value, Operand(0x80000000)); in GenerateLoadExternalArray()
3555 __ Branch(&box_int_0, ne, t2, Operand(zero_reg)); in GenerateLoadExternalArray()
3556 __ And(t2, value, Operand(0x40000000)); in GenerateLoadExternalArray()
3557 __ Branch(&box_int_1, ne, t2, Operand(zero_reg)); in GenerateLoadExternalArray()
3560 __ sll(v0, value, kSmiTagSize); in GenerateLoadExternalArray()
3561 __ Ret(); in GenerateLoadExternalArray()
3566 __ bind(&box_int_0); in GenerateLoadExternalArray()
3569 __ Branch(&done); in GenerateLoadExternalArray()
3571 __ bind(&box_int_1); in GenerateLoadExternalArray()
3576 __ bind(&done); in GenerateLoadExternalArray()
3581 __ LoadRoot(t6, Heap::kHeapNumberMapRootIndex); in GenerateLoadExternalArray()
3582 __ AllocateHeapNumber(t2, t3, t5, t6, &slow); in GenerateLoadExternalArray()
3584 __ sw(hiword, FieldMemOperand(t2, HeapNumber::kExponentOffset)); in GenerateLoadExternalArray()
3585 __ sw(loword, FieldMemOperand(t2, HeapNumber::kMantissaOffset)); in GenerateLoadExternalArray()
3587 __ mov(v0, t2); in GenerateLoadExternalArray()
3588 __ Ret(); in GenerateLoadExternalArray()
3598 __ LoadRoot(t6, Heap::kHeapNumberMapRootIndex); in GenerateLoadExternalArray()
3599 __ AllocateHeapNumber(v0, t3, t5, t6, &slow); in GenerateLoadExternalArray()
3601 __ cvt_d_s(f0, f0); in GenerateLoadExternalArray()
3602 __ sdc1(f0, MemOperand(v0, HeapNumber::kValueOffset - kHeapObjectTag)); in GenerateLoadExternalArray()
3603 __ Ret(); in GenerateLoadExternalArray()
3608 __ LoadRoot(t6, Heap::kHeapNumberMapRootIndex); in GenerateLoadExternalArray()
3609 __ AllocateHeapNumber(v0, t3, t5, t6, &slow); in GenerateLoadExternalArray()
3616 __ And(t4, value, Operand(kBinary32MantissaMask)); in GenerateLoadExternalArray()
3619 __ srl(t5, value, kBinary32MantissaBits); in GenerateLoadExternalArray()
3620 __ And(t5, t5, Operand(kBinary32ExponentMask >> kBinary32MantissaBits)); in GenerateLoadExternalArray()
3623 __ Branch(&exponent_rebiased, eq, t5, Operand(zero_reg)); in GenerateLoadExternalArray()
3625 __ li(t0, 0x7ff); in GenerateLoadExternalArray()
3626 __ Xor(t1, t5, Operand(0xFF)); in GenerateLoadExternalArray()
3627 __ Movz(t5, t0, t1); // Set t5 to 0x7ff only if t5 is equal to 0xff. in GenerateLoadExternalArray()
3628 __ Branch(&exponent_rebiased, eq, t0, Operand(0xff)); in GenerateLoadExternalArray()
3631 __ Addu(t5, in GenerateLoadExternalArray()
3635 __ bind(&exponent_rebiased); in GenerateLoadExternalArray()
3636 __ And(a2, value, Operand(kBinary32SignMask)); in GenerateLoadExternalArray()
3638 __ sll(t0, t5, HeapNumber::kMantissaBitsInTopWord); in GenerateLoadExternalArray()
3639 __ or_(a2, a2, t0); in GenerateLoadExternalArray()
3648 __ srl(t0, t4, kMantissaShiftForHiWord); in GenerateLoadExternalArray()
3649 __ or_(a2, a2, t0); in GenerateLoadExternalArray()
3650 __ sll(a0, t4, kMantissaShiftForLoWord); in GenerateLoadExternalArray()
3652 __ sw(a2, FieldMemOperand(v0, HeapNumber::kExponentOffset)); in GenerateLoadExternalArray()
3653 __ sw(a0, FieldMemOperand(v0, HeapNumber::kMantissaOffset)); in GenerateLoadExternalArray()
3654 __ Ret(); in GenerateLoadExternalArray()
3663 __ LoadRoot(t6, Heap::kHeapNumberMapRootIndex); in GenerateLoadExternalArray()
3664 __ AllocateHeapNumber(v0, t3, t5, t6, &slow); in GenerateLoadExternalArray()
3666 __ sdc1(f0, FieldMemOperand(v0, HeapNumber::kValueOffset)); in GenerateLoadExternalArray()
3667 __ Ret(); in GenerateLoadExternalArray()
3672 __ LoadRoot(t6, Heap::kHeapNumberMapRootIndex); in GenerateLoadExternalArray()
3673 __ AllocateHeapNumber(v0, t3, t5, t6, &slow); in GenerateLoadExternalArray()
3675 __ sw(a2, FieldMemOperand(v0, HeapNumber::kMantissaOffset)); in GenerateLoadExternalArray()
3676 __ sw(a3, FieldMemOperand(v0, HeapNumber::kExponentOffset)); in GenerateLoadExternalArray()
3677 __ Ret(); in GenerateLoadExternalArray()
3682 __ sll(v0, value, kSmiTagSize); in GenerateLoadExternalArray()
3683 __ Ret(); in GenerateLoadExternalArray()
3687 __ bind(&slow); in GenerateLoadExternalArray()
3688 __ IncrementCounter( in GenerateLoadExternalArray()
3698 __ Push(a1, a0); in GenerateLoadExternalArray()
3700 __ TailCallRuntime(Runtime::kKeyedGetProperty, 2, 1); in GenerateLoadExternalArray()
3702 __ bind(&miss_force_generic); in GenerateLoadExternalArray()
3705 __ Jump(stub, RelocInfo::CODE_TARGET); in GenerateLoadExternalArray()
3731 __ JumpIfNotSmi(key, &miss_force_generic); in GenerateStoreExternalArray()
3733 __ lw(a3, FieldMemOperand(receiver, JSObject::kElementsOffset)); in GenerateStoreExternalArray()
3736 __ lw(t1, FieldMemOperand(a3, ExternalArray::kLengthOffset)); in GenerateStoreExternalArray()
3738 __ Branch(&miss_force_generic, Ugreater_equal, key, Operand(t1)); in GenerateStoreExternalArray()
3746 __ JumpIfNotSmi(value, &slow); in GenerateStoreExternalArray()
3748 __ JumpIfNotSmi(value, &check_heap_number); in GenerateStoreExternalArray()
3750 __ SmiUntag(t1, value); in GenerateStoreExternalArray()
3751 __ lw(a3, FieldMemOperand(a3, ExternalArray::kExternalPointerOffset)); in GenerateStoreExternalArray()
3761 __ li(v0, Operand(255)); in GenerateStoreExternalArray()
3763 __ Branch(&done, gt, t1, Operand(v0)); in GenerateStoreExternalArray()
3765 __ Branch(USE_DELAY_SLOT, &done, lt, t1, Operand(zero_reg)); in GenerateStoreExternalArray()
3766 __ mov(v0, zero_reg); // In delay slot. in GenerateStoreExternalArray()
3767 __ mov(v0, t1); // Value is in range 0..255. in GenerateStoreExternalArray()
3768 __ bind(&done); in GenerateStoreExternalArray()
3769 __ mov(t1, v0); in GenerateStoreExternalArray()
3771 __ srl(t8, key, 1); in GenerateStoreExternalArray()
3772 __ addu(t8, a3, t8); in GenerateStoreExternalArray()
3773 __ sb(t1, MemOperand(t8, 0)); in GenerateStoreExternalArray()
3778 __ srl(t8, key, 1); in GenerateStoreExternalArray()
3779 __ addu(t8, a3, t8); in GenerateStoreExternalArray()
3780 __ sb(t1, MemOperand(t8, 0)); in GenerateStoreExternalArray()
3784 __ addu(t8, a3, key); in GenerateStoreExternalArray()
3785 __ sh(t1, MemOperand(t8, 0)); in GenerateStoreExternalArray()
3789 __ sll(t8, key, 1); in GenerateStoreExternalArray()
3790 __ addu(t8, a3, t8); in GenerateStoreExternalArray()
3791 __ sw(t1, MemOperand(t8, 0)); in GenerateStoreExternalArray()
3795 __ SmiUntag(t0, key); in GenerateStoreExternalArray()
3799 __ sll(t8, key, 2); in GenerateStoreExternalArray()
3800 __ addu(a3, a3, t8); in GenerateStoreExternalArray()
3814 __ sdc1(f0, MemOperand(a3, 0)); in GenerateStoreExternalArray()
3816 __ sw(t2, MemOperand(a3, 0)); in GenerateStoreExternalArray()
3817 __ sw(t3, MemOperand(a3, Register::kSizeInBytes)); in GenerateStoreExternalArray()
3830 __ mov(v0, a0); in GenerateStoreExternalArray()
3831 __ Ret(); in GenerateStoreExternalArray()
3835 __ bind(&check_heap_number); in GenerateStoreExternalArray()
3836 __ GetObjectType(value, t1, t2); in GenerateStoreExternalArray()
3837 __ Branch(&slow, ne, t2, Operand(HEAP_NUMBER_TYPE)); in GenerateStoreExternalArray()
3839 __ lw(a3, FieldMemOperand(a3, ExternalArray::kExternalPointerOffset)); in GenerateStoreExternalArray()
3850 __ ldc1(f0, FieldMemOperand(a0, HeapNumber::kValueOffset)); in GenerateStoreExternalArray()
3853 __ cvt_s_d(f0, f0); in GenerateStoreExternalArray()
3854 __ sll(t8, key, 1); in GenerateStoreExternalArray()
3855 __ addu(t8, a3, t8); in GenerateStoreExternalArray()
3856 __ swc1(f0, MemOperand(t8, 0)); in GenerateStoreExternalArray()
3858 __ sll(t8, key, 2); in GenerateStoreExternalArray()
3859 __ addu(t8, a3, t8); in GenerateStoreExternalArray()
3860 __ sdc1(f0, MemOperand(t8, 0)); in GenerateStoreExternalArray()
3862 __ EmitECMATruncate(t3, f0, f2, t2, t1, t5); in GenerateStoreExternalArray()
3867 __ srl(t8, key, 1); in GenerateStoreExternalArray()
3868 __ addu(t8, a3, t8); in GenerateStoreExternalArray()
3869 __ sb(t3, MemOperand(t8, 0)); in GenerateStoreExternalArray()
3873 __ addu(t8, a3, key); in GenerateStoreExternalArray()
3874 __ sh(t3, MemOperand(t8, 0)); in GenerateStoreExternalArray()
3878 __ sll(t8, key, 1); in GenerateStoreExternalArray()
3879 __ addu(t8, a3, t8); in GenerateStoreExternalArray()
3880 __ sw(t3, MemOperand(t8, 0)); in GenerateStoreExternalArray()
3897 __ mov(v0, a0); in GenerateStoreExternalArray()
3898 __ Ret(); in GenerateStoreExternalArray()
3902 __ lw(t3, FieldMemOperand(value, HeapNumber::kExponentOffset)); in GenerateStoreExternalArray()
3903 __ lw(t4, FieldMemOperand(value, HeapNumber::kMantissaOffset)); in GenerateStoreExternalArray()
3915 __ li(t5, HeapNumber::kExponentMask); in GenerateStoreExternalArray()
3916 __ and_(t6, t3, t5); in GenerateStoreExternalArray()
3917 __ Branch(&nan_or_infinity_or_zero, eq, t6, Operand(zero_reg)); in GenerateStoreExternalArray()
3919 __ xor_(t1, t6, t5); in GenerateStoreExternalArray()
3920 __ li(t2, kBinary32ExponentMask); in GenerateStoreExternalArray()
3921 __ Movz(t6, t2, t1); // Only if t6 is equal to t5. in GenerateStoreExternalArray()
3922 __ Branch(&nan_or_infinity_or_zero, eq, t6, Operand(t5)); in GenerateStoreExternalArray()
3925 __ srl(t6, t6, HeapNumber::kExponentShift); in GenerateStoreExternalArray()
3926 __ Addu(t6, in GenerateStoreExternalArray()
3930 __ li(t1, Operand(kBinary32MaxExponent)); in GenerateStoreExternalArray()
3931 __ Slt(t1, t1, t6); in GenerateStoreExternalArray()
3932 __ And(t2, t3, Operand(HeapNumber::kSignMask)); in GenerateStoreExternalArray()
3933 __ Or(t2, t2, Operand(kBinary32ExponentMask)); in GenerateStoreExternalArray()
3934 __ Movn(t3, t2, t1); // Only if t6 is gt kBinary32MaxExponent. in GenerateStoreExternalArray()
3935 __ Branch(&done, gt, t6, Operand(kBinary32MaxExponent)); in GenerateStoreExternalArray()
3937 __ Slt(t1, t6, Operand(kBinary32MinExponent)); in GenerateStoreExternalArray()
3938 __ And(t2, t3, Operand(HeapNumber::kSignMask)); in GenerateStoreExternalArray()
3939 __ Movn(t3, t2, t1); // Only if t6 is lt kBinary32MinExponent. in GenerateStoreExternalArray()
3940 __ Branch(&done, lt, t6, Operand(kBinary32MinExponent)); in GenerateStoreExternalArray()
3942 __ And(t7, t3, Operand(HeapNumber::kSignMask)); in GenerateStoreExternalArray()
3943 __ And(t3, t3, Operand(HeapNumber::kMantissaMask)); in GenerateStoreExternalArray()
3944 __ sll(t3, t3, kMantissaInHiWordShift); in GenerateStoreExternalArray()
3945 __ or_(t7, t7, t3); in GenerateStoreExternalArray()
3946 __ srl(t4, t4, kMantissaInLoWordShift); in GenerateStoreExternalArray()
3947 __ or_(t7, t7, t4); in GenerateStoreExternalArray()
3948 __ sll(t6, t6, kBinary32ExponentShift); in GenerateStoreExternalArray()
3949 __ or_(t3, t7, t6); in GenerateStoreExternalArray()
3951 __ bind(&done); in GenerateStoreExternalArray()
3952 __ sll(t9, key, 1); in GenerateStoreExternalArray()
3953 __ addu(t9, a2, t9); in GenerateStoreExternalArray()
3954 __ sw(t3, MemOperand(t9, 0)); in GenerateStoreExternalArray()
3958 __ mov(v0, a0); in GenerateStoreExternalArray()
3959 __ Ret(); in GenerateStoreExternalArray()
3961 __ bind(&nan_or_infinity_or_zero); in GenerateStoreExternalArray()
3962 __ And(t7, t3, Operand(HeapNumber::kSignMask)); in GenerateStoreExternalArray()
3963 __ And(t3, t3, Operand(HeapNumber::kMantissaMask)); in GenerateStoreExternalArray()
3964 __ or_(t6, t6, t7); in GenerateStoreExternalArray()
3965 __ sll(t3, t3, kMantissaInHiWordShift); in GenerateStoreExternalArray()
3966 __ or_(t6, t6, t3); in GenerateStoreExternalArray()
3967 __ srl(t4, t4, kMantissaInLoWordShift); in GenerateStoreExternalArray()
3968 __ or_(t3, t6, t4); in GenerateStoreExternalArray()
3969 __ Branch(&done); in GenerateStoreExternalArray()
3971 __ sll(t8, t0, 3); in GenerateStoreExternalArray()
3972 __ addu(t8, a3, t8); in GenerateStoreExternalArray()
3974 __ sw(t4, MemOperand(t8, 0)); in GenerateStoreExternalArray()
3975 __ sw(t3, MemOperand(t8, Register::kSizeInBytes)); in GenerateStoreExternalArray()
3976 __ mov(v0, a0); in GenerateStoreExternalArray()
3977 __ Ret(); in GenerateStoreExternalArray()
3987 __ li(t5, HeapNumber::kExponentMask); in GenerateStoreExternalArray()
3988 __ and_(t6, t3, t5); in GenerateStoreExternalArray()
3989 __ Movz(t3, zero_reg, t6); // Only if t6 is equal to zero. in GenerateStoreExternalArray()
3990 __ Branch(&done, eq, t6, Operand(zero_reg)); in GenerateStoreExternalArray()
3992 __ xor_(t2, t6, t5); in GenerateStoreExternalArray()
3993 __ Movz(t3, zero_reg, t2); // Only if t6 is equal to t5. in GenerateStoreExternalArray()
3994 __ Branch(&done, eq, t6, Operand(t5)); in GenerateStoreExternalArray()
3997 __ srl(t6, t6, HeapNumber::kExponentShift); in GenerateStoreExternalArray()
3998 __ Subu(t6, t6, Operand(HeapNumber::kExponentBias)); in GenerateStoreExternalArray()
4000 __ slt(t2, t6, zero_reg); in GenerateStoreExternalArray()
4001 __ Movn(t3, zero_reg, t2); // Only if exponent is negative. in GenerateStoreExternalArray()
4002 __ Branch(&done, lt, t6, Operand(zero_reg)); in GenerateStoreExternalArray()
4005 __ slti(t1, t6, meaningfull_bits - 1); in GenerateStoreExternalArray()
4006 __ li(t2, min_value); in GenerateStoreExternalArray()
4007 __ Movz(t3, t2, t1); // Only if t6 is ge meaningfull_bits - 1. in GenerateStoreExternalArray()
4008 __ Branch(&done, ge, t6, Operand(meaningfull_bits - 1)); in GenerateStoreExternalArray()
4010 __ And(t5, t3, Operand(HeapNumber::kSignMask)); in GenerateStoreExternalArray()
4011 __ And(t3, t3, Operand(HeapNumber::kMantissaMask)); in GenerateStoreExternalArray()
4012 __ Or(t3, t3, Operand(1u << HeapNumber::kMantissaBitsInTopWord)); in GenerateStoreExternalArray()
4014 __ li(t9, HeapNumber::kMantissaBitsInTopWord); in GenerateStoreExternalArray()
4015 __ subu(t6, t9, t6); in GenerateStoreExternalArray()
4016 __ slt(t1, t6, zero_reg); in GenerateStoreExternalArray()
4017 __ srlv(t2, t3, t6); in GenerateStoreExternalArray()
4018 __ Movz(t3, t2, t1); // Only if t6 is positive. in GenerateStoreExternalArray()
4019 __ Branch(&sign, ge, t6, Operand(zero_reg)); in GenerateStoreExternalArray()
4021 __ subu(t6, zero_reg, t6); in GenerateStoreExternalArray()
4022 __ sllv(t3, t3, t6); in GenerateStoreExternalArray()
4023 __ li(t9, meaningfull_bits); in GenerateStoreExternalArray()
4024 __ subu(t6, t9, t6); in GenerateStoreExternalArray()
4025 __ srlv(t4, t4, t6); in GenerateStoreExternalArray()
4026 __ or_(t3, t3, t4); in GenerateStoreExternalArray()
4028 __ bind(&sign); in GenerateStoreExternalArray()
4029 __ subu(t2, t3, zero_reg); in GenerateStoreExternalArray()
4030 __ Movz(t3, t2, t5); // Only if t5 is zero. in GenerateStoreExternalArray()
4032 __ bind(&done); in GenerateStoreExternalArray()
4039 __ srl(t8, key, 1); in GenerateStoreExternalArray()
4040 __ addu(t8, a3, t8); in GenerateStoreExternalArray()
4041 __ sb(t3, MemOperand(t8, 0)); in GenerateStoreExternalArray()
4045 __ addu(t8, a3, key); in GenerateStoreExternalArray()
4046 __ sh(t3, MemOperand(t8, 0)); in GenerateStoreExternalArray()
4050 __ sll(t8, key, 1); in GenerateStoreExternalArray()
4051 __ addu(t8, a3, t8); in GenerateStoreExternalArray()
4052 __ sw(t3, MemOperand(t8, 0)); in GenerateStoreExternalArray()
4070 __ bind(&slow); in GenerateStoreExternalArray()
4071 __ IncrementCounter( in GenerateStoreExternalArray()
4082 __ Jump(slow_ic, RelocInfo::CODE_TARGET); in GenerateStoreExternalArray()
4085 __ bind(&miss_force_generic); in GenerateStoreExternalArray()
4095 __ Jump(miss_ic, RelocInfo::CODE_TARGET); in GenerateStoreExternalArray()
4111 __ JumpIfNotSmi(a0, &miss_force_generic, at, USE_DELAY_SLOT); in GenerateLoadFastElement()
4115 __ lw(a2, FieldMemOperand(a1, JSObject::kElementsOffset)); in GenerateLoadFastElement()
4116 __ AssertFastElements(a2); in GenerateLoadFastElement()
4119 __ lw(a3, FieldMemOperand(a2, FixedArray::kLengthOffset)); in GenerateLoadFastElement()
4120 __ Branch(USE_DELAY_SLOT, &miss_force_generic, hs, a0, Operand(a3)); in GenerateLoadFastElement()
4123 __ Addu(a3, a2, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); in GenerateLoadFastElement()
4125 __ sll(t0, a0, kPointerSizeLog2 - kSmiTagSize); in GenerateLoadFastElement()
4126 __ Addu(t0, t0, a3); in GenerateLoadFastElement()
4127 __ lw(t0, MemOperand(t0)); in GenerateLoadFastElement()
4128 __ LoadRoot(t1, Heap::kTheHoleValueRootIndex); in GenerateLoadFastElement()
4129 __ Branch(&miss_force_generic, eq, t0, Operand(t1)); in GenerateLoadFastElement()
4130 __ Ret(USE_DELAY_SLOT); in GenerateLoadFastElement()
4131 __ mov(v0, t0); in GenerateLoadFastElement()
4133 __ bind(&miss_force_generic); in GenerateLoadFastElement()
4136 __ Jump(stub, RelocInfo::CODE_TARGET); in GenerateLoadFastElement()
4163 __ JumpIfNotSmi(key_reg, &miss_force_generic); in GenerateLoadFastDoubleElement()
4166 __ lw(elements_reg, in GenerateLoadFastDoubleElement()
4170 __ lw(scratch, FieldMemOperand(elements_reg, FixedArray::kLengthOffset)); in GenerateLoadFastDoubleElement()
4171 __ Branch(&miss_force_generic, hs, key_reg, Operand(scratch)); in GenerateLoadFastDoubleElement()
4174 __ sll(scratch2, key_reg, kDoubleSizeLog2 - kSmiTagSize); in GenerateLoadFastDoubleElement()
4175 __ Addu(indexed_double_offset, elements_reg, Operand(scratch2)); in GenerateLoadFastDoubleElement()
4177 __ lw(scratch, FieldMemOperand(indexed_double_offset, upper_32_offset)); in GenerateLoadFastDoubleElement()
4178 __ Branch(&miss_force_generic, eq, scratch, Operand(kHoleNanUpper32)); in GenerateLoadFastDoubleElement()
4181 __ LoadRoot(heap_number_map, Heap::kHeapNumberMapRootIndex); in GenerateLoadFastDoubleElement()
4182 __ AllocateHeapNumber(heap_number_reg, scratch2, scratch3, in GenerateLoadFastDoubleElement()
4187 __ sw(scratch, FieldMemOperand(heap_number_reg, in GenerateLoadFastDoubleElement()
4189 __ lw(scratch, FieldMemOperand(indexed_double_offset, in GenerateLoadFastDoubleElement()
4191 __ sw(scratch, FieldMemOperand(heap_number_reg, in GenerateLoadFastDoubleElement()
4194 __ mov(v0, heap_number_reg); in GenerateLoadFastDoubleElement()
4195 __ Ret(); in GenerateLoadFastDoubleElement()
4197 __ bind(&slow_allocate_heapnumber); in GenerateLoadFastDoubleElement()
4200 __ Jump(slow_ic, RelocInfo::CODE_TARGET); in GenerateLoadFastDoubleElement()
4202 __ bind(&miss_force_generic); in GenerateLoadFastDoubleElement()
4205 __ Jump(miss_ic, RelocInfo::CODE_TARGET); in GenerateLoadFastDoubleElement()
4237 __ JumpIfNotSmi(key_reg, &miss_force_generic); in GenerateStoreFastElement()
4240 __ JumpIfNotSmi(value_reg, &transition_elements_kind); in GenerateStoreFastElement()
4244 __ lw(elements_reg, in GenerateStoreFastElement()
4247 __ lw(scratch, FieldMemOperand(receiver_reg, JSArray::kLengthOffset)); in GenerateStoreFastElement()
4249 __ lw(scratch, FieldMemOperand(elements_reg, FixedArray::kLengthOffset)); in GenerateStoreFastElement()
4253 __ Branch(&grow, hs, key_reg, Operand(scratch)); in GenerateStoreFastElement()
4255 __ Branch(&miss_force_generic, hs, key_reg, Operand(scratch)); in GenerateStoreFastElement()
4259 __ CheckMap(elements_reg, in GenerateStoreFastElement()
4265 __ bind(&finish_store); in GenerateStoreFastElement()
4268 __ Addu(scratch, in GenerateStoreFastElement()
4272 __ sll(scratch2, key_reg, kPointerSizeLog2 - kSmiTagSize); in GenerateStoreFastElement()
4273 __ Addu(scratch, scratch, scratch2); in GenerateStoreFastElement()
4274 __ sw(value_reg, MemOperand(scratch)); in GenerateStoreFastElement()
4277 __ Addu(scratch, in GenerateStoreFastElement()
4281 __ sll(scratch2, key_reg, kPointerSizeLog2 - kSmiTagSize); in GenerateStoreFastElement()
4282 __ Addu(scratch, scratch, scratch2); in GenerateStoreFastElement()
4283 __ sw(value_reg, MemOperand(scratch)); in GenerateStoreFastElement()
4284 __ mov(receiver_reg, value_reg); in GenerateStoreFastElement()
4286 __ RecordWrite(elements_reg, // Object. in GenerateStoreFastElement()
4294 __ Ret(); in GenerateStoreFastElement()
4296 __ bind(&miss_force_generic); in GenerateStoreFastElement()
4299 __ Jump(ic, RelocInfo::CODE_TARGET); in GenerateStoreFastElement()
4301 __ bind(&transition_elements_kind); in GenerateStoreFastElement()
4303 __ Jump(ic_miss, RelocInfo::CODE_TARGET); in GenerateStoreFastElement()
4307 __ bind(&grow); in GenerateStoreFastElement()
4311 __ Branch(&miss_force_generic, ne, key_reg, Operand(scratch)); in GenerateStoreFastElement()
4315 __ lw(length_reg, in GenerateStoreFastElement()
4317 __ lw(elements_reg, in GenerateStoreFastElement()
4319 __ LoadRoot(at, Heap::kEmptyFixedArrayRootIndex); in GenerateStoreFastElement()
4320 __ Branch(&check_capacity, ne, elements_reg, Operand(at)); in GenerateStoreFastElement()
4323 __ AllocateInNewSpace(size, elements_reg, scratch, scratch2, &slow, in GenerateStoreFastElement()
4326 __ LoadRoot(scratch, Heap::kFixedArrayMapRootIndex); in GenerateStoreFastElement()
4327 __ sw(scratch, FieldMemOperand(elements_reg, JSObject::kMapOffset)); in GenerateStoreFastElement()
4328 __ li(scratch, Operand(Smi::FromInt(JSArray::kPreallocatedArrayElements))); in GenerateStoreFastElement()
4329 __ sw(scratch, FieldMemOperand(elements_reg, FixedArray::kLengthOffset)); in GenerateStoreFastElement()
4330 __ LoadRoot(scratch, Heap::kTheHoleValueRootIndex); in GenerateStoreFastElement()
4332 __ sw(scratch, FieldMemOperand(elements_reg, FixedArray::SizeFor(i))); in GenerateStoreFastElement()
4336 __ sw(value_reg, FieldMemOperand(elements_reg, FixedArray::SizeFor(0))); in GenerateStoreFastElement()
4339 __ sw(elements_reg, in GenerateStoreFastElement()
4341 __ RecordWriteField(receiver_reg, JSObject::kElementsOffset, elements_reg, in GenerateStoreFastElement()
4346 __ li(length_reg, Operand(Smi::FromInt(1))); in GenerateStoreFastElement()
4347 __ sw(length_reg, FieldMemOperand(receiver_reg, JSArray::kLengthOffset)); in GenerateStoreFastElement()
4348 __ Ret(); in GenerateStoreFastElement()
4350 __ bind(&check_capacity); in GenerateStoreFastElement()
4352 __ CheckMap(elements_reg, in GenerateStoreFastElement()
4358 __ lw(scratch, FieldMemOperand(elements_reg, FixedArray::kLengthOffset)); in GenerateStoreFastElement()
4359 __ Branch(&slow, hs, length_reg, Operand(scratch)); in GenerateStoreFastElement()
4362 __ Addu(length_reg, length_reg, Operand(Smi::FromInt(1))); in GenerateStoreFastElement()
4363 __ sw(length_reg, FieldMemOperand(receiver_reg, JSArray::kLengthOffset)); in GenerateStoreFastElement()
4364 __ jmp(&finish_store); in GenerateStoreFastElement()
4366 __ bind(&slow); in GenerateStoreFastElement()
4368 __ Jump(ic_slow, RelocInfo::CODE_TARGET); in GenerateStoreFastElement()
4403 __ JumpIfNotSmi(key_reg, &miss_force_generic); in GenerateStoreFastDoubleElement()
4405 __ lw(elements_reg, in GenerateStoreFastDoubleElement()
4410 __ lw(scratch1, FieldMemOperand(receiver_reg, JSArray::kLengthOffset)); in GenerateStoreFastDoubleElement()
4412 __ lw(scratch1, in GenerateStoreFastDoubleElement()
4418 __ Branch(&grow, hs, key_reg, Operand(scratch1)); in GenerateStoreFastDoubleElement()
4420 __ Branch(&miss_force_generic, hs, key_reg, Operand(scratch1)); in GenerateStoreFastDoubleElement()
4423 __ bind(&finish_store); in GenerateStoreFastDoubleElement()
4425 __ StoreNumberToDoubleElements(value_reg, in GenerateStoreFastDoubleElement()
4435 __ Ret(USE_DELAY_SLOT); in GenerateStoreFastDoubleElement()
4436 __ mov(v0, value_reg); // In delay slot. in GenerateStoreFastDoubleElement()
4439 __ bind(&miss_force_generic); in GenerateStoreFastDoubleElement()
4442 __ Jump(ic, RelocInfo::CODE_TARGET); in GenerateStoreFastDoubleElement()
4444 __ bind(&transition_elements_kind); in GenerateStoreFastDoubleElement()
4446 __ Jump(ic_miss, RelocInfo::CODE_TARGET); in GenerateStoreFastDoubleElement()
4450 __ bind(&grow); in GenerateStoreFastDoubleElement()
4454 __ Branch(&miss_force_generic, ne, key_reg, Operand(scratch1)); in GenerateStoreFastDoubleElement()
4458 __ JumpIfSmi(value_reg, &value_is_smi); in GenerateStoreFastDoubleElement()
4459 __ lw(scratch1, FieldMemOperand(value_reg, HeapObject::kMapOffset)); in GenerateStoreFastDoubleElement()
4460 __ LoadRoot(at, Heap::kHeapNumberMapRootIndex); in GenerateStoreFastDoubleElement()
4461 __ Branch(&transition_elements_kind, ne, scratch1, Operand(at)); in GenerateStoreFastDoubleElement()
4462 __ bind(&value_is_smi); in GenerateStoreFastDoubleElement()
4466 __ lw(length_reg, in GenerateStoreFastDoubleElement()
4468 __ lw(elements_reg, in GenerateStoreFastDoubleElement()
4470 __ LoadRoot(at, Heap::kEmptyFixedArrayRootIndex); in GenerateStoreFastDoubleElement()
4471 __ Branch(&check_capacity, ne, elements_reg, Operand(at)); in GenerateStoreFastDoubleElement()
4474 __ AllocateInNewSpace(size, elements_reg, scratch1, scratch2, &slow, in GenerateStoreFastDoubleElement()
4479 __ LoadRoot(scratch1, Heap::kFixedDoubleArrayMapRootIndex); in GenerateStoreFastDoubleElement()
4480 __ sw(scratch1, FieldMemOperand(elements_reg, JSObject::kMapOffset)); in GenerateStoreFastDoubleElement()
4481 __ li(scratch1, Operand(Smi::FromInt(JSArray::kPreallocatedArrayElements))); in GenerateStoreFastDoubleElement()
4482 __ sw(scratch1, in GenerateStoreFastDoubleElement()
4486 __ sw(elements_reg, in GenerateStoreFastDoubleElement()
4488 __ RecordWriteField(receiver_reg, JSObject::kElementsOffset, elements_reg, in GenerateStoreFastDoubleElement()
4493 __ li(length_reg, Operand(Smi::FromInt(1))); in GenerateStoreFastDoubleElement()
4494 __ sw(length_reg, FieldMemOperand(receiver_reg, JSArray::kLengthOffset)); in GenerateStoreFastDoubleElement()
4495 __ lw(elements_reg, in GenerateStoreFastDoubleElement()
4497 __ jmp(&finish_store); in GenerateStoreFastDoubleElement()
4499 __ bind(&check_capacity); in GenerateStoreFastDoubleElement()
4501 __ lw(scratch1, in GenerateStoreFastDoubleElement()
4503 __ Branch(&slow, hs, length_reg, Operand(scratch1)); in GenerateStoreFastDoubleElement()
4506 __ Addu(length_reg, length_reg, Operand(Smi::FromInt(1))); in GenerateStoreFastDoubleElement()
4507 __ sw(length_reg, FieldMemOperand(receiver_reg, JSArray::kLengthOffset)); in GenerateStoreFastDoubleElement()
4508 __ jmp(&finish_store); in GenerateStoreFastDoubleElement()
4510 __ bind(&slow); in GenerateStoreFastDoubleElement()
4512 __ Jump(ic_slow, RelocInfo::CODE_TARGET); in GenerateStoreFastDoubleElement()
4517 #undef __