• Home
  • Raw
  • Download

Lines Matching refs:a3

93   __ pop(a3);  in Generate()
122 __ sw(a3, FieldMemOperand(v0, JSFunction::kSharedFunctionInfoOffset)); in Generate()
129 __ lw(a3, FieldMemOperand(a3, SharedFunctionInfo::kCodeOffset)); in Generate()
130 __ Addu(a3, a3, Operand(Code::kHeaderSize - kHeapObjectTag)); in Generate()
133 __ sw(a3, FieldMemOperand(v0, JSFunction::kCodeEntryOffset)); in Generate()
139 __ Push(cp, a3, t0); in Generate()
158 __ lw(a3, MemOperand(sp, 0)); in Generate()
169 __ sw(a3, MemOperand(v0, Context::SlotOffset(Context::CLOSURE_INDEX))); in Generate()
203 __ lw(a3, MemOperand(sp, 0)); in Generate()
219 __ JumpIfNotSmi(a3, &after_sentinel); in Generate()
222 __ Assert(eq, message, a3, Operand(zero_reg)); in Generate()
224 __ lw(a3, GlobalObjectOperand()); in Generate()
225 __ lw(a3, FieldMemOperand(a3, GlobalObject::kGlobalContextOffset)); in Generate()
226 __ lw(a3, ContextOperand(a3, Context::CLOSURE_INDEX)); in Generate()
231 __ sw(a3, ContextOperand(v0, Context::CLOSURE_INDEX)); in Generate()
282 __ lw(a1, FieldMemOperand(a3, i)); in GenerateFastCloneShallowArrayCommon()
290 __ lw(a3, FieldMemOperand(a3, JSArray::kElementsOffset)); in GenerateFastCloneShallowArrayCommon()
296 __ CopyFields(a2, a3, a1.bit(), elements_size / kPointerSize); in GenerateFastCloneShallowArrayCommon()
310 __ lw(a3, MemOperand(sp, 2 * kPointerSize)); in Generate()
312 __ Addu(a3, a3, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); in Generate()
314 __ Addu(t0, a3, t0); in Generate()
315 __ lw(a3, MemOperand(t0)); in Generate()
317 __ Branch(&slow_case, eq, a3, Operand(t1)); in Generate()
322 __ lw(v0, FieldMemOperand(a3, JSArray::kElementsOffset)); in Generate()
358 __ push(a3); in Generate()
359 __ lw(a3, FieldMemOperand(a3, JSArray::kElementsOffset)); in Generate()
360 __ lw(a3, FieldMemOperand(a3, HeapObject::kMapOffset)); in Generate()
362 __ Assert(eq, message, a3, Operand(at)); in Generate()
363 __ pop(a3); in Generate()
387 __ lw(a3, MemOperand(sp, 3 * kPointerSize)); in Generate()
389 __ Addu(a3, a3, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); in Generate()
391 __ Addu(a3, t0, a3); in Generate()
392 __ lw(a3, MemOperand(a3)); in Generate()
394 __ Branch(&slow_case, eq, a3, Operand(t0)); in Generate()
399 __ lw(a0, FieldMemOperand(a3, HeapObject::kMapOffset)); in Generate()
407 __ lw(a1, FieldMemOperand(a3, i)); in Generate()
531 __ Move(a2, a3, f14); in LoadSmis()
538 ConvertToDoubleStub stub1(a3, a2, scratch1, scratch2); in LoadSmis()
560 a0, f14, a2, a3, heap_number_map, scratch1, scratch2, slow); in LoadOperands()
1005 __ Move(f14, a2, a3); in CallCCodeForDoubleOperation()
1034 sign_.is(a3)) { in IsPregenerated()
1039 scratch_.is(a3) && in IsPregenerated()
1051 WriteInt32ToHeapNumberStub stub1(a1, v0, a2, a3); in GenerateFixedRegStubsAheadOfTime()
1052 WriteInt32ToHeapNumberStub stub2(a2, v0, a3, a0); in GenerateFixedRegStubsAheadOfTime()
1251 __ lw(a3, FieldMemOperand(lhs, HeapNumber::kValueOffset + 4)); in EmitSmiNonsmiComparison()
1291 ConvertToDoubleStub stub2(a3, a2, t6, t5); in EmitSmiNonsmiComparison()
1320 __ mov(t3, a3); // a3 has MS 32 bits of lhs. in EmitNanCheck()
1387 __ mov(t3, a3); // a3 has MS 32 bits of lhs. in EmitTwoNonNanDoubleComparison()
1425 __ Move(f14, a2, a3); in EmitTwoNonNanDoubleComparison()
1478 __ GetObjectType(rhs, a3, a3); in EmitStrictTwoHeapObjectCompare()
1479 __ Branch(&return_not_equal, greater, a3, Operand(FIRST_SPEC_OBJECT_TYPE)); in EmitStrictTwoHeapObjectCompare()
1482 __ Branch(&return_not_equal, eq, a3, Operand(ODDBALL_TYPE)); in EmitStrictTwoHeapObjectCompare()
1488 __ And(t2, a2, Operand(a3)); in EmitStrictTwoHeapObjectCompare()
1500 __ GetObjectType(lhs, a3, a2); in EmitCheckForTwoHeapNumbers()
1504 __ Branch(slow, ne, a3, Operand(a2)); in EmitCheckForTwoHeapNumbers()
1514 __ lw(a3, FieldMemOperand(lhs, HeapNumber::kValueOffset + 4)); in EmitCheckForTwoHeapNumbers()
1544 __ GetObjectType(rhs, a3, a3); in EmitCheckForSymbolsOrObjects()
1545 __ Branch(not_both_strings, ge, a3, Operand(FIRST_NONSTRING_TYPE)); in EmitCheckForSymbolsOrObjects()
1546 __ And(at, a3, Operand(kIsSymbolMask)); in EmitCheckForSymbolsOrObjects()
1556 __ GetObjectType(rhs, a2, a3); in EmitCheckForSymbolsOrObjects()
1557 __ Branch(not_both_strings, lt, a3, Operand(FIRST_SPEC_OBJECT_TYPE)); in EmitCheckForSymbolsOrObjects()
1562 __ lw(a3, FieldMemOperand(lhs, HeapObject::kMapOffset)); in EmitCheckForSymbolsOrObjects()
1564 __ lbu(a3, FieldMemOperand(a3, Map::kBitFieldOffset)); in EmitCheckForSymbolsOrObjects()
1565 __ and_(a0, a2, a3); in EmitCheckForSymbolsOrObjects()
1674 GenerateLookupNumberStringCache(masm, a1, v0, a2, a3, t0, false, &runtime); in Generate()
1816 __ JumpIfNonSmisNotBothSequentialAsciiStrings(lhs_, rhs_, a2, a3, &slow); in Generate()
1818 __ IncrementCounter(isolate->counters()->string_compare_native(), 1, a2, a3); in Generate()
1824 a3, in Generate()
1831 a3, in Generate()
1964 __ Move(a3, tos_); in GenerateTypeTransition()
1967 __ Push(a3, a2, a1); in GenerateTypeTransition()
2156 __ AllocateHeapNumber(a1, a2, a3, t2, &slow_allocate_heapnumber); in GenerateHeapNumberCodeSub()
2169 __ lw(a3, FieldMemOperand(a0, HeapNumber::kMantissaOffset)); in GenerateHeapNumberCodeSub()
2171 __ sw(a3, FieldMemOperand(a1, HeapNumber::kMantissaOffset)); in GenerateHeapNumberCodeSub()
2187 __ ConvertToInt32(a0, a1, a2, a3, f0, slow); in GenerateHeapNumberCodeBitNot()
2204 __ AllocateHeapNumber(a2, a3, t0, t2, &slow_allocate_heapnumber); in GenerateHeapNumberCodeBitNot()
2220 __ ConvertToInt32(v0, a1, a3, t0, f0, &impossible); in GenerateHeapNumberCodeBitNot()
2238 WriteInt32ToHeapNumberStub stub(a1, v0, a2, a3); in GenerateHeapNumberCodeBitNot()
2629 __ SmiUntag(a3, left); in GenerateFPOperation()
2635 a3, in GenerateFPOperation()
2655 __ Or(a2, a3, Operand(a2)); in GenerateFPOperation()
2658 __ Xor(a2, a3, Operand(a2)); in GenerateFPOperation()
2661 __ And(a2, a3, Operand(a2)); in GenerateFPOperation()
2666 __ srav(a2, a3, a2); in GenerateFPOperation()
2671 __ srlv(a2, a3, a2); in GenerateFPOperation()
2685 __ sllv(a2, a3, a2); in GenerateFPOperation()
2691 __ Addu(a3, a2, Operand(0x40000000)); in GenerateFPOperation()
2692 __ Branch(&result_not_a_smi, lt, a3, Operand(zero_reg)); in GenerateFPOperation()
2732 WriteInt32ToHeapNumberStub stub(a2, v0, a3, a0); in GenerateFPOperation()
2886 a3, in GenerateInt32Stub()
3030 a3, in GenerateInt32Stub()
3051 __ Or(a2, a3, Operand(a2)); in GenerateInt32Stub()
3054 __ Xor(a2, a3, Operand(a2)); in GenerateInt32Stub()
3057 __ And(a2, a3, Operand(a2)); in GenerateInt32Stub()
3061 __ srav(a2, a3, a2); in GenerateInt32Stub()
3065 __ srlv(a2, a3, a2); in GenerateInt32Stub()
3090 __ sllv(a2, a3, a2); in GenerateInt32Stub()
3134 WriteInt32ToHeapNumberStub stub(a2, v0, a3, a0); in GenerateInt32Stub()
3359 __ Move(a2, a3, f4); in Generate()
3372 __ lw(a3, FieldMemOperand(a0, HeapNumber::kValueOffset + 4)); in Generate()
3375 __ Move(a2, a3, f4); in Generate()
3382 __ Xor(a1, a2, a3); in Generate()
3429 __ Branch(&calculate, ne, a3, Operand(t1)); in Generate()
3466 __ Push(cache_entry, a2, a3); in Generate()
3472 __ Pop(cache_entry, a2, a3); in Generate()
3478 __ sw(a3, MemOperand(cache_entry, 1 * kPointerSize)); in Generate()
3913 __ lw(a3, MemOperand(a2)); in GenerateCore()
3914 __ Subu(a3, a3, Operand(1)); in GenerateCore()
3915 __ sw(a3, MemOperand(a2)); in GenerateCore()
3952 __ LoadRoot(a3, Heap::kTheHoleValueRootIndex); in GenerateCore()
3956 __ sw(a3, MemOperand(t0)); in GenerateCore()
4251 Register map = a3; // Map of the object. in Generate()
4432 __ lw(a3, MemOperand(a2, StandardFrameConstants::kContextOffset)); in GenerateReadElement()
4435 a3, in GenerateReadElement()
4444 __ subu(a3, a0, a1); in GenerateReadElement()
4445 __ sll(t3, a3, kPointerSizeLog2 - kSmiTagSize); in GenerateReadElement()
4446 __ Addu(a3, fp, Operand(t3)); in GenerateReadElement()
4447 __ lw(v0, MemOperand(a3, kDisplacement)); in GenerateReadElement()
4458 __ subu(a3, a0, a1); in GenerateReadElement()
4459 __ sll(t3, a3, kPointerSizeLog2 - kSmiTagSize); in GenerateReadElement()
4460 __ Addu(a3, a2, Operand(t3)); in GenerateReadElement()
4461 __ lw(v0, MemOperand(a3, kDisplacement)); in GenerateReadElement()
4478 __ lw(a3, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); in GenerateNewNonStrictSlow()
4479 __ lw(a2, MemOperand(a3, StandardFrameConstants::kContextOffset)); in GenerateNewNonStrictSlow()
4486 __ lw(a2, MemOperand(a3, ArgumentsAdaptorFrameConstants::kLengthOffset)); in GenerateNewNonStrictSlow()
4489 __ Addu(a3, a3, Operand(t3)); in GenerateNewNonStrictSlow()
4490 __ addiu(a3, a3, StandardFrameConstants::kCallerSPOffset); in GenerateNewNonStrictSlow()
4491 __ sw(a3, MemOperand(sp, 1 * kPointerSize)); in GenerateNewNonStrictSlow()
4513 __ lw(a3, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); in GenerateNewNonStrictFast()
4514 __ lw(a2, MemOperand(a3, StandardFrameConstants::kContextOffset)); in GenerateNewNonStrictFast()
4527 __ lw(a2, MemOperand(a3, ArgumentsAdaptorFrameConstants::kLengthOffset)); in GenerateNewNonStrictFast()
4529 __ Addu(a3, a3, Operand(t6)); in GenerateNewNonStrictFast()
4530 __ Addu(a3, a3, Operand(StandardFrameConstants::kCallerSPOffset)); in GenerateNewNonStrictFast()
4531 __ sw(a3, MemOperand(sp, 1 * kPointerSize)); in GenerateNewNonStrictFast()
4565 __ AllocateInNewSpace(t5, v0, a3, t0, &runtime, TAG_OBJECT); in GenerateNewNonStrictFast()
4592 __ lw(a3, FieldMemOperand(t0, i)); in GenerateNewNonStrictFast()
4593 __ sw(a3, FieldMemOperand(v0, i)); in GenerateNewNonStrictFast()
4598 __ lw(a3, MemOperand(sp, 2 * kPointerSize)); in GenerateNewNonStrictFast()
4601 __ sw(a3, FieldMemOperand(v0, kCalleeOffset)); in GenerateNewNonStrictFast()
4625 __ mov(a3, t0); in GenerateNewNonStrictFast()
4655 __ Addu(a3, t0, Operand(t6)); in GenerateNewNonStrictFast()
4656 __ Addu(a3, a3, Operand(kParameterMapHeaderSize)); in GenerateNewNonStrictFast()
4673 __ Addu(t6, a3, t1); in GenerateNewNonStrictFast()
4685 __ sw(t1, FieldMemOperand(a3, FixedArray::kMapOffset)); in GenerateNewNonStrictFast()
4686 __ sw(a2, FieldMemOperand(a3, FixedArray::kLengthOffset)); in GenerateNewNonStrictFast()
4699 __ Addu(t1, a3, Operand(t6)); in GenerateNewNonStrictFast()
4724 __ lw(a3, MemOperand(a2, StandardFrameConstants::kContextOffset)); in GenerateNewStrict()
4727 a3, in GenerateNewStrict()
4739 __ Addu(a3, a2, Operand(at)); in GenerateNewStrict()
4741 __ Addu(a3, a3, Operand(StandardFrameConstants::kCallerSPOffset)); in GenerateNewStrict()
4742 __ sw(a3, MemOperand(sp, 1 * kPointerSize)); in GenerateNewStrict()
4759 a3, in GenerateNewStrict()
4771 __ CopyFields(v0, t0, a3.bit(), JSObject::kHeaderSize / kPointerSize); in GenerateNewStrict()
4789 __ LoadRoot(a3, Heap::kFixedArrayMapRootIndex); in GenerateNewStrict()
4790 __ sw(a3, FieldMemOperand(t0, FixedArray::kMapOffset)); in GenerateNewStrict()
4803 __ lw(a3, MemOperand(a2)); in GenerateNewStrict()
4805 __ sw(a3, MemOperand(t0)); in GenerateNewStrict()
4913 __ lw(a3, FieldMemOperand(subject, String::kLengthOffset)); in Generate()
4923 __ Branch(&runtime, ls, a3, Operand(a0)); in Generate()
5018 __ sra(a3, a0, 2); // a3 is 1 for ASCII, 0 for UC16 (used below). in Generate()
5088 __ Xor(a3, a3, Operand(1)); // 1 for 2-byte str, 0 for 1-byte. in Generate()
5098 __ sllv(t1, t0, a3); in Generate()
5100 __ sllv(t1, a1, a3); in Generate()
5105 __ sllv(t1, t2, a3); in Generate()
5106 __ addu(a3, t0, t1); in Generate()
5213 __ lw(a3, MemOperand(a2, 0)); in Generate()
5216 __ sll(a3, a3, kSmiTagSize); // Convert to Smi. in Generate()
5217 __ sw(a3, MemOperand(a0, 0)); in Generate()
5279 a3, // Scratch register. in Generate()
5292 __ Addu(a3, v0, Operand(JSRegExpResult::kSize)); in Generate()
5295 __ sw(a3, FieldMemOperand(v0, JSObject::kElementsOffset)); in Generate()
5315 __ sw(a2, FieldMemOperand(a3, HeapObject::kMapOffset)); in Generate()
5318 __ sw(t2, FieldMemOperand(a3, FixedArray::kLengthOffset)); in Generate()
5321 __ Addu(a3, a3, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); in Generate()
5329 __ addu(t1, t1, a3); // Point past last element to store. in Generate()
5331 __ Branch(&done, ge, a3, Operand(t1)); // Break when a3 past end of elem. in Generate()
5332 __ sw(a2, MemOperand(a3)); in Generate()
5334 __ addiu(a3, a3, kPointerSize); // In branch delay slot. in Generate()
5358 __ lw(a3, FieldMemOperand(a2, JSGlobalPropertyCell::kValueOffset)); in GenerateRecordCallTarget()
5362 __ Branch(&done, eq, a3, Operand(a1)); in GenerateRecordCallTarget()
5364 __ Branch(&done, eq, a3, Operand(at)); in GenerateRecordCallTarget()
5370 __ Branch(USE_DELAY_SLOT, &done, eq, a3, Operand(at)); in GenerateRecordCallTarget()
5444 __ GetBuiltinEntry(a3, Builtins::CALL_FUNCTION_PROXY); in Generate()
5458 __ GetBuiltinEntry(a3, Builtins::CALL_NON_FUNCTION); in Generate()
5474 __ GetObjectType(a1, a3, a3); in Generate()
5475 __ Branch(&slow, ne, a3, Operand(JS_FUNCTION_TYPE)); in Generate()
5492 __ Branch(&non_function_call, ne, a3, Operand(JS_FUNCTION_PROXY_TYPE)); in Generate()
5493 __ GetBuiltinEntry(a3, Builtins::CALL_FUNCTION_PROXY_AS_CONSTRUCTOR); in Generate()
5497 __ GetBuiltinEntry(a3, Builtins::CALL_NON_FUNCTION_AS_CONSTRUCTOR); in Generate()
6065 __ lw(a3, MemOperand(sp, kFromOffset)); in Generate()
6073 __ UntagAndJumpIfNotSmi(a3, a3, &runtime); in Generate()
6076 __ Branch(&runtime, lt, a3, Operand(zero_reg)); // From < 0. in Generate()
6078 __ Branch(&runtime, gt, a3, Operand(a2)); // Fail if from > to. in Generate()
6079 __ Subu(a2, a2, a3); in Generate()
6109 __ Addu(v0, v0, Operand(a3)); in Generate()
6110 __ lbu(a3, FieldMemOperand(v0, SeqAsciiString::kHeaderSize)); in Generate()
6116 masm, a3, t0, a1, t1, t2, t3, t4, &make_two_character_string); in Generate()
6123 __ sh(a3, FieldMemOperand(v0, SeqAsciiString::kHeaderSize)); in Generate()
6158 __ Addu(a3, a3, t0); in Generate()
6193 __ sll(a3, a3, 1); in Generate()
6195 __ sw(a3, FieldMemOperand(v0, SlicedString::kOffsetOffset)); in Generate()
6235 __ Addu(t1, t1, a3); in Generate()
6246 masm, a1, t1, a2, a3, t0, t2, t3, t4, COPY_ASCII | DEST_ALWAYS_ALIGNED); in Generate()
6255 __ sll(t0, a3, 1); in Generate()
6266 masm, a1, t1, a2, a3, t0, t2, t3, t4, DEST_ALWAYS_ALIGNED); in Generate()
6270 __ IncrementCounter(counters->sub_string_native(), 1, a3, t0); in Generate()
6418 __ JumpIfNotBothSequentialAsciiStrings(a1, a0, a2, a3, &runtime); in Generate()
6421 __ IncrementCounter(counters->string_compare_native(), 1, a2, a3); in Generate()
6423 GenerateCompareFlatAsciiStrings(masm, a1, a0, a2, a3, t0, t1); in Generate()
6463 masm, 1 * kPointerSize, a0, a2, a3, t0, t1, &call_builtin); in Generate()
6468 masm, 0 * kPointerSize, a1, a2, a3, t0, t1, &call_builtin); in Generate()
6486 __ lw(a3, FieldMemOperand(a1, String::kLengthOffset)); in Generate()
6490 __ slt(t5, zero_reg, a3); // if (a3 > 0) t5 = 1. in Generate()
6494 __ IncrementCounter(counters->string_add_native(), 1, a2, a3); in Generate()
6502 __ sra(a3, a3, kSmiTagSize); in Generate()
6515 __ Addu(t2, a2, Operand(a3)); in Generate()
6532 __ lbu(a3, FieldMemOperand(a1, SeqAsciiString::kHeaderSize)); in Generate()
6538 masm, a2, a3, t2, t3, t0, t1, t5, &make_two_character_string); in Generate()
6539 __ IncrementCounter(counters->string_add_native(), 1, a2, a3); in Generate()
6551 __ IncrementCounter(counters->string_add_native(), 1, a2, a3); in Generate()
6585 __ IncrementCounter(counters->string_add_native(), 1, a2, a3); in Generate()
6684 StringHelper::GenerateCopyCharacters(masm, t2, a1, a3, t0, true); in Generate()
6685 __ IncrementCounter(counters->string_add_native(), 1, a2, a3); in Generate()
6699 StringHelper::GenerateCopyCharacters(masm, t2, a1, a3, t0, false); in Generate()
6701 __ IncrementCounter(counters->string_add_native(), 1, a2, a3); in Generate()
6863 Register tmp2 = a3; in GenerateSymbols()
6903 Register tmp2 = a3; in GenerateStrings()
7003 __ lw(a3, FieldMemOperand(a1, HeapObject::kMapOffset)); in GenerateKnownObjects()
7005 __ Branch(&miss, ne, a3, Operand(known_map_)); in GenerateKnownObjects()
7159 (ra.bit() | t2.bit() | t1.bit() | t0.bit() | a3.bit() | in GenerateNegativeLookup()
7233 a3.bit() | a2.bit() | a1.bit() | a0.bit() | v0.bit()) & in GeneratePositiveLookup()
7272 Register mask = a3; in Generate()
7366 { REG(a3), REG(t0), REG(t1), EMIT_REMEMBERED_SET },
7370 { REG(a1), REG(a2), REG(a3), EMIT_REMEMBERED_SET },
7371 { REG(a3), REG(a2), REG(a1), EMIT_REMEMBERED_SET },
7373 { REG(a2), REG(a1), REG(a3), EMIT_REMEMBERED_SET },
7374 { REG(a3), REG(a1), REG(a2), EMIT_REMEMBERED_SET },
7376 { REG(a3), REG(a2), REG(t0), EMIT_REMEMBERED_SET },
7377 { REG(a2), REG(a3), REG(t0), EMIT_REMEMBERED_SET },
7381 { REG(a2), REG(a3), REG(t5), EMIT_REMEMBERED_SET },
7382 { REG(a2), REG(a3), REG(t5), OMIT_REMEMBERED_SET },
7653 __ Push(a1, a3, a0); in Generate()
7662 __ sll(t2, a3, kPointerSizeLog2 - kSmiTagSize); in Generate()
7676 __ sll(t2, a3, kPointerSizeLog2 - kSmiTagSize); in Generate()
7685 __ StoreNumberToDoubleElements(a0, a3, a1, t1, t2, t3, t5, a2, in Generate()