Lines Matching refs:__
56 #ifdef __
169 #define __ down_cast<CodeGeneratorARM64*>(codegen)->GetVIXLAssembler()-> // NOLINT macro
214 __ Bind(GetEntryLabel()); in EmitNativeCode()
250 __ Bind(GetEntryLabel()); in EmitNativeCode()
279 __ Bind(GetEntryLabel()); in EmitNativeCode()
286 __ Mov(calling_convention.GetRegisterAt(0).W(), type_index.index_); in EmitNativeCode()
314 __ B(GetExitLabel()); in EmitNativeCode()
336 __ Bind(GetEntryLabel()); in EmitNativeCode()
341 __ Mov(calling_convention.GetRegisterAt(0).W(), string_index.index_); in EmitNativeCode()
349 __ B(GetExitLabel()); in EmitNativeCode()
364 __ Bind(GetEntryLabel()); in EmitNativeCode()
392 __ Bind(GetEntryLabel()); in EmitNativeCode()
398 __ B(GetReturnLabel()); in EmitNativeCode()
400 __ B(arm64_codegen->GetLabelOf(successor_)); in EmitNativeCode()
438 __ Bind(GetEntryLabel()); in EmitNativeCode()
467 __ B(GetExitLabel()); in EmitNativeCode()
487 __ Bind(GetEntryLabel()); in EmitNativeCode()
491 __ Mov(calling_convention.GetRegisterAt(0), in EmitNativeCode()
509 __ Bind(GetEntryLabel()); in EmitNativeCode()
535 __ B(GetExitLabel()); in EmitNativeCode()
554 __ Bind(&table_start_); in EmitTable()
563 __ place(&literal); in EmitTable()
617 __ Bind(GetEntryLabel()); in EmitNativeCode()
655 __ Mov(free_reg.W(), index_reg); in EmitNativeCode()
668 __ Lsl(index_reg, index_reg, DataType::SizeShift(type)); in EmitNativeCode()
672 __ Add(index_reg, index_reg, Operand(offset_)); in EmitNativeCode()
730 __ B(GetExitLabel()); in EmitNativeCode()
784 __ Bind(GetEntryLabel()); in EmitNativeCode()
800 __ Mov(calling_convention.GetRegisterAt(0), XRegisterFrom(out_)); in EmitNativeCode()
809 __ B(GetExitLabel()); in EmitNativeCode()
821 #undef __
958 #define __ GetVIXLAssembler()-> macro
974 __ Bind(slow_path_entry); in Finalize()
979 __ FinalizeCode(); in Finalize()
1119 __ Ldr(method, MemOperand(sp, 0)); in MaybeIncrementHotness()
1121 __ Ldrh(counter, MemOperand(method, ArtMethod::HotnessCountOffset().Int32Value())); in MaybeIncrementHotness()
1122 __ Add(counter, counter, 1); in MaybeIncrementHotness()
1124 __ Sub(counter, counter, Operand(counter, LSR, 16)); in MaybeIncrementHotness()
1125 __ Strh(counter, MemOperand(method, ArtMethod::HotnessCountOffset().Int32Value())); in MaybeIncrementHotness()
1138 __ Mov(temp, address); in MaybeIncrementHotness()
1139 __ Ldrh(counter, MemOperand(temp, ProfilingInfo::BaselineHotnessCountOffset().Int32Value())); in MaybeIncrementHotness()
1140 __ Add(counter, counter, 1); in MaybeIncrementHotness()
1141 __ And(counter, counter, interpreter::kTieredHotnessMask); in MaybeIncrementHotness()
1142 __ Strh(counter, MemOperand(temp, ProfilingInfo::BaselineHotnessCountOffset().Int32Value())); in MaybeIncrementHotness()
1143 __ Cbnz(counter, &done); in MaybeIncrementHotness()
1149 __ Stp(kArtMethodRegister, lr, MemOperand(sp, 0)); in MaybeIncrementHotness()
1151 __ Str(kArtMethodRegister, MemOperand(sp, 0)); in MaybeIncrementHotness()
1158 __ Ldr(lr, MemOperand(tr, entrypoint_offset)); in MaybeIncrementHotness()
1161 __ Blr(lr); in MaybeIncrementHotness()
1164 __ Ldr(lr, MemOperand(sp, 8)); in MaybeIncrementHotness()
1167 __ Bind(&done); in MaybeIncrementHotness()
1174 __ Bind(&frame_entry_label_); in GenerateFrameEntry()
1182 __ Sub(temp, sp, static_cast<int32_t>(GetStackOverflowReservedBytes(InstructionSet::kArm64))); in GenerateFrameEntry()
1188 __ ldr(wzr, MemOperand(temp, 0)); in GenerateFrameEntry()
1217 __ Stp(kArtMethodRegister, lowest_spill, MemOperand(sp, -frame_size, PreIndex)); in GenerateFrameEntry()
1219 __ Str(kArtMethodRegister, MemOperand(sp, -frame_size, PreIndex)); in GenerateFrameEntry()
1221 __ Claim(frame_size); in GenerateFrameEntry()
1234 __ Str(wzr, MemOperand(sp, GetStackOffsetOfShouldDeoptimizeFlag())); in GenerateFrameEntry()
1263 __ Ldp(xzr, lowest_spill, MemOperand(sp, frame_size, PostIndex)); in GenerateFrameExit()
1266 __ Drop(frame_size); in GenerateFrameExit()
1270 __ Ret(); in GenerateFrameExit()
1289 __ Bind(GetLabelOf(block)); in Bind()
1294 __ Mov(RegisterFrom(location, DataType::Type::kInt32), value); in MoveConstant()
1311 __ Cbz(value, &done); in MarkGCCard()
1314 __ Ldr(card, MemOperand(tr, Thread::CardTableOffset<kArm64PointerSize>().Int32Value())); in MarkGCCard()
1317 __ Lsr(temp, object, gc::accounting::CardTable::kCardShift); in MarkGCCard()
1331 __ Strb(card, MemOperand(card, temp.X())); in MarkGCCard()
1333 __ Bind(&done); in MarkGCCard()
1373 __ Str(reg, MemOperand(sp, stack_index)); in SaveCoreRegister()
1379 __ Ldr(reg, MemOperand(sp, stack_index)); in RestoreCoreRegister()
1411 __ Mov(Register(destination), constant->AsIntConstant()->GetValue()); in MoveConstant()
1413 __ Mov(Register(destination), constant->AsLongConstant()->GetValue()); in MoveConstant()
1415 __ Mov(Register(destination), 0); in MoveConstant()
1417 __ Fmov(VRegister(destination), constant->AsFloatConstant()->GetValue()); in MoveConstant()
1420 __ Fmov(VRegister(destination), constant->AsDoubleConstant()->GetValue()); in MoveConstant()
1484 __ Ldr(dst, StackOperandFrom(source)); in MoveLocation()
1492 __ Mov(Register(dst), RegisterFrom(source, dst_type)); in MoveLocation()
1498 __ Fmov(FPRegisterFrom(destination, dst_type), RegisterFrom(source, source_type)); in MoveLocation()
1506 __ Fmov(RegisterFrom(destination, dst_type), FPRegisterFrom(source, source_type)); in MoveLocation()
1512 __ Fmov(VRegister(dst), FPRegisterFrom(source, dst_type)); in MoveLocation()
1531 __ Str(CPURegisterFrom(source, dst_type), StackOperandFrom(destination)); in MoveLocation()
1555 __ Str(temp, StackOperandFrom(destination)); in MoveLocation()
1579 __ Ldr(temp, StackOperandFrom(source)); in MoveLocation()
1580 __ Str(temp, StackOperandFrom(destination)); in MoveLocation()
1591 __ Ldrb(Register(dst), src); in Load()
1594 __ Ldrsb(Register(dst), src); in Load()
1597 __ Ldrh(Register(dst), src); in Load()
1600 __ Ldrsh(Register(dst), src); in Load()
1608 __ Ldr(dst, src); in Load()
1630 __ Add(temp_base, src.GetBaseRegister(), OperandFromMemOperand(src)); in LoadAcquire()
1640 __ ldarb(Register(dst), base); in LoadAcquire()
1646 __ Sbfx(Register(dst), Register(dst), 0, DataType::Size(type) * kBitsPerByte); in LoadAcquire()
1653 __ ldarh(Register(dst), base); in LoadAcquire()
1659 __ Sbfx(Register(dst), Register(dst), 0, DataType::Size(type) * kBitsPerByte); in LoadAcquire()
1668 __ ldar(Register(dst), base); in LoadAcquire()
1682 __ ldar(temp, base); in LoadAcquire()
1687 __ Fmov(VRegister(dst), temp); in LoadAcquire()
1705 __ Strb(Register(src), dst); in Store()
1709 __ Strh(Register(src), dst); in Store()
1717 __ Str(src, dst); in Store()
1740 __ Add(temp_base, dst.GetBaseRegister(), op); in StoreRelease()
1749 __ stlrb(Register(src), base); in StoreRelease()
1759 __ stlrh(Register(src), base); in StoreRelease()
1771 __ stlr(Register(src), base); in StoreRelease()
1787 __ Fmov(temp_src, VRegister(src)); in StoreRelease()
1791 __ stlr(temp_src, base); in StoreRelease()
1816 __ Ldr(lr, MemOperand(tr, entrypoint_offset.Int32Value())); in InvokeRuntime()
1819 __ blr(lr); in InvokeRuntime()
1837 __ Ldr(lr, MemOperand(tr, entry_point_offset)); in InvokeRuntimeWithoutRecordingPcInfo()
1838 __ Blr(lr); in InvokeRuntimeWithoutRecordingPcInfo()
1856 __ Ldrb(temp, HeapOperand(class_reg, status_byte_offset)); in GenerateClassInitializationCheck()
1857 __ Cmp(temp, shifted_visibly_initialized_value); in GenerateClassInitializationCheck()
1858 __ B(lo, slow_path->GetEntryLabel()); in GenerateClassInitializationCheck()
1859 __ Bind(slow_path->GetExitLabel()); in GenerateClassInitializationCheck()
1871 __ Ldrh(temp, HeapOperand(temp, mirror::Class::StatusOffset())); in GenerateBitstringTypeCheckCompare()
1874 __ Ldr(temp, HeapOperand(temp, mirror::Class::StatusOffset())); in GenerateBitstringTypeCheckCompare()
1876 __ Ubfx(temp, temp, 0, mask_bits); in GenerateBitstringTypeCheckCompare()
1879 __ Cmp(temp, path_to_root); in GenerateBitstringTypeCheckCompare()
1902 __ Dmb(InnerShareable, type); in GenerateMemoryBarrier()
1924 __ Ldrh(temp, MemOperand(tr, Thread::ThreadFlagsOffset<kArm64PointerSize>().SizeValue())); in GenerateSuspendCheck()
1926 __ Cbnz(temp, slow_path->GetEntryLabel()); in GenerateSuspendCheck()
1927 __ Bind(slow_path->GetReturnLabel()); in GenerateSuspendCheck()
1929 __ Cbz(temp, codegen_->GetLabelOf(successor)); in GenerateSuspendCheck()
1930 __ B(slow_path->GetEntryLabel()); in GenerateSuspendCheck()
2101 __ Cbz(obj, &*pred_is_null); in HandleFieldSet()
2112 __ Mov(temp, value.W()); in HandleFieldSet()
2133 __ Bind(&*pred_is_null); in HandleFieldSet()
2147 __ Add(dst, lhs, rhs); in HandleBinaryOp()
2149 __ And(dst, lhs, rhs); in HandleBinaryOp()
2151 __ Orr(dst, lhs, rhs); in HandleBinaryOp()
2153 __ Sub(dst, lhs, rhs); in HandleBinaryOp()
2157 __ Ror(dst, lhs, shift); in HandleBinaryOp()
2163 __ Ror(dst, lhs, RegisterFrom(instr->GetLocations()->InAt(1), type)); in HandleBinaryOp()
2166 __ Cmp(lhs, rhs); in HandleBinaryOp()
2167 __ Csel(dst, lhs, rhs, instr->IsMin() ? lt : gt); in HandleBinaryOp()
2170 __ Eor(dst, lhs, rhs); in HandleBinaryOp()
2180 __ Fadd(dst, lhs, rhs); in HandleBinaryOp()
2182 __ Fsub(dst, lhs, rhs); in HandleBinaryOp()
2184 __ Fmin(dst, lhs, rhs); in HandleBinaryOp()
2186 __ Fmax(dst, lhs, rhs); in HandleBinaryOp()
2229 __ Lsl(dst, lhs, shift_value); in HandleShift()
2231 __ Asr(dst, lhs, shift_value); in HandleShift()
2233 __ Lsr(dst, lhs, shift_value); in HandleShift()
2239 __ Lsl(dst, lhs, rhs_reg); in HandleShift()
2241 __ Asr(dst, lhs, rhs_reg); in HandleShift()
2243 __ Lsr(dst, lhs, rhs_reg); in HandleShift()
2285 __ Bic(dst, lhs, rhs); in VisitBitwiseNegatedRight()
2288 __ Orn(dst, lhs, rhs); in VisitBitwiseNegatedRight()
2291 __ Eon(dst, lhs, rhs); in VisitBitwiseNegatedRight()
2348 __ Add(out, left, right_operand); in VisitDataProcWithShifterOp()
2351 __ And(out, left, right_operand); in VisitDataProcWithShifterOp()
2355 __ Neg(out, right_operand); in VisitDataProcWithShifterOp()
2358 __ Orr(out, left, right_operand); in VisitDataProcWithShifterOp()
2361 __ Sub(out, left, right_operand); in VisitDataProcWithShifterOp()
2364 __ Eor(out, left, right_operand); in VisitDataProcWithShifterOp()
2381 __ Add(OutputRegister(instruction), in VisitIntermediateAddress()
2411 __ Add(OutputRegister(instruction), index_reg, offset); in VisitIntermediateAddressIndex()
2414 __ Add(OutputRegister(instruction), offset_reg, Operand(index_reg, LSL, shift)); in VisitIntermediateAddressIndex()
2451 __ nop(); in VisitMultiplyAccumulate()
2457 __ Madd(res, mul_left, mul_right, accumulator); in VisitMultiplyAccumulate()
2462 __ Mneg(res, mul_left, mul_right); in VisitMultiplyAccumulate()
2465 __ Msub(res, mul_left, mul_right, accumulator); in VisitMultiplyAccumulate()
2569 __ Ldr(length, MemOperand(obj.X(), adjusted_offset)); in VisitArrayGet()
2571 __ Ldr(length, HeapOperand(obj, count_offset)); in VisitArrayGet()
2581 __ Tbnz(length.W(), 0, &uncompressed_load); in VisitArrayGet()
2582 __ Ldrb(Register(OutputCPURegister(instruction)), in VisitArrayGet()
2584 __ B(&done); in VisitArrayGet()
2585 __ Bind(&uncompressed_load); in VisitArrayGet()
2586 __ Ldrh(Register(OutputCPURegister(instruction)), in VisitArrayGet()
2588 __ Bind(&done); in VisitArrayGet()
2605 __ Add(temp, obj, offset); in VisitArrayGet()
2611 __ Tbnz(length.W(), 0, &uncompressed_load); in VisitArrayGet()
2612 __ Ldrb(Register(OutputCPURegister(instruction)), in VisitArrayGet()
2614 __ B(&done); in VisitArrayGet()
2615 __ Bind(&uncompressed_load); in VisitArrayGet()
2616 __ Ldrh(Register(OutputCPURegister(instruction)), in VisitArrayGet()
2618 __ Bind(&done); in VisitArrayGet()
2656 __ Ldr(out, HeapOperand(InputRegisterAt(instruction, 0), offset)); in VisitArrayLength()
2661 __ Lsr(out.W(), out.W(), 1u); in VisitArrayLength()
2716 __ Add(temp, array, offset); in VisitArraySet()
2735 __ Cbz(Register(value), &do_store); in VisitArraySet()
2763 __ Ldr(temp, HeapOperand(array, class_offset)); in VisitArraySet()
2769 __ Ldr(temp, HeapOperand(temp, component_offset)); in VisitArraySet()
2771 __ Ldr(temp2, HeapOperand(Register(value), class_offset)); in VisitArraySet()
2774 __ Cmp(temp, temp2); in VisitArraySet()
2778 __ B(eq, &do_put); in VisitArraySet()
2784 __ Ldr(temp, HeapOperand(temp, super_offset)); in VisitArraySet()
2787 __ Cbnz(temp, slow_path->GetEntryLabel()); in VisitArraySet()
2788 __ Bind(&do_put); in VisitArraySet()
2790 __ B(ne, slow_path->GetEntryLabel()); in VisitArraySet()
2798 __ Bind(&do_store); in VisitArraySet()
2805 __ Mov(temp_source, value.W()); in VisitArraySet()
2815 __ Add(temp_base, array, offset); in VisitArraySet()
2825 __ Str(source, destination); in VisitArraySet()
2833 __ Bind(slow_path->GetExitLabel()); in VisitArraySet()
2876 __ B(slow_path->GetEntryLabel()); in VisitBoundsCheck()
2893 __ Cmp(InputRegisterAt(instruction, cmp_first_input), in VisitBoundsCheck()
2896 __ B(slow_path->GetEntryLabel(), cond); in VisitBoundsCheck()
2943 __ Fcmp(lhs_reg, 0.0); in GenerateFcmp()
2945 __ Fcmp(lhs_reg, InputFPRegisterAt(instruction, 1)); in GenerateFcmp()
2998 __ Cmp(left, right); in VisitCompare()
2999 __ Cset(result, ne); // result == +1 if NE or 0 otherwise in VisitCompare()
3000 __ Cneg(result, result, lt); // result == -1 if LT or unchanged otherwise in VisitCompare()
3007 __ Cset(result, ne); in VisitCompare()
3008 __ Cneg(result, result, ARM64FPCondition(kCondLT, compare->IsGtBias())); in VisitCompare()
3047 __ Cset(res, ARM64FPCondition(if_cond, instruction->IsGtBias())); in HandleCondition()
3052 __ Cmp(lhs, rhs); in HandleCondition()
3053 __ Cset(res, ARM64Condition(if_cond)); in HandleCondition()
3111 __ Add(out, dividend, Operand(dividend, LSR, bits - 1)); in FOR_EACH_CONDITION_INSTRUCTION()
3115 __ Add(temp, dividend, abs_imm - 1); in FOR_EACH_CONDITION_INSTRUCTION()
3116 __ Cmp(dividend, 0); in FOR_EACH_CONDITION_INSTRUCTION()
3117 __ Csel(out, temp, dividend, lt); in FOR_EACH_CONDITION_INSTRUCTION()
3124 __ Asr(out, final_dividend, ctz_imm); in FOR_EACH_CONDITION_INSTRUCTION()
3126 __ Neg(out, Operand(final_dividend, ASR, ctz_imm)); in FOR_EACH_CONDITION_INSTRUCTION()
3151 __ Cinc(out, in, mi); in GenerateIncrementNegativeByOne()
3153 __ Add(out, in, Operand(in, LSR, in.GetSizeInBits() - 1)); in GenerateIncrementNegativeByOne()
3165 __ Mov(temp_imm, divisor); in GenerateResultRemWithAnyConstant()
3166 __ Msub(out, quotient, temp_imm, dividend); in GenerateResultRemWithAnyConstant()
3196 __ Mov(temp, magic); in GenerateInt64UnsignedDivRemWithAnyPositiveConstant()
3198 __ Smulh(out, dividend, temp); in GenerateInt64UnsignedDivRemWithAnyPositiveConstant()
3200 __ Smulh(temp, dividend, temp); in GenerateInt64UnsignedDivRemWithAnyPositiveConstant()
3205 __ Add(temp, temp, dividend); in GenerateInt64UnsignedDivRemWithAnyPositiveConstant()
3208 __ Lsr(out, temp, shift); in GenerateInt64UnsignedDivRemWithAnyPositiveConstant()
3243 __ Mov(temp, magic); in GenerateInt64DivRemWithAnyConstant()
3244 __ Smulh(temp, dividend, temp); in GenerateInt64DivRemWithAnyConstant()
3258 __ Adds(temp, temp, dividend); in GenerateInt64DivRemWithAnyConstant()
3261 __ Subs(temp, temp, dividend); in GenerateInt64DivRemWithAnyConstant()
3266 __ Asr(temp, temp, shift); in GenerateInt64DivRemWithAnyConstant()
3297 __ Mov(temp, magic); in GenerateInt32DivRemWithAnyConstant()
3298 __ Smull(temp.X(), dividend, temp); in GenerateInt32DivRemWithAnyConstant()
3312 __ Adds(temp.X(), temp.X(), Operand(dividend.X(), LSL, 32)); in GenerateInt32DivRemWithAnyConstant()
3315 __ Subs(temp.X(), temp.X(), Operand(dividend.X(), LSL, 32)); in GenerateInt32DivRemWithAnyConstant()
3324 __ Lsr(out.X(), temp.X(), 32 + shift); in GenerateInt32DivRemWithAnyConstant()
3326 __ Lsr(temp.X(), temp.X(), 32 + shift); in GenerateInt32DivRemWithAnyConstant()
3330 __ Asr(temp.X(), temp.X(), 32 + shift); in GenerateInt32DivRemWithAnyConstant()
3382 __ Sdiv(out, dividend, divisor); in GenerateIntDiv()
3419 __ Fdiv(OutputFPRegister(div), InputFPRegisterAt(div, 0), InputFPRegisterAt(div, 1)); in VisitDiv()
3448 __ B(slow_path->GetEntryLabel()); in VisitDivZeroCheck()
3454 __ Cbz(InputRegisterAt(instruction, 0), slow_path->GetEntryLabel()); in VisitDivZeroCheck()
3506 __ B(codegen_->GetLabelOf(successor)); in HandleGoto()
3542 __ B(true_target); in GenerateTestAndBranch()
3547 __ B(false_target); in GenerateTestAndBranch()
3566 __ Cbz(InputRegisterAt(instruction, condition_input_index), false_target); in GenerateTestAndBranch()
3568 __ Cbnz(InputRegisterAt(instruction, condition_input_index), true_target); in GenerateTestAndBranch()
3580 __ B(ARM64FPCondition(opposite_condition, condition->IsGtBias()), false_target); in GenerateTestAndBranch()
3582 __ B(ARM64FPCondition(condition->GetCondition(), condition->IsGtBias()), true_target); in GenerateTestAndBranch()
3603 __ Cbz(lhs, non_fallthrough_target); in GenerateTestAndBranch()
3606 __ Cbnz(lhs, non_fallthrough_target); in GenerateTestAndBranch()
3610 __ Tbnz(lhs, (lhs.IsX() ? kXRegSize : kWRegSize) - 1, non_fallthrough_target); in GenerateTestAndBranch()
3614 __ Tbz(lhs, (lhs.IsX() ? kXRegSize : kWRegSize) - 1, non_fallthrough_target); in GenerateTestAndBranch()
3622 __ Cmp(lhs, rhs); in GenerateTestAndBranch()
3623 __ B(arm64_cond, non_fallthrough_target); in GenerateTestAndBranch()
3631 __ B(false_target); in GenerateTestAndBranch()
3684 __ Ldr(OutputRegister(flag), in VisitShouldDeoptimizeFlag()
3744 __ Cmp(InputRegisterAt(select, 2), 0); in VisitSelect()
3751 __ Cmp(InputRegisterAt(cond, 0), InputOperandAt(cond, 1)); in VisitSelect()
3756 __ Fcsel(OutputFPRegister(select), in VisitSelect()
3761 __ Csel(OutputRegister(select), in VisitSelect()
3777 __ Claim(adjustment); in IncreaseFrame()
3782 __ Drop(adjustment); in DecreaseFrame()
3787 __ Nop(); in GenerateNop()
3802 __ Cbz(InputRegisterAt(instruction, 1), &finish); in VisitPredicatedInstanceFieldGet()
3804 __ Bind(&finish); in VisitPredicatedInstanceFieldGet()
3908 __ Cbz(obj, &zero); in VisitInstanceOf()
3922 __ Cmp(out, cls); in VisitInstanceOf()
3923 __ Cset(out, eq); in VisitInstanceOf()
3925 __ B(&done); in VisitInstanceOf()
3943 __ Bind(&loop); in VisitInstanceOf()
3951 __ Cbz(out, &done); in VisitInstanceOf()
3952 __ Cmp(out, cls); in VisitInstanceOf()
3953 __ B(ne, &loop); in VisitInstanceOf()
3954 __ Mov(out, 1); in VisitInstanceOf()
3956 __ B(&done); in VisitInstanceOf()
3973 __ Bind(&loop); in VisitInstanceOf()
3974 __ Cmp(out, cls); in VisitInstanceOf()
3975 __ B(eq, &success); in VisitInstanceOf()
3982 __ Cbnz(out, &loop); in VisitInstanceOf()
3984 __ B(&done); in VisitInstanceOf()
3985 __ Bind(&success); in VisitInstanceOf()
3986 __ Mov(out, 1); in VisitInstanceOf()
3988 __ B(&done); in VisitInstanceOf()
4005 __ Cmp(out, cls); in VisitInstanceOf()
4006 __ B(eq, &exact_check); in VisitInstanceOf()
4015 __ Cbz(out, &done); in VisitInstanceOf()
4016 __ Ldrh(out, HeapOperand(out, primitive_offset)); in VisitInstanceOf()
4018 __ Cbnz(out, &zero); in VisitInstanceOf()
4019 __ Bind(&exact_check); in VisitInstanceOf()
4020 __ Mov(out, 1); in VisitInstanceOf()
4021 __ B(&done); in VisitInstanceOf()
4034 __ Cmp(out, cls); in VisitInstanceOf()
4039 __ B(ne, slow_path->GetEntryLabel()); in VisitInstanceOf()
4040 __ Mov(out, 1); in VisitInstanceOf()
4042 __ B(&done); in VisitInstanceOf()
4071 __ B(slow_path->GetEntryLabel()); in VisitInstanceOf()
4073 __ B(&done); in VisitInstanceOf()
4088 __ Cset(out, eq); in VisitInstanceOf()
4090 __ B(&done); in VisitInstanceOf()
4097 __ Bind(&zero); in VisitInstanceOf()
4098 __ Mov(out, 0); in VisitInstanceOf()
4102 __ Bind(&done); in VisitInstanceOf()
4106 __ Bind(slow_path->GetExitLabel()); in VisitInstanceOf()
4160 __ Cbz(obj, &done); in VisitCheckCast()
4174 __ Cmp(temp, cls); in VisitCheckCast()
4177 __ B(ne, type_check_slow_path->GetEntryLabel()); in VisitCheckCast()
4193 __ Bind(&loop); in VisitCheckCast()
4203 __ Cbz(temp, type_check_slow_path->GetEntryLabel()); in VisitCheckCast()
4205 __ Cmp(temp, cls); in VisitCheckCast()
4206 __ B(ne, &loop); in VisitCheckCast()
4221 __ Bind(&loop); in VisitCheckCast()
4222 __ Cmp(temp, cls); in VisitCheckCast()
4223 __ B(eq, &done); in VisitCheckCast()
4234 __ Cbnz(temp, &loop); in VisitCheckCast()
4236 __ B(type_check_slow_path->GetEntryLabel()); in VisitCheckCast()
4250 __ Cmp(temp, cls); in VisitCheckCast()
4251 __ B(eq, &done); in VisitCheckCast()
4262 __ Cbz(temp, type_check_slow_path->GetEntryLabel()); in VisitCheckCast()
4265 __ Ldrh(temp, HeapOperand(temp, primitive_offset)); in VisitCheckCast()
4267 __ Cbnz(temp, type_check_slow_path->GetEntryLabel()); in VisitCheckCast()
4281 __ B(type_check_slow_path->GetEntryLabel()); in VisitCheckCast()
4300 __ Ldr(WRegisterFrom(maybe_temp2_loc), HeapOperand(temp.W(), array_length_offset)); in VisitCheckCast()
4303 __ Bind(&start_loop); in VisitCheckCast()
4304 __ Cbz(WRegisterFrom(maybe_temp2_loc), type_check_slow_path->GetEntryLabel()); in VisitCheckCast()
4305 __ Ldr(WRegisterFrom(maybe_temp3_loc), HeapOperand(temp.W(), object_array_data_offset)); in VisitCheckCast()
4308 __ Add(temp, temp, 2 * kHeapReferenceSize); in VisitCheckCast()
4309 __ Sub(WRegisterFrom(maybe_temp2_loc), WRegisterFrom(maybe_temp2_loc), 2); in VisitCheckCast()
4311 __ Cmp(cls, WRegisterFrom(maybe_temp3_loc)); in VisitCheckCast()
4312 __ B(ne, &start_loop); in VisitCheckCast()
4326 __ B(ne, type_check_slow_path->GetEntryLabel()); in VisitCheckCast()
4330 __ Bind(&done); in VisitCheckCast()
4332 __ Bind(type_check_slow_path->GetExitLabel()); in VisitCheckCast()
4394 __ Mov(x8, address); in MaybeGenerateInlineCacheCheck()
4395 __ Ldr(x9, MemOperand(x8, InlineCache::ClassesOffset().Int32Value())); in MaybeGenerateInlineCacheCheck()
4397 __ Cmp(klass, x9); in MaybeGenerateInlineCacheCheck()
4398 __ B(eq, &done); in MaybeGenerateInlineCacheCheck()
4400 __ Bind(&done); in MaybeGenerateInlineCacheCheck()
4415 __ Ldr(temp.W(), StackOperandFrom(receiver)); in VisitInvokeInterface()
4419 __ Ldr(temp.W(), HeapOperand(temp.W(), class_offset)); in VisitInvokeInterface()
4425 __ Ldr(temp.W(), HeapOperandFrom(receiver, class_offset)); in VisitInvokeInterface()
4449 __ Ldr(ip1, StackOperandFrom(interface_method)); in VisitInvokeInterface()
4451 __ Mov(ip1, XRegisterFrom(interface_method)); in VisitInvokeInterface()
4462 __ Ldr(temp, in VisitInvokeInterface()
4467 __ Ldr(temp, MemOperand(temp, method_offset)); in VisitInvokeInterface()
4471 __ Mov(ip1, temp); in VisitInvokeInterface()
4474 __ Ldr(lr, MemOperand(temp, entry_point.Int32Value())); in VisitInvokeInterface()
4481 __ blr(lr); in VisitInvokeInterface()
4571 __ Ldr(XRegisterFrom(temp), in LoadMethod()
4595 __ Ldr(XRegisterFrom(temp), MemOperand(tr, offset)); in GenerateStaticOrDirectCall()
4633 __ blr(lr); in GenerateStaticOrDirectCall()
4643 __ bl(&frame_entry_label_); in GenerateStaticOrDirectCall()
4657 __ Ldr(lr, MemOperand(XRegisterFrom(callee_method), offset.Int32Value())); in GenerateStaticOrDirectCall()
4664 __ Ubfx(w0, w0, 0, 8); in GenerateStaticOrDirectCall()
4667 __ Sbfx(w0, w0, 0, 8); in GenerateStaticOrDirectCall()
4670 __ Ubfx(w0, w0, 0, 16); in GenerateStaticOrDirectCall()
4673 __ Sbfx(w0, w0, 0, 16); in GenerateStaticOrDirectCall()
4693 __ Ldr(lr, MemOperand(XRegisterFrom(callee_method), offset.Int32Value())); in GenerateStaticOrDirectCall()
4723 __ Ldr(temp.W(), HeapOperandFrom(LocationFrom(receiver), class_offset)); in GenerateVirtualCall()
4738 __ Ldr(temp, MemOperand(temp, method_offset)); in GenerateVirtualCall()
4740 __ Ldr(lr, MemOperand(temp, entry_point.SizeValue())); in GenerateVirtualCall()
4745 __ blr(lr); in GenerateVirtualCall()
4761 __ Mov(trg_reg, res_reg, kDiscardForSameWReg); in MoveFromReturnRegister()
4765 __ Fmov(trg_reg, res_reg); in MoveFromReturnRegister()
4876 DCHECK(!__ AllowMacroInstructions()); // In ExactAssemblyScope. in EmitEntrypointThunkCall()
4880 __ bind(bl_label); in EmitEntrypointThunkCall()
4881 __ bl(static_cast<int64_t>(0)); // Placeholder, patched at link-time. in EmitEntrypointThunkCall()
4885 DCHECK(!__ AllowMacroInstructions()); // In ExactAssemblyScope. in EmitBakerReadBarrierCbnz()
4889 __ cbnz(mr, slow_path_entry); in EmitBakerReadBarrierCbnz()
4893 __ bind(cbnz_label); in EmitBakerReadBarrierCbnz()
4894 __ cbnz(mr, static_cast<int64_t>(0)); // Placeholder, patched at link-time. in EmitBakerReadBarrierCbnz()
4922 [this]() { return __ CreateLiteralDestroyedWithPool<uint32_t>(/* value= */ 0u); }); in DeduplicateJitStringLiteral()
4930 [this]() { return __ CreateLiteralDestroyedWithPool<uint32_t>(/* value= */ 0u); }); in DeduplicateJitClassLiteral()
4937 __ Bind(fixup_label); in EmitAdrpPlaceholder()
4938 __ adrp(reg, /* offset placeholder */ static_cast<int64_t>(0)); in EmitAdrpPlaceholder()
4947 __ Bind(fixup_label); in EmitAddPlaceholder()
4948 __ add(out, base, Operand(/* offset placeholder */ 0)); in EmitAddPlaceholder()
4956 __ Bind(fixup_label); in EmitLdrOffsetPlaceholder()
4957 __ ldr(out, MemOperand(base, /* offset placeholder */ 0)); in EmitLdrOffsetPlaceholder()
4981 __ Ldr(reg.W(), DeduplicateBootImageAddressLiteral(reinterpret_cast<uintptr_t>(address))); in LoadBootImageAddress()
5154 [this, value]() { return __ CreateLiteralDestroyedWithPool<uint32_t>(value); }); in DeduplicateUint32Literal()
5160 [this, value]() { return __ CreateLiteralDestroyedWithPool<uint64_t>(value); }); in DeduplicateUint64Literal()
5345 __ Ldr(out.W(), codegen_->DeduplicateBootImageAddressLiteral(address)); in VisitLoadClass()
5349 __ Ldr(out, codegen_->DeduplicateJitClassLiteral(cls->GetDexFile(), in VisitLoadClass()
5373 __ Cbz(out, slow_path->GetEntryLabel()); in VisitLoadClass()
5378 __ Bind(slow_path->GetExitLabel()); in VisitLoadClass()
5415 __ Ldr(OutputRegister(instruction), GetExceptionTlsAddress()); in VisitLoadException()
5423 __ Str(wzr, GetExceptionTlsAddress()); in VisitClearException()
5517 __ Cbz(out.X(), slow_path->GetEntryLabel()); in VisitLoadString()
5518 __ Bind(slow_path->GetExitLabel()); in VisitLoadString()
5525 __ Ldr(out.W(), codegen_->DeduplicateBootImageAddressLiteral(address)); in VisitLoadString()
5529 __ Ldr(out, codegen_->DeduplicateJitStringLiteral(load->GetDexFile(), in VisitLoadString()
5547 __ Mov(calling_convention.GetRegisterAt(0).W(), load->GetStringIndex().index_); in VisitLoadString()
5608 __ Mul(OutputRegister(mul), InputRegisterAt(mul, 0), InputRegisterAt(mul, 1)); in VisitMul()
5613 __ Fmul(OutputFPRegister(mul), InputFPRegisterAt(mul, 0), InputFPRegisterAt(mul, 1)); in VisitMul()
5646 __ Neg(OutputRegister(neg), InputOperandAt(neg, 0)); in VisitNeg()
5651 __ Fneg(OutputFPRegister(neg), InputFPRegisterAt(neg, 0)); in VisitNeg()
5700 __ Mvn(OutputRegister(instruction), InputOperandAt(instruction, 0)); in VisitNot()
5715 __ Eor(OutputRegister(instruction), InputRegisterAt(instruction, 0), vixl::aarch64::Operand(1)); in VisitBooleanNot()
5731 __ Ldr(wzr, HeapOperandFrom(obj, Offset(0))); in GenerateImplicitNullCheck()
5743 __ Cbz(RegisterFrom(obj, instruction->InputAt(0)->GetType()), slow_path->GetEntryLabel()); in GenerateExplicitNullCheck()
5856 __ And(out, dividend, abs_imm - 1); in GenerateIntRemForPower2Denom()
5859 __ Cmp(dividend, 0); in GenerateIntRemForPower2Denom()
5860 __ And(out, dividend, 1); in GenerateIntRemForPower2Denom()
5861 __ Csneg(out, out, out, ge); in GenerateIntRemForPower2Denom()
5866 __ Negs(temp, dividend); in GenerateIntRemForPower2Denom()
5867 __ And(out, dividend, abs_imm - 1); in GenerateIntRemForPower2Denom()
5868 __ And(temp, temp, abs_imm - 1); in GenerateIntRemForPower2Denom()
5869 __ Csneg(out, out, temp, mi); in GenerateIntRemForPower2Denom()
5908 __ Sdiv(temp, dividend, divisor); in GenerateIntRem()
5909 __ Msub(out, temp, divisor, dividend); in GenerateIntRem()
5982 __ Cmp(in_reg, Operand(0)); in VisitAbs()
5983 __ Cneg(out_reg, in_reg, lt); in VisitAbs()
5990 __ Fabs(out_reg, in_reg); in VisitAbs()
6027 __ Fmov(w0, s0); in VisitReturn()
6030 __ Fmov(x0, d0); in VisitReturn()
6100 __ Mov(w0, instruction->GetFormat()->GetValue()); in VisitStringBuilderAppend()
6255 __ Mov(output.W(), source.W()); in VisitTypeConversion()
6258 __ Ubfx(output, output.IsX() ? source.X() : source.W(), 0, result_size * kBitsPerByte); in VisitTypeConversion()
6260 __ Sbfx(output, output.IsX() ? source.X() : source.W(), 0, min_size * kBitsPerByte); in VisitTypeConversion()
6263 __ Scvtf(OutputFPRegister(conversion), InputRegisterAt(conversion, 0)); in VisitTypeConversion()
6266 __ Fcvtzs(OutputRegister(conversion), InputFPRegisterAt(conversion, 0)); in VisitTypeConversion()
6269 __ Fcvt(OutputFPRegister(conversion), InputFPRegisterAt(conversion, 0)); in VisitTypeConversion()
6329 __ Subs(temp, value_reg, Operand(lower_bound)); in VisitPackedSwitch()
6333 __ B(eq, codegen_->GetLabelOf(successors[0])); in VisitPackedSwitch()
6336 __ Subs(temp, temp, Operand(2)); in VisitPackedSwitch()
6338 __ B(lo, codegen_->GetLabelOf(successors[last_index + 1])); in VisitPackedSwitch()
6340 __ B(eq, codegen_->GetLabelOf(successors[last_index + 2])); in VisitPackedSwitch()
6344 __ Cmp(temp, Operand(1)); in VisitPackedSwitch()
6345 __ B(eq, codegen_->GetLabelOf(successors[last_index + 1])); in VisitPackedSwitch()
6350 __ B(codegen_->GetLabelOf(default_block)); in VisitPackedSwitch()
6364 __ Sub(index, value_reg, Operand(lower_bound)); in VisitPackedSwitch()
6370 __ Cmp(index, Operand(num_entries)); in VisitPackedSwitch()
6371 __ B(hs, codegen_->GetLabelOf(default_block)); in VisitPackedSwitch()
6378 __ Adr(table_base, jump_table->GetTableStartLabel()); in VisitPackedSwitch()
6380 __ Ldr(jump_offset, MemOperand(table_base, index, UXTW, 2)); in VisitPackedSwitch()
6384 __ Add(target_address, table_base, Operand(jump_offset, SXTW)); in VisitPackedSwitch()
6385 __ Br(target_address); in VisitPackedSwitch()
6415 __ Mov(temp_reg, out_reg); in GenerateReferenceLoadOneRegister()
6417 __ Ldr(out_reg, HeapOperand(out_reg, offset)); in GenerateReferenceLoadOneRegister()
6423 __ Ldr(out_reg, HeapOperand(out_reg, offset)); in GenerateReferenceLoadOneRegister()
6453 __ Ldr(out_reg, HeapOperand(obj_reg, offset)); in GenerateReferenceLoadTwoRegisters()
6459 __ Ldr(out_reg, HeapOperand(obj_reg, offset)); in GenerateReferenceLoadTwoRegisters()
6502 __ adr(lr, &return_address); in GenerateGcRootFieldLoad()
6504 __ bind(fixup_label); in GenerateGcRootFieldLoad()
6508 __ ldr(root_reg, MemOperand(obj.X(), offset)); in GenerateGcRootFieldLoad()
6510 __ bind(&return_address); in GenerateGcRootFieldLoad()
6516 __ Add(root_reg.X(), obj.X(), offset); in GenerateGcRootFieldLoad()
6527 __ Ldr(root_reg, MemOperand(obj, offset)); in GenerateGcRootFieldLoad()
6548 __ adr(lr, &return_address); in GenerateIntrinsicCasMoveWithBakerReadBarrier()
6551 __ mov(marked_old_value, old_value); in GenerateIntrinsicCasMoveWithBakerReadBarrier()
6553 __ bind(&return_address); in GenerateIntrinsicCasMoveWithBakerReadBarrier()
6601 __ adr(lr, &return_address); in GenerateFieldLoadWithBakerReadBarrier()
6609 __ ldar(ref_reg, src); in GenerateFieldLoadWithBakerReadBarrier()
6611 __ ldr(ref_reg, src); in GenerateFieldLoadWithBakerReadBarrier()
6619 __ neg(ref_reg, Operand(ref_reg)); in GenerateFieldLoadWithBakerReadBarrier()
6621 __ bind(&return_address); in GenerateFieldLoadWithBakerReadBarrier()
6638 __ Add(base, obj, offset); in GenerateFieldLoadWithBakerReadBarrier()
6644 __ Add(base, obj, Operand(offset & ~(kReferenceLoadMinFarOffset - 1u))); in GenerateFieldLoadWithBakerReadBarrier()
6708 __ Add(temp.X(), obj.X(), Operand(data_offset)); in GenerateArrayLoadWithBakerReadBarrier()
6717 __ adr(lr, &return_address); in GenerateArrayLoadWithBakerReadBarrier()
6722 __ ldr(ref_reg, MemOperand(temp.X(), index_reg.X(), LSL, scale_factor)); in GenerateArrayLoadWithBakerReadBarrier()
6727 __ neg(ref_reg, Operand(ref_reg)); in GenerateArrayLoadWithBakerReadBarrier()
6729 __ bind(&return_address); in GenerateArrayLoadWithBakerReadBarrier()
6780 __ B(slow_path->GetEntryLabel()); in GenerateReadBarrierSlow()
6781 __ Bind(slow_path->GetExitLabel()); in GenerateReadBarrierSlow()
6815 __ B(slow_path->GetEntryLabel()); in GenerateReadBarrierForRootSlow()
6816 __ Bind(slow_path->GetExitLabel()); in GenerateReadBarrierForRootSlow()
6831 __ Ldr(XRegisterFrom(locations->Out()), in VisitClassTableGet()
6836 __ Ldr(XRegisterFrom(locations->Out()), MemOperand(XRegisterFrom(locations->InAt(0)), in VisitClassTableGet()
6838 __ Ldr(XRegisterFrom(locations->Out()), in VisitClassTableGet()
6897 __ Add(*scratch, base, Operand(WRegisterFrom(index), LSL, shift)); in VecNEONAddress()
6923 __ Add(*scratch, base, offset); in VecSVEAddress()
6927 #undef __
6930 #define __ assembler.GetVIXLAssembler()-> macro
6938 __ Ldr(ip0.W(), lock_word); in EmitGrayCheckAndFastPath()
6942 __ Tbnz(ip0.W(), LockWord::kReadBarrierStateShift, slow_path); in EmitGrayCheckAndFastPath()
6948 __ Bind(throw_npe); in EmitGrayCheckAndFastPath()
6954 __ Add(lr, lr, BAKER_MARK_INTROSPECTION_FIELD_LDR_OFFSET); in EmitGrayCheckAndFastPath()
6958 __ Add(base_reg, base_reg, Operand(ip0, LSR, 32)); in EmitGrayCheckAndFastPath()
6959 __ Br(lr); // And return back to the function. in EmitGrayCheckAndFastPath()
6970 __ Ldr(entrypoint, MemOperand(tr, entry_point_offset)); in LoadReadBarrierMarkIntrospectionEntrypoint()
7003 __ Cbz(holder_reg.W(), throw_npe); in CompileBakerReadBarrierThunk()
7012 __ Bind(&slow_path); in CompileBakerReadBarrierThunk()
7015 __ Ldr(ip0.W(), ldr_address); // Load the LDR (immediate) unsigned offset. in CompileBakerReadBarrierThunk()
7017 __ Ubfx(ip0.W(), ip0.W(), 10, 12); // Extract the offset. in CompileBakerReadBarrierThunk()
7018 __ Ldr(ip0.W(), MemOperand(base_reg, ip0, LSL, 2)); // Load the reference. in CompileBakerReadBarrierThunk()
7023 __ Ldar(ip0.W(), MemOperand(base_reg)); in CompileBakerReadBarrierThunk()
7026 __ Br(ip1); // Jump to the entrypoint. in CompileBakerReadBarrierThunk()
7043 __ Bind(&slow_path); in CompileBakerReadBarrierThunk()
7045 __ Ldr(ip0.W(), ldr_address); // Load the LDR (register) unsigned offset. in CompileBakerReadBarrierThunk()
7047 __ Ubfx(ip0, ip0, 16, 6); // Extract the index register, plus 32 (bit 21 is set). in CompileBakerReadBarrierThunk()
7048 __ Bfi(ip1, ip0, 3, 6); // Insert ip0 to the entrypoint address to create in CompileBakerReadBarrierThunk()
7050 __ Mov(ip0, base_reg); // Move the base register to ip0. in CompileBakerReadBarrierThunk()
7051 __ Br(ip1); // Jump to the entrypoint's array switch case. in CompileBakerReadBarrierThunk()
7067 __ Cbz(root_reg, &return_label); in CompileBakerReadBarrierThunk()
7069 __ Ldr(ip0.W(), lock_word); in CompileBakerReadBarrierThunk()
7070 __ Tbz(ip0.W(), LockWord::kMarkBitStateShift, ¬_marked); in CompileBakerReadBarrierThunk()
7071 __ Bind(&return_label); in CompileBakerReadBarrierThunk()
7072 __ Br(lr); in CompileBakerReadBarrierThunk()
7073 __ Bind(¬_marked); in CompileBakerReadBarrierThunk()
7074 __ Tst(ip0.W(), Operand(ip0.W(), LSL, 1)); in CompileBakerReadBarrierThunk()
7075 __ B(&forwarding_address, mi); in CompileBakerReadBarrierThunk()
7079 __ Add(ip1, ip1, Operand(BAKER_MARK_INTROSPECTION_GC_ROOT_ENTRYPOINT_OFFSET)); in CompileBakerReadBarrierThunk()
7080 __ Mov(ip0.W(), root_reg); in CompileBakerReadBarrierThunk()
7081 __ Br(ip1); in CompileBakerReadBarrierThunk()
7082 __ Bind(&forwarding_address); in CompileBakerReadBarrierThunk()
7083 __ Lsl(root_reg, ip0.W(), LockWord::kForwardingAddressShift); in CompileBakerReadBarrierThunk()
7084 __ Br(lr); in CompileBakerReadBarrierThunk()
7122 #undef __