/art/compiler/linker/arm/ |
D | relative_patcher_thumb2.cc | 84 arm::Thumb2Assembler assembler(&arena); in CompileThunkCode() local 85 assembler.LoadFromOffset( in CompileThunkCode() 88 assembler.bkpt(0); in CompileThunkCode() 89 assembler.FinalizeCode(); in CompileThunkCode() 90 std::vector<uint8_t> thunk_code(assembler.CodeSize()); in CompileThunkCode() 92 assembler.FinalizeInstructions(code); in CompileThunkCode()
|
/art/compiler/optimizing/ |
D | intrinsics_utils.h | 51 Assembler* assembler = codegen->GetAssembler(); in EmitNativeCode() local 52 assembler->Bind(GetEntryLabel()); in EmitNativeCode() 74 assembler->Jump(GetExitLabel()); in EmitNativeCode()
|
D | intrinsics_mips64.cc | 139 #define __ assembler-> 149 static void MoveFPToInt(LocationSummary* locations, bool is64bit, Mips64Assembler* assembler) { in MoveFPToInt() argument 186 static void MoveIntToFP(LocationSummary* locations, bool is64bit, Mips64Assembler* assembler) { in MoveIntToFP() argument 225 Mips64Assembler* assembler) { in GenReverseBytes() argument 277 Mips64Assembler* assembler) { in GenNumberOfLeadingZeroes() argument 308 Mips64Assembler* assembler) { in GenNumberOfTrailingZeroes() argument 345 Mips64Assembler* assembler) { in GenReverse() argument 388 static void MathAbsFP(LocationSummary* locations, bool is64bit, Mips64Assembler* assembler) { in MathAbsFP() argument 425 static void GenAbsInteger(LocationSummary* locations, bool is64bit, Mips64Assembler* assembler) { in GenAbsInteger() argument 461 Mips64Assembler* assembler) { in GenMinMaxFP() argument [all …]
|
D | intrinsics_arm.h | 37 ArmAssembler* assembler, in IntrinsicLocationsBuilderARM() argument 39 : arena_(arena), assembler_(assembler), features_(features) {} in IntrinsicLocationsBuilderARM()
|
D | intrinsics_arm.cc | 66 #define __ assembler-> 84 static void MoveFPToInt(LocationSummary* locations, bool is64bit, ArmAssembler* assembler) { in MoveFPToInt() argument 96 static void MoveIntToFP(LocationSummary* locations, bool is64bit, ArmAssembler* assembler) { in MoveIntToFP() argument 154 ArmAssembler* assembler) { in GenNumberOfLeadingZeros() argument 196 ArmAssembler* assembler) { in GenNumberOfTrailingZeros() argument 243 static void MathAbsFP(LocationSummary* locations, bool is64bit, ArmAssembler* assembler) { in MathAbsFP() argument 283 ArmAssembler* assembler) { in GenAbsInteger() argument 331 ArmAssembler* assembler) { in GenMinMax() argument 374 ArmAssembler* assembler = GetAssembler(); in VisitMathSqrt() local 384 ArmAssembler* assembler = GetAssembler(); in VisitMemoryPeekByte() local [all …]
|
D | intrinsics_x86_64.cc | 80 #define __ assembler-> 98 static void MoveFPToInt(LocationSummary* locations, bool is64bit, X86_64Assembler* assembler) { in MoveFPToInt() argument 104 static void MoveIntToFP(LocationSummary* locations, bool is64bit, X86_64Assembler* assembler) { in MoveIntToFP() argument 148 X86_64Assembler* assembler) { in GenReverseBytes() argument 209 X86_64Assembler* assembler, in MathAbsFP() argument 253 static void GenAbsInteger(LocationSummary* locations, bool is64bit, X86_64Assembler* assembler) { in GenAbsInteger() argument 294 X86_64Assembler* assembler, in GenMinMaxFP() argument 423 X86_64Assembler* assembler) { in GenMinMax() argument 552 X86_64Assembler* assembler, in GenSSE41FPToFPIntrinsic() argument 631 X86_64Assembler* assembler = GetAssembler(); in VisitMathRoundFloat() local [all …]
|
D | intrinsics_x86.cc | 86 #define __ assembler-> 111 static void MoveFPToInt(LocationSummary* locations, bool is64bit, X86Assembler* assembler) { in MoveFPToInt() argument 126 static void MoveIntToFP(LocationSummary* locations, bool is64bit, X86Assembler* assembler) { in MoveIntToFP() argument 196 X86Assembler* assembler) { in GenReverseBytes() argument 235 X86Assembler* assembler = GetAssembler(); in VisitLongReverseBytes() local 275 X86Assembler* assembler, in MathAbsFP() argument 333 static void GenAbsInteger(LocationSummary* locations, X86Assembler* assembler) { in GenAbsInteger() argument 361 static void GenAbsLong(LocationSummary* locations, X86Assembler* assembler) { in GenAbsLong() argument 406 X86Assembler* assembler, in GenMinMaxFP() argument 569 X86Assembler* assembler) { in GenMinMax() argument [all …]
|
D | intrinsics_mips.cc | 150 #define __ assembler-> 160 static void MoveFPToInt(LocationSummary* locations, bool is64bit, MipsAssembler* assembler) { in MoveFPToInt() argument 202 static void MoveIntToFP(LocationSummary* locations, bool is64bit, MipsAssembler* assembler) { in MoveIntToFP() argument 251 MipsAssembler* assembler) { in GenReverse() argument 445 MipsAssembler* assembler) { in GenNumberOfLeadingZeroes() argument 493 MipsAssembler* assembler) { in GenNumberOfTrailingZeroes() argument 616 MipsAssembler* assembler) { in GenBitCount() argument 748 static void MathAbsFP(LocationSummary* locations, bool is64bit, MipsAssembler* assembler) { in MathAbsFP() argument 777 static void GenAbsInteger(LocationSummary* locations, bool is64bit, MipsAssembler* assembler) { in GenAbsInteger() argument 827 MipsAssembler* assembler) { in GenMinMaxFP() argument [all …]
|
D | code_generator_x86_64.cc | 6682 X86_64Assembler* assembler = codegen_->GetAssembler(); in CreateJumpTable() local 6685 const int32_t offset_in_constant_table = assembler->ConstantAreaSize(); in CreateJumpTable() 6689 const int32_t current_table_offset = assembler->CodeSize() + offset_in_constant_table; in CreateJumpTable() 6701 assembler->AppendInt32(offset_to_block); in CreateJumpTable() 6711 X86_64Assembler* assembler = GetAssembler(); in Finalize() local 6712 if (!assembler->IsConstantAreaEmpty() || !fixups_to_jump_tables_.empty()) { in Finalize() 6714 assembler->Align(4, 0); in Finalize() 6715 constant_area_start_ = assembler->CodeSize(); in Finalize() 6723 assembler->AddConstantArea(); in Finalize()
|
D | code_generator_x86.cc | 7258 X86Assembler* assembler = codegen_->GetAssembler(); in CreateJumpTable() local 7261 const int32_t offset_in_constant_table = assembler->ConstantAreaSize(); in CreateJumpTable() 7278 assembler->AppendInt32(offset_to_block); in CreateJumpTable() 7288 X86Assembler* assembler = GetAssembler(); in Finalize() local 7289 if (!assembler->IsConstantAreaEmpty() || !fixups_to_jump_tables_.empty()) { in Finalize() 7292 assembler->Align(4, 0); in Finalize() 7293 constant_area_start_ = assembler->CodeSize(); in Finalize() 7301 assembler->AddConstantArea(); in Finalize()
|
D | code_generator_arm.cc | 3991 ArmAssembler* assembler = codegen_->GetAssembler(); in CanEncodeConstantAsImmediate() local 3992 if (assembler->ShifterOperandCanHold(kNoRegister, kNoRegister, opcode, value, &so)) { in CanEncodeConstantAsImmediate() 4006 return assembler->ShifterOperandCanHold(kNoRegister, kNoRegister, neg_opcode, ~value, &so); in CanEncodeConstantAsImmediate()
|
/art/compiler/utils/ |
D | assembler_thumb_test.cc | 175 #define __ assembler-> 177 void EmitAndCheck(arm::Thumb2Assembler* assembler, const char* testname, in EmitAndCheck() argument 188 void EmitAndCheck(arm::Thumb2Assembler* assembler, const char* testname) { in EmitAndCheck() argument 193 EmitAndCheck(assembler, testname, results->second); in EmitAndCheck() 200 Thumb2AssemblerTest() : pool(), arena(&pool), assembler(&arena) { } in Thumb2AssemblerTest() 204 arm::Thumb2Assembler assembler; member in art::arm::Thumb2AssemblerTest 207 #define __ assembler. 217 EmitAndCheck(&assembler, "SimpleMov"); in TEST_F() 226 EmitAndCheck(&assembler, "SimpleMov32"); in TEST_F() 234 EmitAndCheck(&assembler, "SimpleMovAdd"); in TEST_F() [all …]
|
D | assembler_test.h | 52 typedef std::string (*TestFn)(AssemblerTest* assembler_test, Ass* assembler);
|
/art/compiler/trampolines/ |
D | trampoline_compiler.cc | 46 #define __ assembler. 54 Thumb2Assembler assembler(arena); in CreateTrampoline() local 84 Arm64Assembler assembler(arena); in CreateTrampoline() local 123 MipsAssembler assembler(arena); in CreateTrampoline() local 155 Mips64Assembler assembler(arena); in CreateTrampoline() local 187 X86Assembler assembler(arena); in CreateTrampoline() local 208 x86_64::X86_64Assembler assembler(arena); in CreateTrampoline() local
|
/art/compiler/utils/x86_64/ |
D | assembler_x86_64_test.cc | 363 std::string shll_fn(AssemblerX86_64Test::Base* assembler_test, x86_64::X86_64Assembler* assembler) { in shll_fn() argument 370 assembler->shll(*reg, shifter); in shll_fn() 386 std::string shlq_fn(AssemblerX86_64Test::Base* assembler_test, x86_64::X86_64Assembler* assembler) { in shlq_fn() argument 393 assembler->shlq(*reg, shifter); in shlq_fn() 409 std::string shrl_fn(AssemblerX86_64Test::Base* assembler_test, x86_64::X86_64Assembler* assembler) { in shrl_fn() argument 416 assembler->shrl(*reg, shifter); in shrl_fn() 432 std::string shrq_fn(AssemblerX86_64Test::Base* assembler_test, x86_64::X86_64Assembler* assembler) { in shrq_fn() argument 439 assembler->shrq(*reg, shifter); in shrq_fn() 455 std::string sarl_fn(AssemblerX86_64Test::Base* assembler_test, x86_64::X86_64Assembler* assembler) { in sarl_fn() argument 462 assembler->sarl(*reg, shifter); in sarl_fn() [all …]
|
/art/test/538-checker-embed-constants/ |
D | info.txt | 1 Test embedding of constants in assembler instructions.
|
/art/compiler/linker/arm64/ |
D | relative_patcher_arm64.cc | 252 arm64::Arm64Assembler assembler(&arena); in CompileThunkCode() local 255 assembler.JumpTo(ManagedRegister(arm64::X0), offset, ManagedRegister(arm64::IP0)); in CompileThunkCode() 257 assembler.FinalizeCode(); in CompileThunkCode() 258 std::vector<uint8_t> thunk_code(assembler.CodeSize()); in CompileThunkCode() 260 assembler.FinalizeInstructions(code); in CompileThunkCode()
|
/art/compiler/utils/x86/ |
D | assembler_x86_test.cc | 280 std::string rorl_fn(AssemblerX86Test::Base* assembler_test, x86::X86Assembler* assembler) { in rorl_fn() argument 287 assembler->rorl(*reg, shifter); in rorl_fn() 303 std::string roll_fn(AssemblerX86Test::Base* assembler_test, x86::X86Assembler* assembler) { in roll_fn() argument 310 assembler->roll(*reg, shifter); in roll_fn()
|
/art/compiler/utils/arm/ |
D | assembler_arm.cc | 578 static void EmitLoad(ArmAssembler* assembler, ManagedRegister m_dst, in EmitLoad() argument 585 assembler->LoadFromOffset(kLoadWord, dst.AsCoreRegister(), src_register, src_offset); in EmitLoad() 588 assembler->LoadFromOffset(kLoadWord, dst.AsRegisterPairLow(), src_register, src_offset); in EmitLoad() 589 assembler->LoadFromOffset(kLoadWord, dst.AsRegisterPairHigh(), src_register, src_offset + 4); in EmitLoad() 591 assembler->LoadSFromOffset(dst.AsSRegister(), src_register, src_offset); in EmitLoad() 594 assembler->LoadDFromOffset(dst.AsDRegister(), src_register, src_offset); in EmitLoad()
|
D | assembler_thumb2.h | 576 static void PrepareDependents(Thumb2Assembler* assembler); 578 ArrayRef<const FixupId> Dependents(const Thumb2Assembler& assembler) const { in Dependents() argument 579 return ArrayRef<const FixupId>(assembler.fixup_dependents_).SubArray(dependents_start_, in Dependents() 642 static void ForExpandableDependencies(Thumb2Assembler* assembler, Function fn);
|
D | assembler_thumb2.cc | 31 void Thumb2Assembler::Fixup::ForExpandableDependencies(Thumb2Assembler* assembler, Function fn) { in ForExpandableDependencies() argument 35 Fixup* fixups = assembler->fixups_.data(); in ForExpandableDependencies() 36 for (FixupId fixup_id = 0u, end_id = assembler->fixups_.size(); fixup_id != end_id; ++fixup_id) { in ForExpandableDependencies() 54 void Thumb2Assembler::Fixup::PrepareDependents(Thumb2Assembler* assembler) { in PrepareDependents() argument 63 Fixup* fixups = assembler->fixups_.data(); in PrepareDependents() 65 assembler, in PrepareDependents() 72 for (FixupId fixup_id = 0u, end_id = assembler->fixups_.size(); fixup_id != end_id; ++fixup_id) { in PrepareDependents() 80 assembler->fixup_dependents_.resize(number_of_dependents); in PrepareDependents() 81 FixupId* dependents = assembler->fixup_dependents_.data(); in PrepareDependents() 83 assembler, in PrepareDependents()
|
/art/compiler/ |
D | Android.mk | 79 utils/assembler.cc \
|
/art/runtime/interpreter/mterp/ |
D | README.txt | 99 assembler is GNU "as", but others will work (may require fiddling with
|