Home
last modified time | relevance | path

Searched refs:assembler (Results 1 – 25 of 25) sorted by relevance

/art/compiler/utils/
Dassembler_thumb_test.cc170 ArmVIXLAssemblerTest() : pool(), allocator(&pool), assembler(&allocator) { } in ArmVIXLAssemblerTest()
174 ArmVIXLJNIMacroAssembler assembler; member in art::arm::ArmVIXLAssemblerTest
177 #define __ assembler->
179 void EmitAndCheck(ArmVIXLJNIMacroAssembler* assembler, const char* testname, in EmitAndCheck() argument
190 void EmitAndCheck(ArmVIXLJNIMacroAssembler* assembler, const char* testname) { in EmitAndCheck() argument
195 EmitAndCheck(assembler, testname, results->second); in EmitAndCheck()
200 #define __ assembler.
288 EmitAndCheck(&assembler, "VixlJniHelpers"); in TEST_F()
299 #define __ assembler.asm_.
321 vixl::aarch32::UseScratchRegisterScope temps(assembler.asm_.GetVIXLAssembler()); in TEST_F()
[all …]
Djni_macro_assembler_test.h41 typedef std::string (*TestFn)(JNIMacroAssemblerTest* assembler_test, Ass* assembler);
Dassembler_test.h61 typedef std::string (*TestFn)(AssemblerTest* assembler_test, Ass* assembler);
/art/compiler/linker/arm/
Drelative_patcher_thumb2.cc181 #define __ assembler.GetVIXLAssembler()->
183 static void EmitGrayCheckAndFastPath(arm::ArmVIXLAssembler& assembler, in EmitGrayCheckAndFastPath() argument
206 static void LoadReadBarrierMarkIntrospectionEntrypoint(arm::ArmVIXLAssembler& assembler, in LoadReadBarrierMarkIntrospectionEntrypoint() argument
223 void Thumb2RelativePatcher::CompileBakerReadBarrierThunk(arm::ArmVIXLAssembler& assembler, in CompileBakerReadBarrierThunk() argument
238 UseScratchRegisterScope temps(assembler.GetVIXLAssembler()); in CompileBakerReadBarrierThunk()
252 EmitGrayCheckAndFastPath(assembler, base_reg, lock_word, &slow_path, raw_ldr_offset); in CompileBakerReadBarrierThunk()
257 LoadReadBarrierMarkIntrospectionEntrypoint(assembler, ep_reg); in CompileBakerReadBarrierThunk()
288 UseScratchRegisterScope temps(assembler.GetVIXLAssembler()); in CompileBakerReadBarrierThunk()
296 EmitGrayCheckAndFastPath(assembler, base_reg, lock_word, &slow_path, raw_ldr_offset); in CompileBakerReadBarrierThunk()
304 LoadReadBarrierMarkIntrospectionEntrypoint(assembler, ep_reg); in CompileBakerReadBarrierThunk()
[all …]
Drelative_patcher_thumb2.h126 void CompileBakerReadBarrierThunk(arm::ArmVIXLAssembler& assembler, uint32_t encoded_data);
/art/compiler/optimizing/
Dintrinsics_utils.h51 Assembler* assembler = codegen->GetAssembler(); in EmitNativeCode() local
52 assembler->Bind(GetEntryLabel()); in EmitNativeCode()
73 assembler->Jump(GetExitLabel()); in EmitNativeCode()
Dintrinsics_arm_vixl.cc37 #define __ assembler->GetVIXLAssembler()->
93 ArmVIXLAssembler* assembler = down_cast<ArmVIXLAssembler*>(codegen->GetAssembler()); in EmitNativeCode() local
128 static void GenSystemArrayCopyBaseAddress(ArmVIXLAssembler* assembler, in GenSystemArrayCopyBaseAddress() argument
151 static void GenSystemArrayCopyEndAddress(ArmVIXLAssembler* assembler, in GenSystemArrayCopyEndAddress() argument
182 ArmVIXLAssembler* assembler = arm_codegen->GetAssembler(); in EmitNativeCode() local
203 GenSystemArrayCopyBaseAddress(assembler, type, dest, dest_pos, dst_curr_addr); in EmitNativeCode()
208 assembler->MaybeUnpoisonHeapReference(tmp); in EmitNativeCode()
233 assembler->MaybePoisonHeapReference(tmp); in EmitNativeCode()
277 static void MoveFPToInt(LocationSummary* locations, bool is64bit, ArmVIXLAssembler* assembler) { in MoveFPToInt() argument
287 static void MoveIntToFP(LocationSummary* locations, bool is64bit, ArmVIXLAssembler* assembler) { in MoveIntToFP() argument
[all …]
Dintrinsics_mips64.cc142 #define __ assembler->
151 static void MoveFPToInt(LocationSummary* locations, bool is64bit, Mips64Assembler* assembler) { in MoveFPToInt() argument
187 static void MoveIntToFP(LocationSummary* locations, bool is64bit, Mips64Assembler* assembler) { in MoveIntToFP() argument
225 Mips64Assembler* assembler) { in GenReverseBytes() argument
277 Mips64Assembler* assembler) { in GenNumberOfLeadingZeroes() argument
308 Mips64Assembler* assembler) { in GenNumberOfTrailingZeroes() argument
345 Mips64Assembler* assembler) { in GenReverse() argument
389 Mips64Assembler* assembler) { in GenBitCount() argument
473 static void MathAbsFP(LocationSummary* locations, bool is64bit, Mips64Assembler* assembler) { in MathAbsFP() argument
509 static void GenAbsInteger(LocationSummary* locations, bool is64bit, Mips64Assembler* assembler) { in GenAbsInteger() argument
[all …]
Dintrinsics_x86_64.cc129 #define __ assembler->
145 static void MoveFPToInt(LocationSummary* locations, bool is64bit, X86_64Assembler* assembler) { in MoveFPToInt() argument
151 static void MoveIntToFP(LocationSummary* locations, bool is64bit, X86_64Assembler* assembler) { in MoveIntToFP() argument
194 X86_64Assembler* assembler) { in GenReverseBytes() argument
254 X86_64Assembler* assembler, in MathAbsFP() argument
297 static void GenAbsInteger(LocationSummary* locations, bool is64bit, X86_64Assembler* assembler) { in GenAbsInteger() argument
338 X86_64Assembler* assembler, in GenMinMaxFP() argument
466 X86_64Assembler* assembler) { in GenMinMax() argument
592 X86_64Assembler* assembler, in GenSSE41FPToFPIntrinsic() argument
668 X86_64Assembler* assembler = GetAssembler(); in VisitMathRoundFloat() local
[all …]
Dintrinsics_x86.cc176 #define __ assembler->
199 static void MoveFPToInt(LocationSummary* locations, bool is64bit, X86Assembler* assembler) { in MoveFPToInt() argument
214 static void MoveIntToFP(LocationSummary* locations, bool is64bit, X86Assembler* assembler) { in MoveIntToFP() argument
281 X86Assembler* assembler) { in GenReverseBytes() argument
320 X86Assembler* assembler = GetAssembler(); in VisitLongReverseBytes() local
359 X86Assembler* assembler, in MathAbsFP() argument
421 static void GenAbsInteger(LocationSummary* locations, X86Assembler* assembler) { in GenAbsInteger() argument
448 static void GenAbsLong(LocationSummary* locations, X86Assembler* assembler) { in GenAbsLong() argument
493 X86Assembler* assembler, in GenMinMaxFP() argument
658 X86Assembler* assembler) { in GenMinMax() argument
[all …]
Dintrinsics_mips.cc153 #define __ assembler->
162 static void MoveFPToInt(LocationSummary* locations, bool is64bit, MipsAssembler* assembler) { in MoveFPToInt() argument
203 static void MoveIntToFP(LocationSummary* locations, bool is64bit, MipsAssembler* assembler) { in MoveIntToFP() argument
251 MipsAssembler* assembler) { in GenReverse() argument
445 MipsAssembler* assembler) { in GenNumberOfLeadingZeroes() argument
493 MipsAssembler* assembler) { in GenNumberOfTrailingZeroes() argument
615 MipsAssembler* assembler) { in GenBitCount() argument
749 MipsAssembler* assembler) { in MathAbsFP() argument
815 static void GenAbsInteger(LocationSummary* locations, bool is64bit, MipsAssembler* assembler) { in GenAbsInteger() argument
865 MipsAssembler* assembler) { in GenMinMaxFP() argument
[all …]
Dcode_generator_x86_64.cc7032 X86_64Assembler* assembler = codegen_->GetAssembler(); in CreateJumpTable() local
7035 const int32_t offset_in_constant_table = assembler->ConstantAreaSize(); in CreateJumpTable()
7039 const int32_t current_table_offset = assembler->CodeSize() + offset_in_constant_table; in CreateJumpTable()
7051 assembler->AppendInt32(offset_to_block); in CreateJumpTable()
7061 X86_64Assembler* assembler = GetAssembler(); in Finalize() local
7062 if (!assembler->IsConstantAreaEmpty() || !fixups_to_jump_tables_.empty()) { in Finalize()
7064 assembler->Align(4, 0); in Finalize()
7065 constant_area_start_ = assembler->CodeSize(); in Finalize()
7073 assembler->AddConstantArea(); in Finalize()
Doptimizing_cfi_test_expected.inc220 // as with the old assembler.
Dcode_generator_x86.cc7651 X86Assembler* assembler = codegen_->GetAssembler(); in CreateJumpTable() local
7654 const int32_t offset_in_constant_table = assembler->ConstantAreaSize(); in CreateJumpTable()
7671 assembler->AppendInt32(offset_to_block); in CreateJumpTable()
7681 X86Assembler* assembler = GetAssembler(); in Finalize() local
7682 if (!assembler->IsConstantAreaEmpty() || !fixups_to_jump_tables_.empty()) { in Finalize()
7685 assembler->Align(4, 0); in Finalize()
7686 constant_area_start_ = assembler->CodeSize(); in Finalize()
7694 assembler->AddConstantArea(); in Finalize()
Dcode_generator_arm_vixl.cc140 EmitAdrCode(ArmVIXLMacroAssembler* assembler, vixl32::Register rd, vixl32::Label* label) in EmitAdrCode() argument
141 : assembler_(assembler), rd_(rd), label_(label) { in EmitAdrCode()
142 ExactAssemblyScope aas(assembler, kMaxInstructionSizeInBytes); in EmitAdrCode()
143 adr_location_ = assembler->GetCursorOffset(); in EmitAdrCode()
144 assembler->adr(EncodingSize(Wide), rd, label); in EmitAdrCode()
5648 ArmVIXLAssembler* assembler = codegen->GetAssembler(); in CanEncode32BitConstantAsImmediate() local
5649 if (assembler->ShifterOperandCanHold(opcode, value, flags_update)) { in CanEncode32BitConstantAsImmediate()
5666 if (assembler->ShifterOperandCanHold(neg_opcode, neg_value, flags_update)) { in CanEncode32BitConstantAsImmediate()
/art/compiler/trampolines/
Dtrampoline_compiler.cc46 #define __ assembler.
56 #define ___ assembler.GetVIXLAssembler()->
64 ArmVIXLAssembler assembler(allocator); in CreateTrampoline() local
71 vixl::aarch32::UseScratchRegisterScope temps(assembler.GetVIXLAssembler()); in CreateTrampoline()
102 Arm64Assembler assembler(allocator); in CreateTrampoline() local
141 MipsAssembler assembler(allocator); in CreateTrampoline() local
173 Mips64Assembler assembler(allocator); in CreateTrampoline() local
205 X86Assembler assembler(allocator); in CreateTrampoline() local
226 x86_64::X86_64Assembler assembler(allocator); in CreateTrampoline() local
/art/compiler/linker/arm64/
Drelative_patcher_arm64.cc364 #define __ assembler.GetVIXLAssembler()->
366 static void EmitGrayCheckAndFastPath(arm64::Arm64Assembler& assembler, in EmitGrayCheckAndFastPath() argument
394 static void LoadReadBarrierMarkIntrospectionEntrypoint(arm64::Arm64Assembler& assembler, in LoadReadBarrierMarkIntrospectionEntrypoint() argument
408 void Arm64RelativePatcher::CompileBakerReadBarrierThunk(arm64::Arm64Assembler& assembler, in CompileBakerReadBarrierThunk() argument
424 UseScratchRegisterScope temps(assembler.GetVIXLAssembler()); in CompileBakerReadBarrierThunk()
435 EmitGrayCheckAndFastPath(assembler, base_reg, lock_word, &slow_path); in CompileBakerReadBarrierThunk()
439 LoadReadBarrierMarkIntrospectionEntrypoint(assembler, ip1); in CompileBakerReadBarrierThunk()
458 UseScratchRegisterScope temps(assembler.GetVIXLAssembler()); in CompileBakerReadBarrierThunk()
465 EmitGrayCheckAndFastPath(assembler, base_reg, lock_word, &slow_path); in CompileBakerReadBarrierThunk()
469 LoadReadBarrierMarkIntrospectionEntrypoint(assembler, ip1); in CompileBakerReadBarrierThunk()
[all …]
Drelative_patcher_arm64.h107 void CompileBakerReadBarrierThunk(arm64::Arm64Assembler& assembler, uint32_t encoded_data);
/art/compiler/utils/x86_64/
Dassembler_x86_64_test.cc633 std::string shll_fn(AssemblerX86_64Test::Base* assembler_test, x86_64::X86_64Assembler* assembler) { in shll_fn() argument
638 assembler->shll(*reg, shifter); in shll_fn()
654 std::string shlq_fn(AssemblerX86_64Test::Base* assembler_test, x86_64::X86_64Assembler* assembler) { in shlq_fn() argument
659 assembler->shlq(*reg, shifter); in shlq_fn()
675 std::string shrl_fn(AssemblerX86_64Test::Base* assembler_test, x86_64::X86_64Assembler* assembler) { in shrl_fn() argument
680 assembler->shrl(*reg, shifter); in shrl_fn()
695 std::string shrq_fn(AssemblerX86_64Test::Base* assembler_test, x86_64::X86_64Assembler* assembler) { in shrq_fn() argument
700 assembler->shrq(*reg, shifter); in shrq_fn()
715 std::string sarl_fn(AssemblerX86_64Test::Base* assembler_test, x86_64::X86_64Assembler* assembler) { in sarl_fn() argument
720 assembler->sarl(*reg, shifter); in sarl_fn()
[all …]
Djni_macro_assembler_x86_64.cc145 static void DecreaseFrameSizeImpl(size_t adjust, X86_64Assembler* assembler) { in DecreaseFrameSizeImpl() argument
147 assembler->addq(CpuRegister(RSP), Immediate(adjust)); in DecreaseFrameSizeImpl()
148 assembler->cfi().AdjustCFAOffset(-adjust); in DecreaseFrameSizeImpl()
/art/test/538-checker-embed-constants/
Dinfo.txt1 Test embedding of constants in assembler instructions.
/art/compiler/utils/x86/
Dassembler_x86_test.cc376 std::string rorl_fn(AssemblerX86Test::Base* assembler_test, x86::X86Assembler* assembler) { in rorl_fn() argument
381 assembler->rorl(*reg, shifter); in rorl_fn()
396 std::string roll_fn(AssemblerX86Test::Base* assembler_test, x86::X86Assembler* assembler) { in roll_fn() argument
401 assembler->roll(*reg, shifter); in roll_fn()
Djni_macro_assembler_x86.cc114 static void DecreaseFrameSizeImpl(X86Assembler* assembler, size_t adjust) { in DecreaseFrameSizeImpl() argument
116 assembler->addl(ESP, Immediate(adjust)); in DecreaseFrameSizeImpl()
117 assembler->cfi().AdjustCFAOffset(-adjust); in DecreaseFrameSizeImpl()
/art/compiler/
DAndroid.bp94 "utils/assembler.cc",
/art/runtime/interpreter/mterp/
DREADME.txt99 assembler is GNU "as", but others will work (may require fiddling with