Home
last modified time | relevance | path

Searched refs:assembler (Results 1 – 23 of 23) sorted by relevance

/art/compiler/trampolines/
Dtrampoline_compiler.cc39 #define __ assembler.
49 #define ___ assembler.GetVIXLAssembler()->
57 ArmVIXLAssembler assembler(allocator); in CreateTrampoline() local
64 vixl::aarch32::UseScratchRegisterScope temps(assembler.GetVIXLAssembler()); in CreateTrampoline()
95 Arm64Assembler assembler(allocator); in CreateTrampoline() local
134 X86Assembler assembler(allocator); in CreateTrampoline() local
155 x86_64::X86_64Assembler assembler(allocator); in CreateTrampoline() local
/art/compiler/optimizing/
Dintrinsics_arm_vixl.cc40 #define __ assembler->GetVIXLAssembler()->
75 static void GenSystemArrayCopyBaseAddress(ArmVIXLAssembler* assembler, in GenSystemArrayCopyBaseAddress() argument
98 static void GenSystemArrayCopyEndAddress(ArmVIXLAssembler* assembler, in GenSystemArrayCopyEndAddress() argument
129 ArmVIXLAssembler* assembler = arm_codegen->GetAssembler(); in EmitNativeCode() local
150 GenSystemArrayCopyBaseAddress(assembler, type, dest, dest_pos, dst_curr_addr); in EmitNativeCode()
155 assembler->MaybeUnpoisonHeapReference(tmp); in EmitNativeCode()
180 assembler->MaybePoisonHeapReference(tmp); in EmitNativeCode()
224 static void MoveFPToInt(LocationSummary* locations, bool is64bit, ArmVIXLAssembler* assembler) { in MoveFPToInt() argument
234 static void MoveIntToFP(LocationSummary* locations, bool is64bit, ArmVIXLAssembler* assembler) { in MoveIntToFP() argument
305 ArmVIXLAssembler* assembler = codegen->GetAssembler(); in GenNumberOfLeadingZeros() local
[all …]
Dintrinsics_x86.cc168 #define __ assembler->
191 static void MoveFPToInt(LocationSummary* locations, bool is64bit, X86Assembler* assembler) { in MoveFPToInt() argument
206 static void MoveIntToFP(LocationSummary* locations, bool is64bit, X86Assembler* assembler) { in MoveIntToFP() argument
273 X86Assembler* assembler) { in GenReverseBytes() argument
312 X86Assembler* assembler = GetAssembler(); in VisitLongReverseBytes() local
358 static void GenSSE41FPToFPIntrinsic(HInvoke* invoke, X86Assembler* assembler, int round_mode) { in GenSSE41FPToFPIntrinsic() argument
420 X86Assembler* assembler = GetAssembler(); in VisitMathRoundFloat() local
479 X86Assembler* assembler = codegen->GetAssembler(); in GenFPToFPCall() local
516 static void GenLowestOneBit(X86Assembler* assembler, in GenLowestOneBit() argument
825 static void CheckPosition(X86Assembler* assembler, in CheckPosition() argument
[all …]
Dintrinsics_x86_64.cc124 #define __ assembler->
140 static void MoveFPToInt(LocationSummary* locations, bool is64bit, X86_64Assembler* assembler) { in MoveFPToInt() argument
146 static void MoveIntToFP(LocationSummary* locations, bool is64bit, X86_64Assembler* assembler) { in MoveIntToFP() argument
214 X86_64Assembler* assembler = codegen->GetAssembler(); in GenIsInfinite() local
302 static void GenSSE41FPToFPIntrinsic(HInvoke* invoke, X86_64Assembler* assembler, int round_mode) { in GenSSE41FPToFPIntrinsic() argument
363 X86_64Assembler* assembler = GetAssembler(); in VisitMathRoundFloat() local
404 X86_64Assembler* assembler = GetAssembler(); in VisitMathRoundDouble() local
655 static void CheckPosition(X86_64Assembler* assembler, in CheckPosition() argument
720 X86_64Assembler* assembler, in SystemArrayCopyPrimitive() argument
761 CheckPosition(assembler, src_pos, src, length, slow_path, src_base); in SystemArrayCopyPrimitive()
[all …]
Dintrinsics_utils.h58 TAssembler* assembler = down_cast<TAssembler*>(codegen->GetAssembler()); in EmitNativeCode() local
59 assembler->Bind(this->GetEntryLabel()); in EmitNativeCode()
92 assembler->Jump(this->GetExitLabel()); in EmitNativeCode()
Dcode_generator_arm_vixl.cc118 EmitAdrCode(ArmVIXLMacroAssembler* assembler, vixl32::Register rd, vixl32::Label* label) in EmitAdrCode() argument
119 : assembler_(assembler), rd_(rd), label_(label) { in EmitAdrCode()
120 DCHECK(!assembler->AllowMacroInstructions()); // In ExactAssemblyScope. in EmitAdrCode()
121 adr_location_ = assembler->GetCursorOffset(); in EmitAdrCode()
122 assembler->adr(EncodingSize(Wide), rd, label); in EmitAdrCode()
5999 ArmVIXLAssembler* assembler = codegen->GetAssembler(); in CanEncode32BitConstantAsImmediate() local
6000 if (assembler->ShifterOperandCanHold(opcode, value, flags_update)) { in CanEncode32BitConstantAsImmediate()
6017 if (assembler->ShifterOperandCanHold(neg_opcode, neg_value, flags_update)) { in CanEncode32BitConstantAsImmediate()
9669 arm::ArmVIXLAssembler assembler(GetGraph()->GetAllocator()); in EmitThunkCode() local
9675 assembler.LoadFromOffset(arm::kLoadWord, vixl32::pc, vixl32::r0, offset.Int32Value()); in EmitThunkCode()
[all …]
Dcode_generator_arm64.cc5192 Arm64Assembler assembler(GetGraph()->GetAllocator()); in EmitThunkCode() local
5199 assembler.JumpTo(ManagedRegister(arm64::X0), offset, ManagedRegister(arm64::IP0)); in EmitThunkCode()
5207 assembler.JumpTo(ManagedRegister(arm64::TR), offset, ManagedRegister(arm64::IP0)); in EmitThunkCode()
5215 CompileBakerReadBarrierThunk(assembler, patch.GetBakerCustomValue1(), debug_name); in EmitThunkCode()
5224 assembler.FinalizeCode(); in EmitThunkCode()
5225 code->resize(assembler.CodeSize()); in EmitThunkCode()
5227 assembler.FinalizeInstructions(code_region); in EmitThunkCode()
6999 #define __ assembler.GetVIXLAssembler()->
7001 static void EmitGrayCheckAndFastPath(arm64::Arm64Assembler& assembler, in EmitGrayCheckAndFastPath() argument
7033 static void LoadReadBarrierMarkIntrospectionEntrypoint(arm64::Arm64Assembler& assembler, in LoadReadBarrierMarkIntrospectionEntrypoint() argument
[all …]
Dintrinsics_arm64.cc1123 Arm64Assembler* assembler = codegen->GetAssembler(); in EmitLoadExclusive() local
1124 MacroAssembler* masm = assembler->GetVIXLAssembler(); in EmitLoadExclusive()
1164 assembler->MaybeUnpoisonHeapReference(old_value); in EmitLoadExclusive()
1177 Arm64Assembler* assembler = codegen->GetAssembler(); in EmitStoreExclusive() local
1178 MacroAssembler* masm = assembler->GetVIXLAssembler(); in EmitStoreExclusive()
1180 assembler->MaybePoisonHeapReference(new_value); in EmitStoreExclusive()
1214 assembler->MaybeUnpoisonHeapReference(new_value); in EmitStoreExclusive()
1240 Arm64Assembler* assembler = codegen->GetAssembler(); in GenerateCompareAndSet() local
1241 MacroAssembler* masm = assembler->GetVIXLAssembler(); in GenerateCompareAndSet()
1337 Arm64Assembler* assembler = arm64_codegen->GetAssembler(); in EmitNativeCode() local
[all …]
Doptimizing_cfi_test_expected.inc142 // as with the old assembler.
Dcode_generator_x86_64.cc7989 X86_64Assembler* assembler = codegen_->GetAssembler(); in CreateJumpTable() local
7992 const int32_t offset_in_constant_table = assembler->ConstantAreaSize(); in CreateJumpTable()
7996 const int32_t current_table_offset = assembler->CodeSize() + offset_in_constant_table; in CreateJumpTable()
8008 assembler->AppendInt32(offset_to_block); in CreateJumpTable()
8018 X86_64Assembler* assembler = GetAssembler(); in Finalize() local
8019 if (!assembler->IsConstantAreaEmpty() || !fixups_to_jump_tables_.empty()) { in Finalize()
8021 assembler->Align(4, 0); in Finalize()
8022 constant_area_start_ = assembler->CodeSize(); in Finalize()
8030 assembler->AddConstantArea(); in Finalize()
Dcode_generator_arm_vixl.h896 void CompileBakerReadBarrierThunk(ArmVIXLAssembler& assembler,
Dcode_generator_arm64.h1035 void CompileBakerReadBarrierThunk(Arm64Assembler& assembler,
Dcode_generator_x86.cc8752 X86Assembler* assembler = codegen_->GetAssembler(); in CreateJumpTable() local
8755 const int32_t offset_in_constant_table = assembler->ConstantAreaSize(); in CreateJumpTable()
8772 assembler->AppendInt32(offset_to_block); in CreateJumpTable()
8782 X86Assembler* assembler = GetAssembler(); in Finalize() local
8784 if (!assembler->IsConstantAreaEmpty() || !fixups_to_jump_tables_.empty()) { in Finalize()
8787 assembler->Align(4, 0); in Finalize()
8788 constant_area_start_ = assembler->CodeSize(); in Finalize()
8796 assembler->AddConstantArea(); in Finalize()
/art/compiler/utils/
Dassembler_thumb_test.cc44 ArmVIXLAssemblerTest() : pool(), allocator(&pool), assembler(&allocator) { } in ArmVIXLAssemblerTest()
74 #define __ assembler.
88 #define __ assembler.
92 ArmVIXLJNIMacroAssembler assembler; member in art::arm::ArmVIXLAssemblerTest
210 #define __ assembler.asm_.
232 vixl::aarch32::UseScratchRegisterScope temps(assembler.asm_.GetVIXLAssembler()); in TEST_F()
265 vixl::aarch32::UseScratchRegisterScope temps(assembler.asm_.GetVIXLAssembler()); in TEST_F()
Djni_macro_assembler_test.h42 typedef std::string (*TestFn)(JNIMacroAssemblerTest* assembler_test, Ass* assembler);
Dassembler_test.h62 typedef std::string (*TestFn)(AssemblerTest* assembler_test, Ass* assembler);
/art/compiler/utils/x86_64/
Dassembler_x86_64_test.cc641 std::string shll_fn(AssemblerX86_64Test::Base* assembler_test, x86_64::X86_64Assembler* assembler) { in shll_fn() argument
646 assembler->shll(*reg, shifter); in shll_fn()
662 std::string shlq_fn(AssemblerX86_64Test::Base* assembler_test, x86_64::X86_64Assembler* assembler) { in shlq_fn() argument
667 assembler->shlq(*reg, shifter); in shlq_fn()
683 std::string shrl_fn(AssemblerX86_64Test::Base* assembler_test, x86_64::X86_64Assembler* assembler) { in shrl_fn() argument
688 assembler->shrl(*reg, shifter); in shrl_fn()
703 std::string shrq_fn(AssemblerX86_64Test::Base* assembler_test, x86_64::X86_64Assembler* assembler) { in shrq_fn() argument
708 assembler->shrq(*reg, shifter); in shrq_fn()
723 std::string sarl_fn(AssemblerX86_64Test::Base* assembler_test, x86_64::X86_64Assembler* assembler) { in sarl_fn() argument
728 assembler->sarl(*reg, shifter); in sarl_fn()
[all …]
Djni_macro_assembler_x86_64.cc143 static void DecreaseFrameSizeImpl(size_t adjust, X86_64Assembler* assembler) { in DecreaseFrameSizeImpl() argument
146 assembler->addq(CpuRegister(RSP), Immediate(adjust)); in DecreaseFrameSizeImpl()
147 assembler->cfi().AdjustCFAOffset(-adjust); in DecreaseFrameSizeImpl()
/art/test/538-checker-embed-constants/
Dinfo.txt1 Test embedding of constants in assembler instructions.
/art/compiler/utils/x86/
Djni_macro_assembler_x86.cc114 static void DecreaseFrameSizeImpl(X86Assembler* assembler, size_t adjust) { in DecreaseFrameSizeImpl() argument
117 assembler->addl(ESP, Immediate(adjust)); in DecreaseFrameSizeImpl()
118 assembler->cfi().AdjustCFAOffset(-adjust); in DecreaseFrameSizeImpl()
Dassembler_x86_test.cc510 std::string rorl_fn(AssemblerX86Test::Base* assembler_test, x86::X86Assembler* assembler) { in rorl_fn() argument
515 assembler->rorl(*reg, shifter); in rorl_fn()
530 std::string roll_fn(AssemblerX86Test::Base* assembler_test, x86::X86Assembler* assembler) { in roll_fn() argument
535 assembler->roll(*reg, shifter); in roll_fn()
/art/test/
DREADME.md14 [Jasmin](http://jasmin.sourceforge.net/) assembler or the
/art/compiler/
DAndroid.bp98 "utils/assembler.cc",