Home
last modified time | relevance | path

Searched refs:__ (Results 1 – 25 of 25) sorted by relevance

/art/compiler/utils/
Dassembler_thumb_test.cc74 #define __ assembler. macro
77 __ FinalizeCode(); in EmitAndCheck()
78 size_t cs = __ CodeSize(); in EmitAndCheck()
81 __ FinalizeInstructions(code); in EmitAndCheck()
86 #undef __
88 #define __ assembler. macro
124 __ BuildFrame(frame_size, mr_conv->MethodRegister(), callee_save_regs); in TEST_F()
131 __ Store(mr_conv->CurrentParamStackOffset(), mr_conv->CurrentParamRegister(), size); in TEST_F()
134 __ IncreaseFrameSize(32); in TEST_F()
137 __ IncreaseFrameSize(4096); in TEST_F()
[all …]
/art/compiler/utils/x86/
Djni_macro_assembler_x86.cc43 #define __ asm_. macro
62 __ pushl(spill); in BuildFrame()
73 __ addl(ESP, Immediate(-adjust)); in BuildFrame()
77 __ pushl(method_reg.AsX86().AsCpuRegister()); in BuildFrame()
91 __ addl(ESP, Immediate(adjust)); in RemoveFrame()
96 __ popl(spill); in RemoveFrame()
100 __ ret(); in RemoveFrame()
109 __ addl(ESP, Immediate(-adjust)); in IncreaseFrameSize()
146 __ movl(Address(base.AsCpuRegister(), offs), src.AsCpuRegister()); in Store()
149 __ movl(Address(base.AsCpuRegister(), offs), src.AsRegisterPairLow()); in Store()
[all …]
/art/compiler/jni/quick/
Djni_compiler.cc48 #define __ jni_asm-> macro
177 __ BuildFrame(current_frame_size, method_register, callee_save_regs); in ArtJniCompileMethodInternal()
186 jclass_read_barrier_slow_path = __ CreateLabel(); in ArtJniCompileMethodInternal()
187 jclass_read_barrier_return = __ CreateLabel(); in ArtJniCompileMethodInternal()
190 __ TestGcMarking(jclass_read_barrier_slow_path.get(), JNIMacroUnaryCondition::kNotZero); in ArtJniCompileMethodInternal()
193 __ Bind(jclass_read_barrier_return.get()); in ArtJniCompileMethodInternal()
212 __ MoveArguments(ArrayRef<ArgumentLocation>(dest_args), in ArtJniCompileMethodInternal()
222 __ StoreStackPointerToThread(Thread::TopOfManagedStackOffset<kPointerSize>()); in ArtJniCompileMethodInternal()
236 __ Load(to_lock, method_register, MemberOffset(0u), kObjectReferenceSize); in ArtJniCompileMethodInternal()
241 __ Move(to_lock, mr_conv->CurrentParamRegister(), kObjectReferenceSize); in ArtJniCompileMethodInternal()
[all …]
/art/compiler/utils/x86_64/
Djni_macro_assembler_x86_64.cc44 #define __ asm_. macro
61 __ pushq(spill.AsCpuRegister()); in BuildFrame()
72 __ subq(CpuRegister(RSP), Immediate(rest_of_frame)); in BuildFrame()
82 __ movsd(Address(CpuRegister(RSP), offset), spill.AsXmmRegister()); in BuildFrame()
91 __ movq(Address(CpuRegister(RSP), 0), method_reg.AsX86_64().AsCpuRegister()); in BuildFrame()
108 __ movsd(spill.AsXmmRegister(), Address(CpuRegister(RSP), offset)); in RemoveFrame()
118 __ addq(CpuRegister(RSP), Immediate(offset)); in RemoveFrame()
124 __ popq(spill.AsCpuRegister()); in RemoveFrame()
129 __ ret(); in RemoveFrame()
138 __ addq(CpuRegister(RSP), Immediate(-static_cast<int64_t>(adjust))); in IncreaseFrameSize()
[all …]
/art/compiler/optimizing/
Dintrinsics_x86_64.cc67 #define __ down_cast<X86_64Assembler*>(codegen->GetAssembler())-> // NOLINT macro
94 __ Bind(GetEntryLabel()); in EmitNativeCode()
96 __ Bind(&loop); in EmitNativeCode()
97 __ movl(CpuRegister(TMP), Address(src_curr_addr, 0)); in EmitNativeCode()
98 __ MaybeUnpoisonHeapReference(CpuRegister(TMP)); in EmitNativeCode()
107 __ MaybePoisonHeapReference(CpuRegister(TMP)); in EmitNativeCode()
108 __ movl(Address(dst_curr_addr, 0), CpuRegister(TMP)); in EmitNativeCode()
109 __ addl(src_curr_addr, Immediate(element_size)); in EmitNativeCode()
110 __ addl(dst_curr_addr, Immediate(element_size)); in EmitNativeCode()
111 __ cmpl(src_curr_addr, src_stop_addr); in EmitNativeCode()
[all …]
Dintrinsics_x86.cc71 #define __ down_cast<X86Assembler*>(codegen->GetAssembler())-> // NOLINT macro
105 __ Bind(GetEntryLabel()); in EmitNativeCode()
117 __ xorl(temp1, temp1); in EmitNativeCode()
119 __ Bind(&loop); in EmitNativeCode()
124 __ movl(temp2, Address(src, temp1, ScaleFactor::TIMES_4, adjusted_offset)); in EmitNativeCode()
126 __ leal(temp2, Address(src_pos.AsRegister<Register>(), temp1, ScaleFactor::TIMES_1, 0)); in EmitNativeCode()
127 __ movl(temp2, Address(src, temp2, ScaleFactor::TIMES_4, offset)); in EmitNativeCode()
129 __ MaybeUnpoisonHeapReference(temp2); in EmitNativeCode()
142 __ MaybePoisonHeapReference(temp2); in EmitNativeCode()
147 __ movl(Address(dest, temp1, ScaleFactor::TIMES_4, adjusted_offset), temp2); in EmitNativeCode()
[all …]
Dcode_generator_x86_64.cc75 #define __ down_cast<X86_64Assembler*>(codegen->GetAssembler())-> // NOLINT macro
84 __ Bind(GetEntryLabel()); in EmitNativeCode()
110 __ Bind(GetEntryLabel()); in EmitNativeCode()
129 __ Bind(GetEntryLabel()); in EmitNativeCode()
132 __ negl(cpu_reg_); in EmitNativeCode()
134 __ xorl(cpu_reg_, cpu_reg_); in EmitNativeCode()
140 __ negq(cpu_reg_); in EmitNativeCode()
142 __ xorl(cpu_reg_, cpu_reg_); in EmitNativeCode()
145 __ jmp(GetExitLabel()); in EmitNativeCode()
165 __ Bind(GetEntryLabel()); in EmitNativeCode()
[all …]
Dcode_generator_vector_x86.cc26 #define __ down_cast<X86Assembler*>(GetAssembler())-> // NOLINT macro
69 cpu_has_avx ? __ vxorps(dst, dst, dst) : __ xorps(dst, dst); in VisitVecReplicateScalar()
78 __ movd(dst, locations->InAt(0).AsRegister<Register>()); in VisitVecReplicateScalar()
79 __ punpcklbw(dst, dst); in VisitVecReplicateScalar()
80 __ punpcklwd(dst, dst); in VisitVecReplicateScalar()
81 __ pshufd(dst, dst, Immediate(0)); in VisitVecReplicateScalar()
86 __ movd(dst, locations->InAt(0).AsRegister<Register>()); in VisitVecReplicateScalar()
87 __ punpcklwd(dst, dst); in VisitVecReplicateScalar()
88 __ pshufd(dst, dst, Immediate(0)); in VisitVecReplicateScalar()
92 __ movd(dst, locations->InAt(0).AsRegister<Register>()); in VisitVecReplicateScalar()
[all …]
Dcode_generator_vector_x86_64.cc26 #define __ down_cast<X86_64Assembler*>(GetAssembler())-> // NOLINT macro
64 cpu_has_avx ? __ vxorps(dst, dst, dst) : __ xorps(dst, dst); in VisitVecReplicateScalar()
73 __ movd(dst, locations->InAt(0).AsRegister<CpuRegister>(), /*64-bit*/ false); in VisitVecReplicateScalar()
74 __ punpcklbw(dst, dst); in VisitVecReplicateScalar()
75 __ punpcklwd(dst, dst); in VisitVecReplicateScalar()
76 __ pshufd(dst, dst, Immediate(0)); in VisitVecReplicateScalar()
81 __ movd(dst, locations->InAt(0).AsRegister<CpuRegister>(), /*64-bit*/ false); in VisitVecReplicateScalar()
82 __ punpcklwd(dst, dst); in VisitVecReplicateScalar()
83 __ pshufd(dst, dst, Immediate(0)); in VisitVecReplicateScalar()
87 __ movd(dst, locations->InAt(0).AsRegister<CpuRegister>(), /*64-bit*/ false); in VisitVecReplicateScalar()
[all …]
Dintrinsics_arm_vixl.cc40 #define __ assembler->GetVIXLAssembler()-> macro
90 __ Add(base, array, element_size * constant + data_offset); in GenSystemArrayCopyBaseAddress()
92 __ Add(base, array, Operand(RegisterFrom(pos), vixl32::LSL, element_size_shift)); in GenSystemArrayCopyBaseAddress()
93 __ Add(base, base, data_offset); in GenSystemArrayCopyBaseAddress()
112 __ Add(end, base, element_size * constant); in GenSystemArrayCopyEndAddress()
114 __ Add(end, base, Operand(RegisterFrom(copy_length), vixl32::LSL, element_size_shift)); in GenSystemArrayCopyEndAddress()
148 __ Bind(GetEntryLabel()); in EmitNativeCode()
153 __ Bind(&loop); in EmitNativeCode()
154 __ Ldr(tmp, MemOperand(src_curr_addr, element_size, PostIndex)); in EmitNativeCode()
181 __ Str(tmp, MemOperand(dst_curr_addr, element_size, PostIndex)); in EmitNativeCode()
[all …]
Dcode_generator_x86.cc74 #define __ down_cast<X86Assembler*>(codegen->GetAssembler())-> // NOLINT macro
83 __ Bind(GetEntryLabel()); in EmitNativeCode()
109 __ Bind(GetEntryLabel()); in EmitNativeCode()
128 __ Bind(GetEntryLabel()); in EmitNativeCode()
130 __ negl(reg_); in EmitNativeCode()
132 __ movl(reg_, Immediate(0)); in EmitNativeCode()
134 __ jmp(GetExitLabel()); in EmitNativeCode()
152 __ Bind(GetEntryLabel()); in EmitNativeCode()
173 __ movl(length_arg.AsRegister<Register>(), in EmitNativeCode()
179 __ movl(length_arg.AsRegister<Register>(), in EmitNativeCode()
[all …]
Dcode_generator_vector_arm64_neon.cc40 #define __ GetVIXLAssembler()-> macro
120 __ Movi(dst.V16B(), Int64FromLocation(src_loc)); in VisitVecReplicateScalar()
122 __ Dup(dst.V16B(), InputRegisterAt(instruction, 0)); in VisitVecReplicateScalar()
129 __ Movi(dst.V8H(), Int64FromLocation(src_loc)); in VisitVecReplicateScalar()
131 __ Dup(dst.V8H(), InputRegisterAt(instruction, 0)); in VisitVecReplicateScalar()
137 __ Movi(dst.V4S(), Int64FromLocation(src_loc)); in VisitVecReplicateScalar()
139 __ Dup(dst.V4S(), InputRegisterAt(instruction, 0)); in VisitVecReplicateScalar()
145 __ Movi(dst.V2D(), Int64FromLocation(src_loc)); in VisitVecReplicateScalar()
147 __ Dup(dst.V2D(), XRegisterFrom(src_loc)); in VisitVecReplicateScalar()
153 __ Fmov(dst.V4S(), src_loc.GetConstant()->AsFloatConstant()->GetValue()); in VisitVecReplicateScalar()
[all …]
Dintrinsics_arm64.cc88 #define __ codegen->GetVIXLAssembler()-> macro
116 __ Bind(GetEntryLabel()); in EmitNativeCode()
118 __ Bind(&slow_copy_loop); in EmitNativeCode()
119 __ Ldr(tmp_reg, MemOperand(src_curr_addr, element_size, PostIndex)); in EmitNativeCode()
146 __ Str(tmp_reg, MemOperand(dst_curr_addr, element_size, PostIndex)); in EmitNativeCode()
147 __ Cmp(src_curr_addr, src_stop_addr); in EmitNativeCode()
148 __ B(&slow_copy_loop, ne); in EmitNativeCode()
149 __ B(GetExitLabel()); in EmitNativeCode()
159 #undef __
170 #define __ masm-> macro
[all …]
Dcode_generator_arm_vixl.cc101 #ifdef __
106 #define __ down_cast<CodeGeneratorARMVIXL*>(codegen)->GetVIXLAssembler()-> // NOLINT macro
183 __ Vstr(vixl32::SRegister(first), MemOperand(sp, stack_offset)); in SaveContiguousSRegisterList()
187 __ Vstr(vixl32::SRegister(first++), MemOperand(sp, stack_offset)); in SaveContiguousSRegisterList()
203 __ Vstr(d_reg, MemOperand(sp, stack_offset)); in SaveContiguousSRegisterList()
209 __ Add(base, sp, Operand::From(stack_offset)); in SaveContiguousSRegisterList()
211 __ Vstm(F64, base, NO_WRITE_BACK, DRegisterList(d_reg, number_of_d_regs)); in SaveContiguousSRegisterList()
217 __ Vstr(vixl32::SRegister(last + 1), MemOperand(sp, stack_offset)); in SaveContiguousSRegisterList()
232 __ Vldr(vixl32::SRegister(first), MemOperand(sp, stack_offset)); in RestoreContiguousSRegisterList()
236 __ Vldr(vixl32::SRegister(first++), MemOperand(sp, stack_offset)); in RestoreContiguousSRegisterList()
[all …]
Dcode_generator_vector_arm64_sve.cc42 #define __ GetVIXLAssembler()-> macro
123 __ Dup(dst.VnB(), Int64FromLocation(src_loc)); in VisitVecReplicateScalar()
125 __ Dup(dst.VnB(), InputRegisterAt(instruction, 0)); in VisitVecReplicateScalar()
131 __ Dup(dst.VnH(), Int64FromLocation(src_loc)); in VisitVecReplicateScalar()
133 __ Dup(dst.VnH(), InputRegisterAt(instruction, 0)); in VisitVecReplicateScalar()
138 __ Dup(dst.VnS(), Int64FromLocation(src_loc)); in VisitVecReplicateScalar()
140 __ Dup(dst.VnS(), InputRegisterAt(instruction, 0)); in VisitVecReplicateScalar()
145 __ Dup(dst.VnD(), Int64FromLocation(src_loc)); in VisitVecReplicateScalar()
147 __ Dup(dst.VnD(), XRegisterFrom(src_loc)); in VisitVecReplicateScalar()
152 __ Fdup(dst.VnS(), src_loc.GetConstant()->AsFloatConstant()->GetValue()); in VisitVecReplicateScalar()
[all …]
Dcode_generator_arm64.cc57 #ifdef __
170 #define __ down_cast<CodeGeneratorARM64*>(codegen)->GetVIXLAssembler()-> // NOLINT macro
215 __ Bind(GetEntryLabel()); in EmitNativeCode()
251 __ Bind(GetEntryLabel()); in EmitNativeCode()
280 __ Bind(GetEntryLabel()); in EmitNativeCode()
290 __ Mov(calling_convention.GetRegisterAt(0).W(), type_index.index_); in EmitNativeCode()
318 __ B(GetExitLabel()); in EmitNativeCode()
340 __ Bind(GetEntryLabel()); in EmitNativeCode()
345 __ Mov(calling_convention.GetRegisterAt(0).W(), string_index.index_); in EmitNativeCode()
353 __ B(GetExitLabel()); in EmitNativeCode()
[all …]
Dcode_generator_vector_arm_vixl.cc34 #define __ GetVIXLAssembler()-> macro
62 __ Vdup(Untyped8, dst, InputRegisterAt(instruction, 0)); in VisitVecReplicateScalar()
67 __ Vdup(Untyped16, dst, InputRegisterAt(instruction, 0)); in VisitVecReplicateScalar()
71 __ Vdup(Untyped32, dst, InputRegisterAt(instruction, 0)); in VisitVecReplicateScalar()
98 __ Vmov(OutputRegister(instruction), DRegisterLane(src, 0)); in VisitVecExtractScalar()
143 __ Vpadd(DataTypeValue::I32, dst, src, src); in VisitVecReduce()
146 __ Vpmin(DataTypeValue::S32, dst, src, src); in VisitVecReduce()
149 __ Vpmax(DataTypeValue::S32, dst, src, src); in VisitVecReduce()
179 __ Vneg(DataTypeValue::S8, dst, src); in VisitVecNeg()
184 __ Vneg(DataTypeValue::S16, dst, src); in VisitVecNeg()
[all …]
Doptimizing_cfi_test.cc193 #define __ down_cast<arm::ArmVIXLAssembler*>(GetCodeGenerator() \ in TEST_ISA() macro
196 __ CompareAndBranchIfZero(r0, &target); in TEST_ISA()
199 __ Ldr(r0, vixl32::MemOperand(r0)); in TEST_ISA()
201 __ Bind(&target); in TEST_ISA()
202 #undef __ in TEST_ISA()
/art/runtime/hprof/
Dhprof.cc433 #define __ output_-> macro
569 __ AddU4(sn); in WriteClassTable()
570 __ AddObjectId(c); in WriteClassTable()
571 __ AddStackTraceSerialNumber(LookupStackTraceSerialNumber(c)); in WriteClassTable()
572 __ AddStringId(LookupClassNameId(c)); in WriteClassTable()
587 __ AddU4(id); in WriteStringTable()
588 __ AddUtf8String(string.c_str()); in WriteStringTable()
664 __ AddU1List(reinterpret_cast<const uint8_t*>(magic), sizeof(magic)); in WriteFixedHeader()
671 __ AddU4(sizeof(uint32_t)); in WriteFixedHeader()
679 __ AddU4(static_cast<uint32_t>(nowMs >> 32)); in WriteFixedHeader()
[all …]
/art/compiler/trampolines/
Dtrampoline_compiler.cc39 #define __ assembler. macro
77 __ FinalizeCode(); in CreateTrampoline()
78 size_t cs = __ CodeSize(); in CreateTrampoline()
81 __ FinalizeInstructions(code); in CreateTrampoline()
99 __ JumpTo(Arm64ManagedRegister::FromXRegister(X0), Offset(offset.Int32Value()), in CreateTrampoline()
104 __ LoadRawPtr(Arm64ManagedRegister::FromXRegister(IP1), in CreateTrampoline()
108 __ JumpTo(Arm64ManagedRegister::FromXRegister(IP1), Offset(offset.Int32Value()), in CreateTrampoline()
113 __ JumpTo(Arm64ManagedRegister::FromXRegister(TR), Offset(offset.Int32Value()), in CreateTrampoline()
119 __ FinalizeCode(); in CreateTrampoline()
120 size_t cs = __ CodeSize(); in CreateTrampoline()
[all …]
/art/runtime/
Druntime_intrinsics.cc95 #define IS_INTRINSIC_INITIALIZED(Name, InvokeType, _, __, ___, ClassName, MethodName, Signature) \ in AreAllIntrinsicsInitialized() argument
113 #define INITIALIZE_INTRINSIC(Name, InvokeType, _, __, ___, ClassName, MethodName, Signature) \ in InitializeIntrinsics() argument
Dmethod_handles.cc86 #define CASE_PRIMITIVE(primitive, _, java_name, __) \ in GetBoxedPrimitiveClass() argument
/art/
D.clang-format12 AttributeMacros: ['__', 'NO_RETURN']
/art/build/
DAndroid.cpplint.mk58 art_cpplint_touch := $$(OUT_CPPLINT)/$$(subst /,__,$$(art_cpplint_file))
/art/dex2oat/driver/
Dcompiler_driver.cc1013 #define ADD_INTRINSIC_OWNER_CLASS(_, __, ___, ____, _____, ClassName, ______, _______) \ in AddClassesContainingIntrinsics() argument