Home
last modified time | relevance | path

Searched refs:__ (Results 1 – 25 of 32) sorted by relevance

12

/art/compiler/utils/
Dassembler_thumb_test.cc178 #define __ assembler-> macro
182 __ FinalizeCode(); in EmitAndCheck()
183 size_t cs = __ CodeSize(); in EmitAndCheck()
186 __ FinalizeInstructions(code); in EmitAndCheck()
199 #undef __
201 #define __ assembler. macro
229 __ BuildFrame(frame_size, mr_conv->MethodRegister(), callee_save_regs, mr_conv->EntrySpills()); in TEST_F()
230 __ IncreaseFrameSize(32); in TEST_F()
233 __ IncreaseFrameSize(4096); in TEST_F()
234 __ Load(method_register, FrameOffset(32), 4); in TEST_F()
[all …]
/art/compiler/utils/mips/
Dassembler_mips_test.cc24 #define __ GetAssembler()-> macro
213 __ SetReorder(false); in BranchHelper()
218 __ Addu(mips::ZERO, mips::ZERO, mips::ZERO); in BranchHelper()
220 __ Bind(&label1); in BranchHelper()
224 __ Addu(mips::ZERO, mips::ZERO, mips::ZERO); in BranchHelper()
226 __ Bind(&label2); in BranchHelper()
228 __ Addu(mips::ZERO, mips::ZERO, mips::ZERO); in BranchHelper()
251 __ SetReorder(false); in BranchCondOneRegHelper()
256 __ Addu(mips::ZERO, mips::ZERO, mips::ZERO); in BranchCondOneRegHelper()
258 __ Bind(&label); in BranchCondOneRegHelper()
[all …]
Dassembler_mips32r5_test.cc24 #define __ GetAssembler()-> macro
280 __ LoadQFromOffset(mips::F0, mips::A0, 0); in TEST_F()
281 __ LoadQFromOffset(mips::F0, mips::A0, 1); in TEST_F()
282 __ LoadQFromOffset(mips::F0, mips::A0, 2); in TEST_F()
283 __ LoadQFromOffset(mips::F0, mips::A0, 4); in TEST_F()
284 __ LoadQFromOffset(mips::F0, mips::A0, 8); in TEST_F()
285 __ LoadQFromOffset(mips::F0, mips::A0, 511); in TEST_F()
286 __ LoadQFromOffset(mips::F0, mips::A0, 512); in TEST_F()
287 __ LoadQFromOffset(mips::F0, mips::A0, 513); in TEST_F()
288 __ LoadQFromOffset(mips::F0, mips::A0, 514); in TEST_F()
[all …]
Dassembler_mips32r6_test.cc24 #define __ GetAssembler()-> macro
284 __ SetReorder(false); in BranchHelper()
289 __ Addu(mips::ZERO, mips::ZERO, mips::ZERO); in BranchHelper()
291 __ Bind(&label1); in BranchHelper()
295 __ Addu(mips::ZERO, mips::ZERO, mips::ZERO); in BranchHelper()
297 __ Bind(&label2); in BranchHelper()
299 __ Addu(mips::ZERO, mips::ZERO, mips::ZERO); in BranchHelper()
322 __ SetReorder(false); in BranchCondOneRegHelper()
327 __ Addu(mips::ZERO, mips::ZERO, mips::ZERO); in BranchCondOneRegHelper()
329 __ Bind(&label); in BranchCondOneRegHelper()
[all …]
/art/compiler/optimizing/
Dintrinsics_mips.cc65 #define __ codegen->GetAssembler()-> macro
80 __ Move(V0, trg_reg); in MoveFromReturnRegister()
86 __ MovS(F0, trg_reg); in MoveFromReturnRegister()
88 __ MovD(F0, trg_reg); in MoveFromReturnRegister()
114 __ Bind(GetEntryLabel()); in EmitNativeCode()
137 __ B(GetExitLabel()); in EmitNativeCode()
149 #undef __
157 #define __ assembler-> macro
173 __ Mfc1(out_lo, in); in MoveFPToInt()
174 __ MoveFromFpuHigh(out_hi, in); in MoveFPToInt()
[all …]
Dcode_generator_mips.cc172 #define __ down_cast<CodeGeneratorMIPS*>(codegen)->GetAssembler()-> // NOLINT macro
182 __ Bind(GetEntryLabel()); in EmitNativeCode()
218 __ Bind(GetEntryLabel()); in EmitNativeCode()
247 __ Bind(GetEntryLabel()); in EmitNativeCode()
254 __ LoadConst32(calling_convention.GetRegisterAt(0), type_index.index_); in EmitNativeCode()
280 __ B(GetExitLabel()); in EmitNativeCode()
305 __ Bind(GetEntryLabel()); in EmitNativeCode()
308 __ LoadConst32(calling_convention.GetRegisterAt(0), string_index.index_); in EmitNativeCode()
318 __ B(GetExitLabel()); in EmitNativeCode()
333 __ Bind(GetEntryLabel()); in EmitNativeCode()
[all …]
Dintrinsics_x86_64.cc72 #define __ down_cast<X86_64Assembler*>(codegen->GetAssembler())-> // NOLINT macro
99 __ Bind(GetEntryLabel()); in EmitNativeCode()
101 __ Bind(&loop); in EmitNativeCode()
102 __ movl(CpuRegister(TMP), Address(src_curr_addr, 0)); in EmitNativeCode()
103 __ MaybeUnpoisonHeapReference(CpuRegister(TMP)); in EmitNativeCode()
112 __ MaybePoisonHeapReference(CpuRegister(TMP)); in EmitNativeCode()
113 __ movl(Address(dst_curr_addr, 0), CpuRegister(TMP)); in EmitNativeCode()
114 __ addl(src_curr_addr, Immediate(element_size)); in EmitNativeCode()
115 __ addl(dst_curr_addr, Immediate(element_size)); in EmitNativeCode()
116 __ cmpl(src_curr_addr, src_stop_addr); in EmitNativeCode()
[all …]
Dintrinsics_x86.cc74 #define __ down_cast<X86Assembler*>(codegen->GetAssembler())-> // NOLINT macro
108 __ Bind(GetEntryLabel()); in EmitNativeCode()
120 __ xorl(temp1, temp1); in EmitNativeCode()
122 __ Bind(&loop); in EmitNativeCode()
127 __ movl(temp2, Address(src, temp1, ScaleFactor::TIMES_4, adjusted_offset)); in EmitNativeCode()
129 __ leal(temp2, Address(src_pos.AsRegister<Register>(), temp1, ScaleFactor::TIMES_1, 0)); in EmitNativeCode()
130 __ movl(temp2, Address(src, temp2, ScaleFactor::TIMES_4, offset)); in EmitNativeCode()
132 __ MaybeUnpoisonHeapReference(temp2); in EmitNativeCode()
145 __ MaybePoisonHeapReference(temp2); in EmitNativeCode()
150 __ movl(Address(dest, temp1, ScaleFactor::TIMES_4, adjusted_offset), temp2); in EmitNativeCode()
[all …]
Dintrinsics_arm_vixl.cc37 #define __ assembler->GetVIXLAssembler()-> macro
90 __ Bind(GetEntryLabel()); in EmitNativeCode()
111 __ B(GetExitLabel()); in EmitNativeCode()
139 __ Add(base, array, element_size * constant + data_offset); in GenSystemArrayCopyBaseAddress()
141 __ Add(base, array, Operand(RegisterFrom(pos), vixl32::LSL, element_size_shift)); in GenSystemArrayCopyBaseAddress()
142 __ Add(base, base, data_offset); in GenSystemArrayCopyBaseAddress()
161 __ Add(end, base, element_size * constant); in GenSystemArrayCopyEndAddress()
163 __ Add(end, base, Operand(RegisterFrom(copy_length), vixl32::LSL, element_size_shift)); in GenSystemArrayCopyEndAddress()
197 __ Bind(GetEntryLabel()); in EmitNativeCode()
202 __ Bind(&loop); in EmitNativeCode()
[all …]
Dcode_generator_vector_mips.cc24 #define __ down_cast<MipsAssembler*>(GetAssembler())-> // NOLINT macro
58 __ FillB(dst, locations->InAt(0).AsRegister<Register>()); in VisitVecReplicateScalar()
63 __ FillH(dst, locations->InAt(0).AsRegister<Register>()); in VisitVecReplicateScalar()
67 __ FillW(dst, locations->InAt(0).AsRegister<Register>()); in VisitVecReplicateScalar()
71 __ InsertW(static_cast<VectorRegister>(FTMP), in VisitVecReplicateScalar()
74 __ InsertW(static_cast<VectorRegister>(FTMP), in VisitVecReplicateScalar()
77 __ ReplicateFPToVectorRegister(dst, FTMP, /* is_double= */ true); in VisitVecReplicateScalar()
81 __ ReplicateFPToVectorRegister(dst, in VisitVecReplicateScalar()
87 __ ReplicateFPToVectorRegister(dst, in VisitVecReplicateScalar()
127 __ Copy_sW(locations->Out().AsRegister<Register>(), src, 0); in VisitVecExtractScalar()
[all …]
Dcode_generator_vector_mips64.cc24 #define __ down_cast<Mips64Assembler*>(GetAssembler())-> // NOLINT macro
63 __ FillB(dst, locations->InAt(0).AsRegister<GpuRegister>()); in VisitVecReplicateScalar()
68 __ FillH(dst, locations->InAt(0).AsRegister<GpuRegister>()); in VisitVecReplicateScalar()
72 __ FillW(dst, locations->InAt(0).AsRegister<GpuRegister>()); in VisitVecReplicateScalar()
76 __ FillD(dst, locations->InAt(0).AsRegister<GpuRegister>()); in VisitVecReplicateScalar()
80 __ ReplicateFPToVectorRegister(dst, in VisitVecReplicateScalar()
86 __ ReplicateFPToVectorRegister(dst, in VisitVecReplicateScalar()
126 __ Copy_sW(locations->Out().AsRegister<GpuRegister>(), src, 0); in VisitVecExtractScalar()
130 __ Copy_sD(locations->Out().AsRegister<GpuRegister>(), src, 0); in VisitVecExtractScalar()
190 __ Hadd_sD(tmp, src, src); in VisitVecReduce()
[all …]
Dcode_generator_mips64.cc124 #define __ down_cast<CodeGeneratorMIPS64*>(codegen)->GetAssembler()-> // NOLINT macro
134 __ Bind(GetEntryLabel()); in EmitNativeCode()
171 __ Bind(GetEntryLabel()); in EmitNativeCode()
200 __ Bind(GetEntryLabel()); in EmitNativeCode()
207 __ LoadConst32(calling_convention.GetRegisterAt(0), type_index.index_); in EmitNativeCode()
233 __ Bc(GetExitLabel()); in EmitNativeCode()
258 __ Bind(GetEntryLabel()); in EmitNativeCode()
261 __ LoadConst32(calling_convention.GetRegisterAt(0), string_index.index_); in EmitNativeCode()
274 __ Bc(GetExitLabel()); in EmitNativeCode()
289 __ Bind(GetEntryLabel()); in EmitNativeCode()
[all …]
Dintrinsics_mips64.cc53 #define __ codegen->GetAssembler()-> macro
68 __ Move(V0, trg_reg); in MoveFromReturnRegister()
74 __ MovS(F0, trg_reg); in MoveFromReturnRegister()
76 __ MovD(F0, trg_reg); in MoveFromReturnRegister()
103 __ Bind(GetEntryLabel()); in EmitNativeCode()
126 __ Bc(GetExitLabel()); in EmitNativeCode()
138 #undef __
146 #define __ assembler-> macro
160 __ Dmfc1(out, in); in MoveFPToInt()
162 __ Mfc1(out, in); in MoveFPToInt()
[all …]
Dintrinsics_arm64.cc76 #define __ codegen->GetVIXLAssembler()-> macro
91 __ Mov(trg_reg, res_reg, kDiscardForSameWReg); in MoveFromReturnRegister()
95 __ Fmov(trg_reg, res_reg); in MoveFromReturnRegister()
117 __ Bind(GetEntryLabel()); in EmitNativeCode()
145 __ B(GetExitLabel()); in EmitNativeCode()
183 __ Bind(GetEntryLabel()); in EmitNativeCode()
185 __ Bind(&slow_copy_loop); in EmitNativeCode()
186 __ Ldr(tmp_reg, MemOperand(src_curr_addr, element_size, PostIndex)); in EmitNativeCode()
213 __ Str(tmp_reg, MemOperand(dst_curr_addr, element_size, PostIndex)); in EmitNativeCode()
214 __ Cmp(src_curr_addr, src_stop_addr); in EmitNativeCode()
[all …]
Dcode_generator_x86_64.cc67 #define __ down_cast<X86_64Assembler*>(codegen->GetAssembler())-> // NOLINT macro
76 __ Bind(GetEntryLabel()); in EmitNativeCode()
102 __ Bind(GetEntryLabel()); in EmitNativeCode()
121 __ Bind(GetEntryLabel()); in EmitNativeCode()
124 __ negl(cpu_reg_); in EmitNativeCode()
126 __ xorl(cpu_reg_, cpu_reg_); in EmitNativeCode()
132 __ negq(cpu_reg_); in EmitNativeCode()
134 __ xorl(cpu_reg_, cpu_reg_); in EmitNativeCode()
137 __ jmp(GetExitLabel()); in EmitNativeCode()
157 __ Bind(GetEntryLabel()); in EmitNativeCode()
[all …]
Dcode_generator_x86.cc68 #define __ down_cast<X86Assembler*>(codegen->GetAssembler())-> // NOLINT macro
77 __ Bind(GetEntryLabel()); in EmitNativeCode()
103 __ Bind(GetEntryLabel()); in EmitNativeCode()
122 __ Bind(GetEntryLabel()); in EmitNativeCode()
124 __ negl(reg_); in EmitNativeCode()
126 __ movl(reg_, Immediate(0)); in EmitNativeCode()
128 __ jmp(GetExitLabel()); in EmitNativeCode()
146 __ Bind(GetEntryLabel()); in EmitNativeCode()
170 __ movl(length_loc.AsRegister<Register>(), array_len); in EmitNativeCode()
172 __ shrl(length_loc.AsRegister<Register>(), Immediate(1)); in EmitNativeCode()
[all …]
Dcode_generator_vector_x86.cc26 #define __ down_cast<X86Assembler*>(GetAssembler())-> // NOLINT macro
68 __ xorps(dst, dst); in VisitVecReplicateScalar()
77 __ movd(dst, locations->InAt(0).AsRegister<Register>()); in VisitVecReplicateScalar()
78 __ punpcklbw(dst, dst); in VisitVecReplicateScalar()
79 __ punpcklwd(dst, dst); in VisitVecReplicateScalar()
80 __ pshufd(dst, dst, Immediate(0)); in VisitVecReplicateScalar()
85 __ movd(dst, locations->InAt(0).AsRegister<Register>()); in VisitVecReplicateScalar()
86 __ punpcklwd(dst, dst); in VisitVecReplicateScalar()
87 __ pshufd(dst, dst, Immediate(0)); in VisitVecReplicateScalar()
91 __ movd(dst, locations->InAt(0).AsRegister<Register>()); in VisitVecReplicateScalar()
[all …]
Dcode_generator_vector_x86_64.cc26 #define __ down_cast<X86_64Assembler*>(GetAssembler())-> // NOLINT macro
63 __ xorps(dst, dst); in VisitVecReplicateScalar()
72 __ movd(dst, locations->InAt(0).AsRegister<CpuRegister>(), /*64-bit*/ false); in VisitVecReplicateScalar()
73 __ punpcklbw(dst, dst); in VisitVecReplicateScalar()
74 __ punpcklwd(dst, dst); in VisitVecReplicateScalar()
75 __ pshufd(dst, dst, Immediate(0)); in VisitVecReplicateScalar()
80 __ movd(dst, locations->InAt(0).AsRegister<CpuRegister>(), /*64-bit*/ false); in VisitVecReplicateScalar()
81 __ punpcklwd(dst, dst); in VisitVecReplicateScalar()
82 __ pshufd(dst, dst, Immediate(0)); in VisitVecReplicateScalar()
86 __ movd(dst, locations->InAt(0).AsRegister<CpuRegister>(), /*64-bit*/ false); in VisitVecReplicateScalar()
[all …]
Dcode_generator_vector_arm64.cc39 #define __ GetVIXLAssembler()-> macro
91 __ Movi(dst.V16B(), Int64FromLocation(src_loc)); in VisitVecReplicateScalar()
93 __ Dup(dst.V16B(), InputRegisterAt(instruction, 0)); in VisitVecReplicateScalar()
100 __ Movi(dst.V8H(), Int64FromLocation(src_loc)); in VisitVecReplicateScalar()
102 __ Dup(dst.V8H(), InputRegisterAt(instruction, 0)); in VisitVecReplicateScalar()
108 __ Movi(dst.V4S(), Int64FromLocation(src_loc)); in VisitVecReplicateScalar()
110 __ Dup(dst.V4S(), InputRegisterAt(instruction, 0)); in VisitVecReplicateScalar()
116 __ Movi(dst.V2D(), Int64FromLocation(src_loc)); in VisitVecReplicateScalar()
118 __ Dup(dst.V2D(), XRegisterFrom(src_loc)); in VisitVecReplicateScalar()
124 __ Fmov(dst.V4S(), src_loc.GetConstant()->AsFloatConstant()->GetValue()); in VisitVecReplicateScalar()
[all …]
/art/compiler/utils/x86/
Djni_macro_assembler_x86.cc42 #define __ asm_. macro
54 __ pushl(spill); in BuildFrame()
64 __ addl(ESP, Immediate(-adjust)); in BuildFrame()
66 __ pushl(method_reg.AsX86().AsCpuRegister()); in BuildFrame()
73 __ movl(Address(ESP, offset), spill.AsX86().AsCpuRegister()); in BuildFrame()
77 __ movsd(Address(ESP, frame_size + spill.getSpillOffset()), spill.AsX86().AsXmmRegister()); in BuildFrame()
80 __ movss(Address(ESP, frame_size + spill.getSpillOffset()), spill.AsX86().AsXmmRegister()); in BuildFrame()
93 __ addl(ESP, Immediate(adjust)); in RemoveFrame()
97 __ popl(spill); in RemoveFrame()
101 __ ret(); in RemoveFrame()
[all …]
/art/compiler/trampolines/
Dtrampoline_compiler.cc47 #define __ assembler. macro
85 __ FinalizeCode(); in CreateTrampoline()
86 size_t cs = __ CodeSize(); in CreateTrampoline()
89 __ FinalizeInstructions(code); in CreateTrampoline()
107 __ JumpTo(Arm64ManagedRegister::FromXRegister(X0), Offset(offset.Int32Value()), in CreateTrampoline()
112 __ LoadRawPtr(Arm64ManagedRegister::FromXRegister(IP1), in CreateTrampoline()
116 __ JumpTo(Arm64ManagedRegister::FromXRegister(IP1), Offset(offset.Int32Value()), in CreateTrampoline()
121 __ JumpTo(Arm64ManagedRegister::FromXRegister(TR), Offset(offset.Int32Value()), in CreateTrampoline()
127 __ FinalizeCode(); in CreateTrampoline()
128 size_t cs = __ CodeSize(); in CreateTrampoline()
[all …]
/art/compiler/utils/x86_64/
Djni_macro_assembler_x86_64.cc36 #define __ asm_. macro
49 __ pushq(spill.AsCpuRegister()); in BuildFrame()
59 __ subq(CpuRegister(RSP), Immediate(rest_of_frame)); in BuildFrame()
68 __ movsd(Address(CpuRegister(RSP), offset), spill.AsXmmRegister()); in BuildFrame()
76 __ movq(Address(CpuRegister(RSP), 0), method_reg.AsX86_64().AsCpuRegister()); in BuildFrame()
81 __ movq(Address(CpuRegister(RSP), frame_size + spill.getSpillOffset()), in BuildFrame()
85 __ movl(Address(CpuRegister(RSP), frame_size + spill.getSpillOffset()), in BuildFrame()
90 __ movsd(Address(CpuRegister(RSP), frame_size + spill.getSpillOffset()), in BuildFrame()
94 __ movss(Address(CpuRegister(RSP), frame_size + spill.getSpillOffset()), in BuildFrame()
115 __ movsd(spill.AsXmmRegister(), Address(CpuRegister(RSP), offset)); in RemoveFrame()
[all …]
/art/compiler/utils/mips64/
Dassembler_mips64_test.cc27 #define __ GetAssembler()-> macro
285 __ Addu(mips64::ZERO, mips64::ZERO, mips64::ZERO); in BranchHelper()
287 __ Bind(&label1); in BranchHelper()
291 __ Addu(mips64::ZERO, mips64::ZERO, mips64::ZERO); in BranchHelper()
293 __ Bind(&label2); in BranchHelper()
295 __ Addu(mips64::ZERO, mips64::ZERO, mips64::ZERO); in BranchHelper()
319 __ Addu(mips64::ZERO, mips64::ZERO, mips64::ZERO); in BranchCondOneRegHelper()
321 __ Bind(&label); in BranchCondOneRegHelper()
324 __ Addu(mips64::ZERO, mips64::ZERO, mips64::ZERO); in BranchCondOneRegHelper()
327 __ Addu(mips64::ZERO, mips64::ZERO, mips64::ZERO); in BranchCondOneRegHelper()
[all …]
/art/runtime/hprof/
Dhprof.cc434 #define __ output_-> macro
570 __ AddU4(sn); in WriteClassTable()
571 __ AddObjectId(c); in WriteClassTable()
572 __ AddStackTraceSerialNumber(LookupStackTraceSerialNumber(c)); in WriteClassTable()
573 __ AddStringId(LookupClassNameId(c)); in WriteClassTable()
588 __ AddU4(id); in WriteStringTable()
589 __ AddUtf8String(string.c_str()); in WriteStringTable()
665 __ AddU1List(reinterpret_cast<const uint8_t*>(magic), sizeof(magic)); in WriteFixedHeader()
672 __ AddU4(sizeof(uint32_t)); in WriteFixedHeader()
680 __ AddU4(static_cast<uint32_t>(nowMs >> 32)); in WriteFixedHeader()
[all …]
/art/compiler/jni/quick/
Djni_compiler.cc50 #define __ jni_asm-> macro
225 __ BuildFrame(frame_size, mr_conv->MethodRegister(), callee_save_regs, mr_conv->EntrySpills()); in ArtJniCompileMethodInternal()
235 __ StoreImmediateToFrame(main_jni_conv->HandleScopeNumRefsOffset(), in ArtJniCompileMethodInternal()
239 __ CopyRawPtrFromThread(main_jni_conv->HandleScopeLinkOffset(), in ArtJniCompileMethodInternal()
242 __ StoreStackOffsetToThread(Thread::TopHandleScopeOffset<kPointerSize>(), in ArtJniCompileMethodInternal()
257 __ LoadRef(main_jni_conv->InterproceduralScratchRegister(), in ArtJniCompileMethodInternal()
259 __ VerifyObject(main_jni_conv->InterproceduralScratchRegister(), false); in ArtJniCompileMethodInternal()
261 __ StoreRef(handle_scope_offset, main_jni_conv->InterproceduralScratchRegister()); in ArtJniCompileMethodInternal()
284 __ VerifyObject(in_reg, mr_conv->IsCurrentArgPossiblyNull()); in ArtJniCompileMethodInternal()
285 __ StoreRef(handle_scope_offset, in_reg); in ArtJniCompileMethodInternal()
[all …]

12