/art/compiler/utils/x86/ |
D | jni_macro_assembler_x86.cc | 147 __ movl(Address(base.AsCpuRegister(), offs), src.AsCpuRegister()); in Store() local 150 __ movl(Address(base.AsCpuRegister(), offs), src.AsRegisterPairLow()); in Store() local 151 __ movl(Address(base.AsCpuRegister(), FrameOffset(offs.Int32Value()+4)), in Store() local 172 __ movl(Address(ESP, dest), src.AsCpuRegister()); in StoreRawPtr() local 179 __ movl(Address(ESP, -32), scratch); in StoreStackPointerToThread() local 180 __ movl(scratch, ESP); in StoreStackPointerToThread() local 183 __ movl(scratch, Address(ESP, -32)); in StoreStackPointerToThread() local 203 __ movl(dest.AsCpuRegister(), Address(base.AsCpuRegister(), offs)); in Load() local 206 __ movl(dest.AsRegisterPairLow(), Address(base.AsCpuRegister(), offs)); in Load() local 207 __ movl(dest.AsRegisterPairHigh(), in Load() local [all …]
|
D | assembler_x86.cc | 140 void X86Assembler::movl(Register dst, const Immediate& imm) { in movl() function in art::x86::X86Assembler 147 void X86Assembler::movl(Register dst, Register src) { in movl() function in art::x86::X86Assembler 154 void X86Assembler::movl(Register dst, const Address& src) { in movl() function in art::x86::X86Assembler 161 void X86Assembler::movl(const Address& dst, Register src) { in movl() function in art::x86::X86Assembler 168 void X86Assembler::movl(const Address& dst, const Immediate& imm) { in movl() function in art::x86::X86Assembler 175 void X86Assembler::movl(const Address& dst, Label* lbl) { in movl() function in art::x86::X86Assembler
|
/art/compiler/optimizing/ |
D | code_generator_x86.cc | 136 __ movl(reg_, Immediate(0)); in EmitNativeCode() local 177 __ movl(length_arg.AsRegister<Register>(), in EmitNativeCode() local 183 __ movl(length_arg.AsRegister<Register>(), in EmitNativeCode() local 195 __ movl(length_arg.AsRegister<Register>(), in EmitNativeCode() local 281 __ movl(calling_convention.GetRegisterAt(0), Immediate(string_index.index_)); in EmitNativeCode() local 322 __ movl(calling_convention.GetRegisterAt(0), Immediate(type_index.index_)); in EmitNativeCode() local 621 __ movl(temp_, ref_reg); in EmitNativeCode() local 668 __ movl(EAX, temp_); in EmitNativeCode() local 682 __ movl(value, base); in EmitNativeCode() local 818 __ movl(free_reg, index_reg); in EmitNativeCode() local [all …]
|
D | code_generator_x86_64.cc | 228 __ movl(length_arg.AsRegister<CpuRegister>(), array_len); in EmitNativeCode() local 233 __ movl(length_arg.AsRegister<CpuRegister>(), array_len); in EmitNativeCode() local 237 __ movl(CpuRegister(TMP), array_len); in EmitNativeCode() local 240 __ movl(length_arg.AsRegister<CpuRegister>(), CpuRegister(TMP)); in EmitNativeCode() local 287 __ movl(CpuRegister(RAX), Immediate(proto_index.index_)); in EmitNativeCode() local 331 __ movl(CpuRegister(RAX), Immediate(type_index.index_)); in EmitNativeCode() local 383 __ movl(CpuRegister(RAX), Immediate(string_index.index_)); in EmitNativeCode() local 667 __ movl(temp1_, ref_cpu_reg); in EmitNativeCode() local 715 __ movl(CpuRegister(RAX), temp1_); in EmitNativeCode() local 730 __ movl(CpuRegister(value_reg), base); in EmitNativeCode() local [all …]
|
D | intrinsics_x86.cc | 123 __ movl(value, Address(src_curr_addr, 0)); in EmitNativeCode() local 136 __ movl(Address(dst_curr_addr, 0), value); in EmitNativeCode() local 294 __ movl(output_lo, input_hi); in VisitLongReverseBytes() local 295 __ movl(output_hi, input_lo); in VisitLongReverseBytes() local 436 __ movl(out, Immediate(kPrimIntMax)); in VisitMathRoundFloat() local 440 __ movl(out, Immediate(0)); // does not change flags in VisitMathRoundFloat() local 541 __ movl(out_lo, src_lo); in GenLowestOneBit() local 542 __ movl(out_hi, src_hi); in GenLowestOneBit() local 558 __ movl(out, src.AsRegister<Register>()); in GenLowestOneBit() local 561 __ movl(out, Address(ESP, src.GetStackIndex())); in GenLowestOneBit() local [all …]
|
D | intrinsics_x86_64.cc | 119 __ movl(CpuRegister(TMP), Address(src_curr_addr, 0)); in EmitNativeCode() local 130 __ movl(Address(dst_curr_addr, 0), CpuRegister(TMP)); in EmitNativeCode() local 263 __ movl(output, Immediate(1)); in GenIsInfinite() local 269 __ movl(output, Immediate(1)); in GenIsInfinite() local 403 __ movl(out, Immediate(0)); // does not change flags in VisitMathRoundFloat() local 444 __ movl(out, Immediate(0)); // does not change flags, implicit zero extension to 64-bit in VisitMathRoundDouble() local 710 __ movl(temp, Address(array, length_offset)); in CheckSystemArrayCopyPosition() local 732 __ movl(temp, Address(array, length_offset)); in CheckSystemArrayCopyPosition() local 803 __ movl(count, Immediate(length.GetConstant()->AsIntConstant()->GetValue())); in SystemArrayCopyPrimitive() local 805 __ movl(count, length.AsRegister<CpuRegister>()); in SystemArrayCopyPrimitive() local [all …]
|
/art/compiler/utils/x86_64/ |
D | jni_macro_assembler_x86_64.cc | 177 __ movl(Address(base.AsCpuRegister(), offs), src.AsCpuRegister()); in Store() local 230 __ movl(dest.AsCpuRegister(), Address(base.AsCpuRegister(), offs)); in Load() local 417 __ movl(scratch, Address(CpuRegister(RSP), src)); in Copy() local 418 __ movl(Address(CpuRegister(RSP), dest), scratch); in Copy() local 432 __ movl(in_reg.AsCpuRegister(), Address(CpuRegister(RSP), spilled_reference_offset)); in CreateJObject() local 457 __ movl(scratch, Address(CpuRegister(RSP), spilled_reference_offset)); in CreateJObject() local 478 __ movl(reg.AsX86_64().AsCpuRegister(), Address(reg.AsX86_64().AsCpuRegister(), /*disp=*/ 0)); in DecodeJNITransitionOrLocalJObject() local 531 __ movl(scratch, Immediate(kNativeStateValue)); in TryToTransitionFromRunnableToNative() local 568 __ movl(rax, Immediate(kNativeStateValue)); in TryToTransitionFromNativeToRunnable() local
|
D | assembler_x86_64.cc | 158 void X86_64Assembler::movl(CpuRegister dst, const Immediate& imm) { in movl() function in art::x86_64::X86_64Assembler 186 void X86_64Assembler::movl(CpuRegister dst, CpuRegister src) { in movl() function in art::x86_64::X86_64Assembler 202 void X86_64Assembler::movl(CpuRegister dst, const Address& src) { in movl() function in art::x86_64::X86_64Assembler 218 void X86_64Assembler::movl(const Address& dst, CpuRegister src) { in movl() function in art::x86_64::X86_64Assembler 225 void X86_64Assembler::movl(const Address& dst, const Immediate& imm) { in movl() function in art::x86_64::X86_64Assembler
|