/art/compiler/optimizing/ |
D | code_generator_vector_x86.cc | 223 __ movaps(dst, src); in VisitVecReduce() local 239 __ movaps(tmp, src); in VisitVecReduce() local 240 __ movaps(dst, src); in VisitVecReduce() local 337 __ movaps(dst, src); in VisitVecAbs() local 1237 is_aligned16 ? __ movaps(reg, address) : __ movups(reg, address); in VisitVecLoad() 1273 is_aligned16 ? __ movaps(address, reg) : __ movups(address, reg); in VisitVecStore()
|
D | code_generator_vector_x86_64.cc | 206 __ movaps(dst, src); in VisitVecReduce() local 222 __ movaps(tmp, src); in VisitVecReduce() local 223 __ movaps(dst, src); in VisitVecReduce() local 320 __ movaps(dst, src); in VisitVecAbs() local 1210 is_aligned16 ? __ movaps(reg, address) : __ movups(reg, address); in VisitVecLoad() 1246 is_aligned16 ? __ movaps(address, reg) : __ movups(address, reg); in VisitVecStore()
|
D | code_generator_x86_64.cc | 1463 __ movaps(dest, source.AsFpuRegister<XmmRegister>()); in Move() local 5681 __ movaps(destination.AsFpuRegister<XmmRegister>(), source.AsFpuRegister<XmmRegister>()); in EmitMove() local 5789 __ movaps(source.AsFpuRegister<XmmRegister>(), destination.AsFpuRegister<XmmRegister>()); in EmitSwap() local
|
D | code_generator_x86.cc | 1253 __ movaps(destination.AsFpuRegister<XmmRegister>(), source.AsFpuRegister<XmmRegister>()); in Move32() local 1303 __ movaps(destination.AsFpuRegister<XmmRegister>(), source.AsFpuRegister<XmmRegister>()); in Move64() local 6242 __ movaps(destination.AsFpuRegister<XmmRegister>(), source.AsFpuRegister<XmmRegister>()); in EmitMove() local
|
/art/compiler/utils/x86/ |
D | assembler_x86_test.cc | 485 DriverStr(RepeatFF(&x86::X86Assembler::movaps, "movaps %{reg2}, %{reg1}"), "movaps"); in TEST_F() 489 DriverStr(RepeatFA(&x86::X86Assembler::movaps, "movaps {mem}, %{reg}"), "movaps_load"); in TEST_F() 493 DriverStr(RepeatAF(&x86::X86Assembler::movaps, "movaps %{reg}, {mem}"), "movaps_store"); in TEST_F()
|
D | assembler_x86.h | 382 void movaps(XmmRegister dst, XmmRegister src); // move 383 void movaps(XmmRegister dst, const Address& src); // load aligned 385 void movaps(const Address& dst, XmmRegister src); // store aligned
|
D | assembler_x86.cc | 518 void X86Assembler::movaps(XmmRegister dst, XmmRegister src) { in movaps() function in art::x86::X86Assembler 526 void X86Assembler::movaps(XmmRegister dst, const Address& src) { in movaps() function in art::x86::X86Assembler 542 void X86Assembler::movaps(const Address& dst, XmmRegister src) { in movaps() function in art::x86::X86Assembler
|
/art/compiler/utils/x86_64/ |
D | assembler_x86_64.h | 412 void movaps(XmmRegister dst, XmmRegister src); // move 413 void movaps(XmmRegister dst, const Address& src); // load aligned 415 void movaps(const Address& dst, XmmRegister src); // store aligned
|
D | assembler_x86_64_test.cc | 1107 DriverStr(RepeatFF(&x86_64::X86_64Assembler::movaps, "movaps %{reg2}, %{reg1}"), "movaps"); in TEST_F() 1111 DriverStr(RepeatAF(&x86_64::X86_64Assembler::movaps, "movaps %{reg}, {mem}"), "movaps_s"); in TEST_F() 1115 DriverStr(RepeatFA(&x86_64::X86_64Assembler::movaps, "movaps {mem}, %{reg}"), "movaps_l"); in TEST_F()
|
D | assembler_x86_64.cc | 501 void X86_64Assembler::movaps(XmmRegister dst, XmmRegister src) { in movaps() function in art::x86_64::X86_64Assembler 510 void X86_64Assembler::movaps(XmmRegister dst, const Address& src) { in movaps() function in art::x86_64::X86_64Assembler 528 void X86_64Assembler::movaps(const Address& dst, XmmRegister src) { in movaps() function in art::x86_64::X86_64Assembler
|
/art/runtime/arch/x86/ |
D | memcmp16_x86.S | 117 movaps (%esi), %xmm1 119 movaps 16(%esi), %xmm2
|