/art/compiler/utils/x86_64/ |
D | assembler_x86_64.cc | 141 void X86_64Assembler::movq(CpuRegister dst, CpuRegister src) { in movq() argument 144 EmitRex64(src, dst); in movq() 146 EmitRegisterOperand(src.LowBits(), dst.LowBits()); in movq() 150 void X86_64Assembler::movl(CpuRegister dst, CpuRegister src) { in movl() argument 152 EmitOptionalRex32(dst, src); in movl() 154 EmitRegisterOperand(dst.LowBits(), src.LowBits()); in movl() 158 void X86_64Assembler::movq(CpuRegister dst, const Address& src) { in movq() argument 160 EmitRex64(dst, src); in movq() 162 EmitOperand(dst.LowBits(), src); in movq() 166 void X86_64Assembler::movl(CpuRegister dst, const Address& src) { in movl() argument [all …]
|
D | assembler_x86_64.h | 355 void movq(CpuRegister dst, const Immediate& src); 356 void movl(CpuRegister dst, const Immediate& src); 357 void movq(CpuRegister dst, CpuRegister src); 358 void movl(CpuRegister dst, CpuRegister src); 360 void movntl(const Address& dst, CpuRegister src); 361 void movntq(const Address& dst, CpuRegister src); 363 void movq(CpuRegister dst, const Address& src); 364 void movl(CpuRegister dst, const Address& src); 365 void movq(const Address& dst, CpuRegister src); 367 void movl(const Address& dst, CpuRegister src); [all …]
|
D | jni_macro_assembler_x86_64.h | 59 void Store(FrameOffset offs, ManagedRegister src, size_t size) OVERRIDE; 60 void StoreRef(FrameOffset dest, ManagedRegister src) OVERRIDE; 61 void StoreRawPtr(FrameOffset dest, ManagedRegister src) OVERRIDE; 72 ManagedRegister src, 77 void Load(ManagedRegister dest, FrameOffset src, size_t size) OVERRIDE; 79 void LoadFromThread(ManagedRegister dest, ThreadOffset64 src, size_t size) OVERRIDE; 81 void LoadRef(ManagedRegister dest, FrameOffset src) OVERRIDE; 93 void Move(ManagedRegister dest, ManagedRegister src, size_t size); 102 void CopyRef(FrameOffset dest, FrameOffset src, ManagedRegister scratch) OVERRIDE; 104 void Copy(FrameOffset dest, FrameOffset src, ManagedRegister scratch, size_t size) OVERRIDE; [all …]
|
D | jni_macro_assembler_x86_64.cc | 155 X86_64ManagedRegister src = msrc.AsX86_64(); in Store() local 156 if (src.IsNoRegister()) { in Store() 158 } else if (src.IsCpuRegister()) { in Store() 161 __ movl(Address(CpuRegister(RSP), offs), src.AsCpuRegister()); in Store() 164 __ movq(Address(CpuRegister(RSP), offs), src.AsCpuRegister()); in Store() 166 } else if (src.IsRegisterPair()) { in Store() 168 __ movq(Address(CpuRegister(RSP), offs), src.AsRegisterPairLow()); in Store() 170 src.AsRegisterPairHigh()); in Store() 171 } else if (src.IsX87Register()) { in Store() 178 CHECK(src.IsXmmRegister()); in Store() [all …]
|
/art/compiler/utils/x86/ |
D | assembler_x86.h | 326 void movl(Register dst, const Immediate& src); 327 void movl(Register dst, Register src); 329 void movl(Register dst, const Address& src); 330 void movl(const Address& dst, Register src); 334 void movntl(const Address& dst, Register src); 338 void bsfl(Register dst, Register src); 339 void bsfl(Register dst, const Address& src); 340 void bsrl(Register dst, Register src); 341 void bsrl(Register dst, const Address& src); 343 void popcntl(Register dst, Register src); [all …]
|
D | assembler_x86.cc | 113 void X86Assembler::movl(Register dst, Register src) { in movl() argument 116 EmitRegisterOperand(src, dst); in movl() 120 void X86Assembler::movl(Register dst, const Address& src) { in movl() argument 123 EmitOperand(dst, src); in movl() 127 void X86Assembler::movl(const Address& dst, Register src) { in movl() argument 130 EmitOperand(src, dst); in movl() 148 void X86Assembler::movntl(const Address& dst, Register src) { in movntl() argument 152 EmitOperand(src, dst); in movntl() 161 void X86Assembler::bsfl(Register dst, Register src) { in bsfl() argument 165 EmitRegisterOperand(dst, src); in bsfl() [all …]
|
D | jni_macro_assembler_x86.cc | 124 X86ManagedRegister src = msrc.AsX86(); in Store() local 125 if (src.IsNoRegister()) { in Store() 127 } else if (src.IsCpuRegister()) { in Store() 129 __ movl(Address(ESP, offs), src.AsCpuRegister()); in Store() 130 } else if (src.IsRegisterPair()) { in Store() 132 __ movl(Address(ESP, offs), src.AsRegisterPairLow()); in Store() 133 __ movl(Address(ESP, FrameOffset(offs.Int32Value()+4)), src.AsRegisterPairHigh()); in Store() 134 } else if (src.IsX87Register()) { in Store() 141 CHECK(src.IsXmmRegister()); in Store() 143 __ movss(Address(ESP, offs), src.AsXmmRegister()); in Store() [all …]
|
D | jni_macro_assembler_x86.h | 58 void Store(FrameOffset offs, ManagedRegister src, size_t size) OVERRIDE; 59 void StoreRef(FrameOffset dest, ManagedRegister src) OVERRIDE; 60 void StoreRawPtr(FrameOffset dest, ManagedRegister src) OVERRIDE; 70 void StoreSpanning(FrameOffset dest, ManagedRegister src, FrameOffset in_off, 74 void Load(ManagedRegister dest, FrameOffset src, size_t size) OVERRIDE; 76 void LoadFromThread(ManagedRegister dest, ThreadOffset32 src, size_t size) OVERRIDE; 78 void LoadRef(ManagedRegister dest, FrameOffset src) OVERRIDE; 88 void Move(ManagedRegister dest, ManagedRegister src, size_t size) OVERRIDE; 97 void CopyRef(FrameOffset dest, FrameOffset src, ManagedRegister scratch) OVERRIDE; 99 void Copy(FrameOffset dest, FrameOffset src, ManagedRegister scratch, size_t size) OVERRIDE; [all …]
|
/art/runtime/ |
D | reflection-inl.h | 35 const JValue& src, in ConvertPrimitiveValueNoThrow() argument 39 dst->SetJ(src.GetJ()); in ConvertPrimitiveValueNoThrow() 50 dst->SetS(src.GetI()); in ConvertPrimitiveValueNoThrow() 57 dst->SetI(src.GetI()); in ConvertPrimitiveValueNoThrow() 64 dst->SetJ(src.GetI()); in ConvertPrimitiveValueNoThrow() 71 dst->SetF(src.GetI()); in ConvertPrimitiveValueNoThrow() 74 dst->SetF(src.GetJ()); in ConvertPrimitiveValueNoThrow() 81 dst->SetD(src.GetI()); in ConvertPrimitiveValueNoThrow() 84 dst->SetD(src.GetJ()); in ConvertPrimitiveValueNoThrow() 87 dst->SetD(src.GetF()); in ConvertPrimitiveValueNoThrow() [all …]
|
/art/test/etc/ |
D | default-build | 34 if [ -d src ]; then 41 if [ -d src-art ]; then 53 if [ -d src-multidex ]; then 72 if [ -d src-ex ]; then 78 if [ -d src-dex2oat-unresolved ]; then 335 …${JAVAC} ${JAVAC_ARGS} -implicit:none -sourcepath src-dex2oat-unresolved -d classes `find src -nam… 336 …${JAVAC} ${JAVAC_ARGS} -implicit:none -sourcepath src -d classes-ex `find src-dex2oat-unresolved -… 359 … ${JACK} ${JACK_ARGS} --output-jack src.jack $(maybe_dir src) src-multidex $(maybe_dir src-art) 364 ${JACK} ${JACK_ARGS} --output-jack src.jack $(maybe_dir src) $(maybe_dir src-art) 385 … ${JAVAC} ${JAVAC_ARGS} -implicit:none -classpath src-multidex -d classes `find src -name '*.java'` [all …]
|
/art/compiler/utils/arm/ |
D | jni_macro_assembler_arm_vixl.cc | 179 ArmManagedRegister src = m_src.AsArm(); in Store() local 180 if (src.IsNoRegister()) { in Store() 182 } else if (src.IsCoreRegister()) { in Store() 185 temps.Exclude(src.AsVIXLRegister()); in Store() 186 asm_.StoreToOffset(kStoreWord, src.AsVIXLRegister(), sp, dest.Int32Value()); in Store() 187 } else if (src.IsRegisterPair()) { in Store() 189 asm_.StoreToOffset(kStoreWord, src.AsVIXLRegisterPairLow(), sp, dest.Int32Value()); in Store() 190 asm_.StoreToOffset(kStoreWord, src.AsVIXLRegisterPairHigh(), sp, dest.Int32Value() + 4); in Store() 191 } else if (src.IsSRegister()) { in Store() 193 asm_.StoreSToOffset(src.AsVIXLSRegister(), sp, dest.Int32Value()); in Store() [all …]
|
D | jni_macro_assembler_arm_vixl.h | 63 void Store(FrameOffset offs, ManagedRegister src, size_t size) OVERRIDE; 64 void StoreRef(FrameOffset dest, ManagedRegister src) OVERRIDE; 65 void StoreRawPtr(FrameOffset dest, ManagedRegister src) OVERRIDE; 76 ManagedRegister src, 81 void Load(ManagedRegister dest, FrameOffset src, size_t size) OVERRIDE; 84 ThreadOffset32 src, 87 void LoadRef(ManagedRegister dest, FrameOffset src) OVERRIDE; 99 void Move(ManagedRegister dest, ManagedRegister src, size_t size) OVERRIDE; 109 void CopyRef(FrameOffset dest, FrameOffset src, ManagedRegister scratch) OVERRIDE; 111 void Copy(FrameOffset dest, FrameOffset src, ManagedRegister scratch, size_t size) OVERRIDE; [all …]
|
/art/compiler/optimizing/ |
D | code_generator_vector_x86.cc | 144 XmmRegister src = locations->InAt(0).AsFpuRegister<XmmRegister>(); in VisitVecCnv() local 150 __ cvtdq2ps(dst, src); in VisitVecCnv() 162 XmmRegister src = locations->InAt(0).AsFpuRegister<XmmRegister>(); in VisitVecNeg() local 168 __ psubb(dst, src); in VisitVecNeg() 174 __ psubw(dst, src); in VisitVecNeg() 179 __ psubd(dst, src); in VisitVecNeg() 184 __ psubq(dst, src); in VisitVecNeg() 189 __ subps(dst, src); in VisitVecNeg() 194 __ subpd(dst, src); in VisitVecNeg() 212 XmmRegister src = locations->InAt(0).AsFpuRegister<XmmRegister>(); in VisitVecAbs() local [all …]
|
D | code_generator_vector_x86_64.cc | 137 XmmRegister src = locations->InAt(0).AsFpuRegister<XmmRegister>(); in VisitVecCnv() local 143 __ cvtdq2ps(dst, src); in VisitVecCnv() 155 XmmRegister src = locations->InAt(0).AsFpuRegister<XmmRegister>(); in VisitVecNeg() local 161 __ psubb(dst, src); in VisitVecNeg() 167 __ psubw(dst, src); in VisitVecNeg() 172 __ psubd(dst, src); in VisitVecNeg() 177 __ psubq(dst, src); in VisitVecNeg() 182 __ subps(dst, src); in VisitVecNeg() 187 __ subpd(dst, src); in VisitVecNeg() 205 XmmRegister src = locations->InAt(0).AsFpuRegister<XmmRegister>(); in VisitVecAbs() local [all …]
|
/art/runtime/base/ |
D | bit_vector.cc | 51 BitVector::BitVector(const BitVector& src, in BitVector() argument 56 src.storage_size_, in BitVector() 57 static_cast<uint32_t*>(allocator->Alloc(src.storage_size_ * kWordBytes))) { in BitVector() 59 Copy(&src); in BitVector() 66 bool BitVector::SameBitsSet(const BitVector *src) const { in SameBitsSet() 68 int src_highest = src->GetHighestBitSet(); in SameBitsSet() 89 return (memcmp(storage_, src->GetRawStorage(), our_highest_index * kWordBytes) == 0); in SameBitsSet() 118 void BitVector::Intersect(const BitVector* src) { in Intersect() argument 119 uint32_t src_storage_size = src->storage_size_; in Intersect() 126 storage_[idx] &= src->GetRawStorageWord(idx); in Intersect() [all …]
|
/art/runtime/mirror/ |
D | object_array-inl.h | 132 ObjPtr<ObjectArray<T>> src, in AssignableMemmove() argument 138 src->GetWithoutChecks(src_pos + i); in AssignableMemmove() 147 const bool copy_forward = (src != this) || (dst_pos < src_pos) || (dst_pos - src_pos >= count); in AssignableMemmove() 153 if (!ReadBarrier::IsGray(src.Ptr(), &fake_address_dependency)) { in AssignableMemmove() 156 src.Assign(reinterpret_cast<ObjectArray<T>*>( in AssignableMemmove() 157 reinterpret_cast<uintptr_t>(src.Ptr()) | fake_address_dependency)); in AssignableMemmove() 160 T* obj = src->template GetWithoutChecks<kDefaultVerifyFlags, kWithoutReadBarrier>( in AssignableMemmove() 169 T* obj = src->GetWithoutChecks(src_pos + i); in AssignableMemmove() 178 if (!ReadBarrier::IsGray(src.Ptr(), &fake_address_dependency)) { in AssignableMemmove() 181 src.Assign(reinterpret_cast<ObjectArray<T>*>( in AssignableMemmove() [all …]
|
/art/test/971-iface-super/ |
D | build | 25 mkdir -p src 31 ./util-src/generate_java.py ./javac_exec.sh ./src ./classes ./expected.txt ./build_log 35 ./util-src/generate_smali.py ./smali ./expected.txt
|
/art/test/968-default-partial-compile-gen/ |
D | build | 25 mkdir -p src 31 ./util-src/generate_java.py ./javac_exec.sh ./src ./classes ./expected.txt ./build_log 35 ./util-src/generate_smali.py ./smali ./expected.txt
|
/art/compiler/utils/arm64/ |
D | jni_macro_assembler_arm64.h | 66 void Store(FrameOffset offs, ManagedRegister src, size_t size) OVERRIDE; 67 void StoreRef(FrameOffset dest, ManagedRegister src) OVERRIDE; 68 void StoreRawPtr(FrameOffset dest, ManagedRegister src) OVERRIDE; 75 ManagedRegister src, 80 void Load(ManagedRegister dest, FrameOffset src, size_t size) OVERRIDE; 81 void LoadFromThread(ManagedRegister dest, ThreadOffset64 src, size_t size) OVERRIDE; 82 void LoadRef(ManagedRegister dest, FrameOffset src) OVERRIDE; 91 void Move(ManagedRegister dest, ManagedRegister src, size_t size) OVERRIDE; 97 void CopyRef(FrameOffset dest, FrameOffset src, ManagedRegister scratch) OVERRIDE; 98 void Copy(FrameOffset dest, FrameOffset src, ManagedRegister scratch, size_t size) OVERRIDE; [all …]
|
D | jni_macro_assembler_arm64.cc | 128 Arm64ManagedRegister src = m_src.AsArm64(); in Store() local 129 if (src.IsNoRegister()) { in Store() 131 } else if (src.IsWRegister()) { in Store() 133 StoreWToOffset(kStoreWord, src.AsWRegister(), SP, offs.Int32Value()); in Store() 134 } else if (src.IsXRegister()) { in Store() 136 StoreToOffset(src.AsXRegister(), SP, offs.Int32Value()); in Store() 137 } else if (src.IsSRegister()) { in Store() 138 StoreSToOffset(src.AsSRegister(), SP, offs.Int32Value()); in Store() 140 CHECK(src.IsDRegister()) << src; in Store() 141 StoreDToOffset(src.AsDRegister(), SP, offs.Int32Value()); in Store() [all …]
|
/art/compiler/utils/ |
D | jni_macro_assembler.h | 75 virtual void Store(FrameOffset offs, ManagedRegister src, size_t size) = 0; 76 virtual void StoreRef(FrameOffset dest, ManagedRegister src) = 0; 77 virtual void StoreRawPtr(FrameOffset dest, ManagedRegister src) = 0; 88 ManagedRegister src, 93 virtual void Load(ManagedRegister dest, FrameOffset src, size_t size) = 0; 96 ThreadOffset<kPointerSize> src, 99 virtual void LoadRef(ManagedRegister dest, FrameOffset src) = 0; 111 virtual void Move(ManagedRegister dest, ManagedRegister src, size_t size) = 0; 121 virtual void CopyRef(FrameOffset dest, FrameOffset src, ManagedRegister scratch) = 0; 123 virtual void Copy(FrameOffset dest, FrameOffset src, ManagedRegister scratch, size_t size) = 0; [all …]
|
/art/test/970-iface-super-resolution-gen/ |
D | build | 34 mkdir -p src 36 ./util-src/generate_java.py ./src2 ./src ./expected.txt 40 ./util-src/generate_smali.py ./smali ./expected.txt
|
/art/test/648-many-direct-methods/ |
D | build | 20 mkdir -p ./src 23 ./util-src/generate_java.py ./src
|
/art/test/964-default-iface-init-gen/ |
D | build | 20 mkdir -p ./src 23 ./util-src/generate_java.py ./src ./expected.txt
|
/art/test/961-default-iface-resolution-gen/ |
D | build | 20 mkdir -p ./src 23 ./util-src/generate_java.py ./src ./expected.txt
|