/art/compiler/utils/x86_64/ |
D | assembler_x86_64.h | 379 void movq(CpuRegister dst, const Immediate& src); 380 void movl(CpuRegister dst, const Immediate& src); 381 void movq(CpuRegister dst, CpuRegister src); 382 void movl(CpuRegister dst, CpuRegister src); 384 void movntl(const Address& dst, CpuRegister src); 385 void movntq(const Address& dst, CpuRegister src); 387 void movq(CpuRegister dst, const Address& src); 388 void movl(CpuRegister dst, const Address& src); 389 void movq(const Address& dst, CpuRegister src); 391 void movl(const Address& dst, CpuRegister src); [all …]
|
D | assembler_x86_64.cc | 177 void X86_64Assembler::movq(CpuRegister dst, CpuRegister src) { in movq() argument 180 EmitRex64(src, dst); in movq() 182 EmitRegisterOperand(src.LowBits(), dst.LowBits()); in movq() 186 void X86_64Assembler::movl(CpuRegister dst, CpuRegister src) { in movl() argument 188 EmitOptionalRex32(dst, src); in movl() 190 EmitRegisterOperand(dst.LowBits(), src.LowBits()); in movl() 194 void X86_64Assembler::movq(CpuRegister dst, const Address& src) { in movq() argument 196 EmitRex64(dst, src); in movq() 198 EmitOperand(dst.LowBits(), src); in movq() 202 void X86_64Assembler::movl(CpuRegister dst, const Address& src) { in movl() argument [all …]
|
D | jni_macro_assembler_x86_64.cc | 155 X86_64ManagedRegister src = msrc.AsX86_64(); in Store() local 156 if (src.IsNoRegister()) { in Store() 158 } else if (src.IsCpuRegister()) { in Store() 161 __ movl(Address(CpuRegister(RSP), offs), src.AsCpuRegister()); in Store() 164 __ movq(Address(CpuRegister(RSP), offs), src.AsCpuRegister()); in Store() 166 } else if (src.IsRegisterPair()) { in Store() 168 __ movq(Address(CpuRegister(RSP), offs), src.AsRegisterPairLow()); in Store() 170 src.AsRegisterPairHigh()); in Store() 171 } else if (src.IsX87Register()) { in Store() 178 CHECK(src.IsXmmRegister()); in Store() [all …]
|
D | jni_macro_assembler_x86_64.h | 59 void Store(FrameOffset offs, ManagedRegister src, size_t size) override; 60 void StoreRef(FrameOffset dest, ManagedRegister src) override; 61 void StoreRawPtr(FrameOffset dest, ManagedRegister src) override; 69 void StoreSpanning(FrameOffset dest, ManagedRegister src, FrameOffset in_off) override; 72 void Load(ManagedRegister dest, FrameOffset src, size_t size) override; 74 void LoadFromThread(ManagedRegister dest, ThreadOffset64 src, size_t size) override; 76 void LoadRef(ManagedRegister dest, FrameOffset src) override; 90 void Move(ManagedRegister dest, ManagedRegister src, size_t size) override; 97 void CopyRef(FrameOffset dest, FrameOffset src) override; 103 void Copy(FrameOffset dest, FrameOffset src, size_t size) override; [all …]
|
/art/compiler/utils/x86/ |
D | assembler_x86.h | 359 void movl(Register dst, const Immediate& src); 360 void movl(Register dst, Register src); 362 void movl(Register dst, const Address& src); 363 void movl(const Address& dst, Register src); 367 void movntl(const Address& dst, Register src); 369 void blsi(Register dst, Register src); // no addr variant (for now) 370 void blsmsk(Register dst, Register src); // no addr variant (for now) 371 void blsr(Register dst, Register src); // no addr varianr (for now) 375 void bsfl(Register dst, Register src); 376 void bsfl(Register dst, const Address& src); [all …]
|
D | assembler_x86.cc | 147 void X86Assembler::movl(Register dst, Register src) { in movl() argument 150 EmitRegisterOperand(src, dst); in movl() 154 void X86Assembler::movl(Register dst, const Address& src) { in movl() argument 157 EmitOperand(dst, src); in movl() 161 void X86Assembler::movl(const Address& dst, Register src) { in movl() argument 164 EmitOperand(src, dst); in movl() 182 void X86Assembler::movntl(const Address& dst, Register src) { in movntl() argument 186 EmitOperand(src, dst); in movntl() 189 void X86Assembler::blsi(Register dst, Register src) { in blsi() argument 200 EmitRegisterOperand(3, src); in blsi() [all …]
|
D | jni_macro_assembler_x86.cc | 135 X86ManagedRegister src = msrc.AsX86(); in Store() local 136 if (src.IsNoRegister()) { in Store() 138 } else if (src.IsCpuRegister()) { in Store() 140 __ movl(Address(ESP, offs), src.AsCpuRegister()); in Store() 141 } else if (src.IsRegisterPair()) { in Store() 143 __ movl(Address(ESP, offs), src.AsRegisterPairLow()); in Store() 144 __ movl(Address(ESP, FrameOffset(offs.Int32Value()+4)), src.AsRegisterPairHigh()); in Store() 145 } else if (src.IsX87Register()) { in Store() 152 CHECK(src.IsXmmRegister()); in Store() 154 __ movss(Address(ESP, offs), src.AsXmmRegister()); in Store() [all …]
|
D | jni_macro_assembler_x86.h | 58 void Store(FrameOffset offs, ManagedRegister src, size_t size) override; 59 void StoreRef(FrameOffset dest, ManagedRegister src) override; 60 void StoreRawPtr(FrameOffset dest, ManagedRegister src) override; 68 void StoreSpanning(FrameOffset dest, ManagedRegister src, FrameOffset in_off) override; 71 void Load(ManagedRegister dest, FrameOffset src, size_t size) override; 73 void LoadFromThread(ManagedRegister dest, ThreadOffset32 src, size_t size) override; 75 void LoadRef(ManagedRegister dest, FrameOffset src) override; 87 void Move(ManagedRegister dest, ManagedRegister src, size_t size) override; 94 void CopyRef(FrameOffset dest, FrameOffset src) override; 100 void Copy(FrameOffset dest, FrameOffset src, size_t size) override; [all …]
|
/art/test/2000-virtual-list-structural/ |
D | build | 21 mv src-ex/java/util/AbstractCollection.java src-ex/java/util/AbstractCollection.bak 22 cp src-ex/java/util/AbstractCollection.bak src-ex/java/util/AbstractCollection.java 25 patch src-ex/java/util/AbstractCollection.java AbstractCollection.patch 30 rm src-ex/java/util/AbstractCollection.java 31 mv src-ex/java/util/AbstractCollection.bak src-ex/java/util/AbstractCollection.java
|
/art/runtime/ |
D | reflection-inl.h | 35 const JValue& src, in ConvertPrimitiveValueNoThrow() argument 39 dst->SetJ(src.GetJ()); in ConvertPrimitiveValueNoThrow() 50 dst->SetS(src.GetI()); in ConvertPrimitiveValueNoThrow() 57 dst->SetI(src.GetI()); in ConvertPrimitiveValueNoThrow() 64 dst->SetJ(src.GetI()); in ConvertPrimitiveValueNoThrow() 71 dst->SetF(src.GetI()); in ConvertPrimitiveValueNoThrow() 74 dst->SetF(src.GetJ()); in ConvertPrimitiveValueNoThrow() 81 dst->SetD(src.GetI()); in ConvertPrimitiveValueNoThrow() 84 dst->SetD(src.GetJ()); in ConvertPrimitiveValueNoThrow() 87 dst->SetD(src.GetF()); in ConvertPrimitiveValueNoThrow() [all …]
|
/art/compiler/optimizing/ |
D | code_generator_vector_x86.cc | 149 XmmRegister src = locations->InAt(0).AsFpuRegister<XmmRegister>(); in VisitVecExtractScalar() local 161 __ movd(locations->Out().AsRegister<Register>(), src); in VisitVecExtractScalar() 166 __ movd(locations->Out().AsRegisterPairLow<Register>(), src); in VisitVecExtractScalar() 167 __ pshufd(tmp, src, Immediate(1)); in VisitVecExtractScalar() 217 XmmRegister src = locations->InAt(0).AsFpuRegister<XmmRegister>(); in VisitVecReduce() local 224 __ movaps(dst, src); in VisitVecReduce() 240 __ movaps(tmp, src); in VisitVecReduce() 241 __ movaps(dst, src); in VisitVecReduce() 263 XmmRegister src = locations->InAt(0).AsFpuRegister<XmmRegister>(); in VisitVecCnv() local 269 __ cvtdq2ps(dst, src); in VisitVecCnv() [all …]
|
D | code_generator_vector_x86_64.cc | 137 XmmRegister src = locations->InAt(0).AsFpuRegister<XmmRegister>(); in VisitVecExtractScalar() local 148 __ movd(locations->Out().AsRegister<CpuRegister>(), src, /*64-bit*/ false); in VisitVecExtractScalar() 152 __ movd(locations->Out().AsRegister<CpuRegister>(), src, /*64-bit*/ true); in VisitVecExtractScalar() 200 XmmRegister src = locations->InAt(0).AsFpuRegister<XmmRegister>(); in VisitVecReduce() local 207 __ movaps(dst, src); in VisitVecReduce() 223 __ movaps(tmp, src); in VisitVecReduce() 224 __ movaps(dst, src); in VisitVecReduce() 246 XmmRegister src = locations->InAt(0).AsFpuRegister<XmmRegister>(); in VisitVecCnv() local 252 __ cvtdq2ps(dst, src); in VisitVecCnv() 264 XmmRegister src = locations->InAt(0).AsFpuRegister<XmmRegister>(); in VisitVecNeg() local [all …]
|
/art/test/etc/ |
D | default-build | 45 if [ -d src ]; then 52 if [ -d src-art ]; then 64 if [ -d src-multidex ]; then 89 if [ -d src-ex ]; then 95 if [ -d src-ex2 ]; then 101 if [ -d src-dex2oat-unresolved ]; then 373 …javac_with_bootclasspath -implicit:none -sourcepath src-dex2oat-unresolved -d classes `find src -n… 374 …javac_with_bootclasspath -implicit:none -sourcepath src -d classes-ex `find src-dex2oat-unresolved… 389 `find src -name '*.java'` \ 390 `find src-multidex -name '*.java'` [all …]
|
/art/compiler/utils/arm64/ |
D | jni_macro_assembler_arm64.cc | 135 Arm64ManagedRegister src = m_src.AsArm64(); in Store() local 136 if (src.IsNoRegister()) { in Store() 138 } else if (src.IsWRegister()) { in Store() 140 StoreWToOffset(kStoreWord, src.AsWRegister(), SP, offs.Int32Value()); in Store() 141 } else if (src.IsXRegister()) { in Store() 143 StoreToOffset(src.AsXRegister(), SP, offs.Int32Value()); in Store() 144 } else if (src.IsSRegister()) { in Store() 145 StoreSToOffset(src.AsSRegister(), SP, offs.Int32Value()); in Store() 147 CHECK(src.IsDRegister()) << src; in Store() 148 StoreDToOffset(src.AsDRegister(), SP, offs.Int32Value()); in Store() [all …]
|
D | jni_macro_assembler_arm64.h | 68 void Store(FrameOffset offs, ManagedRegister src, size_t size) override; 69 void StoreRef(FrameOffset dest, ManagedRegister src) override; 70 void StoreRawPtr(FrameOffset dest, ManagedRegister src) override; 74 void StoreSpanning(FrameOffset dest, ManagedRegister src, FrameOffset in_off) override; 77 void Load(ManagedRegister dest, FrameOffset src, size_t size) override; 78 void LoadFromThread(ManagedRegister dest, ThreadOffset64 src, size_t size) override; 79 void LoadRef(ManagedRegister dest, FrameOffset src) override; 89 void Move(ManagedRegister dest, ManagedRegister src, size_t size) override; 93 void CopyRef(FrameOffset dest, FrameOffset src) override; 98 void Copy(FrameOffset dest, FrameOffset src, size_t size) override; [all …]
|
/art/runtime/mirror/ |
D | object_array-inl.h | 105 ObjPtr<ObjectArray<T>> src, in AssignableMemmove() argument 111 src->GetWithoutChecks(src_pos + i); in AssignableMemmove() 120 const bool copy_forward = (src != this) || (dst_pos < src_pos) || (dst_pos - src_pos >= count); in AssignableMemmove() 126 if (!ReadBarrier::IsGray(src.Ptr(), &fake_address_dependency)) { in AssignableMemmove() 129 src.Assign(reinterpret_cast<ObjectArray<T>*>( in AssignableMemmove() 130 reinterpret_cast<uintptr_t>(src.Ptr()) | fake_address_dependency)); in AssignableMemmove() 133 ObjPtr<T> obj = src->template GetWithoutChecks<kDefaultVerifyFlags, kWithoutReadBarrier>( in AssignableMemmove() 142 ObjPtr<T> obj = src->GetWithoutChecks(src_pos + i); in AssignableMemmove() 151 if (!ReadBarrier::IsGray(src.Ptr(), &fake_address_dependency)) { in AssignableMemmove() 154 src.Assign(reinterpret_cast<ObjectArray<T>*>( in AssignableMemmove() [all …]
|
/art/compiler/utils/arm/ |
D | jni_macro_assembler_arm_vixl.cc | 271 ArmManagedRegister src = m_src.AsArm(); in Store() local 272 if (src.IsNoRegister()) { in Store() 274 } else if (src.IsCoreRegister()) { in Store() 277 temps.Exclude(AsVIXLRegister(src)); in Store() 278 asm_.StoreToOffset(kStoreWord, AsVIXLRegister(src), sp, dest.Int32Value()); in Store() 279 } else if (src.IsRegisterPair()) { in Store() 281 ___ Strd(AsVIXLRegisterPairLow(src), in Store() 282 AsVIXLRegisterPairHigh(src), in Store() 284 } else if (src.IsSRegister()) { in Store() 286 asm_.StoreSToOffset(AsVIXLSRegister(src), sp, dest.Int32Value()); in Store() [all …]
|
D | jni_macro_assembler_arm_vixl.h | 65 void Store(FrameOffset offs, ManagedRegister src, size_t size) override; 66 void StoreRef(FrameOffset dest, ManagedRegister src) override; 67 void StoreRawPtr(FrameOffset dest, ManagedRegister src) override; 75 void StoreSpanning(FrameOffset dest, ManagedRegister src, FrameOffset in_off) override; 78 void Load(ManagedRegister dest, FrameOffset src, size_t size) override; 81 ThreadOffset32 src, 84 void LoadRef(ManagedRegister dest, FrameOffset src) override; 98 void Move(ManagedRegister dest, ManagedRegister src, size_t size) override; 106 void CopyRef(FrameOffset dest, FrameOffset src) override; 112 void Copy(FrameOffset dest, FrameOffset src, size_t size) override; [all …]
|
/art/libartbase/base/ |
D | bit_vector.cc | 52 BitVector::BitVector(const BitVector& src, in BitVector() argument 57 src.storage_size_, in BitVector() 58 static_cast<uint32_t*>(allocator->Alloc(src.storage_size_ * kWordBytes))) { in BitVector() 60 Copy(&src); in BitVector() 70 bool BitVector::SameBitsSet(const BitVector *src) const { in SameBitsSet() 72 int src_highest = src->GetHighestBitSet(); in SameBitsSet() 93 return (memcmp(storage_, src->GetRawStorage(), our_highest_index * kWordBytes) == 0); in SameBitsSet() 122 void BitVector::Intersect(const BitVector* src) { in Intersect() argument 123 uint32_t src_storage_size = src->storage_size_; in Intersect() 130 storage_[idx] &= src->GetRawStorageWord(idx); in Intersect() [all …]
|
/art/test/970-iface-super-resolution-gen/ |
D | build | 24 mkdir -p src 26 ./util-src/generate_java.py ./src2 ./src ./expected-stdout.txt 30 ./util-src/generate_smali.py ./smali ./expected-stdout.txt
|
/art/tools/ahat/src/main/com/android/ahat/heapdump/ |
D | Reference.java | 26 public final AhatInstance src; field in Reference 31 public Reference(AhatInstance src, String field, AhatInstance ref, Reachability reachability) { in Reference() argument 32 this.src = src; in Reference()
|
/art/test/968-default-partial-compile-gen/ |
D | build | 23 mkdir -p src 29 ./util-src/generate_java.py ./javac_exec.sh ./src ./classes ./expected-stdout.txt ./build_log 33 ./util-src/generate_smali.py ./smali ./expected-stdout.txt
|
/art/test/971-iface-super/ |
D | build | 23 mkdir -p src 29 ./util-src/generate_java.py ./javac_exec.sh ./src ./classes ./expected-stdout.txt ./build_log 33 ./util-src/generate_smali.py ./smali ./expected-stdout.txt
|
/art/libelffile/elf/ |
D | xz_utils.cc | 43 void XzCompress(ArrayRef<const uint8_t> src, in XzCompress() argument 52 lzma2Props.lzmaProps.reduceSize = src.size(); // Size of data that will be compressed. in XzCompress() 85 callbacks.src_ = src; in XzCompress() 95 DCHECK_EQ(decompressed.size(), src.size()); in XzCompress() 96 DCHECK_EQ(memcmp(decompressed.data(), src.data(), src.size()), 0); in XzCompress() 100 void XzDecompress(ArrayRef<const uint8_t> src, std::vector<uint8_t>* dst) { in XzDecompress() argument 113 size_t src_remaining = src.size() - src_offset; in XzDecompress() 118 src.data() + src_offset, in XzDecompress() 127 CHECK_EQ(src_offset, src.size()); in XzDecompress()
|
/art/test/648-many-direct-methods/ |
D | build | 20 mkdir -p ./src 23 ./util-src/generate_java.py ./src
|