/art/compiler/utils/x86_64/ |
D | assembler_x86_64.h | 445 void movq(CpuRegister dst, const Immediate& src); 446 void movl(CpuRegister dst, const Immediate& src); 447 void movq(CpuRegister dst, CpuRegister src); 448 void movl(CpuRegister dst, CpuRegister src); 450 void movntl(const Address& dst, CpuRegister src); 451 void movntq(const Address& dst, CpuRegister src); 453 void movq(CpuRegister dst, const Address& src); 454 void movl(CpuRegister dst, const Address& src); 455 void movq(const Address& dst, CpuRegister src); 456 void movq(const Address& dst, const Immediate& imm); [all …]
|
D | assembler_x86_64.cc | 142 void X86_64Assembler::movq(CpuRegister dst, const Immediate& imm) { in movq() argument 146 EmitRex64(dst); in movq() 148 EmitRegisterOperand(0, dst.LowBits()); in movq() 151 EmitRex64(dst); in movq() 152 EmitUint8(0xB8 + dst.LowBits()); in movq() 158 void X86_64Assembler::movl(CpuRegister dst, const Immediate& imm) { in movl() argument 161 EmitOptionalRex32(dst); in movl() 162 EmitUint8(0xB8 + dst.LowBits()); in movl() 167 void X86_64Assembler::movq(const Address& dst, const Immediate& imm) { in movq() argument 170 EmitRex64(dst); in movq() [all …]
|
/art/compiler/utils/x86/ |
D | assembler_x86.h | 400 void movl(Register dst, const Immediate& src); 401 void movl(Register dst, Register src); 403 void movl(Register dst, const Address& src); 404 void movl(const Address& dst, Register src); 405 void movl(const Address& dst, const Immediate& imm); 406 void movl(const Address& dst, Label* lbl); 408 void movntl(const Address& dst, Register src); 410 void blsi(Register dst, Register src); // no addr variant (for now) 411 void blsmsk(Register dst, Register src); // no addr variant (for now) 412 void blsr(Register dst, Register src); // no addr varianr (for now) [all …]
|
D | assembler_x86.cc | 140 void X86Assembler::movl(Register dst, const Immediate& imm) { in movl() argument 142 EmitUint8(0xB8 + dst); in movl() 147 void X86Assembler::movl(Register dst, Register src) { in movl() argument 150 EmitRegisterOperand(src, dst); in movl() 154 void X86Assembler::movl(Register dst, const Address& src) { in movl() argument 157 EmitOperand(dst, src); in movl() 161 void X86Assembler::movl(const Address& dst, Register src) { in movl() argument 164 EmitOperand(src, dst); in movl() 168 void X86Assembler::movl(const Address& dst, const Immediate& imm) { in movl() argument 171 EmitOperand(0, dst); in movl() [all …]
|
/art/compiler/optimizing/ |
D | code_generator_vector_x86.cc | 64 XmmRegister dst = locations->Out().AsFpuRegister<XmmRegister>(); in VisitVecReplicateScalar() local 69 cpu_has_avx ? __ vxorps(dst, dst, dst) : __ xorps(dst, dst); in VisitVecReplicateScalar() 78 __ movd(dst, locations->InAt(0).AsRegister<Register>()); in VisitVecReplicateScalar() 79 __ punpcklbw(dst, dst); in VisitVecReplicateScalar() 80 __ punpcklwd(dst, dst); in VisitVecReplicateScalar() 81 __ pshufd(dst, dst, Immediate(0)); in VisitVecReplicateScalar() 86 __ movd(dst, locations->InAt(0).AsRegister<Register>()); in VisitVecReplicateScalar() 87 __ punpcklwd(dst, dst); in VisitVecReplicateScalar() 88 __ pshufd(dst, dst, Immediate(0)); in VisitVecReplicateScalar() 92 __ movd(dst, locations->InAt(0).AsRegister<Register>()); in VisitVecReplicateScalar() [all …]
|
D | code_generator_vector_x86_64.cc | 59 XmmRegister dst = locations->Out().AsFpuRegister<XmmRegister>(); in VisitVecReplicateScalar() local 64 cpu_has_avx ? __ vxorps(dst, dst, dst) : __ xorps(dst, dst); in VisitVecReplicateScalar() 73 __ movd(dst, locations->InAt(0).AsRegister<CpuRegister>(), /*64-bit*/ false); in VisitVecReplicateScalar() 74 __ punpcklbw(dst, dst); in VisitVecReplicateScalar() 75 __ punpcklwd(dst, dst); in VisitVecReplicateScalar() 76 __ pshufd(dst, dst, Immediate(0)); in VisitVecReplicateScalar() 81 __ movd(dst, locations->InAt(0).AsRegister<CpuRegister>(), /*64-bit*/ false); in VisitVecReplicateScalar() 82 __ punpcklwd(dst, dst); in VisitVecReplicateScalar() 83 __ pshufd(dst, dst, Immediate(0)); in VisitVecReplicateScalar() 87 __ movd(dst, locations->InAt(0).AsRegister<CpuRegister>(), /*64-bit*/ false); in VisitVecReplicateScalar() [all …]
|
D | code_generator_vector_arm_vixl.cc | 56 vixl32::DRegister dst = DRegisterFrom(locations->Out()); in VisitVecReplicateScalar() local 62 __ Vdup(Untyped8, dst, InputRegisterAt(instruction, 0)); in VisitVecReplicateScalar() 67 __ Vdup(Untyped16, dst, InputRegisterAt(instruction, 0)); in VisitVecReplicateScalar() 71 __ Vdup(Untyped32, dst, InputRegisterAt(instruction, 0)); in VisitVecReplicateScalar() 137 vixl32::DRegister dst = DRegisterFrom(locations->Out()); in VisitVecReduce() local 143 __ Vpadd(DataTypeValue::I32, dst, src, src); in VisitVecReduce() 146 __ Vpmin(DataTypeValue::S32, dst, src, src); in VisitVecReduce() 149 __ Vpmax(DataTypeValue::S32, dst, src, src); in VisitVecReduce() 174 vixl32::DRegister dst = DRegisterFrom(locations->Out()); in VisitVecNeg() local 179 __ Vneg(DataTypeValue::S8, dst, src); in VisitVecNeg() [all …]
|
D | code_generator_vector_arm64_neon.cc | 113 VRegister dst = VRegisterFrom(locations->Out()); in VisitVecReplicateScalar() local 120 __ Movi(dst.V16B(), Int64FromLocation(src_loc)); in VisitVecReplicateScalar() 122 __ Dup(dst.V16B(), InputRegisterAt(instruction, 0)); in VisitVecReplicateScalar() 129 __ Movi(dst.V8H(), Int64FromLocation(src_loc)); in VisitVecReplicateScalar() 131 __ Dup(dst.V8H(), InputRegisterAt(instruction, 0)); in VisitVecReplicateScalar() 137 __ Movi(dst.V4S(), Int64FromLocation(src_loc)); in VisitVecReplicateScalar() 139 __ Dup(dst.V4S(), InputRegisterAt(instruction, 0)); in VisitVecReplicateScalar() 145 __ Movi(dst.V2D(), Int64FromLocation(src_loc)); in VisitVecReplicateScalar() 147 __ Dup(dst.V2D(), XRegisterFrom(src_loc)); in VisitVecReplicateScalar() 153 __ Fmov(dst.V4S(), src_loc.GetConstant()->AsFloatConstant()->GetValue()); in VisitVecReplicateScalar() [all …]
|
D | code_generator_vector_arm64_sve.cc | 116 const ZRegister dst = ZRegisterFrom(locations->Out()); in VisitVecReplicateScalar() local 123 __ Dup(dst.VnB(), Int64FromLocation(src_loc)); in VisitVecReplicateScalar() 125 __ Dup(dst.VnB(), InputRegisterAt(instruction, 0)); in VisitVecReplicateScalar() 131 __ Dup(dst.VnH(), Int64FromLocation(src_loc)); in VisitVecReplicateScalar() 133 __ Dup(dst.VnH(), InputRegisterAt(instruction, 0)); in VisitVecReplicateScalar() 138 __ Dup(dst.VnS(), Int64FromLocation(src_loc)); in VisitVecReplicateScalar() 140 __ Dup(dst.VnS(), InputRegisterAt(instruction, 0)); in VisitVecReplicateScalar() 145 __ Dup(dst.VnD(), Int64FromLocation(src_loc)); in VisitVecReplicateScalar() 147 __ Dup(dst.VnD(), XRegisterFrom(src_loc)); in VisitVecReplicateScalar() 152 __ Fdup(dst.VnS(), src_loc.GetConstant()->AsFloatConstant()->GetValue()); in VisitVecReplicateScalar() [all …]
|
/art/runtime/ |
D | reflection-inl.h | 36 JValue* dst) { in ConvertPrimitiveValueNoThrow() argument 39 dst->SetJ(src.GetJ()); in ConvertPrimitiveValueNoThrow() 50 dst->SetS(src.GetI()); in ConvertPrimitiveValueNoThrow() 57 dst->SetI(src.GetI()); in ConvertPrimitiveValueNoThrow() 64 dst->SetJ(src.GetI()); in ConvertPrimitiveValueNoThrow() 71 dst->SetF(src.GetI()); in ConvertPrimitiveValueNoThrow() 74 dst->SetF(src.GetJ()); in ConvertPrimitiveValueNoThrow() 81 dst->SetD(src.GetI()); in ConvertPrimitiveValueNoThrow() 84 dst->SetD(src.GetJ()); in ConvertPrimitiveValueNoThrow() 87 dst->SetD(src.GetF()); in ConvertPrimitiveValueNoThrow() [all …]
|
D | write_barrier-inl.h | 30 inline void WriteBarrier::ForFieldWrite(ObjPtr<mirror::Object> dst, in ForFieldWrite() argument 37 GetCardTable()->MarkCard(dst.Ptr()); in ForFieldWrite() 40 inline void WriteBarrier::ForArrayWrite(ObjPtr<mirror::Object> dst, in ForArrayWrite() argument 43 GetCardTable()->MarkCard(dst.Ptr()); in ForArrayWrite()
|
/art/libartbase/base/ |
D | endian_utils.h | 86 inline void Write1BE(uint8_t** dst, uint8_t value) { in Write1BE() argument 87 Set1(*dst, value); in Write1BE() 88 *dst += sizeof(value); in Write1BE() 91 inline void Write2BE(uint8_t** dst, uint16_t value) { in Write2BE() argument 92 Set2BE(*dst, value); in Write2BE() 93 *dst += sizeof(value); in Write2BE() 96 inline void Write4BE(uint8_t** dst, uint32_t value) { in Write4BE() argument 97 Set4BE(*dst, value); in Write4BE() 98 *dst += sizeof(value); in Write4BE() 101 inline void Write8BE(uint8_t** dst, uint64_t value) { in Write8BE() argument [all …]
|
D | safe_copy.cc | 33 ssize_t SafeCopy(void *dst, const void *src, size_t len) { in SafeCopy() argument 36 .iov_base = dst, in SafeCopy() 78 UNUSED(dst, src, len); in SafeCopy()
|
D | strlcpy.h | 31 static inline size_t strlcpy(char* dst, const char* src, size_t size) { in strlcpy() argument 33 return snprintf(dst, size, "%s", src); in strlcpy()
|
D | safe_copy_test.cc | 95 char* dst = static_cast<char*>(dst_map); in TEST() local 96 ASSERT_EQ(0, mprotect(dst + 3 * kPageSize, kPageSize, PROT_NONE)); in TEST() 102 SafeCopy(dst + 1024, src + 512, kPageSize * 3 - 1024)); in TEST() 103 EXPECT_EQ(0, memcmp(dst + 1024, src + 512, kPageSize * 3 - 1024)); in TEST()
|
/art/runtime/native/ |
D | libcore_io_Memory.cc | 66 jint* dst = reinterpret_cast<jint*>(dstShorts); in swapShorts() local 70 put_unaligned<jint>(dst++, bswap_2x16(v)); in swapShorts() 74 put_unaligned<jshort>(reinterpret_cast<jshort*>(dst), __builtin_bswap16(v)); in swapShorts() 86 jint* dst = reinterpret_cast<jint*>(dstLongs); in swapLongs() local 91 put_unaligned<jint>(dst++, __builtin_bswap32(v2)); in swapLongs() 92 put_unaligned<jint>(dst++, __builtin_bswap32(v1)); in swapLongs() 97 JNIEnv* env, jclass, jlong srcAddress, jbyteArray dst, jint dstOffset, jint byteCount) { in Memory_peekByteArray() argument 98 env->SetByteArrayRegion(dst, dstOffset, byteCount, cast<const jbyte*>(srcAddress)); in Memory_peekByteArray() 111 Scoped##JNI_NAME##ArrayRW elements(env, dst); \ 119 env->Set##JNI_NAME##ArrayRegion(dst, dstOffset, count, src); \ [all …]
|
/art/libelffile/elf/ |
D | xz_utils.cc | 44 std::vector<uint8_t>* dst, in XzCompress() argument 86 callbacks.dst_ = dst; in XzCompress() 94 XzDecompress(ArrayRef<const uint8_t>(*dst), &decompressed); in XzCompress() 100 void XzDecompress(ArrayRef<const uint8_t> src, std::vector<uint8_t>* dst) { in XzDecompress() argument 112 dst->resize(RoundUp(dst_offset + kPageSize / 4, kPageSize)); in XzDecompress() 114 size_t dst_remaining = dst->size() - dst_offset; in XzDecompress() 116 dst->data() + dst_offset, in XzDecompress() 130 dst->resize(dst_offset); in XzDecompress()
|
/art/test/201-built-in-except-detail-messages/src/ |
D | Main.java | 136 Integer[] dst = new Integer[10]; in arrayStore() local 137 System.arraycopy(src, 1, dst, 0, 5); in arrayStore() 145 int[] dst = new int[1]; in arrayStore() local 146 System.arraycopy(src, 0, dst, 0, 1); in arrayStore() 153 Runnable[] dst = new Runnable[1]; in arrayStore() local 154 System.arraycopy(src, 0, dst, 0, 1); in arrayStore() 161 double[][] dst = new double[1][]; in arrayStore() local 162 System.arraycopy(src, 0, dst, 0, 1); in arrayStore() 169 Object[] dst = new Object[1]; in arrayStore() local 170 System.arraycopy(src, 0, dst, 0, 1); in arrayStore() [all …]
|
/art/test/020-string/src/ |
D | Main.java | 131 char[] dst = new char[7]; in copyTest() local 142 src.getChars(-1, 9, dst, 0); in copyTest() 149 src.getChars(2, 19, dst, 0); in copyTest() 156 src.getChars(2, 1, dst, 0); in copyTest() 163 src.getChars(2, 10, dst, 0); in copyTest() 169 src.getChars(2, 9, dst, 0); in copyTest() 170 System.out.println(new String(dst)); in copyTest()
|
/art/tools/ |
D | buildbot-build.sh | 252 dst="$ANDROID_PRODUCT_OUT/system/${l}/${so}" 253 if [ "${src}" -nt "${dst}" ]; then 254 cmd="cp -p \"${src}\" \"${dst}\"" 282 dst=$ANDROID_PRODUCT_OUT/system/${lib_dir}/${lib_file} 284 mkdir -p $(dirname $dst) 285 ln -sf $src $dst 308 dst="$linkerconfig_root/apex/com.android.art" 310 dst="$linkerconfig_root/apex/${apex}" 312 msginfo "Copying APEX directory" "from $src to $dst" 313 rm -rf $dst [all …]
|
/art/test/536-checker-intrinsic-optimization/src/ |
D | Main.java | 427 char[] dst = new char[src.length()]; in stringGetCharsAndBack() local 428 src.getChars(0, src.length(), dst, 0); in stringGetCharsAndBack() 429 return new String(dst); in stringGetCharsAndBack() 433 char[] dst = new char[src.length() + offset]; in stringGetCharsAndBackOffset() local 434 src.getChars(0, src.length(), dst, offset); in stringGetCharsAndBackOffset() 435 return new String(dst, offset, src.length()); in stringGetCharsAndBackOffset() 439 char[] dst = new char[srcEnd - srcBegin + offset]; in stringGetCharsRange() local 440 src.getChars(srcBegin, srcEnd, dst, offset); in stringGetCharsRange() 441 return new String(dst, offset, srcEnd - srcBegin); in stringGetCharsRange()
|
/art/test/702-LargeBranchOffset/ |
D | generate | 42 with open("src/Main.java", "wt") as dst: 44 dst.write(TEMPLATE.strip().replace("BODY", body))
|
/art/libdexfile/dex/ |
D | base64_test_util.h | 92 std::unique_ptr<uint8_t[]> dst(new uint8_t[tmp.size()]); in DecodeBase64() 94 std::copy(tmp.begin(), tmp.end(), dst.get()); in DecodeBase64() 95 return dst.release(); in DecodeBase64()
|
/art/tools/ahat/src/main/com/android/ahat/dominators/ |
D | Dominators.java | 256 public final Node dst; field in Dominators.Link 259 public Link(NodeS srcS, Node dst) { in Link() argument 261 this.dst = dst; in Link() 268 this.dst = null; in Link() 314 if (link.dst == null) { in computeDominators() 320 NodeS dstS = (NodeS)graph.getDominatorsComputationState(link.dst); in computeDominators() 325 graph.setDominatorsComputationState(link.dst, dstS); in computeDominators() 327 dstS.node = link.dst; in computeDominators() 336 for (Node child : graph.getReferencesForDominators(link.dst)) { in computeDominators()
|
/art/test/646-checker-arraycopy-large-cst-pos/src/ |
D | Main.java | 30 Object[] dst = new Object[2048]; in test() local 34 System.arraycopy(src, 0, dst, 1024, 64); in test()
|