/external/openssl/crypto/bn/asm/ |
D | x86_64-mont5.pl | 36 $bp="%rdx"; # const BN_ULONG *bp, 147 mov %rdx,$hi0 156 adc \$0,%rdx 157 mov %rdx,$hi1 166 adc \$0,%rdx 169 adc \$0,%rdx 171 mov %rdx,$hi1 177 adc \$0,%rdx 179 mov %rdx,$lo0 189 adc \$0,%rdx [all …]
|
D | x86_64-mont.pl | 48 $bp="%rdx"; # const BN_ULONG *bp, 110 mov %rdx,$hi0 115 adc \$0,%rdx 116 mov %rdx,$hi1 125 adc \$0,%rdx 128 adc \$0,%rdx 130 mov %rdx,$hi1 136 adc \$0,%rdx 138 mov %rdx,$lo0 146 adc \$0,%rdx [all …]
|
D | modexp512-x86_64.pl | 93 adc \$0, %rdx 98 mov %rdx, $TMP 104 adc \$0, %rdx 106 adc \$0, %rdx 110 mov %rdx, $X[0] 125 adc \$0, %rdx 130 mov %rdx, $TMP 135 adc \$0, %rdx 137 adc \$0, %rdx 141 mov %rdx, $X[0] [all …]
|
/external/v8/test/cctest/ |
D | test-disasm-x64.cc | 75 __ addq(rdx, rbx); in TEST() 76 __ addq(rdx, Operand(rbx, 0)); in TEST() 77 __ addq(rdx, Operand(rbx, 16)); in TEST() 78 __ addq(rdx, Operand(rbx, 1999)); in TEST() 79 __ addq(rdx, Operand(rsp, 0)); in TEST() 80 __ addq(rdx, Operand(rsp, 16)); in TEST() 81 __ addq(rdx, Operand(rsp, 1999)); in TEST() 91 __ and_(rdx, Immediate(3)); in TEST() 92 __ and_(rdx, Operand(rsp, 4)); in TEST() 93 __ cmpq(rdx, Immediate(3)); in TEST() [all …]
|
D | test-macro-assembler-x64.cc | 77 using v8::internal::rdx; 107 __ Move(rdx, Smi::FromInt(1)); in ExitCode() 108 __ cmpq(rdx, v8::internal::kSmiConstantRegister); in ExitCode() 109 __ movq(rdx, Immediate(-1)); in ExitCode() 110 __ cmovq(not_equal, rax, rdx); in ExitCode() 145 __ Set(rdx, reinterpret_cast<intptr_t>(Smi::FromInt(0))); in TestMoveSmi() 146 __ cmpq(rcx, rdx); in TestMoveSmi() 198 __ Move(rdx, Smi::FromInt(y)); in TestSmiCompare() 199 __ movq(r9, rdx); in TestSmiCompare() 200 __ SmiCompare(rcx, rdx); in TestSmiCompare() [all …]
|
/external/valgrind/main/none/tests/amd64/ |
D | cmpxchg.c | 14 ULong rdx; variable 24 rdx = 0x11111111; rax = 0x22222222; in main() 40 "\tmov " VG_SYM(rdx) ",%rdx\n" in main() 45 "\tmov " VG_SYM(rdx) "(%rip),%rdx\n" in main() 68 rdx = 0x99999999; rax = 0x77777777; in main() 84 "\tmov " VG_SYM(rdx) ",%rdx\n" in main() 89 "\tmov " VG_SYM(rdx) "(%rip),%rdx\n" in main() 112 rdx = 0x11111111; rax = 0x22222222; in main() 128 "\tmov " VG_SYM(rdx) ",%rdx\n" in main() 133 "\tmov " VG_SYM(rdx) "(%rip),%rdx\n" in main() [all …]
|
/external/zlib/contrib/amd64/ |
D | amd64-match.S | 269 mov $(-MAX_MATCH_8), %rdx 275 prefetcht1 (%windowbestlen, %rdx) 276 prefetcht1 (%prev, %rdx) 293 movdqu (%windowbestlen, %rdx), %xmm1 294 movdqu (%prev, %rdx), %xmm2 296 movdqu 16(%windowbestlen, %rdx), %xmm3 297 movdqu 16(%prev, %rdx), %xmm4 299 movdqu 32(%windowbestlen, %rdx), %xmm5 300 movdqu 32(%prev, %rdx), %xmm6 302 movdqu 48(%windowbestlen, %rdx), %xmm7 [all …]
|
/external/libvpx/vp8/common/x86/ |
D | subpixel_ssse3.asm | 47 movsxd rdx, DWORD PTR arg(5) ;table index 49 shl rdx, 4 54 add rax, rdx 68 movsxd rdx, dword ptr arg(3) ;output_pitch 70 sub rdi, rdx 88 lea rdi, [rdi + rdx] 126 movsxd rdx, dword ptr arg(3) ;output_pitch 128 sub rdi, rdx 143 lea rdi, [rdi + rdx] 188 movsxd rdx, DWORD PTR arg(5) ;table index [all …]
|
D | subpixel_sse2.asm | 46 mov rdx, arg(6) ;vp8_filter 76 pmullw xmm3, XMMWORD PTR [rdx] ; x[-2] * H[-2]; Tap 1 80 pmullw xmm4, XMMWORD PTR [rdx+16] ; x[-1] * H[-1]; Tap 2 86 pmullw xmm5, [rdx+32] ; x[ 0] * H[ 0]; Tap 3 91 pmullw xmm6, [rdx+48] ; x[ 1] * h[ 1] ; Tap 4 97 pmullw xmm7, [rdx+64] ; x[ 2] * h[ 2] ; Tap 5 100 pmullw xmm1, [rdx+80] ; x[ 3] * h[ 3] ; Tap 6 166 mov rdx, arg(6) ;vp8_filter 201 pmullw xmm3, XMMWORD PTR [rdx] ; x[-2] * H[-2]; Tap 1 205 pmullw xmm4, XMMWORD PTR [rdx+16] ; x[-1] * H[-1]; Tap 2 [all …]
|
D | idctllm_mmx.asm | 45 mov rdx, arg(1) ;output 172 movq [rdx], mm0 174 movq [rdx+rax], mm1 175 movq [rdx+rax*2], mm2 177 add rdx, rax 178 movq [rdx+rax*2], mm5 200 mov rdx, arg(1) ;output 208 movq [rdx], mm0 209 movq [rdx+rax], mm0 211 movq [rdx+rax*2], mm0 [all …]
|
D | subpixel_mmx.asm | 40 mov rdx, arg(6) ;vp8_filter 42 movq mm1, [rdx + 16] ; do both the negative taps first!!! 43 movq mm2, [rdx + 32] ; 44 movq mm6, [rdx + 48] ; 45 movq mm7, [rdx + 64] ; 80 pmullw mm4, [rdx+80] ; mm5 *= kernel 0 modifiers 84 pmullw mm5, [rdx] ; mm5 *= kernel 5 modifiers 145 movsxd rdx, dword ptr arg(3) ;pixels_per_line 148 sub rsi, rdx 149 sub rsi, rdx [all …]
|
D | loopfilter_sse2.asm | 113 mov rdx, arg(2) ; get flimit 126 movdqa xmm4, XMMWORD PTR [rdx] ; flimit 131 mov rdx, arg(4) ; hev get thresh 151 movdqa xmm2, XMMWORD PTR [rdx] ; hev 177 lea rdx, srct 179 movdqa xmm2, [rdx] ; p1 180 movdqa xmm7, [rdx+48] ; q1 181 movdqa xmm6, [rdx+16] ; p0 182 movdqa xmm0, [rdx+32] ; q0 507 movdqa xmm5, XMMWORD PTR [rdx+96] ; q2 [all …]
|
/external/v8/src/x64/ |
D | ic-x64.cc | 236 StubCompiler::GenerateLoadArrayLength(masm, rax, rdx, &miss); in GenerateArrayLength() 250 StubCompiler::GenerateLoadStringLength(masm, rax, rdx, rbx, &miss, in GenerateStringLength() 265 StubCompiler::GenerateLoadFunctionPrototype(masm, rax, rdx, rbx, &miss); in GenerateFunctionPrototype() 405 masm, rdx, rcx, Map::kHasIndexedInterceptor, &slow); in GenerateGeneric() 411 rdx, in GenerateGeneric() 424 __ movq(rcx, FieldOperand(rdx, JSObject::kElementsOffset)); in GenerateGeneric() 448 masm, rdx, rcx, Map::kHasNamedInterceptor, &slow); in GenerateGeneric() 452 __ movq(rbx, FieldOperand(rdx, JSObject::kPropertiesOffset)); in GenerateGeneric() 459 __ movq(rbx, FieldOperand(rdx, HeapObject::kMapOffset)); in GenerateGeneric() 519 __ movq(rax, FieldOperand(rdx, rcx, times_pointer_size, 0)); in GenerateGeneric() [all …]
|
D | stub-cache-x64.cc | 482 Register arguments_arg = rdx; in GenerateFastApiCall() 1015 Register name_arg = rdx; in GenerateLoadCallback() 1234 __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize)); in GenerateGlobalReceiverCheck() 1238 __ JumpIfSmi(rdx, miss); in GenerateGlobalReceiverCheck() 1239 CheckPrototypes(object, rdx, holder, rbx, rax, rdi, name, miss); in GenerateGlobalReceiverCheck() 1300 __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize)); in CompileCallField() 1303 __ JumpIfSmi(rdx, &miss); in CompileCallField() 1306 Register reg = CheckPrototypes(object, rdx, holder, rbx, rax, rdi, in CompileCallField() 1319 __ movq(rdx, FieldOperand(rdx, GlobalObject::kGlobalReceiverOffset)); in CompileCallField() 1320 __ movq(Operand(rsp, (argc + 1) * kPointerSize), rdx); in CompileCallField() [all …]
|
D | code-stubs-x64.cc | 69 __ movq(rdx, Operand(rsp, 1 * kPointerSize)); in Generate() 90 __ movq(FieldOperand(rax, JSFunction::kSharedFunctionInfoOffset), rdx); in Generate() 97 __ movq(rdx, FieldOperand(rdx, SharedFunctionInfo::kCodeOffset)); in Generate() 98 __ lea(rdx, FieldOperand(rdx, Code::kHeaderSize)); in Generate() 99 __ movq(FieldOperand(rax, JSFunction::kCodeEntryOffset), rdx); in Generate() 108 __ pop(rdx); in Generate() 110 __ push(rdx); in Generate() 243 __ AllocateInNewSpace(size, rax, rbx, rdx, fail, TAG_OBJECT); in GenerateFastCloneShallowArrayCommon() 257 __ lea(rdx, Operand(rax, JSArray::kSize)); in GenerateFastCloneShallowArrayCommon() 258 __ movq(FieldOperand(rax, JSArray::kElementsOffset), rdx); in GenerateFastCloneShallowArrayCommon() [all …]
|
D | builtins-x64.cc | 176 __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex); in Generate_JSConstructStubHelper() 188 __ InitializeFieldsWithFiller(rcx, rsi, rdx); in Generate_JSConstructStubHelper() 189 __ LoadRoot(rdx, Heap::kOnePointerFillerMapRootIndex); in Generate_JSConstructStubHelper() 191 __ InitializeFieldsWithFiller(rcx, rdi, rdx); in Generate_JSConstructStubHelper() 208 __ movzxbq(rdx, FieldOperand(rax, Map::kUnusedPropertyFieldsOffset)); in Generate_JSConstructStubHelper() 211 __ addq(rdx, rcx); in Generate_JSConstructStubHelper() 214 __ subq(rdx, rcx); in Generate_JSConstructStubHelper() 226 rdx, in Generate_JSConstructStubHelper() 240 __ Integer32ToSmi(rdx, rdx); in Generate_JSConstructStubHelper() 241 __ movq(Operand(rdi, FixedArray::kLengthOffset), rdx); // and length in Generate_JSConstructStubHelper() [all …]
|
/external/valgrind/main/VEX/orig_amd64/ |
D | test1.sorted | 1 0384941C040000 addl 1052(%rsp,%rdx,4), %eax 9 034495FC addl -4(%rbp,%rdx,4), %eax 12 038AB8C90000 addl 51640(%rdx), %ecx 14 03B2BCC90000 addl 51644(%rdx), %esi 16 03BAC0C90000 addl 51648(%rdx), %edi 39 4801C2 addq %rax, %rdx 41 4801EA addq %rbp, %rdx 44 4801D0 addq %rdx, %rax 45 4801D2 addq %rdx, %rdx 46 4801F2 addq %rsi, %rdx [all …]
|
D | test2.sorted | 1 40027A08 addb 8(%rdx), %dil 86 037A08 addl 8(%rdx), %edi 146 48035110 addq 16(%rcx), %rdx 147 48035610 addq 16(%rsi), %rdx 170 48031507000000 addq 7(%rip), %rdx 192 664101445616 addw %ax, 22(%r14,%rdx,2) 193 6601445316 addw %ax, 22(%rbx,%rdx,2) 194 40227A08 andb 8(%rdx), %dil 284 237A08 andl 8(%rdx), %edi 370 480F44D0 cmove %rax, %rdx [all …]
|
/external/elfutils/tests/ |
D | testfile45.expect.bz2 | ... : 44 00 02 add %r8b,(%rdx)
25 34: 44 00 03 add %r8b ... |
/external/libvpx/vp8/encoder/x86/ |
D | subtract_mmx.asm | 30 movsxd rdx, dword ptr arg(1);src_stride; 42 movd mm0, [rsi+rdx] 50 movd mm0, [rsi+rdx*2] 57 lea rsi, [rsi+rdx*2] 62 movd mm0, [rsi+rdx] 91 movsxd rdx, dword ptr arg(3) ;stride 139 lea rsi, [rsi+rdx] 176 movsxd rdx, dword ptr arg(4) ;stride; 193 movq mm0, [rsi+rdx] 206 movq mm0, [rsi+rdx*2] [all …]
|
D | sad_sse2.asm | 32 movsxd rdx, dword ptr arg(3) ;ref_stride 48 movq xmm5, QWORD PTR [rdi+rdx] 57 movq xmm3, QWORD PTR [rdi+rdx+8] 60 lea rdi, [rdi+rdx*2] 105 movsxd rdx, dword ptr arg(3) ;ref_stride 122 movq mm3, QWORD PTR [rdi+rdx] 128 lea rdi, [rdi+rdx*2] 168 movsxd rdx, dword ptr arg(3) ;ref_stride 185 add rdi, rdx 220 movsxd rdx, dword ptr arg(3) ;ref_stride [all …]
|
/external/zlib/contrib/gcc_gvmat64/ |
D | gvmat64.S | 419 mov rdx, 0xfffffffffffffef8 //; -(MAX_MATCH_8) 423 prefetcht1 [rsi+rdx] 424 prefetcht1 [rdi+rdx] 443 mov rax, [rsi + rdx] 444 xor rax, [rdi + rdx] 447 mov rax, [rsi + rdx + 8] 448 xor rax, [rdi + rdx + 8] 452 mov rax, [rsi + rdx + 8+8] 453 xor rax, [rdi + rdx + 8+8] 456 add rdx,8+8+8 [all …]
|
/external/libvpx/vp8/decoder/x86/ |
D | dequantize_mmx.asm | 65 mov rdx, arg(1) ;dq 69 pmullw mm0, [rdx] 72 pmullw mm1, [rdx +8] 75 pmullw mm2, [rdx+16] 78 pmullw mm3, [rdx+24] 80 mov rdx, arg(3) ;dest 218 movd [rdx], mm0 224 movd [rdx+rdi], mm1 230 movd [rdx+rdi*2], mm2 232 add rdx, rdi [all …]
|
/external/openssl/crypto/ |
D | x86_64cpuid.pl | 16 ($arg1,$arg2,$arg3,$arg4)=$win64?("%rcx","%rdx","%r8", "%r9") : # Win64 order 17 ("%rdi","%rsi","%rdx","%rcx"); # Unix order 49 shl \$32,%rdx 50 or %rdx,%rax 233 xorq %rdx,%rdx 256 xorq %rdx,%rdx
|
/external/valgrind/main/exp-bbv/tests/amd64-linux/ |
D | ll.S | 89 mov text_buf(%rdx), %al # load byte from text_buf[] 127 mov $strcat,%edx # use rdx as call pointer (smaller op) 142 call *%rdx # call strcat 145 call *%rdx # call strcat 149 call *%rdx # call strcat 153 call *%rdx # call strcat 157 call *%rdx # call strcat 164 call *%rdx # call strcat 176 push %rdx # save call pointer 270 call *%rdx # call find string [all …]
|