/third_party/openssl/crypto/whrlpool/asm/x86_64/ |
D | wp-x86_64.s | 12 pushq %rbp 13 .cfi_offset %rbp,-24 35 leaq .Ltable(%rip),%rbp 77 movq 4096(%rbp,%rsi,8),%r8 87 xorq 0(%rbp,%rsi,8),%r8 88 movq 7(%rbp,%rdi,8),%r9 94 movq 6(%rbp,%rsi,8),%r10 95 movq 5(%rbp,%rdi,8),%r11 101 movq 4(%rbp,%rsi,8),%r12 102 movq 3(%rbp,%rdi,8),%r13 [all …]
|
/third_party/openssl/crypto/bn/asm/x86_64/ |
D | x86_64-gf2m.s | 19 andq %rbp,%rax 23 andq %rbp,%rsi 27 andq %rbp,%rdi 68 andq %rbp,%rsi 70 shrq $4,%rbp 73 andq %rbp,%rdi 74 shrq $4,%rbp 78 andq %rbp,%rsi 79 shrq $4,%rbp 84 andq %rbp,%rdi [all …]
|
D | x86_64-mont5.s | 23 pushq %rbp 24 .cfi_offset %rbp,-24 205 movq %r8,%rbp 210 imulq %r10,%rbp 213 mulq %rbp 241 mulq %rbp 325 movq %r8,%rbp 333 imulq %r10,%rbp 336 mulq %rbp 367 mulq %rbp [all …]
|
D | x86_64-mont.s | 28 pushq %rbp 29 .cfi_offset %rbp,-24 80 movq %r8,%rbp 85 imulq %r10,%rbp 88 mulq %rbp 116 mulq %rbp 141 movq %r8,%rbp 148 imulq %r10,%rbp 151 mulq %rbp 182 mulq %rbp [all …]
|
D | rsaz-x86_64.s | 13 pushq %rbp 15 .cfi_offset %rbp,-24 47 movq %rax,%rbp 102 mulq %rbp 108 mulq %rbp 116 mulq %rbp 124 mulq %rbp 132 mulq %rbp 140 mulq %rbp 142 movq %rbp,%rax [all …]
|
/third_party/openssl/crypto/ec/asm/ |
D | x25519-x86_64.pl | 106 push %rbp 107 .cfi_push %rbp 126 mov 8*3(%rdx),%rbp 148 mulq %rbp # f[0]*g[3] 170 imulq \$19,%rbp,%rdi # g[3]*19 174 mulq %rbp # f[1]*g[3] 175 mov 8*2(%rsp),%rbp # g[2] 189 imulq \$19,%rbp,%rdi # g[2]*19 193 mulq %rbp # f[1]*g[2] 197 mulq %rbp # f[2]*g[2] [all …]
|
/third_party/openssl/crypto/bn/asm/ |
D | rsaz-x86_64.pl | 89 ($out, $inp, $mod) = ("%rdi", "%rsi", "%rbp"); # common internal API 105 push %rbp 106 .cfi_push %rbp 138 mov %rax, %rbp # 8($inp) 193 mulq %rbp 199 mulq %rbp 207 mulq %rbp 215 mulq %rbp 223 mulq %rbp 231 mulq %rbp [all …]
|
D | x86_64-mont.pl | 96 $m1="%rbp"; 130 push %rbp 131 .cfi_push %rbp 353 mov -16(%rsi),%rbp 354 .cfi_restore %rbp 385 push %rbp 386 .cfi_push %rbp 806 mov -16(%rsi),%rbp 807 .cfi_restore %rbp 828 my ($i,$j,$tptr)=("%rbp","%rcx",$rptr); [all …]
|
D | x86_64-mont5.pl | 85 $m1="%rbp"; 114 push %rbp 115 .cfi_push %rbp 455 mov -16(%rsi),%rbp 456 .cfi_restore %rbp 487 push %rbp 488 .cfi_push %rbp 514 mov %rsp,%rbp 519 sub %r11,%rbp # align with $rp 520 lea -320(%rbp,$num,2),%rbp # future alloca(frame+2*num*8+256) [all …]
|
D | x86_64-gf2m.pl | 47 ($b,$mask)=("%rbp","%r8"); 233 mov %rbp,8*13(%rsp) 234 .cfi_rel_offset %rbp,8*13 269 mov 32(%rsp),%rbp 274 mov @r[0],0(%rbp) 276 mov @r[3],24(%rbp) 280 mov $hi,16(%rbp) 281 mov $lo,8(%rbp) 289 mov 8*13(%rsp),%rbp 290 .cfi_restore %rbp [all …]
|
/third_party/mindspore/mindspore/ccsrc/backend/kernel_compiler/cpu/nnacl/assembly/avx/ |
D | MatmulAvx.S | 55 pushq %rbp 78 movq 56(%rsp), %rbp // row 79 movq %rbp, 8(%rsp) 80 movq 64(%rsp), %rbp // col 81 movq %rbp, 16(%rsp) 82 movq 72(%rsp), %rbp // stride 83 movq %rbp, 24(%rsp) 84 movq 80(%rsp), %rbp // weiteMode 85 movq %rbp, 32(%rsp) 87 movq 8(%rsp), %rbp [all …]
|
D | ConvDwFp32Avx3x3.S | 50 pushq %rbp 71 movq 56(%rsp), %rbp // input_stride 72 movq %rbp, 8(%rsp) 73 movq 64(%rsp), %rbp // relu 74 movq %rbp, 16(%rsp) 75 movq 72(%rsp), %rbp // relu6 76 movq %rbp, 24(%rsp) 96 movq 56(%rsi), %rbp 140 vmovups (%rbp), %ymm7 141 addq $32, %rbp [all …]
|
/third_party/openssl/crypto/ec/asm/x86_64/ |
D | ecp_nistz256-x86_64.s | 2795 pushq %rbp 2797 .cfi_offset %rbp,-16 2850 movq %r8,%rbp 2863 movq %rbp,%rax 2865 movq %rbp,%rdx 2872 sbbq %rdx,%rbp 2875 adcq %rbp,%r12 2884 movq %rdx,%rbp 2887 addq %rbp,%r10 2892 movq %rdx,%rbp [all …]
|
D | x25519-x86_64.s | 8 pushq %rbp 10 .cfi_offset %rbp,-16 34 movq 24(%rdx),%rbp 56 mulq %rbp 78 imulq $19,%rbp,%rdi 82 mulq %rbp 83 movq 16(%rsp),%rbp 97 imulq $19,%rbp,%rdi 101 mulq %rbp 105 mulq %rbp [all …]
|
/third_party/openssl/crypto/aes/asm/x86_64/ |
D | aesni-mb-x86_64.s | 22 pushq %rbp 23 .cfi_offset %rbp,-24 112 leaq 16(%rsp),%rbp 114 subq %rbx,%rbp 129 cmovgeq %rbp,%r8 130 cmovgq %rbp,%r12 137 cmovgeq %rbp,%r9 138 cmovgq %rbp,%r13 145 cmovgeq %rbp,%r10 146 cmovgq %rbp,%r14 [all …]
|
/third_party/openssl/engines/asm/ |
D | e_padlock-x86_64.pl | 290 push %rbp 316 lea (%rsp),%rbp 323 lea (%rax,%rbp),%rsp 344 cmp %rsp,%rbp 426 cmp %rsp,%rbp 440 cmp %rsp,%rbp 457 cmp %rbp,%rsp 465 cmp %rax,%rbp 469 lea (%rbp),%rsp 514 lea ($inp,$len),%rbp [all …]
|
/third_party/openssl/crypto/whrlpool/asm/ |
D | wp-x86_64.pl | 74 push %rbp 75 .cfi_push %rbp 97 lea $table(%rip),%rbp 113 mov 4096(%rbp,%rsi,8),@mm[0] # rc[r] 127 xor 0(%rbp,%rsi,8),@mm[0] 128 $func 7(%rbp,%rdi,8),@mm[1] 134 $func 6(%rbp,%rsi,8),@mm[2] 135 $func 5(%rbp,%rdi,8),@mm[3] 141 $func 4(%rbp,%rsi,8),@mm[4] 142 $func 3(%rbp,%rdi,8),@mm[5] [all …]
|
/third_party/openssl/crypto/md5/asm/ |
D | md5-x86_64.pl | 144 push %rbp 145 .cfi_push %rbp 159 mov %rdi, %rbp # rbp = ctx 162 mov 0*4(%rbp), %eax # eax = ctx->A 163 mov 1*4(%rbp), %ebx # ebx = ctx->B 164 mov 2*4(%rbp), %ecx # ecx = ctx->C 165 mov 3*4(%rbp), %edx # edx = ctx->D 264 mov %eax, 0*4(%rbp) # ctx->A = A 265 mov %ebx, 1*4(%rbp) # ctx->B = B 266 mov %ecx, 2*4(%rbp) # ctx->C = C [all …]
|
/third_party/openssl/crypto/sha/asm/x86_64/ |
D | keccak1600-x86_64.s | 11 movq 92(%rdi),%rbp 27 movq %rbp,%r12 28 xorq -68(%rdi),%rbp 34 xorq -28(%rdi),%rbp 40 xorq 12(%rdi),%rbp 53 xorq 52(%rdi),%rbp 56 xorq %rbp,%rbx 58 rolq $1,%rbp 59 xorq %r13,%rbp 63 xorq %rbp,%r11 [all …]
|
/third_party/libunwind/src/x86_64/ |
D | Gtrace.c | 216 unw_word_t rbp, in trace_init_addr() argument 247 && likely(dwarf_put (d, d->loc[UNW_X86_64_RBP], rbp) >= 0) in trace_init_addr() 278 unw_word_t rbp, in trace_lookup() argument 331 return trace_init_addr (frame, cursor, cfa, rip, rbp, rsp); in trace_lookup() 403 unw_word_t rbp, rsp, rip, cfa; in tdep_trace() local 422 ACCESS_MEM_FAST(ret, 0, d, DWARF_GET_LOC(d->loc[UNW_X86_64_RBP]), rbp); in tdep_trace() 442 depth, cfa, rip, rsp, rbp); in tdep_trace() 449 unw_tdep_frame_t *f = trace_lookup (cursor, cache, cfa, rip, rbp, rsp); in tdep_trace() 483 cfa = (f->cfa_reg_rsp ? rsp : rbp) + f->cfa_reg_offset; in tdep_trace() 488 ACCESS_MEM_FAST(ret, validate, d, cfa + f->rbp_cfa_offset, rbp); in tdep_trace() [all …]
|
D | Gstep.c | 151 unw_word_t rbp; in unw_step() local 153 ret = dwarf_get (&c->dwarf, c->dwarf.loc[RBP], &rbp); in unw_step() 164 if (!rbp && invalid_prev_rip == 0) in unw_step() 207 rbp_loc = DWARF_LOC (rbp, 0); in unw_step() 225 rbp_loc = DWARF_LOC(rbp, 0); in unw_step() 226 rsp_loc = DWARF_VAL_LOC(c, rbp + 16); in unw_step() 227 rip_loc = DWARF_LOC (rbp + 8, 0); in unw_step() 231 rbp, c->dwarf.cfa, rbp1); in unw_step() 239 || rbp < c->dwarf.cfa in unw_step() 240 || (rbp - c->dwarf.cfa) > 0x4000) in unw_step()
|
/third_party/openssl/crypto/camellia/asm/ |
D | cmll-x86_64.pl | 62 $Tbl="%rbp"; # size optimization 145 push %rbp 146 .cfi_push %rbp 189 mov 24(%rsp),%rbp 190 .cfi_restore %rbp 272 push %rbp 273 .cfi_push %rbp 316 mov 24(%rsp),%rbp 317 .cfi_restore %rbp 449 push %rbp [all …]
|
/third_party/libffi/src/x86/ |
D | win64.S | 53 movq %rbp, (arg1) 55 movq arg1, %rbp 56 cfi_def_cfa(%rbp, 16) 57 cfi_rel_offset(%rbp, 0) 58 SEH(.seh_pushreg %rbp) 59 SEH(.seh_setframe %rbp, 0) 75 call *16(%rbp) 77 movl 24(%rbp), %ecx 78 movq 32(%rbp), %r8 91 cfi_restore(%rbp)
|
/third_party/openssl/crypto/modes/asm/x86_64/ |
D | ghash-x86_64.s | 12 pushq %rbp 14 .cfi_offset %rbp,-24 123 pushq %rbp 125 .cfi_offset %rbp,-24 144 leaq 16+128(%rsp),%rbp 162 movq %r8,0(%rbp) 165 movq %rax,0-128(%rbp) 174 movq %r9,8(%rbp) 177 movq %rbx,8-128(%rbp) 186 movq %r8,16(%rbp) [all …]
|
/third_party/openssl/crypto/poly1305/asm/x86_64/ |
D | poly1305-x86_64.s | 60 pushq %rbp 62 .cfi_offset %rbp,-24 84 movq 16(%rdi),%rbp 97 adcq %rcx,%rbp 114 movq %rbp,%rbx 123 imulq %r11,%rbp 126 adcq %rbp,%r10 129 movq %r10,%rbp 131 andq $3,%rbp 135 adcq $0,%rbp [all …]
|