/external/llvm/test/MC/AArch64/ |
D | basic-a64-instructions.s | 103 adds xzr, x25, w20, sxtb #3 105 adds xzr, x2, w3, sxtw 138 subs xzr, x25, w20, sxtb #3 140 subs xzr, x2, w3, sxtw 310 adds xzr, x3, #0x1, lsl #12 // FIXME: canonically should be cmn 317 subs xzr, sp, #20, lsl #12 // FIXME: canonically should be cmp 318 subs xzr, x30, #4095, lsl #0 // FIXME: canonically should be cmp 392 add xzr, x3, x5 393 add x20, xzr, x4 394 add x4, x6, xzr [all …]
|
D | arm64-bitfield-encoding.s | 15 sbfiz xzr, x0, #31, #1 17 ubfiz xzr, x0, #31, #1 26 ; CHECK: sbfiz xzr, x0, #31, #1 ; encoding: [0x1f,0x00,0x61,0x93] 28 ; CHECK: ubfiz xzr, x0, #31, #1 ; encoding: [0x1f,0x00,0x61,0xd3]
|
D | basic-a64-diagnostics.s | 37 add xzr, x3, x5, uxtx 38 sub x3, xzr, w9, sxth #1 52 adds x3, xzr, x9, uxtx 152 subs x5, xzr, #0x456, lsl #12 403 cmn x19, xzr, asr #-1 404 cmn xzr, xzr, asr #64 452 cmp x19, xzr, asr #-1 453 cmp xzr, xzr, asr #64 501 neg x19, xzr, asr #-1 502 neg xzr, xzr, asr #64 [all …]
|
D | arm64-leaf-compact-unwind.s | 61 mov x9, xzr 70 mov x9, xzr 179 mov x9, xzr 190 mov x9, xzr
|
/external/llvm/test/CodeGen/AArch64/ |
D | arm64-movi.ll | 10 ; CHECK: orr x0, xzr, #0x700000007 17 ; CHECK: orr x0, xzr, #0xc0000003c0000003 24 ; CHECK: orr x0, xzr, #0xeeeeeeeeeeeeeeee 99 ; CHECK: orr x0, xzr, #0xffff0000ffff0 106 ; CHECK: orr x0, xzr, #0xffff0000ffff0 113 ; CHECK: orr x0, xzr, #0xffff0000ffff0 120 ; CHECK: orr x0, xzr, #0xffff0000ffff0 128 ; CHECK: orr x0, xzr, #0xff00ff00ff00ff00 135 ; CHECK: orr x0, xzr, #0xff00ff00ff00ff00 143 ; CHECK: orr x0, xzr, #0xff00ff00ff00ff00 [all …]
|
D | arm64-addrmode.ll | 8 ; CHECK: ldr xzr, [x{{[0-9]+}}, #8] 19 ; CHECK: ldr xzr, [ 30 ; CHECK: ldr xzr, [x{{[0-9]+}}, #32760] 41 ; CHECK: ldr xzr, [ 52 ; CHECK: ldr xzr, [x{{[0-9]+}}, x{{[0-9]+}}, lsl #3] 64 ; CHECK: ldr xzr, [
|
D | arm64-memset-inline.ll | 7 ; CHECK: str xzr, [x0] 16 ; CHECK: stp xzr, xzr, [sp, #16] 17 ; CHECK: str xzr, [sp, #8]
|
D | zero-reg.ll | 12 ; CHECK: str xzr, [{{x[0-9]+}}, {{#?}}:lo12:var64] 21 ; Important correctness point here is that LLVM doesn't try to use xzr 22 ; as an addressing register: "str w0, [xzr]" is not a valid A64
|
D | movw-consts.ll | 6 ; CHECK: mov x0, xzr 36 ; CHECK: orr x0, xzr, #0x100000000 42 ; CHECK: orr x0, xzr, #0xffff00000000 48 ; CHECK: orr x0, xzr, #0x1000000000000 122 ; CHECK: orr x0, xzr, #0xfffffffffffffffd
|
D | arm64-complex-ret.ll | 5 ; CHECK: stp xzr, xzr, [x8]
|
D | arm64-early-ifcvt.ll | 59 ; CHECK: {{subs.*xzr,|cmp}} x2, #1 95 ; CHECK: {{subs.*xzr,|cmp}} x2, #1 131 ; CHECK: {{subs.*xzr,|cmp}} x2, #1 167 ; CHECK: {{subs.*xzr,|cmp}} x2, #1 203 ; CHECK: {{subs.*xzr,|cmp}} x2, #1 239 ; CHECK: {{subs.*xzr,|cmp}} x2, #1 274 ; CHECK: {{subs.*xzr,|cmp}} x2, #0 308 ; CHECK: {{subs.*xzr,|cmp}} x2, #0 325 ; CHECK: {{ands.*xzr,|tst}} w2, #0x80 343 ; CHECK: {{ands.*xzr,|tst}} x2, #0x8000000000000000 [all …]
|
D | arm64-dead-register-def-bug.ll | 13 ; CHECK-NOT: orr xzr, xzr, #0x2
|
D | arm64-atomic-128.ll | 198 ; CHECK: ldaxp xzr, xzr, [x2] 210 ; CHECK: ldxp xzr, xzr, [x2] 222 ; CHECK: ldxp xzr, xzr, [x2]
|
D | arm64-inline-asm.ll | 156 ; CHECK: mov xzr, {{x[0-9]+}} 214 ; CHECK: USE(xzr) 224 ; CHECK: USE(xzr), USE(xzr) 227 ; CHECK: USE(xzr), USE(wzr)
|
/external/llvm/test/MC/Disassembler/AArch64/ |
D | ldp-preind.predictable.txt | 16 # xzr != sp so "stp xzr, xzr, [sp, #8]!" is fine. 18 # CHECK: stp xzr, xzr, [sp, #8]!
|
D | ldp-postind.predictable.txt | 16 # xzr != sp so "stp xzr, xzr, [sp], #8" is fine. 18 # CHECK: stp xzr, xzr, [sp], #8
|
D | basic-a64-instructions.txt | 109 # CHECK: add xzr, x3, x5 110 # CHECK: add x20, xzr, x4 111 # CHECK: add x4, x6, xzr 113 # CHECK: add x9, x3, xzr, lsl #10 164 # CHECK: adds x20, xzr, x4 165 # CHECK: adds x4, x6, xzr 167 # CHECK: adds x9, x3, xzr, lsl #10 217 # CHECK: sub xzr, x3, x5 218 # CHECK: {{sub x20, xzr, x4|neg x20, x4}} 219 # CHECK: sub x4, x6, xzr [all …]
|
/external/chromium_org/v8/src/arm64/ |
D | code-stubs-arm64.h | 125 ASSERT(instr2->IsPCRelAddressing() && (instr2->Rd() == xzr.code())); in GetMode() 129 ASSERT(instr1->IsPCRelAddressing() && (instr1->Rd() == xzr.code())); in GetMode() 164 patcher.adr(xzr, offset_to_incremental_noncompacting); in Patch() 165 patcher.adr(xzr, offset_to_incremental_compacting); in Patch() 170 patcher.adr(xzr, offset_to_incremental_compacting); in Patch() 174 patcher.adr(xzr, offset_to_incremental_noncompacting); in Patch()
|
/external/chromium_org/v8/test/cctest/ |
D | test-disasm-arm64.cc | 148 COMPARE(Mov(x0, xzr), "mov x0, xzr"); in TEST_() 152 COMPARE(mov(x0, xzr), "mov x0, xzr"); in TEST_() 334 COMPARE(add(x4, xzr, Operand(x5, LSL, 1)), "add x4, xzr, x5, lsl #1"); in TEST_() 336 COMPARE(adds(xzr, csp, Operand(x8, LSL, 4)), "cmn csp, x8, lsl #4"); in TEST_() 337 COMPARE(adds(xzr, xzr, Operand(x8, LSL, 5)), "cmn xzr, x8, lsl #5"); in TEST_() 364 COMPARE(sub(x4, xzr, Operand(x5, LSL, 1)), "neg x4, x5, lsl #1"); in TEST_() 366 COMPARE(subs(xzr, csp, Operand(x8, LSL, 4)), "cmp csp, x8, lsl #4"); in TEST_() 367 COMPARE(subs(xzr, xzr, Operand(x8, LSL, 5)), "cmp xzr, x8, lsl #5"); in TEST_() 392 COMPARE(cmn(csp, Operand(xzr, UXTX, 3)), "cmn csp, xzr, lsl #3"); in TEST_() 393 COMPARE(cmn(csp, Operand(xzr, LSL, 4)), "cmn csp, xzr, lsl #4"); in TEST_() [all …]
|
D | test-assembler-arm64.cc | 181 __ Msr(NZCV, xzr); \ 182 __ Msr(FPCR, xzr); 262 __ Orr(csp, xzr, Operand(0x1fff)); in TEST() 270 __ Orr(csp, xzr, Operand(0xfffffff8L)); in TEST() 3569 __ Sub(x2, x2, xzr); in TEST() 3604 __ Claim(xzr, 8); in TEST() 3605 __ Drop(xzr, 8); in TEST() 3606 __ Claim(xzr, 0); in TEST() 3607 __ Drop(xzr, 0); in TEST() 3610 __ ClaimBySMI(xzr, 8); in TEST() [all …]
|
D | test-hashing.cc | 104 __ Push(root, xzr); in generate() 114 __ Pop(xzr, root); in generate() 166 __ Push(root, xzr); in generate() 170 __ Pop(xzr, root); in generate()
|
/external/vixl/test/ |
D | test-disasm-a64.cc | 296 COMPARE(add(x4, xzr, Operand(x5, LSL, 1)), "add x4, xzr, x5, lsl #1"); in TEST() 298 COMPARE(adds(xzr, sp, Operand(x8, LSL, 4)), "cmn sp, x8, lsl #4"); in TEST() 299 COMPARE(adds(xzr, xzr, Operand(x8, LSL, 5)), "cmn xzr, x8, lsl #5"); in TEST() 326 COMPARE(sub(x4, xzr, Operand(x5, LSL, 1)), "neg x4, x5, lsl #1"); in TEST() 328 COMPARE(subs(xzr, sp, Operand(x8, LSL, 4)), "cmp sp, x8, lsl #4"); in TEST() 329 COMPARE(subs(xzr, xzr, Operand(x8, LSL, 5)), "cmp xzr, x8, lsl #5"); in TEST() 354 COMPARE(cmn(sp, Operand(xzr, UXTX, 3)), "cmn sp, xzr, lsl #3"); in TEST() 355 COMPARE(cmn(sp, Operand(xzr, LSL, 4)), "cmn sp, xzr, lsl #4"); in TEST() 380 COMPARE(cmp(sp, Operand(xzr, UXTX, 3)), "cmp sp, xzr, lsl #3"); in TEST() 381 COMPARE(cmp(sp, Operand(xzr, LSL, 4)), "cmp sp, xzr, lsl #4"); in TEST() [all …]
|
D | test-assembler-a64.cc | 228 __ Orr(sp, xzr, 0x1fff); in TEST() 236 __ Orr(sp, xzr, 0xfffffff8); in TEST() 3090 __ Sub(x2, x2, xzr); in TEST() 3124 __ Claim(Operand(xzr)); in TEST() 3125 __ Drop(Operand(xzr)); in TEST() 4012 __ lslv(x0, x0, xzr); in TEST() 4064 __ lsrv(x0, x0, xzr); in TEST() 4118 __ asrv(x0, x0, xzr); in TEST() 4172 __ rorv(x0, x0, xzr); in TEST() 7311 __ add(xzr, x0, x1); in TEST() [all …]
|
/external/valgrind/main/coregrind/m_dispatch/ |
D | dispatch-arm64-linux.S | 74 stp x0, xzr, [sp, #-16]! 119 ldp x0, xzr, [sp], #16
|
/external/valgrind/main/coregrind/m_syswrap/ |
D | syscall-arm64-linux.S | 129 ldp xzr, x1, [sp], #16 142 ldp xzr, x1, [sp], #16
|