/external/boringssl/linux-aarch64/crypto/fipsmodule/ |
D | armv8-mont.S | 50 subs xzr,x6,#1 // (*) 52 adc x13,x13,xzr 59 adc x7,x11,xzr 64 adc x13,x17,xzr 69 adc x13,x13,xzr 77 adc x7,x11,xzr 81 adc x13,x17,xzr 87 adc x19,xzr,xzr // upmost overflow bit 103 adc x7,x7,xzr 112 subs xzr,x6,#1 // (*) [all …]
|
/external/boringssl/ios-aarch64/crypto/fipsmodule/ |
D | armv8-mont.S | 49 subs xzr,x6,#1 // (*) 51 adc x13,x13,xzr 58 adc x7,x11,xzr 63 adc x13,x17,xzr 68 adc x13,x13,xzr 76 adc x7,x11,xzr 80 adc x13,x17,xzr 86 adc x19,xzr,xzr // upmost overflow bit 102 adc x7,x7,xzr 111 subs xzr,x6,#1 // (*) [all …]
|
/external/capstone/suite/MC/AArch64/ |
D | basic-a64-instructions.s.cs | 42 0x3f,0x8f,0x34,0xab = adds xzr, x25, w20, sxtb #3 44 0x5f,0xc0,0x23,0xab = adds xzr, x2, w3, sxtw 58 0x3f,0x8f,0x34,0xeb = subs xzr, x25, w20, sxtb #3 60 0x5f,0xc0,0x23,0xeb = subs xzr, x2, w3, sxtw 130 0x7f,0x04,0x40,0xb1 = adds xzr, x3, #1, lsl #12 131 0xff,0x53,0x40,0xf1 = subs xzr, sp, #20, lsl #12 132 0xdf,0xff,0x3f,0xf1 = subs xzr, x30, #4095 158 0x7f,0x00,0x05,0x8b = add xzr, x3, x5 159 0xf4,0x03,0x04,0x8b = add x20, xzr, x4 160 0xc4,0x00,0x1f,0x8b = add x4, x6, xzr [all …]
|
/external/llvm/test/MC/AArch64/ |
D | basic-a64-instructions.s | 103 adds xzr, x25, w20, sxtb #3 105 adds xzr, x2, w3, sxtw 138 subs xzr, x25, w20, sxtb #3 140 subs xzr, x2, w3, sxtw 310 adds xzr, x3, #0x1, lsl #12 // FIXME: canonically should be cmn 317 subs xzr, sp, #20, lsl #12 // FIXME: canonically should be cmp 318 subs xzr, x30, #4095, lsl #0 // FIXME: canonically should be cmp 398 add xzr, x3, x5 399 add x20, xzr, x4 400 add x4, x6, xzr [all …]
|
D | arm64-aliases.s | 27 orr x2, xzr, x9 44 ands xzr, x1, x2, lsl #3 190 orr x20, xzr, #0xaaaaaaaaaaaaaaaa 198 orr x3, xzr, #0x1 200 orr x3, xzr, #0x10000 202 orr x3, xzr, #0x700000000 203 orr x3, xzr, #0x3000000000000 204 ; CHECK: orr x3, xzr, #0x1 206 ; CHECK: orr x3, xzr, #0x10000 208 ; CHECK: orr x3, xzr, #0x700000000 [all …]
|
D | arm64-bitfield-encoding.s | 15 sbfiz xzr, x0, #31, #1 17 ubfiz xzr, x0, #31, #1 26 ; CHECK: sbfiz xzr, x0, #31, #1 ; encoding: [0x1f,0x00,0x61,0x93] 28 ; CHECK: ubfiz xzr, x0, #31, #1 ; encoding: [0x1f,0x00,0x61,0xd3]
|
D | basic-a64-diagnostics.s | 37 add xzr, x3, x5, uxtx 38 sub x3, xzr, w9, sxth #1 52 adds x3, xzr, x9, uxtx 152 subs x5, xzr, #0x456, lsl #12 403 cmn x19, xzr, asr #-1 404 cmn xzr, xzr, asr #64 452 cmp x19, xzr, asr #-1 453 cmp xzr, xzr, asr #64 501 neg x19, xzr, asr #-1 502 neg xzr, xzr, asr #64 [all …]
|
D | arm64-leaf-compact-unwind.s | 86 mov x9, xzr 95 mov x9, xzr 204 mov x9, xzr 215 mov x9, xzr
|
/external/llvm/test/CodeGen/AArch64/ |
D | arm64-addrmode.ll | 8 ; CHECK: ldr xzr, [x{{[0-9]+}}, #8] 19 ; CHECK: ldr xzr, [ 30 ; CHECK: ldr xzr, [x{{[0-9]+}}, #32760] 41 ; CHECK: ldr xzr, [x{{[0-9]+}}, x[[NUM]]] 51 ; CHECK: ldr xzr, [x{{[0-9]+}}, x{{[0-9]+}}, lsl #3] 63 ; CHECK: ldr xzr, [x{{[0-9]+}}, x[[NUM]]] 76 ; CHECK-NEXT: ldr xzr, [x0, x[[NUM]]] 86 ; CHECK-NEXT: ldr xzr, [x0, [[REG]]] 96 ; CHECK-NEXT: ldr xzr, [x0, [[REG]]] 106 ; CHECK-NEXT: ldr xzr, [x0, [[REG]]] [all …]
|
D | arm64-memset-inline.ll | 7 ; CHECK: str xzr, [x0] 16 ; CHECK: stp xzr, xzr, [sp, #16] 17 ; CHECK: str xzr, [sp, #8]
|
D | arm64-long-shift.ll | 9 ; CHECK: csel [[LO_FOR_HI:x[0-9]+]], xzr, [[LO_FOR_HI_NORMAL]], eq 17 ; CHECK: csel x0, xzr, [[SMALLSHIFT_LO]], ge 30 ; CHECK: csel [[HI_FOR_LO:x[0-9]+]], xzr, [[HI_FOR_LO_NORMAL]], eq 52 ; CHECK: csel [[HI_FOR_LO:x[0-9]+]], xzr, [[HI_FOR_LO_NORMAL]], eq 60 ; CHECK: csel x1, xzr, [[SMALLSHIFT_HI]], ge
|
D | movw-consts.ll | 6 ; CHECK: mov x0, xzr 36 ; CHECK: orr x0, xzr, #0x100000000 42 ; CHECK: orr x0, xzr, #0xffff00000000 48 ; CHECK: orr x0, xzr, #0x1000000000000 122 ; CHECK: orr x0, xzr, #0xfffffffffffffffd
|
D | zero-reg.ll | 12 ; CHECK: str xzr, [{{x[0-9]+}}, {{#?}}:lo12:var64] 21 ; Important correctness point here is that LLVM doesn't try to use xzr 22 ; as an addressing register: "str w0, [xzr]" is not a valid A64
|
D | arm64-early-ifcvt.ll | 59 ; CHECK: {{subs.*xzr,|cmp}} x2, #1 95 ; CHECK: {{subs.*xzr,|cmp}} x2, #1 131 ; CHECK: {{subs.*xzr,|cmp}} x2, #1 167 ; CHECK: {{subs.*xzr,|cmp}} x2, #1 203 ; CHECK: {{subs.*xzr,|cmp}} x2, #1 239 ; CHECK: {{subs.*xzr,|cmp}} x2, #1 274 ; CHECK: {{subs.*xzr,|cmp}} x2, #0 308 ; CHECK: {{subs.*xzr,|cmp}} x2, #0 325 ; CHECK: {{ands.*xzr,|tst}} w2, #0x80 343 ; CHECK: {{ands.*xzr,|tst}} x2, #0x8000000000000000 [all …]
|
D | arm64-complex-ret.ll | 5 ; CHECK: stp xzr, xzr, [x8]
|
D | machine_cse_impdef_killflags.ll | 10 ; CHECK-DAG: orr [[REG2:x[0-9]+]], xzr, #0x2 11 ; CHECK-DAG: orr [[REG3:x[0-9]+]], xzr, #0x3
|
D | arm64-dead-register-def-bug.ll | 13 ; CHECK-NOT: orr xzr, xzr, #0x2
|
/external/llvm/test/MC/Disassembler/AArch64/ |
D | ldp-preind.predictable.txt | 16 # xzr != sp so "stp xzr, xzr, [sp, #8]!" is fine. 18 # CHECK: stp xzr, xzr, [sp, #8]!
|
D | ldp-postind.predictable.txt | 16 # xzr != sp so "stp xzr, xzr, [sp], #8" is fine. 18 # CHECK: stp xzr, xzr, [sp], #8
|
D | armv8.2a-uao.txt | 9 # CHECK: msr S0_0_C4_C2_3, xzr 10 # NO_V82A: msr S0_0_C4_C0_3, xzr 11 # NO_V82A: msr S0_0_C4_C1_3, xzr 12 # NO_V82A: msr S0_0_C4_C2_3, xzr
|
D | basic-a64-instructions.txt | 110 # CHECK: add xzr, x3, x5 111 # CHECK: add x20, xzr, x4 112 # CHECK: add x4, x6, xzr 114 # CHECK: add x9, x3, xzr, lsl #10 165 # CHECK: adds x20, xzr, x4 166 # CHECK: adds x4, x6, xzr 168 # CHECK: adds x9, x3, xzr, lsl #10 218 # CHECK: sub xzr, x3, x5 219 # CHECK: {{sub x20, xzr, x4|neg x20, x4}} 220 # CHECK: sub x4, x6, xzr [all …]
|
/external/vixl/test/aarch64/ |
D | test-api-aarch64.cc | 54 VIXL_CHECK(xzr.GetBit() == (UINT64_C(1) << kZeroRegCode)); in TEST() 58 VIXL_CHECK(sp.GetBit() != xzr.GetBit()); in TEST() 64 VIXL_CHECK(xzr.GetBit() == wzr.GetBit()); in TEST() 102 VIXL_CHECK(xzr.IsValid()); in TEST() 115 VIXL_CHECK(xzr.IsValidRegister()); in TEST() 121 VIXL_CHECK(!xzr.IsValidFPRegister()); in TEST() 137 VIXL_CHECK(static_cast<CPURegister>(xzr).IsValid()); in TEST() 150 VIXL_CHECK(static_cast<CPURegister>(xzr).IsValidRegister()); in TEST() 156 VIXL_CHECK(!static_cast<CPURegister>(xzr).IsValidFPRegister()); in TEST()
|
/external/v8/src/ic/arm64/ |
D | ic-arm64.cc | 83 DCHECK(to_patch->Rt() == xzr.code()); in PatchInlinedSmiCode() 87 DCHECK(to_patch->Rt() != xzr.code()); in PatchInlinedSmiCode() 88 smi_reg = xzr; in PatchInlinedSmiCode()
|
/external/v8/src/arm64/ |
D | code-stubs-arm64.h | 109 DCHECK(instr2->IsPCRelAddressing() && (instr2->Rd() == xzr.code())); in GetMode() 113 DCHECK(instr1->IsPCRelAddressing() && (instr1->Rd() == xzr.code())); in GetMode() 151 patcher.adr(xzr, offset_to_incremental_noncompacting); in Patch() 152 patcher.adr(xzr, offset_to_incremental_compacting); in Patch() 157 patcher.adr(xzr, offset_to_incremental_compacting); in Patch() 161 patcher.adr(xzr, offset_to_incremental_noncompacting); in Patch()
|
/external/v8/src/crankshaft/arm64/ |
D | delayed-masm-arm64.cc | 78 __ Stp(xzr, xzr, dst); in StoreConstant() 169 __ Str(xzr, pending_address_dst_); in EmitPending()
|