1; RUN: llc -verify-machineinstrs %s -o - -mtriple=aarch64-linux-gnu -aarch64-atomic-cfg-tidy=0 | FileCheck %s 2 3@var8 = global i8 0 4@var16 = global i16 0 5@var32 = global i32 0 6@var64 = global i64 0 7 8define void @addsub_i8rhs() minsize { 9; CHECK-LABEL: addsub_i8rhs: 10 %val8_tmp = load i8* @var8 11 %lhs32 = load i32* @var32 12 %lhs64 = load i64* @var64 13 14 ; Need this to prevent extension upon load and give a vanilla i8 operand. 15 %val8 = add i8 %val8_tmp, 123 16 17 18; Zero-extending to 32-bits 19 %rhs32_zext = zext i8 %val8 to i32 20 %res32_zext = add i32 %lhs32, %rhs32_zext 21 store volatile i32 %res32_zext, i32* @var32 22; CHECK: add {{w[0-9]+}}, {{w[0-9]+}}, {{w[0-9]+}}, uxtb 23 24 %rhs32_zext_shift = shl i32 %rhs32_zext, 3 25 %res32_zext_shift = add i32 %lhs32, %rhs32_zext_shift 26 store volatile i32 %res32_zext_shift, i32* @var32 27; CHECK: add {{w[0-9]+}}, {{w[0-9]+}}, {{w[0-9]+}}, uxtb #3 28 29 30; Zero-extending to 64-bits 31 %rhs64_zext = zext i8 %val8 to i64 32 %res64_zext = add i64 %lhs64, %rhs64_zext 33 store volatile i64 %res64_zext, i64* @var64 34; CHECK: add {{x[0-9]+}}, {{x[0-9]+}}, {{w[0-9]+}}, uxtb 35 36 %rhs64_zext_shift = shl i64 %rhs64_zext, 1 37 %res64_zext_shift = add i64 %lhs64, %rhs64_zext_shift 38 store volatile i64 %res64_zext_shift, i64* @var64 39; CHECK: add {{x[0-9]+}}, {{x[0-9]+}}, {{w[0-9]+}}, uxtb #1 40 41; Sign-extending to 32-bits 42 %rhs32_sext = sext i8 %val8 to i32 43 %res32_sext = add i32 %lhs32, %rhs32_sext 44 store volatile i32 %res32_sext, i32* @var32 45; CHECK: add {{w[0-9]+}}, {{w[0-9]+}}, {{w[0-9]+}}, sxtb 46 47 %rhs32_sext_shift = shl i32 %rhs32_sext, 1 48 %res32_sext_shift = add i32 %lhs32, %rhs32_sext_shift 49 store volatile i32 %res32_sext_shift, i32* @var32 50; CHECK: add {{w[0-9]+}}, {{w[0-9]+}}, {{w[0-9]+}}, sxtb #1 51 52; Sign-extending to 64-bits 53 %rhs64_sext = sext i8 %val8 to i64 54 %res64_sext = add i64 %lhs64, %rhs64_sext 55 store volatile i64 %res64_sext, i64* @var64 56; CHECK: add {{x[0-9]+}}, {{x[0-9]+}}, {{w[0-9]+}}, sxtb 57 58 %rhs64_sext_shift = shl i64 %rhs64_sext, 4 59 %res64_sext_shift = add i64 %lhs64, %rhs64_sext_shift 60 store volatile i64 %res64_sext_shift, i64* @var64 61; CHECK: add {{x[0-9]+}}, {{x[0-9]+}}, {{w[0-9]+}}, sxtb #4 62 63 64; CMP variants 65 %tst = icmp slt i32 %lhs32, %rhs32_zext 66 br i1 %tst, label %end, label %test2 67; CHECK: cmp {{w[0-9]+}}, {{w[0-9]+}}, uxtb 68 69test2: 70 %cmp_sext = sext i8 %val8 to i64 71 %tst2 = icmp eq i64 %lhs64, %cmp_sext 72 br i1 %tst2, label %other, label %end 73; CHECK: cmp {{x[0-9]+}}, {{w[0-9]+}}, sxtb 74 75other: 76 store volatile i32 %lhs32, i32* @var32 77 ret void 78 79end: 80 ret void 81} 82 83define void @addsub_i16rhs() minsize { 84; CHECK-LABEL: addsub_i16rhs: 85 %val16_tmp = load i16* @var16 86 %lhs32 = load i32* @var32 87 %lhs64 = load i64* @var64 88 89 ; Need this to prevent extension upon load and give a vanilla i16 operand. 90 %val16 = add i16 %val16_tmp, 123 91 92 93; Zero-extending to 32-bits 94 %rhs32_zext = zext i16 %val16 to i32 95 %res32_zext = add i32 %lhs32, %rhs32_zext 96 store volatile i32 %res32_zext, i32* @var32 97; CHECK: add {{w[0-9]+}}, {{w[0-9]+}}, {{w[0-9]+}}, uxth 98 99 %rhs32_zext_shift = shl i32 %rhs32_zext, 3 100 %res32_zext_shift = add i32 %lhs32, %rhs32_zext_shift 101 store volatile i32 %res32_zext_shift, i32* @var32 102; CHECK: add {{w[0-9]+}}, {{w[0-9]+}}, {{w[0-9]+}}, uxth #3 103 104 105; Zero-extending to 64-bits 106 %rhs64_zext = zext i16 %val16 to i64 107 %res64_zext = add i64 %lhs64, %rhs64_zext 108 store volatile i64 %res64_zext, i64* @var64 109; CHECK: add {{x[0-9]+}}, {{x[0-9]+}}, {{w[0-9]+}}, uxth 110 111 %rhs64_zext_shift = shl i64 %rhs64_zext, 1 112 %res64_zext_shift = add i64 %lhs64, %rhs64_zext_shift 113 store volatile i64 %res64_zext_shift, i64* @var64 114; CHECK: add {{x[0-9]+}}, {{x[0-9]+}}, {{w[0-9]+}}, uxth #1 115 116; Sign-extending to 32-bits 117 %rhs32_sext = sext i16 %val16 to i32 118 %res32_sext = add i32 %lhs32, %rhs32_sext 119 store volatile i32 %res32_sext, i32* @var32 120; CHECK: add {{w[0-9]+}}, {{w[0-9]+}}, {{w[0-9]+}}, sxth 121 122 %rhs32_sext_shift = shl i32 %rhs32_sext, 1 123 %res32_sext_shift = add i32 %lhs32, %rhs32_sext_shift 124 store volatile i32 %res32_sext_shift, i32* @var32 125; CHECK: add {{w[0-9]+}}, {{w[0-9]+}}, {{w[0-9]+}}, sxth #1 126 127; Sign-extending to 64-bits 128 %rhs64_sext = sext i16 %val16 to i64 129 %res64_sext = add i64 %lhs64, %rhs64_sext 130 store volatile i64 %res64_sext, i64* @var64 131; CHECK: add {{x[0-9]+}}, {{x[0-9]+}}, {{w[0-9]+}}, sxth 132 133 %rhs64_sext_shift = shl i64 %rhs64_sext, 4 134 %res64_sext_shift = add i64 %lhs64, %rhs64_sext_shift 135 store volatile i64 %res64_sext_shift, i64* @var64 136; CHECK: add {{x[0-9]+}}, {{x[0-9]+}}, {{w[0-9]+}}, sxth #4 137 138 139; CMP variants 140 %tst = icmp slt i32 %lhs32, %rhs32_zext 141 br i1 %tst, label %end, label %test2 142; CHECK: cmp {{w[0-9]+}}, {{w[0-9]+}}, uxth 143 144test2: 145 %cmp_sext = sext i16 %val16 to i64 146 %tst2 = icmp eq i64 %lhs64, %cmp_sext 147 br i1 %tst2, label %other, label %end 148; CHECK: cmp {{x[0-9]+}}, {{w[0-9]+}}, sxth 149 150other: 151 store volatile i32 %lhs32, i32* @var32 152 ret void 153 154end: 155 ret void 156} 157 158; N.b. we could probably check more here ("add w2, w3, w1, uxtw" for 159; example), but the remaining instructions are probably not idiomatic 160; in the face of "add/sub (shifted register)" so I don't intend to. 161define void @addsub_i32rhs() minsize { 162; CHECK-LABEL: addsub_i32rhs: 163 %val32_tmp = load i32* @var32 164 %lhs64 = load i64* @var64 165 166 %val32 = add i32 %val32_tmp, 123 167 168 %rhs64_zext = zext i32 %val32 to i64 169 %res64_zext = add i64 %lhs64, %rhs64_zext 170 store volatile i64 %res64_zext, i64* @var64 171; CHECK: add {{x[0-9]+}}, {{x[0-9]+}}, {{w[0-9]+}}, uxtw 172 173 %rhs64_zext_shift = shl i64 %rhs64_zext, 2 174 %res64_zext_shift = add i64 %lhs64, %rhs64_zext_shift 175 store volatile i64 %res64_zext_shift, i64* @var64 176; CHECK: add {{x[0-9]+}}, {{x[0-9]+}}, {{w[0-9]+}}, uxtw #2 177 178 %rhs64_sext = sext i32 %val32 to i64 179 %res64_sext = add i64 %lhs64, %rhs64_sext 180 store volatile i64 %res64_sext, i64* @var64 181; CHECK: add {{x[0-9]+}}, {{x[0-9]+}}, {{w[0-9]+}}, sxtw 182 183 %rhs64_sext_shift = shl i64 %rhs64_sext, 2 184 %res64_sext_shift = add i64 %lhs64, %rhs64_sext_shift 185 store volatile i64 %res64_sext_shift, i64* @var64 186; CHECK: add {{x[0-9]+}}, {{x[0-9]+}}, {{w[0-9]+}}, sxtw #2 187 188 ret void 189} 190