1; RUN: llvm-as < %s | llvm-dis | FileCheck %s 2 3@addr = external global i64 4 5define i64 @add_unsigned(i64 %x, i64 %y) { 6; CHECK: %z = add nuw i64 %x, %y 7 %z = add nuw i64 %x, %y 8 ret i64 %z 9} 10 11define i64 @sub_unsigned(i64 %x, i64 %y) { 12; CHECK: %z = sub nuw i64 %x, %y 13 %z = sub nuw i64 %x, %y 14 ret i64 %z 15} 16 17define i64 @mul_unsigned(i64 %x, i64 %y) { 18; CHECK: %z = mul nuw i64 %x, %y 19 %z = mul nuw i64 %x, %y 20 ret i64 %z 21} 22 23define i64 @add_signed(i64 %x, i64 %y) { 24; CHECK: %z = add nsw i64 %x, %y 25 %z = add nsw i64 %x, %y 26 ret i64 %z 27} 28 29define i64 @sub_signed(i64 %x, i64 %y) { 30; CHECK: %z = sub nsw i64 %x, %y 31 %z = sub nsw i64 %x, %y 32 ret i64 %z 33} 34 35define i64 @mul_signed(i64 %x, i64 %y) { 36; CHECK: %z = mul nsw i64 %x, %y 37 %z = mul nsw i64 %x, %y 38 ret i64 %z 39} 40 41define i64 @add_plain(i64 %x, i64 %y) { 42; CHECK: %z = add i64 %x, %y 43 %z = add i64 %x, %y 44 ret i64 %z 45} 46 47define i64 @sub_plain(i64 %x, i64 %y) { 48; CHECK: %z = sub i64 %x, %y 49 %z = sub i64 %x, %y 50 ret i64 %z 51} 52 53define i64 @mul_plain(i64 %x, i64 %y) { 54; CHECK: %z = mul i64 %x, %y 55 %z = mul i64 %x, %y 56 ret i64 %z 57} 58 59define i64 @add_both(i64 %x, i64 %y) { 60; CHECK: %z = add nuw nsw i64 %x, %y 61 %z = add nuw nsw i64 %x, %y 62 ret i64 %z 63} 64 65define i64 @sub_both(i64 %x, i64 %y) { 66; CHECK: %z = sub nuw nsw i64 %x, %y 67 %z = sub nuw nsw i64 %x, %y 68 ret i64 %z 69} 70 71define i64 @mul_both(i64 %x, i64 %y) { 72; CHECK: %z = mul nuw nsw i64 %x, %y 73 %z = mul nuw nsw i64 %x, %y 74 ret i64 %z 75} 76 77define i64 @add_both_reversed(i64 %x, i64 %y) { 78; CHECK: %z = add nuw nsw i64 %x, %y 79 %z = add nsw nuw i64 %x, %y 80 ret i64 %z 81} 82 83define i64 @sub_both_reversed(i64 %x, i64 %y) { 84; CHECK: %z = sub nuw nsw i64 %x, %y 85 %z = sub nsw nuw i64 %x, %y 86 ret i64 %z 87} 88 89define i64 @mul_both_reversed(i64 %x, i64 %y) { 90; CHECK: %z = mul nuw nsw i64 %x, %y 91 %z = mul nsw nuw i64 %x, %y 92 ret i64 %z 93} 94 95define i64 @shl_both(i64 %x, i64 %y) { 96; CHECK: %z = shl nuw nsw i64 %x, %y 97 %z = shl nuw nsw i64 %x, %y 98 ret i64 %z 99} 100 101define i64 @sdiv_exact(i64 %x, i64 %y) { 102; CHECK: %z = sdiv exact i64 %x, %y 103 %z = sdiv exact i64 %x, %y 104 ret i64 %z 105} 106 107define i64 @sdiv_plain(i64 %x, i64 %y) { 108; CHECK: %z = sdiv i64 %x, %y 109 %z = sdiv i64 %x, %y 110 ret i64 %z 111} 112 113define i64 @udiv_exact(i64 %x, i64 %y) { 114; CHECK: %z = udiv exact i64 %x, %y 115 %z = udiv exact i64 %x, %y 116 ret i64 %z 117} 118 119define i64 @udiv_plain(i64 %x, i64 %y) { 120; CHECK: %z = udiv i64 %x, %y 121 %z = udiv i64 %x, %y 122 ret i64 %z 123} 124 125define i64 @ashr_plain(i64 %x, i64 %y) { 126; CHECK: %z = ashr i64 %x, %y 127 %z = ashr i64 %x, %y 128 ret i64 %z 129} 130 131define i64 @ashr_exact(i64 %x, i64 %y) { 132; CHECK: %z = ashr exact i64 %x, %y 133 %z = ashr exact i64 %x, %y 134 ret i64 %z 135} 136 137define i64 @lshr_plain(i64 %x, i64 %y) { 138; CHECK: %z = lshr i64 %x, %y 139 %z = lshr i64 %x, %y 140 ret i64 %z 141} 142 143define i64 @lshr_exact(i64 %x, i64 %y) { 144; CHECK: %z = lshr exact i64 %x, %y 145 %z = lshr exact i64 %x, %y 146 ret i64 %z 147} 148 149define i64* @gep_nw(i64* %p, i64 %x) { 150; CHECK: %z = getelementptr inbounds i64* %p, i64 %x 151 %z = getelementptr inbounds i64* %p, i64 %x 152 ret i64* %z 153} 154 155define i64* @gep_plain(i64* %p, i64 %x) { 156; CHECK: %z = getelementptr i64* %p, i64 %x 157 %z = getelementptr i64* %p, i64 %x 158 ret i64* %z 159} 160 161define i64 @add_both_ce() { 162; CHECK: ret i64 add nuw nsw (i64 ptrtoint (i64* @addr to i64), i64 91) 163 ret i64 add nsw nuw (i64 ptrtoint (i64* @addr to i64), i64 91) 164} 165 166define i64 @sub_both_ce() { 167; CHECK: ret i64 sub nuw nsw (i64 ptrtoint (i64* @addr to i64), i64 91) 168 ret i64 sub nsw nuw (i64 ptrtoint (i64* @addr to i64), i64 91) 169} 170 171define i64 @mul_both_ce() { 172; CHECK: ret i64 mul nuw nsw (i64 ptrtoint (i64* @addr to i64), i64 91) 173 ret i64 mul nuw nsw (i64 ptrtoint (i64* @addr to i64), i64 91) 174} 175 176define i64 @sdiv_exact_ce() { 177; CHECK: ret i64 sdiv exact (i64 ptrtoint (i64* @addr to i64), i64 91) 178 ret i64 sdiv exact (i64 ptrtoint (i64* @addr to i64), i64 91) 179} 180 181define i64 @udiv_exact_ce() { 182; CHECK: ret i64 udiv exact (i64 ptrtoint (i64* @addr to i64), i64 91) 183 ret i64 udiv exact (i64 ptrtoint (i64* @addr to i64), i64 91) 184} 185 186define i64 @ashr_exact_ce() { 187; CHECK: ret i64 ashr exact (i64 ptrtoint (i64* @addr to i64), i64 9) 188 ret i64 ashr exact (i64 ptrtoint (i64* @addr to i64), i64 9) 189} 190 191define i64 @lshr_exact_ce() { 192; CHECK: ret i64 lshr exact (i64 ptrtoint (i64* @addr to i64), i64 9) 193 ret i64 lshr exact (i64 ptrtoint (i64* @addr to i64), i64 9) 194} 195 196define i64* @gep_nw_ce() { 197; CHECK: ret i64* getelementptr inbounds (i64* @addr, i64 171) 198 ret i64* getelementptr inbounds (i64* @addr, i64 171) 199} 200 201define i64 @add_plain_ce() { 202; CHECK: ret i64 add (i64 ptrtoint (i64* @addr to i64), i64 91) 203 ret i64 add (i64 ptrtoint (i64* @addr to i64), i64 91) 204} 205 206define i64 @sub_plain_ce() { 207; CHECK: ret i64 sub (i64 ptrtoint (i64* @addr to i64), i64 91) 208 ret i64 sub (i64 ptrtoint (i64* @addr to i64), i64 91) 209} 210 211define i64 @mul_plain_ce() { 212; CHECK: ret i64 mul (i64 ptrtoint (i64* @addr to i64), i64 91) 213 ret i64 mul (i64 ptrtoint (i64* @addr to i64), i64 91) 214} 215 216define i64 @sdiv_plain_ce() { 217; CHECK: ret i64 sdiv (i64 ptrtoint (i64* @addr to i64), i64 91) 218 ret i64 sdiv (i64 ptrtoint (i64* @addr to i64), i64 91) 219} 220 221define i64* @gep_plain_ce() { 222; CHECK: ret i64* getelementptr (i64* @addr, i64 171) 223 ret i64* getelementptr (i64* @addr, i64 171) 224} 225 226define i64 @add_both_reversed_ce() { 227; CHECK: ret i64 add nuw nsw (i64 ptrtoint (i64* @addr to i64), i64 91) 228 ret i64 add nsw nuw (i64 ptrtoint (i64* @addr to i64), i64 91) 229} 230 231define i64 @sub_both_reversed_ce() { 232; CHECK: ret i64 sub nuw nsw (i64 ptrtoint (i64* @addr to i64), i64 91) 233 ret i64 sub nsw nuw (i64 ptrtoint (i64* @addr to i64), i64 91) 234} 235 236define i64 @mul_both_reversed_ce() { 237; CHECK: ret i64 mul nuw nsw (i64 ptrtoint (i64* @addr to i64), i64 91) 238 ret i64 mul nsw nuw (i64 ptrtoint (i64* @addr to i64), i64 91) 239} 240 241define i64 @add_signed_ce() { 242; CHECK: ret i64 add nsw (i64 ptrtoint (i64* @addr to i64), i64 91) 243 ret i64 add nsw (i64 ptrtoint (i64* @addr to i64), i64 91) 244} 245 246define i64 @sub_signed_ce() { 247; CHECK: ret i64 sub nsw (i64 ptrtoint (i64* @addr to i64), i64 91) 248 ret i64 sub nsw (i64 ptrtoint (i64* @addr to i64), i64 91) 249} 250 251define i64 @mul_signed_ce() { 252; CHECK: ret i64 mul nsw (i64 ptrtoint (i64* @addr to i64), i64 91) 253 ret i64 mul nsw (i64 ptrtoint (i64* @addr to i64), i64 91) 254} 255 256define i64 @shl_signed_ce() { 257; CHECK: ret i64 shl nsw (i64 ptrtoint (i64* @addr to i64), i64 17) 258 ret i64 shl nsw (i64 ptrtoint (i64* @addr to i64), i64 17) 259} 260 261 262define i64 @add_unsigned_ce() { 263; CHECK: ret i64 add nuw (i64 ptrtoint (i64* @addr to i64), i64 91) 264 ret i64 add nuw (i64 ptrtoint (i64* @addr to i64), i64 91) 265} 266 267define i64 @sub_unsigned_ce() { 268; CHECK: ret i64 sub nuw (i64 ptrtoint (i64* @addr to i64), i64 91) 269 ret i64 sub nuw (i64 ptrtoint (i64* @addr to i64), i64 91) 270} 271 272define i64 @mul_unsigned_ce() { 273; CHECK: ret i64 mul nuw (i64 ptrtoint (i64* @addr to i64), i64 91) 274 ret i64 mul nuw (i64 ptrtoint (i64* @addr to i64), i64 91) 275} 276 277