1# NOTE: Assertions have been autogenerated by utils/update_mir_test_checks.py 2# RUN: llc -global-isel-abort=0 -march=amdgcn -mcpu=tahiti -run-pass=legalizer %s -o - | FileCheck -check-prefix=GFX6 %s 3# RUN: llc -global-isel-abort=0 -march=amdgcn -mcpu=fiji -run-pass=legalizer %s -o - | FileCheck -check-prefix=GFX8 %s 4# RUN: llc -global-isel-abort=0 -march=amdgcn -mcpu=gfx900 -run-pass=legalizer %s -o - | FileCheck -check-prefix=GFX9 %s 5 6--- 7name: uaddsat_s7 8body: | 9 bb.0: 10 liveins: $vgpr0, $vgpr1 11 12 ; GFX6-LABEL: name: uaddsat_s7 13 ; GFX6: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0 14 ; GFX6: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1 15 ; GFX6: [[COPY2:%[0-9]+]]:_(s32) = COPY [[COPY]](s32) 16 ; GFX6: [[COPY3:%[0-9]+]]:_(s32) = COPY [[COPY1]](s32) 17 ; GFX6: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 25 18 ; GFX6: [[SHL:%[0-9]+]]:_(s32) = G_SHL [[COPY2]], [[C]](s32) 19 ; GFX6: [[SHL1:%[0-9]+]]:_(s32) = G_SHL [[COPY3]], [[C]](s32) 20 ; GFX6: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 -1 21 ; GFX6: [[XOR:%[0-9]+]]:_(s32) = G_XOR [[SHL]], [[C1]] 22 ; GFX6: [[UMIN:%[0-9]+]]:_(s32) = G_UMIN [[XOR]], [[SHL1]] 23 ; GFX6: [[ADD:%[0-9]+]]:_(s32) = G_ADD [[SHL]], [[UMIN]] 24 ; GFX6: [[LSHR:%[0-9]+]]:_(s32) = G_LSHR [[ADD]], [[C]](s32) 25 ; GFX6: [[COPY4:%[0-9]+]]:_(s32) = COPY [[LSHR]](s32) 26 ; GFX6: $vgpr0 = COPY [[COPY4]](s32) 27 ; GFX8-LABEL: name: uaddsat_s7 28 ; GFX8: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0 29 ; GFX8: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1 30 ; GFX8: [[TRUNC:%[0-9]+]]:_(s16) = G_TRUNC [[COPY]](s32) 31 ; GFX8: [[TRUNC1:%[0-9]+]]:_(s16) = G_TRUNC [[COPY1]](s32) 32 ; GFX8: [[C:%[0-9]+]]:_(s16) = G_CONSTANT i16 9 33 ; GFX8: [[SHL:%[0-9]+]]:_(s16) = G_SHL [[TRUNC]], [[C]](s16) 34 ; GFX8: [[SHL1:%[0-9]+]]:_(s16) = G_SHL [[TRUNC1]], [[C]](s16) 35 ; GFX8: [[UADDSAT:%[0-9]+]]:_(s16) = G_UADDSAT [[SHL]], [[SHL1]] 36 ; GFX8: [[LSHR:%[0-9]+]]:_(s16) = G_LSHR [[UADDSAT]], [[C]](s16) 37 ; GFX8: [[ANYEXT:%[0-9]+]]:_(s32) = G_ANYEXT [[LSHR]](s16) 38 ; GFX8: $vgpr0 = COPY [[ANYEXT]](s32) 39 ; GFX9-LABEL: name: uaddsat_s7 40 ; GFX9: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0 41 ; GFX9: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1 42 ; GFX9: [[TRUNC:%[0-9]+]]:_(s16) = G_TRUNC [[COPY]](s32) 43 ; GFX9: [[TRUNC1:%[0-9]+]]:_(s16) = G_TRUNC [[COPY1]](s32) 44 ; GFX9: [[C:%[0-9]+]]:_(s16) = G_CONSTANT i16 9 45 ; GFX9: [[SHL:%[0-9]+]]:_(s16) = G_SHL [[TRUNC]], [[C]](s16) 46 ; GFX9: [[SHL1:%[0-9]+]]:_(s16) = G_SHL [[TRUNC1]], [[C]](s16) 47 ; GFX9: [[UADDSAT:%[0-9]+]]:_(s16) = G_UADDSAT [[SHL]], [[SHL1]] 48 ; GFX9: [[LSHR:%[0-9]+]]:_(s16) = G_LSHR [[UADDSAT]], [[C]](s16) 49 ; GFX9: [[ANYEXT:%[0-9]+]]:_(s32) = G_ANYEXT [[LSHR]](s16) 50 ; GFX9: $vgpr0 = COPY [[ANYEXT]](s32) 51 %0:_(s32) = COPY $vgpr0 52 %1:_(s32) = COPY $vgpr1 53 %2:_(s7) = G_TRUNC %0 54 %3:_(s7) = G_TRUNC %1 55 %4:_(s7) = G_UADDSAT %2, %3 56 %5:_(s32) = G_ANYEXT %4 57 $vgpr0 = COPY %5 58... 59 60--- 61name: uaddsat_s8 62body: | 63 bb.0: 64 liveins: $vgpr0, $vgpr1 65 66 ; GFX6-LABEL: name: uaddsat_s8 67 ; GFX6: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0 68 ; GFX6: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1 69 ; GFX6: [[COPY2:%[0-9]+]]:_(s32) = COPY [[COPY]](s32) 70 ; GFX6: [[COPY3:%[0-9]+]]:_(s32) = COPY [[COPY1]](s32) 71 ; GFX6: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 24 72 ; GFX6: [[SHL:%[0-9]+]]:_(s32) = G_SHL [[COPY2]], [[C]](s32) 73 ; GFX6: [[SHL1:%[0-9]+]]:_(s32) = G_SHL [[COPY3]], [[C]](s32) 74 ; GFX6: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 -1 75 ; GFX6: [[XOR:%[0-9]+]]:_(s32) = G_XOR [[SHL]], [[C1]] 76 ; GFX6: [[UMIN:%[0-9]+]]:_(s32) = G_UMIN [[XOR]], [[SHL1]] 77 ; GFX6: [[ADD:%[0-9]+]]:_(s32) = G_ADD [[SHL]], [[UMIN]] 78 ; GFX6: [[LSHR:%[0-9]+]]:_(s32) = G_LSHR [[ADD]], [[C]](s32) 79 ; GFX6: [[COPY4:%[0-9]+]]:_(s32) = COPY [[LSHR]](s32) 80 ; GFX6: $vgpr0 = COPY [[COPY4]](s32) 81 ; GFX8-LABEL: name: uaddsat_s8 82 ; GFX8: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0 83 ; GFX8: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1 84 ; GFX8: [[TRUNC:%[0-9]+]]:_(s16) = G_TRUNC [[COPY]](s32) 85 ; GFX8: [[TRUNC1:%[0-9]+]]:_(s16) = G_TRUNC [[COPY1]](s32) 86 ; GFX8: [[C:%[0-9]+]]:_(s16) = G_CONSTANT i16 8 87 ; GFX8: [[SHL:%[0-9]+]]:_(s16) = G_SHL [[TRUNC]], [[C]](s16) 88 ; GFX8: [[SHL1:%[0-9]+]]:_(s16) = G_SHL [[TRUNC1]], [[C]](s16) 89 ; GFX8: [[UADDSAT:%[0-9]+]]:_(s16) = G_UADDSAT [[SHL]], [[SHL1]] 90 ; GFX8: [[LSHR:%[0-9]+]]:_(s16) = G_LSHR [[UADDSAT]], [[C]](s16) 91 ; GFX8: [[ANYEXT:%[0-9]+]]:_(s32) = G_ANYEXT [[LSHR]](s16) 92 ; GFX8: $vgpr0 = COPY [[ANYEXT]](s32) 93 ; GFX9-LABEL: name: uaddsat_s8 94 ; GFX9: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0 95 ; GFX9: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1 96 ; GFX9: [[TRUNC:%[0-9]+]]:_(s16) = G_TRUNC [[COPY]](s32) 97 ; GFX9: [[TRUNC1:%[0-9]+]]:_(s16) = G_TRUNC [[COPY1]](s32) 98 ; GFX9: [[C:%[0-9]+]]:_(s16) = G_CONSTANT i16 8 99 ; GFX9: [[SHL:%[0-9]+]]:_(s16) = G_SHL [[TRUNC]], [[C]](s16) 100 ; GFX9: [[SHL1:%[0-9]+]]:_(s16) = G_SHL [[TRUNC1]], [[C]](s16) 101 ; GFX9: [[UADDSAT:%[0-9]+]]:_(s16) = G_UADDSAT [[SHL]], [[SHL1]] 102 ; GFX9: [[LSHR:%[0-9]+]]:_(s16) = G_LSHR [[UADDSAT]], [[C]](s16) 103 ; GFX9: [[ANYEXT:%[0-9]+]]:_(s32) = G_ANYEXT [[LSHR]](s16) 104 ; GFX9: $vgpr0 = COPY [[ANYEXT]](s32) 105 %0:_(s32) = COPY $vgpr0 106 %1:_(s32) = COPY $vgpr1 107 %2:_(s8) = G_TRUNC %0 108 %3:_(s8) = G_TRUNC %1 109 %4:_(s8) = G_UADDSAT %2, %3 110 %5:_(s32) = G_ANYEXT %4 111 $vgpr0 = COPY %5 112... 113 114--- 115name: uaddsat_v2s8 116body: | 117 bb.0: 118 liveins: $vgpr0, $vgpr1 119 120 ; GFX6-LABEL: name: uaddsat_v2s8 121 ; GFX6: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0 122 ; GFX6: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1 123 ; GFX6: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 8 124 ; GFX6: [[LSHR:%[0-9]+]]:_(s32) = G_LSHR [[COPY]], [[C]](s32) 125 ; GFX6: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 16 126 ; GFX6: [[LSHR1:%[0-9]+]]:_(s32) = G_LSHR [[COPY]], [[C1]](s32) 127 ; GFX6: [[C2:%[0-9]+]]:_(s32) = G_CONSTANT i32 24 128 ; GFX6: [[LSHR2:%[0-9]+]]:_(s32) = G_LSHR [[COPY]], [[C2]](s32) 129 ; GFX6: [[LSHR3:%[0-9]+]]:_(s32) = G_LSHR [[COPY1]], [[C]](s32) 130 ; GFX6: [[LSHR4:%[0-9]+]]:_(s32) = G_LSHR [[COPY1]], [[C1]](s32) 131 ; GFX6: [[LSHR5:%[0-9]+]]:_(s32) = G_LSHR [[COPY1]], [[C2]](s32) 132 ; GFX6: [[COPY2:%[0-9]+]]:_(s32) = COPY [[COPY]](s32) 133 ; GFX6: [[COPY3:%[0-9]+]]:_(s32) = COPY [[COPY1]](s32) 134 ; GFX6: [[SHL:%[0-9]+]]:_(s32) = G_SHL [[COPY2]], [[C2]](s32) 135 ; GFX6: [[SHL1:%[0-9]+]]:_(s32) = G_SHL [[COPY3]], [[C2]](s32) 136 ; GFX6: [[C3:%[0-9]+]]:_(s32) = G_CONSTANT i32 -1 137 ; GFX6: [[XOR:%[0-9]+]]:_(s32) = G_XOR [[SHL]], [[C3]] 138 ; GFX6: [[UMIN:%[0-9]+]]:_(s32) = G_UMIN [[XOR]], [[SHL1]] 139 ; GFX6: [[ADD:%[0-9]+]]:_(s32) = G_ADD [[SHL]], [[UMIN]] 140 ; GFX6: [[LSHR6:%[0-9]+]]:_(s32) = G_LSHR [[ADD]], [[C2]](s32) 141 ; GFX6: [[COPY4:%[0-9]+]]:_(s32) = COPY [[LSHR]](s32) 142 ; GFX6: [[COPY5:%[0-9]+]]:_(s32) = COPY [[LSHR3]](s32) 143 ; GFX6: [[SHL2:%[0-9]+]]:_(s32) = G_SHL [[COPY4]], [[C2]](s32) 144 ; GFX6: [[SHL3:%[0-9]+]]:_(s32) = G_SHL [[COPY5]], [[C2]](s32) 145 ; GFX6: [[XOR1:%[0-9]+]]:_(s32) = G_XOR [[SHL2]], [[C3]] 146 ; GFX6: [[UMIN1:%[0-9]+]]:_(s32) = G_UMIN [[XOR1]], [[SHL3]] 147 ; GFX6: [[ADD1:%[0-9]+]]:_(s32) = G_ADD [[SHL2]], [[UMIN1]] 148 ; GFX6: [[LSHR7:%[0-9]+]]:_(s32) = G_LSHR [[ADD1]], [[C2]](s32) 149 ; GFX6: [[C4:%[0-9]+]]:_(s16) = G_CONSTANT i16 255 150 ; GFX6: [[TRUNC:%[0-9]+]]:_(s16) = G_TRUNC [[LSHR6]](s32) 151 ; GFX6: [[AND:%[0-9]+]]:_(s16) = G_AND [[TRUNC]], [[C4]] 152 ; GFX6: [[COPY6:%[0-9]+]]:_(s32) = COPY [[C]](s32) 153 ; GFX6: [[C5:%[0-9]+]]:_(s32) = G_CONSTANT i32 255 154 ; GFX6: [[COPY7:%[0-9]+]]:_(s32) = COPY [[LSHR7]](s32) 155 ; GFX6: [[AND1:%[0-9]+]]:_(s32) = G_AND [[COPY7]], [[C5]] 156 ; GFX6: [[SHL4:%[0-9]+]]:_(s32) = G_SHL [[AND1]], [[COPY6]](s32) 157 ; GFX6: [[TRUNC1:%[0-9]+]]:_(s16) = G_TRUNC [[SHL4]](s32) 158 ; GFX6: [[OR:%[0-9]+]]:_(s16) = G_OR [[AND]], [[TRUNC1]] 159 ; GFX6: [[ANYEXT:%[0-9]+]]:_(s32) = G_ANYEXT [[OR]](s16) 160 ; GFX6: $vgpr0 = COPY [[ANYEXT]](s32) 161 ; GFX8-LABEL: name: uaddsat_v2s8 162 ; GFX8: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0 163 ; GFX8: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1 164 ; GFX8: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 8 165 ; GFX8: [[LSHR:%[0-9]+]]:_(s32) = G_LSHR [[COPY]], [[C]](s32) 166 ; GFX8: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 16 167 ; GFX8: [[LSHR1:%[0-9]+]]:_(s32) = G_LSHR [[COPY]], [[C1]](s32) 168 ; GFX8: [[C2:%[0-9]+]]:_(s32) = G_CONSTANT i32 24 169 ; GFX8: [[LSHR2:%[0-9]+]]:_(s32) = G_LSHR [[COPY]], [[C2]](s32) 170 ; GFX8: [[LSHR3:%[0-9]+]]:_(s32) = G_LSHR [[COPY1]], [[C]](s32) 171 ; GFX8: [[LSHR4:%[0-9]+]]:_(s32) = G_LSHR [[COPY1]], [[C1]](s32) 172 ; GFX8: [[LSHR5:%[0-9]+]]:_(s32) = G_LSHR [[COPY1]], [[C2]](s32) 173 ; GFX8: [[TRUNC:%[0-9]+]]:_(s16) = G_TRUNC [[COPY]](s32) 174 ; GFX8: [[TRUNC1:%[0-9]+]]:_(s16) = G_TRUNC [[COPY1]](s32) 175 ; GFX8: [[C3:%[0-9]+]]:_(s16) = G_CONSTANT i16 8 176 ; GFX8: [[SHL:%[0-9]+]]:_(s16) = G_SHL [[TRUNC]], [[C3]](s16) 177 ; GFX8: [[SHL1:%[0-9]+]]:_(s16) = G_SHL [[TRUNC1]], [[C3]](s16) 178 ; GFX8: [[UADDSAT:%[0-9]+]]:_(s16) = G_UADDSAT [[SHL]], [[SHL1]] 179 ; GFX8: [[LSHR6:%[0-9]+]]:_(s16) = G_LSHR [[UADDSAT]], [[C3]](s16) 180 ; GFX8: [[TRUNC2:%[0-9]+]]:_(s16) = G_TRUNC [[LSHR]](s32) 181 ; GFX8: [[TRUNC3:%[0-9]+]]:_(s16) = G_TRUNC [[LSHR3]](s32) 182 ; GFX8: [[SHL2:%[0-9]+]]:_(s16) = G_SHL [[TRUNC2]], [[C3]](s16) 183 ; GFX8: [[SHL3:%[0-9]+]]:_(s16) = G_SHL [[TRUNC3]], [[C3]](s16) 184 ; GFX8: [[UADDSAT1:%[0-9]+]]:_(s16) = G_UADDSAT [[SHL2]], [[SHL3]] 185 ; GFX8: [[LSHR7:%[0-9]+]]:_(s16) = G_LSHR [[UADDSAT1]], [[C3]](s16) 186 ; GFX8: [[C4:%[0-9]+]]:_(s16) = G_CONSTANT i16 255 187 ; GFX8: [[COPY2:%[0-9]+]]:_(s16) = COPY [[LSHR6]](s16) 188 ; GFX8: [[AND:%[0-9]+]]:_(s16) = G_AND [[COPY2]], [[C4]] 189 ; GFX8: [[COPY3:%[0-9]+]]:_(s16) = COPY [[LSHR7]](s16) 190 ; GFX8: [[AND1:%[0-9]+]]:_(s16) = G_AND [[COPY3]], [[C4]] 191 ; GFX8: [[SHL4:%[0-9]+]]:_(s16) = G_SHL [[AND1]], [[C3]](s16) 192 ; GFX8: [[OR:%[0-9]+]]:_(s16) = G_OR [[AND]], [[SHL4]] 193 ; GFX8: [[ANYEXT:%[0-9]+]]:_(s32) = G_ANYEXT [[OR]](s16) 194 ; GFX8: $vgpr0 = COPY [[ANYEXT]](s32) 195 ; GFX9-LABEL: name: uaddsat_v2s8 196 ; GFX9: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0 197 ; GFX9: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1 198 ; GFX9: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 8 199 ; GFX9: [[LSHR:%[0-9]+]]:_(s32) = G_LSHR [[COPY]], [[C]](s32) 200 ; GFX9: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 16 201 ; GFX9: [[LSHR1:%[0-9]+]]:_(s32) = G_LSHR [[COPY]], [[C1]](s32) 202 ; GFX9: [[C2:%[0-9]+]]:_(s32) = G_CONSTANT i32 24 203 ; GFX9: [[LSHR2:%[0-9]+]]:_(s32) = G_LSHR [[COPY]], [[C2]](s32) 204 ; GFX9: [[LSHR3:%[0-9]+]]:_(s32) = G_LSHR [[COPY1]], [[C]](s32) 205 ; GFX9: [[LSHR4:%[0-9]+]]:_(s32) = G_LSHR [[COPY1]], [[C1]](s32) 206 ; GFX9: [[LSHR5:%[0-9]+]]:_(s32) = G_LSHR [[COPY1]], [[C2]](s32) 207 ; GFX9: [[COPY2:%[0-9]+]]:_(s32) = COPY [[COPY]](s32) 208 ; GFX9: [[COPY3:%[0-9]+]]:_(s32) = COPY [[LSHR]](s32) 209 ; GFX9: [[BUILD_VECTOR_TRUNC:%[0-9]+]]:_(<2 x s16>) = G_BUILD_VECTOR_TRUNC [[COPY2]](s32), [[COPY3]](s32) 210 ; GFX9: [[COPY4:%[0-9]+]]:_(s32) = COPY [[COPY1]](s32) 211 ; GFX9: [[COPY5:%[0-9]+]]:_(s32) = COPY [[LSHR3]](s32) 212 ; GFX9: [[BUILD_VECTOR_TRUNC1:%[0-9]+]]:_(<2 x s16>) = G_BUILD_VECTOR_TRUNC [[COPY4]](s32), [[COPY5]](s32) 213 ; GFX9: [[C3:%[0-9]+]]:_(s16) = G_CONSTANT i16 8 214 ; GFX9: [[COPY6:%[0-9]+]]:_(s32) = COPY [[C]](s32) 215 ; GFX9: [[BUILD_VECTOR_TRUNC2:%[0-9]+]]:_(<2 x s16>) = G_BUILD_VECTOR_TRUNC [[COPY6]](s32), [[COPY6]](s32) 216 ; GFX9: [[SHL:%[0-9]+]]:_(<2 x s16>) = G_SHL [[BUILD_VECTOR_TRUNC]], [[BUILD_VECTOR_TRUNC2]](<2 x s16>) 217 ; GFX9: [[SHL1:%[0-9]+]]:_(<2 x s16>) = G_SHL [[BUILD_VECTOR_TRUNC1]], [[BUILD_VECTOR_TRUNC2]](<2 x s16>) 218 ; GFX9: [[UADDSAT:%[0-9]+]]:_(<2 x s16>) = G_UADDSAT [[SHL]], [[SHL1]] 219 ; GFX9: [[LSHR6:%[0-9]+]]:_(<2 x s16>) = G_LSHR [[UADDSAT]], [[BUILD_VECTOR_TRUNC2]](<2 x s16>) 220 ; GFX9: [[BITCAST:%[0-9]+]]:_(s32) = G_BITCAST [[LSHR6]](<2 x s16>) 221 ; GFX9: [[TRUNC:%[0-9]+]]:_(s16) = G_TRUNC [[BITCAST]](s32) 222 ; GFX9: [[LSHR7:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST]], [[C1]](s32) 223 ; GFX9: [[TRUNC1:%[0-9]+]]:_(s16) = G_TRUNC [[LSHR7]](s32) 224 ; GFX9: [[C4:%[0-9]+]]:_(s16) = G_CONSTANT i16 255 225 ; GFX9: [[COPY7:%[0-9]+]]:_(s16) = COPY [[TRUNC]](s16) 226 ; GFX9: [[AND:%[0-9]+]]:_(s16) = G_AND [[COPY7]], [[C4]] 227 ; GFX9: [[COPY8:%[0-9]+]]:_(s16) = COPY [[TRUNC1]](s16) 228 ; GFX9: [[AND1:%[0-9]+]]:_(s16) = G_AND [[COPY8]], [[C4]] 229 ; GFX9: [[SHL2:%[0-9]+]]:_(s16) = G_SHL [[AND1]], [[C3]](s16) 230 ; GFX9: [[OR:%[0-9]+]]:_(s16) = G_OR [[AND]], [[SHL2]] 231 ; GFX9: [[ANYEXT:%[0-9]+]]:_(s32) = G_ANYEXT [[OR]](s16) 232 ; GFX9: $vgpr0 = COPY [[ANYEXT]](s32) 233 %0:_(s32) = COPY $vgpr0 234 %1:_(s32) = COPY $vgpr1 235 %2:_(s16) = G_TRUNC %0 236 %3:_(s16) = G_TRUNC %1 237 %4:_(<2 x s8>) = G_BITCAST %2 238 %5:_(<2 x s8>) = G_BITCAST %3 239 %6:_(<2 x s8>) = G_UADDSAT %4, %5 240 %7:_(s16) = G_BITCAST %6 241 %8:_(s32) = G_ANYEXT %7 242 $vgpr0 = COPY %8 243... 244 245--- 246name: uaddsat_s16 247body: | 248 bb.0: 249 liveins: $vgpr0, $vgpr1 250 251 ; GFX6-LABEL: name: uaddsat_s16 252 ; GFX6: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0 253 ; GFX6: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1 254 ; GFX6: [[COPY2:%[0-9]+]]:_(s32) = COPY [[COPY]](s32) 255 ; GFX6: [[COPY3:%[0-9]+]]:_(s32) = COPY [[COPY1]](s32) 256 ; GFX6: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 16 257 ; GFX6: [[SHL:%[0-9]+]]:_(s32) = G_SHL [[COPY2]], [[C]](s32) 258 ; GFX6: [[SHL1:%[0-9]+]]:_(s32) = G_SHL [[COPY3]], [[C]](s32) 259 ; GFX6: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 -1 260 ; GFX6: [[XOR:%[0-9]+]]:_(s32) = G_XOR [[SHL]], [[C1]] 261 ; GFX6: [[UMIN:%[0-9]+]]:_(s32) = G_UMIN [[XOR]], [[SHL1]] 262 ; GFX6: [[ADD:%[0-9]+]]:_(s32) = G_ADD [[SHL]], [[UMIN]] 263 ; GFX6: [[LSHR:%[0-9]+]]:_(s32) = G_LSHR [[ADD]], [[C]](s32) 264 ; GFX6: [[COPY4:%[0-9]+]]:_(s32) = COPY [[LSHR]](s32) 265 ; GFX6: $vgpr0 = COPY [[COPY4]](s32) 266 ; GFX8-LABEL: name: uaddsat_s16 267 ; GFX8: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0 268 ; GFX8: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1 269 ; GFX8: [[TRUNC:%[0-9]+]]:_(s16) = G_TRUNC [[COPY]](s32) 270 ; GFX8: [[TRUNC1:%[0-9]+]]:_(s16) = G_TRUNC [[COPY1]](s32) 271 ; GFX8: [[UADDSAT:%[0-9]+]]:_(s16) = G_UADDSAT [[TRUNC]], [[TRUNC1]] 272 ; GFX8: [[ANYEXT:%[0-9]+]]:_(s32) = G_ANYEXT [[UADDSAT]](s16) 273 ; GFX8: $vgpr0 = COPY [[ANYEXT]](s32) 274 ; GFX9-LABEL: name: uaddsat_s16 275 ; GFX9: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0 276 ; GFX9: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1 277 ; GFX9: [[TRUNC:%[0-9]+]]:_(s16) = G_TRUNC [[COPY]](s32) 278 ; GFX9: [[TRUNC1:%[0-9]+]]:_(s16) = G_TRUNC [[COPY1]](s32) 279 ; GFX9: [[UADDSAT:%[0-9]+]]:_(s16) = G_UADDSAT [[TRUNC]], [[TRUNC1]] 280 ; GFX9: [[ANYEXT:%[0-9]+]]:_(s32) = G_ANYEXT [[UADDSAT]](s16) 281 ; GFX9: $vgpr0 = COPY [[ANYEXT]](s32) 282 %0:_(s32) = COPY $vgpr0 283 %1:_(s32) = COPY $vgpr1 284 %2:_(s16) = G_TRUNC %0 285 %3:_(s16) = G_TRUNC %1 286 %4:_(s16) = G_UADDSAT %2, %3 287 %5:_(s32) = G_ANYEXT %4 288 $vgpr0 = COPY %5 289... 290 291--- 292name: uaddsat_v2s16 293body: | 294 bb.0: 295 liveins: $vgpr0, $vgpr1 296 297 ; GFX6-LABEL: name: uaddsat_v2s16 298 ; GFX6: [[COPY:%[0-9]+]]:_(<2 x s16>) = COPY $vgpr0 299 ; GFX6: [[COPY1:%[0-9]+]]:_(<2 x s16>) = COPY $vgpr1 300 ; GFX6: [[BITCAST:%[0-9]+]]:_(s32) = G_BITCAST [[COPY]](<2 x s16>) 301 ; GFX6: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 16 302 ; GFX6: [[LSHR:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST]], [[C]](s32) 303 ; GFX6: [[BITCAST1:%[0-9]+]]:_(s32) = G_BITCAST [[COPY1]](<2 x s16>) 304 ; GFX6: [[LSHR1:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST1]], [[C]](s32) 305 ; GFX6: [[COPY2:%[0-9]+]]:_(s32) = COPY [[BITCAST]](s32) 306 ; GFX6: [[COPY3:%[0-9]+]]:_(s32) = COPY [[BITCAST1]](s32) 307 ; GFX6: [[SHL:%[0-9]+]]:_(s32) = G_SHL [[COPY2]], [[C]](s32) 308 ; GFX6: [[SHL1:%[0-9]+]]:_(s32) = G_SHL [[COPY3]], [[C]](s32) 309 ; GFX6: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 -1 310 ; GFX6: [[XOR:%[0-9]+]]:_(s32) = G_XOR [[SHL]], [[C1]] 311 ; GFX6: [[UMIN:%[0-9]+]]:_(s32) = G_UMIN [[XOR]], [[SHL1]] 312 ; GFX6: [[ADD:%[0-9]+]]:_(s32) = G_ADD [[SHL]], [[UMIN]] 313 ; GFX6: [[LSHR2:%[0-9]+]]:_(s32) = G_LSHR [[ADD]], [[C]](s32) 314 ; GFX6: [[COPY4:%[0-9]+]]:_(s32) = COPY [[LSHR]](s32) 315 ; GFX6: [[COPY5:%[0-9]+]]:_(s32) = COPY [[LSHR1]](s32) 316 ; GFX6: [[SHL2:%[0-9]+]]:_(s32) = G_SHL [[COPY4]], [[C]](s32) 317 ; GFX6: [[SHL3:%[0-9]+]]:_(s32) = G_SHL [[COPY5]], [[C]](s32) 318 ; GFX6: [[XOR1:%[0-9]+]]:_(s32) = G_XOR [[SHL2]], [[C1]] 319 ; GFX6: [[UMIN1:%[0-9]+]]:_(s32) = G_UMIN [[XOR1]], [[SHL3]] 320 ; GFX6: [[ADD1:%[0-9]+]]:_(s32) = G_ADD [[SHL2]], [[UMIN1]] 321 ; GFX6: [[LSHR3:%[0-9]+]]:_(s32) = G_LSHR [[ADD1]], [[C]](s32) 322 ; GFX6: [[C2:%[0-9]+]]:_(s32) = G_CONSTANT i32 65535 323 ; GFX6: [[COPY6:%[0-9]+]]:_(s32) = COPY [[LSHR2]](s32) 324 ; GFX6: [[AND:%[0-9]+]]:_(s32) = G_AND [[COPY6]], [[C2]] 325 ; GFX6: [[COPY7:%[0-9]+]]:_(s32) = COPY [[LSHR3]](s32) 326 ; GFX6: [[AND1:%[0-9]+]]:_(s32) = G_AND [[COPY7]], [[C2]] 327 ; GFX6: [[SHL4:%[0-9]+]]:_(s32) = G_SHL [[AND1]], [[C]](s32) 328 ; GFX6: [[OR:%[0-9]+]]:_(s32) = G_OR [[AND]], [[SHL4]] 329 ; GFX6: [[BITCAST2:%[0-9]+]]:_(<2 x s16>) = G_BITCAST [[OR]](s32) 330 ; GFX6: $vgpr0 = COPY [[BITCAST2]](<2 x s16>) 331 ; GFX8-LABEL: name: uaddsat_v2s16 332 ; GFX8: [[COPY:%[0-9]+]]:_(<2 x s16>) = COPY $vgpr0 333 ; GFX8: [[COPY1:%[0-9]+]]:_(<2 x s16>) = COPY $vgpr1 334 ; GFX8: [[BITCAST:%[0-9]+]]:_(s32) = G_BITCAST [[COPY]](<2 x s16>) 335 ; GFX8: [[TRUNC:%[0-9]+]]:_(s16) = G_TRUNC [[BITCAST]](s32) 336 ; GFX8: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 16 337 ; GFX8: [[LSHR:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST]], [[C]](s32) 338 ; GFX8: [[TRUNC1:%[0-9]+]]:_(s16) = G_TRUNC [[LSHR]](s32) 339 ; GFX8: [[BITCAST1:%[0-9]+]]:_(s32) = G_BITCAST [[COPY1]](<2 x s16>) 340 ; GFX8: [[TRUNC2:%[0-9]+]]:_(s16) = G_TRUNC [[BITCAST1]](s32) 341 ; GFX8: [[LSHR1:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST1]], [[C]](s32) 342 ; GFX8: [[TRUNC3:%[0-9]+]]:_(s16) = G_TRUNC [[LSHR1]](s32) 343 ; GFX8: [[UADDSAT:%[0-9]+]]:_(s16) = G_UADDSAT [[TRUNC]], [[TRUNC2]] 344 ; GFX8: [[UADDSAT1:%[0-9]+]]:_(s16) = G_UADDSAT [[TRUNC1]], [[TRUNC3]] 345 ; GFX8: [[ZEXT:%[0-9]+]]:_(s32) = G_ZEXT [[UADDSAT]](s16) 346 ; GFX8: [[ZEXT1:%[0-9]+]]:_(s32) = G_ZEXT [[UADDSAT1]](s16) 347 ; GFX8: [[SHL:%[0-9]+]]:_(s32) = G_SHL [[ZEXT1]], [[C]](s32) 348 ; GFX8: [[OR:%[0-9]+]]:_(s32) = G_OR [[ZEXT]], [[SHL]] 349 ; GFX8: [[BITCAST2:%[0-9]+]]:_(<2 x s16>) = G_BITCAST [[OR]](s32) 350 ; GFX8: $vgpr0 = COPY [[BITCAST2]](<2 x s16>) 351 ; GFX9-LABEL: name: uaddsat_v2s16 352 ; GFX9: [[COPY:%[0-9]+]]:_(<2 x s16>) = COPY $vgpr0 353 ; GFX9: [[COPY1:%[0-9]+]]:_(<2 x s16>) = COPY $vgpr1 354 ; GFX9: [[UADDSAT:%[0-9]+]]:_(<2 x s16>) = G_UADDSAT [[COPY]], [[COPY1]] 355 ; GFX9: $vgpr0 = COPY [[UADDSAT]](<2 x s16>) 356 %0:_(<2 x s16>) = COPY $vgpr0 357 %1:_(<2 x s16>) = COPY $vgpr1 358 %2:_(<2 x s16>) = G_UADDSAT %0, %1 359 $vgpr0 = COPY %2 360... 361 362--- 363name: uaddsat_v3s16 364body: | 365 bb.0: 366 liveins: $vgpr0_vgpr1_vgpr2 367 368 ; GFX6-LABEL: name: uaddsat_v3s16 369 ; GFX6: [[COPY:%[0-9]+]]:_(<6 x s16>) = COPY $vgpr0_vgpr1_vgpr2 370 ; GFX6: [[UV:%[0-9]+]]:_(<2 x s16>), [[UV1:%[0-9]+]]:_(<2 x s16>), [[UV2:%[0-9]+]]:_(<2 x s16>) = G_UNMERGE_VALUES [[COPY]](<6 x s16>) 371 ; GFX6: [[BITCAST:%[0-9]+]]:_(s32) = G_BITCAST [[UV]](<2 x s16>) 372 ; GFX6: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 16 373 ; GFX6: [[LSHR:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST]], [[C]](s32) 374 ; GFX6: [[BITCAST1:%[0-9]+]]:_(s32) = G_BITCAST [[UV1]](<2 x s16>) 375 ; GFX6: [[LSHR1:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST1]], [[C]](s32) 376 ; GFX6: [[UV3:%[0-9]+]]:_(<2 x s16>), [[UV4:%[0-9]+]]:_(<2 x s16>), [[UV5:%[0-9]+]]:_(<2 x s16>) = G_UNMERGE_VALUES [[COPY]](<6 x s16>) 377 ; GFX6: [[BITCAST2:%[0-9]+]]:_(s32) = G_BITCAST [[UV4]](<2 x s16>) 378 ; GFX6: [[LSHR2:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST2]], [[C]](s32) 379 ; GFX6: [[BITCAST3:%[0-9]+]]:_(s32) = G_BITCAST [[UV5]](<2 x s16>) 380 ; GFX6: [[LSHR3:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST3]], [[C]](s32) 381 ; GFX6: [[COPY1:%[0-9]+]]:_(s32) = COPY [[BITCAST]](s32) 382 ; GFX6: [[COPY2:%[0-9]+]]:_(s32) = COPY [[LSHR2]](s32) 383 ; GFX6: [[SHL:%[0-9]+]]:_(s32) = G_SHL [[COPY1]], [[C]](s32) 384 ; GFX6: [[SHL1:%[0-9]+]]:_(s32) = G_SHL [[COPY2]], [[C]](s32) 385 ; GFX6: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 -1 386 ; GFX6: [[XOR:%[0-9]+]]:_(s32) = G_XOR [[SHL]], [[C1]] 387 ; GFX6: [[UMIN:%[0-9]+]]:_(s32) = G_UMIN [[XOR]], [[SHL1]] 388 ; GFX6: [[ADD:%[0-9]+]]:_(s32) = G_ADD [[SHL]], [[UMIN]] 389 ; GFX6: [[LSHR4:%[0-9]+]]:_(s32) = G_LSHR [[ADD]], [[C]](s32) 390 ; GFX6: [[COPY3:%[0-9]+]]:_(s32) = COPY [[LSHR]](s32) 391 ; GFX6: [[COPY4:%[0-9]+]]:_(s32) = COPY [[BITCAST3]](s32) 392 ; GFX6: [[SHL2:%[0-9]+]]:_(s32) = G_SHL [[COPY3]], [[C]](s32) 393 ; GFX6: [[SHL3:%[0-9]+]]:_(s32) = G_SHL [[COPY4]], [[C]](s32) 394 ; GFX6: [[XOR1:%[0-9]+]]:_(s32) = G_XOR [[SHL2]], [[C1]] 395 ; GFX6: [[UMIN1:%[0-9]+]]:_(s32) = G_UMIN [[XOR1]], [[SHL3]] 396 ; GFX6: [[ADD1:%[0-9]+]]:_(s32) = G_ADD [[SHL2]], [[UMIN1]] 397 ; GFX6: [[LSHR5:%[0-9]+]]:_(s32) = G_LSHR [[ADD1]], [[C]](s32) 398 ; GFX6: [[COPY5:%[0-9]+]]:_(s32) = COPY [[BITCAST1]](s32) 399 ; GFX6: [[COPY6:%[0-9]+]]:_(s32) = COPY [[LSHR3]](s32) 400 ; GFX6: [[SHL4:%[0-9]+]]:_(s32) = G_SHL [[COPY5]], [[C]](s32) 401 ; GFX6: [[SHL5:%[0-9]+]]:_(s32) = G_SHL [[COPY6]], [[C]](s32) 402 ; GFX6: [[XOR2:%[0-9]+]]:_(s32) = G_XOR [[SHL4]], [[C1]] 403 ; GFX6: [[UMIN2:%[0-9]+]]:_(s32) = G_UMIN [[XOR2]], [[SHL5]] 404 ; GFX6: [[ADD2:%[0-9]+]]:_(s32) = G_ADD [[SHL4]], [[UMIN2]] 405 ; GFX6: [[LSHR6:%[0-9]+]]:_(s32) = G_LSHR [[ADD2]], [[C]](s32) 406 ; GFX6: [[DEF:%[0-9]+]]:_(s16) = G_IMPLICIT_DEF 407 ; GFX6: [[DEF1:%[0-9]+]]:_(<2 x s16>) = G_IMPLICIT_DEF 408 ; GFX6: [[DEF2:%[0-9]+]]:_(<4 x s16>) = G_IMPLICIT_DEF 409 ; GFX6: [[DEF3:%[0-9]+]]:_(<4 x s16>) = G_IMPLICIT_DEF 410 ; GFX6: [[UV6:%[0-9]+]]:_(<2 x s16>), [[UV7:%[0-9]+]]:_(<2 x s16>) = G_UNMERGE_VALUES [[DEF2]](<4 x s16>) 411 ; GFX6: [[BITCAST4:%[0-9]+]]:_(s32) = G_BITCAST [[UV6]](<2 x s16>) 412 ; GFX6: [[LSHR7:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST4]], [[C]](s32) 413 ; GFX6: [[BITCAST5:%[0-9]+]]:_(s32) = G_BITCAST [[UV7]](<2 x s16>) 414 ; GFX6: [[LSHR8:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST5]], [[C]](s32) 415 ; GFX6: [[C2:%[0-9]+]]:_(s32) = G_CONSTANT i32 65535 416 ; GFX6: [[COPY7:%[0-9]+]]:_(s32) = COPY [[LSHR4]](s32) 417 ; GFX6: [[AND:%[0-9]+]]:_(s32) = G_AND [[COPY7]], [[C2]] 418 ; GFX6: [[COPY8:%[0-9]+]]:_(s32) = COPY [[LSHR5]](s32) 419 ; GFX6: [[AND1:%[0-9]+]]:_(s32) = G_AND [[COPY8]], [[C2]] 420 ; GFX6: [[SHL6:%[0-9]+]]:_(s32) = G_SHL [[AND1]], [[C]](s32) 421 ; GFX6: [[OR:%[0-9]+]]:_(s32) = G_OR [[AND]], [[SHL6]] 422 ; GFX6: [[BITCAST6:%[0-9]+]]:_(<2 x s16>) = G_BITCAST [[OR]](s32) 423 ; GFX6: [[COPY9:%[0-9]+]]:_(s32) = COPY [[LSHR6]](s32) 424 ; GFX6: [[AND2:%[0-9]+]]:_(s32) = G_AND [[COPY9]], [[C2]] 425 ; GFX6: [[COPY10:%[0-9]+]]:_(s32) = COPY [[BITCAST4]](s32) 426 ; GFX6: [[AND3:%[0-9]+]]:_(s32) = G_AND [[COPY10]], [[C2]] 427 ; GFX6: [[SHL7:%[0-9]+]]:_(s32) = G_SHL [[AND3]], [[C]](s32) 428 ; GFX6: [[OR1:%[0-9]+]]:_(s32) = G_OR [[AND2]], [[SHL7]] 429 ; GFX6: [[BITCAST7:%[0-9]+]]:_(<2 x s16>) = G_BITCAST [[OR1]](s32) 430 ; GFX6: [[COPY11:%[0-9]+]]:_(s32) = COPY [[LSHR7]](s32) 431 ; GFX6: [[AND4:%[0-9]+]]:_(s32) = G_AND [[COPY11]], [[C2]] 432 ; GFX6: [[COPY12:%[0-9]+]]:_(s32) = COPY [[BITCAST5]](s32) 433 ; GFX6: [[AND5:%[0-9]+]]:_(s32) = G_AND [[COPY12]], [[C2]] 434 ; GFX6: [[SHL8:%[0-9]+]]:_(s32) = G_SHL [[AND5]], [[C]](s32) 435 ; GFX6: [[OR2:%[0-9]+]]:_(s32) = G_OR [[AND4]], [[SHL8]] 436 ; GFX6: [[BITCAST8:%[0-9]+]]:_(<2 x s16>) = G_BITCAST [[OR2]](s32) 437 ; GFX6: [[CONCAT_VECTORS:%[0-9]+]]:_(<6 x s16>) = G_CONCAT_VECTORS [[BITCAST6]](<2 x s16>), [[BITCAST7]](<2 x s16>), [[BITCAST8]](<2 x s16>) 438 ; GFX6: $vgpr0_vgpr1_vgpr2 = COPY [[CONCAT_VECTORS]](<6 x s16>) 439 ; GFX8-LABEL: name: uaddsat_v3s16 440 ; GFX8: [[COPY:%[0-9]+]]:_(<6 x s16>) = COPY $vgpr0_vgpr1_vgpr2 441 ; GFX8: [[UV:%[0-9]+]]:_(<2 x s16>), [[UV1:%[0-9]+]]:_(<2 x s16>), [[UV2:%[0-9]+]]:_(<2 x s16>) = G_UNMERGE_VALUES [[COPY]](<6 x s16>) 442 ; GFX8: [[BITCAST:%[0-9]+]]:_(s32) = G_BITCAST [[UV]](<2 x s16>) 443 ; GFX8: [[TRUNC:%[0-9]+]]:_(s16) = G_TRUNC [[BITCAST]](s32) 444 ; GFX8: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 16 445 ; GFX8: [[LSHR:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST]], [[C]](s32) 446 ; GFX8: [[TRUNC1:%[0-9]+]]:_(s16) = G_TRUNC [[LSHR]](s32) 447 ; GFX8: [[BITCAST1:%[0-9]+]]:_(s32) = G_BITCAST [[UV1]](<2 x s16>) 448 ; GFX8: [[TRUNC2:%[0-9]+]]:_(s16) = G_TRUNC [[BITCAST1]](s32) 449 ; GFX8: [[LSHR1:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST1]], [[C]](s32) 450 ; GFX8: [[UV3:%[0-9]+]]:_(<2 x s16>), [[UV4:%[0-9]+]]:_(<2 x s16>), [[UV5:%[0-9]+]]:_(<2 x s16>) = G_UNMERGE_VALUES [[COPY]](<6 x s16>) 451 ; GFX8: [[BITCAST2:%[0-9]+]]:_(s32) = G_BITCAST [[UV4]](<2 x s16>) 452 ; GFX8: [[LSHR2:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST2]], [[C]](s32) 453 ; GFX8: [[TRUNC3:%[0-9]+]]:_(s16) = G_TRUNC [[LSHR2]](s32) 454 ; GFX8: [[BITCAST3:%[0-9]+]]:_(s32) = G_BITCAST [[UV5]](<2 x s16>) 455 ; GFX8: [[TRUNC4:%[0-9]+]]:_(s16) = G_TRUNC [[BITCAST3]](s32) 456 ; GFX8: [[LSHR3:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST3]], [[C]](s32) 457 ; GFX8: [[TRUNC5:%[0-9]+]]:_(s16) = G_TRUNC [[LSHR3]](s32) 458 ; GFX8: [[UADDSAT:%[0-9]+]]:_(s16) = G_UADDSAT [[TRUNC]], [[TRUNC3]] 459 ; GFX8: [[UADDSAT1:%[0-9]+]]:_(s16) = G_UADDSAT [[TRUNC1]], [[TRUNC4]] 460 ; GFX8: [[UADDSAT2:%[0-9]+]]:_(s16) = G_UADDSAT [[TRUNC2]], [[TRUNC5]] 461 ; GFX8: [[DEF:%[0-9]+]]:_(s16) = G_IMPLICIT_DEF 462 ; GFX8: [[DEF1:%[0-9]+]]:_(<2 x s16>) = G_IMPLICIT_DEF 463 ; GFX8: [[DEF2:%[0-9]+]]:_(<4 x s16>) = G_IMPLICIT_DEF 464 ; GFX8: [[DEF3:%[0-9]+]]:_(<4 x s16>) = G_IMPLICIT_DEF 465 ; GFX8: [[UV6:%[0-9]+]]:_(<2 x s16>), [[UV7:%[0-9]+]]:_(<2 x s16>) = G_UNMERGE_VALUES [[DEF2]](<4 x s16>) 466 ; GFX8: [[BITCAST4:%[0-9]+]]:_(s32) = G_BITCAST [[UV6]](<2 x s16>) 467 ; GFX8: [[LSHR4:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST4]], [[C]](s32) 468 ; GFX8: [[BITCAST5:%[0-9]+]]:_(s32) = G_BITCAST [[UV7]](<2 x s16>) 469 ; GFX8: [[LSHR5:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST5]], [[C]](s32) 470 ; GFX8: [[ZEXT:%[0-9]+]]:_(s32) = G_ZEXT [[UADDSAT]](s16) 471 ; GFX8: [[ZEXT1:%[0-9]+]]:_(s32) = G_ZEXT [[UADDSAT1]](s16) 472 ; GFX8: [[SHL:%[0-9]+]]:_(s32) = G_SHL [[ZEXT1]], [[C]](s32) 473 ; GFX8: [[OR:%[0-9]+]]:_(s32) = G_OR [[ZEXT]], [[SHL]] 474 ; GFX8: [[BITCAST6:%[0-9]+]]:_(<2 x s16>) = G_BITCAST [[OR]](s32) 475 ; GFX8: [[ZEXT2:%[0-9]+]]:_(s32) = G_ZEXT [[UADDSAT2]](s16) 476 ; GFX8: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 65535 477 ; GFX8: [[COPY1:%[0-9]+]]:_(s32) = COPY [[BITCAST4]](s32) 478 ; GFX8: [[AND:%[0-9]+]]:_(s32) = G_AND [[COPY1]], [[C1]] 479 ; GFX8: [[SHL1:%[0-9]+]]:_(s32) = G_SHL [[AND]], [[C]](s32) 480 ; GFX8: [[OR1:%[0-9]+]]:_(s32) = G_OR [[ZEXT2]], [[SHL1]] 481 ; GFX8: [[BITCAST7:%[0-9]+]]:_(<2 x s16>) = G_BITCAST [[OR1]](s32) 482 ; GFX8: [[COPY2:%[0-9]+]]:_(s32) = COPY [[LSHR4]](s32) 483 ; GFX8: [[AND1:%[0-9]+]]:_(s32) = G_AND [[COPY2]], [[C1]] 484 ; GFX8: [[COPY3:%[0-9]+]]:_(s32) = COPY [[BITCAST5]](s32) 485 ; GFX8: [[AND2:%[0-9]+]]:_(s32) = G_AND [[COPY3]], [[C1]] 486 ; GFX8: [[SHL2:%[0-9]+]]:_(s32) = G_SHL [[AND2]], [[C]](s32) 487 ; GFX8: [[OR2:%[0-9]+]]:_(s32) = G_OR [[AND1]], [[SHL2]] 488 ; GFX8: [[BITCAST8:%[0-9]+]]:_(<2 x s16>) = G_BITCAST [[OR2]](s32) 489 ; GFX8: [[CONCAT_VECTORS:%[0-9]+]]:_(<6 x s16>) = G_CONCAT_VECTORS [[BITCAST6]](<2 x s16>), [[BITCAST7]](<2 x s16>), [[BITCAST8]](<2 x s16>) 490 ; GFX8: $vgpr0_vgpr1_vgpr2 = COPY [[CONCAT_VECTORS]](<6 x s16>) 491 ; GFX9-LABEL: name: uaddsat_v3s16 492 ; GFX9: [[COPY:%[0-9]+]]:_(<6 x s16>) = COPY $vgpr0_vgpr1_vgpr2 493 ; GFX9: [[UV:%[0-9]+]]:_(<2 x s16>), [[UV1:%[0-9]+]]:_(<2 x s16>), [[UV2:%[0-9]+]]:_(<2 x s16>) = G_UNMERGE_VALUES [[COPY]](<6 x s16>) 494 ; GFX9: [[BITCAST:%[0-9]+]]:_(s32) = G_BITCAST [[UV]](<2 x s16>) 495 ; GFX9: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 16 496 ; GFX9: [[LSHR:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST]], [[C]](s32) 497 ; GFX9: [[BITCAST1:%[0-9]+]]:_(s32) = G_BITCAST [[UV1]](<2 x s16>) 498 ; GFX9: [[LSHR1:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST1]], [[C]](s32) 499 ; GFX9: [[COPY1:%[0-9]+]]:_(s32) = COPY [[BITCAST]](s32) 500 ; GFX9: [[COPY2:%[0-9]+]]:_(s32) = COPY [[LSHR]](s32) 501 ; GFX9: [[BUILD_VECTOR_TRUNC:%[0-9]+]]:_(<2 x s16>) = G_BUILD_VECTOR_TRUNC [[COPY1]](s32), [[COPY2]](s32) 502 ; GFX9: [[COPY3:%[0-9]+]]:_(s32) = COPY [[BITCAST1]](s32) 503 ; GFX9: [[DEF:%[0-9]+]]:_(s32) = G_IMPLICIT_DEF 504 ; GFX9: [[BUILD_VECTOR_TRUNC1:%[0-9]+]]:_(<2 x s16>) = G_BUILD_VECTOR_TRUNC [[COPY3]](s32), [[DEF]](s32) 505 ; GFX9: [[DEF1:%[0-9]+]]:_(<2 x s16>) = G_IMPLICIT_DEF 506 ; GFX9: [[UV3:%[0-9]+]]:_(<2 x s16>), [[UV4:%[0-9]+]]:_(<2 x s16>), [[UV5:%[0-9]+]]:_(<2 x s16>) = G_UNMERGE_VALUES [[COPY]](<6 x s16>) 507 ; GFX9: [[BITCAST2:%[0-9]+]]:_(s32) = G_BITCAST [[UV4]](<2 x s16>) 508 ; GFX9: [[LSHR2:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST2]], [[C]](s32) 509 ; GFX9: [[BITCAST3:%[0-9]+]]:_(s32) = G_BITCAST [[UV5]](<2 x s16>) 510 ; GFX9: [[LSHR3:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST3]], [[C]](s32) 511 ; GFX9: [[COPY4:%[0-9]+]]:_(s32) = COPY [[LSHR2]](s32) 512 ; GFX9: [[COPY5:%[0-9]+]]:_(s32) = COPY [[BITCAST3]](s32) 513 ; GFX9: [[BUILD_VECTOR_TRUNC2:%[0-9]+]]:_(<2 x s16>) = G_BUILD_VECTOR_TRUNC [[COPY4]](s32), [[COPY5]](s32) 514 ; GFX9: [[COPY6:%[0-9]+]]:_(s32) = COPY [[LSHR3]](s32) 515 ; GFX9: [[BUILD_VECTOR_TRUNC3:%[0-9]+]]:_(<2 x s16>) = G_BUILD_VECTOR_TRUNC [[COPY6]](s32), [[DEF]](s32) 516 ; GFX9: [[UADDSAT:%[0-9]+]]:_(<2 x s16>) = G_UADDSAT [[BUILD_VECTOR_TRUNC]], [[BUILD_VECTOR_TRUNC2]] 517 ; GFX9: [[UADDSAT1:%[0-9]+]]:_(<2 x s16>) = G_UADDSAT [[BUILD_VECTOR_TRUNC1]], [[BUILD_VECTOR_TRUNC3]] 518 ; GFX9: [[DEF2:%[0-9]+]]:_(<4 x s16>) = G_IMPLICIT_DEF 519 ; GFX9: [[DEF3:%[0-9]+]]:_(<4 x s16>) = G_IMPLICIT_DEF 520 ; GFX9: [[BITCAST4:%[0-9]+]]:_(s32) = G_BITCAST [[UADDSAT]](<2 x s16>) 521 ; GFX9: [[LSHR4:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST4]], [[C]](s32) 522 ; GFX9: [[BITCAST5:%[0-9]+]]:_(s32) = G_BITCAST [[UADDSAT1]](<2 x s16>) 523 ; GFX9: [[LSHR5:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST5]], [[C]](s32) 524 ; GFX9: [[UV6:%[0-9]+]]:_(<2 x s16>), [[UV7:%[0-9]+]]:_(<2 x s16>) = G_UNMERGE_VALUES [[DEF2]](<4 x s16>) 525 ; GFX9: [[BITCAST6:%[0-9]+]]:_(s32) = G_BITCAST [[UV6]](<2 x s16>) 526 ; GFX9: [[LSHR6:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST6]], [[C]](s32) 527 ; GFX9: [[BITCAST7:%[0-9]+]]:_(s32) = G_BITCAST [[UV7]](<2 x s16>) 528 ; GFX9: [[LSHR7:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST7]], [[C]](s32) 529 ; GFX9: [[COPY7:%[0-9]+]]:_(s32) = COPY [[BITCAST4]](s32) 530 ; GFX9: [[COPY8:%[0-9]+]]:_(s32) = COPY [[LSHR4]](s32) 531 ; GFX9: [[BUILD_VECTOR_TRUNC4:%[0-9]+]]:_(<2 x s16>) = G_BUILD_VECTOR_TRUNC [[COPY7]](s32), [[COPY8]](s32) 532 ; GFX9: [[COPY9:%[0-9]+]]:_(s32) = COPY [[BITCAST5]](s32) 533 ; GFX9: [[COPY10:%[0-9]+]]:_(s32) = COPY [[BITCAST6]](s32) 534 ; GFX9: [[BUILD_VECTOR_TRUNC5:%[0-9]+]]:_(<2 x s16>) = G_BUILD_VECTOR_TRUNC [[COPY9]](s32), [[COPY10]](s32) 535 ; GFX9: [[COPY11:%[0-9]+]]:_(s32) = COPY [[LSHR6]](s32) 536 ; GFX9: [[COPY12:%[0-9]+]]:_(s32) = COPY [[BITCAST7]](s32) 537 ; GFX9: [[BUILD_VECTOR_TRUNC6:%[0-9]+]]:_(<2 x s16>) = G_BUILD_VECTOR_TRUNC [[COPY11]](s32), [[COPY12]](s32) 538 ; GFX9: [[CONCAT_VECTORS:%[0-9]+]]:_(<6 x s16>) = G_CONCAT_VECTORS [[BUILD_VECTOR_TRUNC4]](<2 x s16>), [[BUILD_VECTOR_TRUNC5]](<2 x s16>), [[BUILD_VECTOR_TRUNC6]](<2 x s16>) 539 ; GFX9: $vgpr0_vgpr1_vgpr2 = COPY [[CONCAT_VECTORS]](<6 x s16>) 540 %0:_(<6 x s16>) = COPY $vgpr0_vgpr1_vgpr2 541 %1:_(<3 x s16>), %2:_(<3 x s16>) = G_UNMERGE_VALUES %0 542 %3:_(<3 x s16>) = G_UADDSAT %1, %2 543 %4:_(<3 x s16>) = G_IMPLICIT_DEF 544 %5:_(<6 x s16>) = G_CONCAT_VECTORS %3, %4 545 $vgpr0_vgpr1_vgpr2 = COPY %5 546... 547 548--- 549name: uaddsat_v4s16 550body: | 551 bb.0: 552 liveins: $vgpr0_vgpr1, $vgpr2_vgpr3 553 554 ; GFX6-LABEL: name: uaddsat_v4s16 555 ; GFX6: [[COPY:%[0-9]+]]:_(<4 x s16>) = COPY $vgpr0_vgpr1 556 ; GFX6: [[COPY1:%[0-9]+]]:_(<4 x s16>) = COPY $vgpr2_vgpr3 557 ; GFX6: [[UV:%[0-9]+]]:_(<2 x s16>), [[UV1:%[0-9]+]]:_(<2 x s16>) = G_UNMERGE_VALUES [[COPY]](<4 x s16>) 558 ; GFX6: [[BITCAST:%[0-9]+]]:_(s32) = G_BITCAST [[UV]](<2 x s16>) 559 ; GFX6: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 16 560 ; GFX6: [[LSHR:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST]], [[C]](s32) 561 ; GFX6: [[BITCAST1:%[0-9]+]]:_(s32) = G_BITCAST [[UV1]](<2 x s16>) 562 ; GFX6: [[LSHR1:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST1]], [[C]](s32) 563 ; GFX6: [[UV2:%[0-9]+]]:_(<2 x s16>), [[UV3:%[0-9]+]]:_(<2 x s16>) = G_UNMERGE_VALUES [[COPY1]](<4 x s16>) 564 ; GFX6: [[BITCAST2:%[0-9]+]]:_(s32) = G_BITCAST [[UV2]](<2 x s16>) 565 ; GFX6: [[LSHR2:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST2]], [[C]](s32) 566 ; GFX6: [[BITCAST3:%[0-9]+]]:_(s32) = G_BITCAST [[UV3]](<2 x s16>) 567 ; GFX6: [[LSHR3:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST3]], [[C]](s32) 568 ; GFX6: [[COPY2:%[0-9]+]]:_(s32) = COPY [[BITCAST]](s32) 569 ; GFX6: [[COPY3:%[0-9]+]]:_(s32) = COPY [[BITCAST2]](s32) 570 ; GFX6: [[SHL:%[0-9]+]]:_(s32) = G_SHL [[COPY2]], [[C]](s32) 571 ; GFX6: [[SHL1:%[0-9]+]]:_(s32) = G_SHL [[COPY3]], [[C]](s32) 572 ; GFX6: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 -1 573 ; GFX6: [[XOR:%[0-9]+]]:_(s32) = G_XOR [[SHL]], [[C1]] 574 ; GFX6: [[UMIN:%[0-9]+]]:_(s32) = G_UMIN [[XOR]], [[SHL1]] 575 ; GFX6: [[ADD:%[0-9]+]]:_(s32) = G_ADD [[SHL]], [[UMIN]] 576 ; GFX6: [[LSHR4:%[0-9]+]]:_(s32) = G_LSHR [[ADD]], [[C]](s32) 577 ; GFX6: [[COPY4:%[0-9]+]]:_(s32) = COPY [[LSHR]](s32) 578 ; GFX6: [[COPY5:%[0-9]+]]:_(s32) = COPY [[LSHR2]](s32) 579 ; GFX6: [[SHL2:%[0-9]+]]:_(s32) = G_SHL [[COPY4]], [[C]](s32) 580 ; GFX6: [[SHL3:%[0-9]+]]:_(s32) = G_SHL [[COPY5]], [[C]](s32) 581 ; GFX6: [[XOR1:%[0-9]+]]:_(s32) = G_XOR [[SHL2]], [[C1]] 582 ; GFX6: [[UMIN1:%[0-9]+]]:_(s32) = G_UMIN [[XOR1]], [[SHL3]] 583 ; GFX6: [[ADD1:%[0-9]+]]:_(s32) = G_ADD [[SHL2]], [[UMIN1]] 584 ; GFX6: [[LSHR5:%[0-9]+]]:_(s32) = G_LSHR [[ADD1]], [[C]](s32) 585 ; GFX6: [[COPY6:%[0-9]+]]:_(s32) = COPY [[BITCAST1]](s32) 586 ; GFX6: [[COPY7:%[0-9]+]]:_(s32) = COPY [[BITCAST3]](s32) 587 ; GFX6: [[SHL4:%[0-9]+]]:_(s32) = G_SHL [[COPY6]], [[C]](s32) 588 ; GFX6: [[SHL5:%[0-9]+]]:_(s32) = G_SHL [[COPY7]], [[C]](s32) 589 ; GFX6: [[XOR2:%[0-9]+]]:_(s32) = G_XOR [[SHL4]], [[C1]] 590 ; GFX6: [[UMIN2:%[0-9]+]]:_(s32) = G_UMIN [[XOR2]], [[SHL5]] 591 ; GFX6: [[ADD2:%[0-9]+]]:_(s32) = G_ADD [[SHL4]], [[UMIN2]] 592 ; GFX6: [[LSHR6:%[0-9]+]]:_(s32) = G_LSHR [[ADD2]], [[C]](s32) 593 ; GFX6: [[COPY8:%[0-9]+]]:_(s32) = COPY [[LSHR1]](s32) 594 ; GFX6: [[COPY9:%[0-9]+]]:_(s32) = COPY [[LSHR3]](s32) 595 ; GFX6: [[SHL6:%[0-9]+]]:_(s32) = G_SHL [[COPY8]], [[C]](s32) 596 ; GFX6: [[SHL7:%[0-9]+]]:_(s32) = G_SHL [[COPY9]], [[C]](s32) 597 ; GFX6: [[XOR3:%[0-9]+]]:_(s32) = G_XOR [[SHL6]], [[C1]] 598 ; GFX6: [[UMIN3:%[0-9]+]]:_(s32) = G_UMIN [[XOR3]], [[SHL7]] 599 ; GFX6: [[ADD3:%[0-9]+]]:_(s32) = G_ADD [[SHL6]], [[UMIN3]] 600 ; GFX6: [[LSHR7:%[0-9]+]]:_(s32) = G_LSHR [[ADD3]], [[C]](s32) 601 ; GFX6: [[C2:%[0-9]+]]:_(s32) = G_CONSTANT i32 65535 602 ; GFX6: [[COPY10:%[0-9]+]]:_(s32) = COPY [[LSHR4]](s32) 603 ; GFX6: [[AND:%[0-9]+]]:_(s32) = G_AND [[COPY10]], [[C2]] 604 ; GFX6: [[COPY11:%[0-9]+]]:_(s32) = COPY [[LSHR5]](s32) 605 ; GFX6: [[AND1:%[0-9]+]]:_(s32) = G_AND [[COPY11]], [[C2]] 606 ; GFX6: [[SHL8:%[0-9]+]]:_(s32) = G_SHL [[AND1]], [[C]](s32) 607 ; GFX6: [[OR:%[0-9]+]]:_(s32) = G_OR [[AND]], [[SHL8]] 608 ; GFX6: [[BITCAST4:%[0-9]+]]:_(<2 x s16>) = G_BITCAST [[OR]](s32) 609 ; GFX6: [[COPY12:%[0-9]+]]:_(s32) = COPY [[LSHR6]](s32) 610 ; GFX6: [[AND2:%[0-9]+]]:_(s32) = G_AND [[COPY12]], [[C2]] 611 ; GFX6: [[COPY13:%[0-9]+]]:_(s32) = COPY [[LSHR7]](s32) 612 ; GFX6: [[AND3:%[0-9]+]]:_(s32) = G_AND [[COPY13]], [[C2]] 613 ; GFX6: [[SHL9:%[0-9]+]]:_(s32) = G_SHL [[AND3]], [[C]](s32) 614 ; GFX6: [[OR1:%[0-9]+]]:_(s32) = G_OR [[AND2]], [[SHL9]] 615 ; GFX6: [[BITCAST5:%[0-9]+]]:_(<2 x s16>) = G_BITCAST [[OR1]](s32) 616 ; GFX6: [[CONCAT_VECTORS:%[0-9]+]]:_(<4 x s16>) = G_CONCAT_VECTORS [[BITCAST4]](<2 x s16>), [[BITCAST5]](<2 x s16>) 617 ; GFX6: $vgpr0_vgpr1 = COPY [[CONCAT_VECTORS]](<4 x s16>) 618 ; GFX8-LABEL: name: uaddsat_v4s16 619 ; GFX8: [[COPY:%[0-9]+]]:_(<4 x s16>) = COPY $vgpr0_vgpr1 620 ; GFX8: [[COPY1:%[0-9]+]]:_(<4 x s16>) = COPY $vgpr2_vgpr3 621 ; GFX8: [[UV:%[0-9]+]]:_(<2 x s16>), [[UV1:%[0-9]+]]:_(<2 x s16>) = G_UNMERGE_VALUES [[COPY]](<4 x s16>) 622 ; GFX8: [[BITCAST:%[0-9]+]]:_(s32) = G_BITCAST [[UV]](<2 x s16>) 623 ; GFX8: [[TRUNC:%[0-9]+]]:_(s16) = G_TRUNC [[BITCAST]](s32) 624 ; GFX8: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 16 625 ; GFX8: [[LSHR:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST]], [[C]](s32) 626 ; GFX8: [[TRUNC1:%[0-9]+]]:_(s16) = G_TRUNC [[LSHR]](s32) 627 ; GFX8: [[BITCAST1:%[0-9]+]]:_(s32) = G_BITCAST [[UV1]](<2 x s16>) 628 ; GFX8: [[TRUNC2:%[0-9]+]]:_(s16) = G_TRUNC [[BITCAST1]](s32) 629 ; GFX8: [[LSHR1:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST1]], [[C]](s32) 630 ; GFX8: [[TRUNC3:%[0-9]+]]:_(s16) = G_TRUNC [[LSHR1]](s32) 631 ; GFX8: [[UV2:%[0-9]+]]:_(<2 x s16>), [[UV3:%[0-9]+]]:_(<2 x s16>) = G_UNMERGE_VALUES [[COPY1]](<4 x s16>) 632 ; GFX8: [[BITCAST2:%[0-9]+]]:_(s32) = G_BITCAST [[UV2]](<2 x s16>) 633 ; GFX8: [[TRUNC4:%[0-9]+]]:_(s16) = G_TRUNC [[BITCAST2]](s32) 634 ; GFX8: [[LSHR2:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST2]], [[C]](s32) 635 ; GFX8: [[TRUNC5:%[0-9]+]]:_(s16) = G_TRUNC [[LSHR2]](s32) 636 ; GFX8: [[BITCAST3:%[0-9]+]]:_(s32) = G_BITCAST [[UV3]](<2 x s16>) 637 ; GFX8: [[TRUNC6:%[0-9]+]]:_(s16) = G_TRUNC [[BITCAST3]](s32) 638 ; GFX8: [[LSHR3:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST3]], [[C]](s32) 639 ; GFX8: [[TRUNC7:%[0-9]+]]:_(s16) = G_TRUNC [[LSHR3]](s32) 640 ; GFX8: [[UADDSAT:%[0-9]+]]:_(s16) = G_UADDSAT [[TRUNC]], [[TRUNC4]] 641 ; GFX8: [[UADDSAT1:%[0-9]+]]:_(s16) = G_UADDSAT [[TRUNC1]], [[TRUNC5]] 642 ; GFX8: [[UADDSAT2:%[0-9]+]]:_(s16) = G_UADDSAT [[TRUNC2]], [[TRUNC6]] 643 ; GFX8: [[UADDSAT3:%[0-9]+]]:_(s16) = G_UADDSAT [[TRUNC3]], [[TRUNC7]] 644 ; GFX8: [[ZEXT:%[0-9]+]]:_(s32) = G_ZEXT [[UADDSAT]](s16) 645 ; GFX8: [[ZEXT1:%[0-9]+]]:_(s32) = G_ZEXT [[UADDSAT1]](s16) 646 ; GFX8: [[SHL:%[0-9]+]]:_(s32) = G_SHL [[ZEXT1]], [[C]](s32) 647 ; GFX8: [[OR:%[0-9]+]]:_(s32) = G_OR [[ZEXT]], [[SHL]] 648 ; GFX8: [[BITCAST4:%[0-9]+]]:_(<2 x s16>) = G_BITCAST [[OR]](s32) 649 ; GFX8: [[ZEXT2:%[0-9]+]]:_(s32) = G_ZEXT [[UADDSAT2]](s16) 650 ; GFX8: [[ZEXT3:%[0-9]+]]:_(s32) = G_ZEXT [[UADDSAT3]](s16) 651 ; GFX8: [[SHL1:%[0-9]+]]:_(s32) = G_SHL [[ZEXT3]], [[C]](s32) 652 ; GFX8: [[OR1:%[0-9]+]]:_(s32) = G_OR [[ZEXT2]], [[SHL1]] 653 ; GFX8: [[BITCAST5:%[0-9]+]]:_(<2 x s16>) = G_BITCAST [[OR1]](s32) 654 ; GFX8: [[CONCAT_VECTORS:%[0-9]+]]:_(<4 x s16>) = G_CONCAT_VECTORS [[BITCAST4]](<2 x s16>), [[BITCAST5]](<2 x s16>) 655 ; GFX8: $vgpr0_vgpr1 = COPY [[CONCAT_VECTORS]](<4 x s16>) 656 ; GFX9-LABEL: name: uaddsat_v4s16 657 ; GFX9: [[COPY:%[0-9]+]]:_(<4 x s16>) = COPY $vgpr0_vgpr1 658 ; GFX9: [[COPY1:%[0-9]+]]:_(<4 x s16>) = COPY $vgpr2_vgpr3 659 ; GFX9: [[UV:%[0-9]+]]:_(<2 x s16>), [[UV1:%[0-9]+]]:_(<2 x s16>) = G_UNMERGE_VALUES [[COPY]](<4 x s16>) 660 ; GFX9: [[UV2:%[0-9]+]]:_(<2 x s16>), [[UV3:%[0-9]+]]:_(<2 x s16>) = G_UNMERGE_VALUES [[COPY1]](<4 x s16>) 661 ; GFX9: [[UADDSAT:%[0-9]+]]:_(<2 x s16>) = G_UADDSAT [[UV]], [[UV2]] 662 ; GFX9: [[UADDSAT1:%[0-9]+]]:_(<2 x s16>) = G_UADDSAT [[UV1]], [[UV3]] 663 ; GFX9: [[CONCAT_VECTORS:%[0-9]+]]:_(<4 x s16>) = G_CONCAT_VECTORS [[UADDSAT]](<2 x s16>), [[UADDSAT1]](<2 x s16>) 664 ; GFX9: $vgpr0_vgpr1 = COPY [[CONCAT_VECTORS]](<4 x s16>) 665 %0:_(<4 x s16>) = COPY $vgpr0_vgpr1 666 %1:_(<4 x s16>) = COPY $vgpr2_vgpr3 667 %2:_(<4 x s16>) = G_UADDSAT %0, %1 668 $vgpr0_vgpr1 = COPY %2 669... 670 671--- 672name: uaddsat_s32 673body: | 674 bb.0: 675 liveins: $vgpr0, $vgpr1 676 677 ; GFX6-LABEL: name: uaddsat_s32 678 ; GFX6: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0 679 ; GFX6: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1 680 ; GFX6: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 -1 681 ; GFX6: [[XOR:%[0-9]+]]:_(s32) = G_XOR [[COPY]], [[C]] 682 ; GFX6: [[UMIN:%[0-9]+]]:_(s32) = G_UMIN [[XOR]], [[COPY1]] 683 ; GFX6: [[ADD:%[0-9]+]]:_(s32) = G_ADD [[COPY]], [[UMIN]] 684 ; GFX6: $vgpr0 = COPY [[ADD]](s32) 685 ; GFX8-LABEL: name: uaddsat_s32 686 ; GFX8: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0 687 ; GFX8: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1 688 ; GFX8: [[UADDSAT:%[0-9]+]]:_(s32) = G_UADDSAT [[COPY]], [[COPY1]] 689 ; GFX8: $vgpr0 = COPY [[UADDSAT]](s32) 690 ; GFX9-LABEL: name: uaddsat_s32 691 ; GFX9: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0 692 ; GFX9: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1 693 ; GFX9: [[UADDSAT:%[0-9]+]]:_(s32) = G_UADDSAT [[COPY]], [[COPY1]] 694 ; GFX9: $vgpr0 = COPY [[UADDSAT]](s32) 695 %0:_(s32) = COPY $vgpr0 696 %1:_(s32) = COPY $vgpr1 697 %2:_(s32) = G_UADDSAT %0, %1 698 $vgpr0 = COPY %2 699... 700 701--- 702name: uaddsat_v2s32 703body: | 704 bb.0: 705 liveins: $vgpr0_vgpr1, $vgpr2_vgpr3 706 707 ; GFX6-LABEL: name: uaddsat_v2s32 708 ; GFX6: [[COPY:%[0-9]+]]:_(<2 x s32>) = COPY $vgpr0_vgpr1 709 ; GFX6: [[COPY1:%[0-9]+]]:_(<2 x s32>) = COPY $vgpr2_vgpr3 710 ; GFX6: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY]](<2 x s32>) 711 ; GFX6: [[UV2:%[0-9]+]]:_(s32), [[UV3:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY1]](<2 x s32>) 712 ; GFX6: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 -1 713 ; GFX6: [[XOR:%[0-9]+]]:_(s32) = G_XOR [[UV]], [[C]] 714 ; GFX6: [[UMIN:%[0-9]+]]:_(s32) = G_UMIN [[XOR]], [[UV2]] 715 ; GFX6: [[ADD:%[0-9]+]]:_(s32) = G_ADD [[UV]], [[UMIN]] 716 ; GFX6: [[XOR1:%[0-9]+]]:_(s32) = G_XOR [[UV1]], [[C]] 717 ; GFX6: [[UMIN1:%[0-9]+]]:_(s32) = G_UMIN [[XOR1]], [[UV3]] 718 ; GFX6: [[ADD1:%[0-9]+]]:_(s32) = G_ADD [[UV1]], [[UMIN1]] 719 ; GFX6: [[BUILD_VECTOR:%[0-9]+]]:_(<2 x s32>) = G_BUILD_VECTOR [[ADD]](s32), [[ADD1]](s32) 720 ; GFX6: $vgpr0_vgpr1 = COPY [[BUILD_VECTOR]](<2 x s32>) 721 ; GFX8-LABEL: name: uaddsat_v2s32 722 ; GFX8: [[COPY:%[0-9]+]]:_(<2 x s32>) = COPY $vgpr0_vgpr1 723 ; GFX8: [[COPY1:%[0-9]+]]:_(<2 x s32>) = COPY $vgpr2_vgpr3 724 ; GFX8: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY]](<2 x s32>) 725 ; GFX8: [[UV2:%[0-9]+]]:_(s32), [[UV3:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY1]](<2 x s32>) 726 ; GFX8: [[UADDSAT:%[0-9]+]]:_(s32) = G_UADDSAT [[UV]], [[UV2]] 727 ; GFX8: [[UADDSAT1:%[0-9]+]]:_(s32) = G_UADDSAT [[UV1]], [[UV3]] 728 ; GFX8: [[BUILD_VECTOR:%[0-9]+]]:_(<2 x s32>) = G_BUILD_VECTOR [[UADDSAT]](s32), [[UADDSAT1]](s32) 729 ; GFX8: $vgpr0_vgpr1 = COPY [[BUILD_VECTOR]](<2 x s32>) 730 ; GFX9-LABEL: name: uaddsat_v2s32 731 ; GFX9: [[COPY:%[0-9]+]]:_(<2 x s32>) = COPY $vgpr0_vgpr1 732 ; GFX9: [[COPY1:%[0-9]+]]:_(<2 x s32>) = COPY $vgpr2_vgpr3 733 ; GFX9: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY]](<2 x s32>) 734 ; GFX9: [[UV2:%[0-9]+]]:_(s32), [[UV3:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY1]](<2 x s32>) 735 ; GFX9: [[UADDSAT:%[0-9]+]]:_(s32) = G_UADDSAT [[UV]], [[UV2]] 736 ; GFX9: [[UADDSAT1:%[0-9]+]]:_(s32) = G_UADDSAT [[UV1]], [[UV3]] 737 ; GFX9: [[BUILD_VECTOR:%[0-9]+]]:_(<2 x s32>) = G_BUILD_VECTOR [[UADDSAT]](s32), [[UADDSAT1]](s32) 738 ; GFX9: $vgpr0_vgpr1 = COPY [[BUILD_VECTOR]](<2 x s32>) 739 %0:_(<2 x s32>) = COPY $vgpr0_vgpr1 740 %1:_(<2 x s32>) = COPY $vgpr2_vgpr3 741 %2:_(<2 x s32>) = G_UADDSAT %0, %1 742 $vgpr0_vgpr1 = COPY %2 743... 744 745--- 746name: uaddsat_s64 747body: | 748 bb.0: 749 liveins: $vgpr0_vgpr1, $vgpr2_vgpr3 750 751 ; GFX6-LABEL: name: uaddsat_s64 752 ; GFX6: [[COPY:%[0-9]+]]:_(s64) = COPY $vgpr0_vgpr1 753 ; GFX6: [[COPY1:%[0-9]+]]:_(s64) = COPY $vgpr2_vgpr3 754 ; GFX6: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY]](s64) 755 ; GFX6: [[UV2:%[0-9]+]]:_(s32), [[UV3:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY1]](s64) 756 ; GFX6: [[UADDO:%[0-9]+]]:_(s32), [[UADDO1:%[0-9]+]]:_(s1) = G_UADDO [[UV]], [[UV2]] 757 ; GFX6: [[UADDE:%[0-9]+]]:_(s32), [[UADDE1:%[0-9]+]]:_(s1) = G_UADDE [[UV1]], [[UV3]], [[UADDO1]] 758 ; GFX6: [[MV:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[UADDO]](s32), [[UADDE]](s32) 759 ; GFX6: [[ICMP:%[0-9]+]]:_(s1) = G_ICMP intpred(ult), [[MV]](s64), [[COPY1]] 760 ; GFX6: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 -1 761 ; GFX6: [[SELECT:%[0-9]+]]:_(s64) = G_SELECT [[ICMP]](s1), [[C]], [[MV]] 762 ; GFX6: $vgpr0_vgpr1 = COPY [[SELECT]](s64) 763 ; GFX8-LABEL: name: uaddsat_s64 764 ; GFX8: [[COPY:%[0-9]+]]:_(s64) = COPY $vgpr0_vgpr1 765 ; GFX8: [[COPY1:%[0-9]+]]:_(s64) = COPY $vgpr2_vgpr3 766 ; GFX8: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY]](s64) 767 ; GFX8: [[UV2:%[0-9]+]]:_(s32), [[UV3:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY1]](s64) 768 ; GFX8: [[UADDO:%[0-9]+]]:_(s32), [[UADDO1:%[0-9]+]]:_(s1) = G_UADDO [[UV]], [[UV2]] 769 ; GFX8: [[UADDE:%[0-9]+]]:_(s32), [[UADDE1:%[0-9]+]]:_(s1) = G_UADDE [[UV1]], [[UV3]], [[UADDO1]] 770 ; GFX8: [[MV:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[UADDO]](s32), [[UADDE]](s32) 771 ; GFX8: [[ICMP:%[0-9]+]]:_(s1) = G_ICMP intpred(ult), [[MV]](s64), [[COPY1]] 772 ; GFX8: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 -1 773 ; GFX8: [[SELECT:%[0-9]+]]:_(s64) = G_SELECT [[ICMP]](s1), [[C]], [[MV]] 774 ; GFX8: $vgpr0_vgpr1 = COPY [[SELECT]](s64) 775 ; GFX9-LABEL: name: uaddsat_s64 776 ; GFX9: [[COPY:%[0-9]+]]:_(s64) = COPY $vgpr0_vgpr1 777 ; GFX9: [[COPY1:%[0-9]+]]:_(s64) = COPY $vgpr2_vgpr3 778 ; GFX9: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY]](s64) 779 ; GFX9: [[UV2:%[0-9]+]]:_(s32), [[UV3:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY1]](s64) 780 ; GFX9: [[UADDO:%[0-9]+]]:_(s32), [[UADDO1:%[0-9]+]]:_(s1) = G_UADDO [[UV]], [[UV2]] 781 ; GFX9: [[UADDE:%[0-9]+]]:_(s32), [[UADDE1:%[0-9]+]]:_(s1) = G_UADDE [[UV1]], [[UV3]], [[UADDO1]] 782 ; GFX9: [[MV:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[UADDO]](s32), [[UADDE]](s32) 783 ; GFX9: [[ICMP:%[0-9]+]]:_(s1) = G_ICMP intpred(ult), [[MV]](s64), [[COPY1]] 784 ; GFX9: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 -1 785 ; GFX9: [[SELECT:%[0-9]+]]:_(s64) = G_SELECT [[ICMP]](s1), [[C]], [[MV]] 786 ; GFX9: $vgpr0_vgpr1 = COPY [[SELECT]](s64) 787 %0:_(s64) = COPY $vgpr0_vgpr1 788 %1:_(s64) = COPY $vgpr2_vgpr3 789 %2:_(s64) = G_UADDSAT %0, %1 790 $vgpr0_vgpr1 = COPY %2 791... 792 793--- 794name: uaddsat_v2s64 795body: | 796 bb.0: 797 liveins: $vgpr0_vgpr1_vgpr2_vgpr3, $vgpr4_vgpr5_vgpr6_vgpr7 798 799 ; GFX6-LABEL: name: uaddsat_v2s64 800 ; GFX6: [[COPY:%[0-9]+]]:_(<2 x s64>) = COPY $vgpr0_vgpr1_vgpr2_vgpr3 801 ; GFX6: [[COPY1:%[0-9]+]]:_(<2 x s64>) = COPY $vgpr4_vgpr5_vgpr6_vgpr7 802 ; GFX6: [[UV:%[0-9]+]]:_(s64), [[UV1:%[0-9]+]]:_(s64) = G_UNMERGE_VALUES [[COPY]](<2 x s64>) 803 ; GFX6: [[UV2:%[0-9]+]]:_(s64), [[UV3:%[0-9]+]]:_(s64) = G_UNMERGE_VALUES [[COPY1]](<2 x s64>) 804 ; GFX6: [[UV4:%[0-9]+]]:_(s32), [[UV5:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[UV]](s64) 805 ; GFX6: [[UV6:%[0-9]+]]:_(s32), [[UV7:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[UV2]](s64) 806 ; GFX6: [[UADDO:%[0-9]+]]:_(s32), [[UADDO1:%[0-9]+]]:_(s1) = G_UADDO [[UV4]], [[UV6]] 807 ; GFX6: [[UADDE:%[0-9]+]]:_(s32), [[UADDE1:%[0-9]+]]:_(s1) = G_UADDE [[UV5]], [[UV7]], [[UADDO1]] 808 ; GFX6: [[MV:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[UADDO]](s32), [[UADDE]](s32) 809 ; GFX6: [[ICMP:%[0-9]+]]:_(s1) = G_ICMP intpred(ult), [[MV]](s64), [[UV2]] 810 ; GFX6: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 -1 811 ; GFX6: [[SELECT:%[0-9]+]]:_(s64) = G_SELECT [[ICMP]](s1), [[C]], [[MV]] 812 ; GFX6: [[UV8:%[0-9]+]]:_(s32), [[UV9:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[UV1]](s64) 813 ; GFX6: [[UV10:%[0-9]+]]:_(s32), [[UV11:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[UV3]](s64) 814 ; GFX6: [[UADDO2:%[0-9]+]]:_(s32), [[UADDO3:%[0-9]+]]:_(s1) = G_UADDO [[UV8]], [[UV10]] 815 ; GFX6: [[UADDE2:%[0-9]+]]:_(s32), [[UADDE3:%[0-9]+]]:_(s1) = G_UADDE [[UV9]], [[UV11]], [[UADDO3]] 816 ; GFX6: [[MV1:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[UADDO2]](s32), [[UADDE2]](s32) 817 ; GFX6: [[ICMP1:%[0-9]+]]:_(s1) = G_ICMP intpred(ult), [[MV1]](s64), [[UV3]] 818 ; GFX6: [[SELECT1:%[0-9]+]]:_(s64) = G_SELECT [[ICMP1]](s1), [[C]], [[MV1]] 819 ; GFX6: [[BUILD_VECTOR:%[0-9]+]]:_(<2 x s64>) = G_BUILD_VECTOR [[SELECT]](s64), [[SELECT1]](s64) 820 ; GFX6: $vgpr0_vgpr1_vgpr2_vgpr3 = COPY [[BUILD_VECTOR]](<2 x s64>) 821 ; GFX8-LABEL: name: uaddsat_v2s64 822 ; GFX8: [[COPY:%[0-9]+]]:_(<2 x s64>) = COPY $vgpr0_vgpr1_vgpr2_vgpr3 823 ; GFX8: [[COPY1:%[0-9]+]]:_(<2 x s64>) = COPY $vgpr4_vgpr5_vgpr6_vgpr7 824 ; GFX8: [[UV:%[0-9]+]]:_(s64), [[UV1:%[0-9]+]]:_(s64) = G_UNMERGE_VALUES [[COPY]](<2 x s64>) 825 ; GFX8: [[UV2:%[0-9]+]]:_(s64), [[UV3:%[0-9]+]]:_(s64) = G_UNMERGE_VALUES [[COPY1]](<2 x s64>) 826 ; GFX8: [[UV4:%[0-9]+]]:_(s32), [[UV5:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[UV]](s64) 827 ; GFX8: [[UV6:%[0-9]+]]:_(s32), [[UV7:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[UV2]](s64) 828 ; GFX8: [[UADDO:%[0-9]+]]:_(s32), [[UADDO1:%[0-9]+]]:_(s1) = G_UADDO [[UV4]], [[UV6]] 829 ; GFX8: [[UADDE:%[0-9]+]]:_(s32), [[UADDE1:%[0-9]+]]:_(s1) = G_UADDE [[UV5]], [[UV7]], [[UADDO1]] 830 ; GFX8: [[MV:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[UADDO]](s32), [[UADDE]](s32) 831 ; GFX8: [[ICMP:%[0-9]+]]:_(s1) = G_ICMP intpred(ult), [[MV]](s64), [[UV2]] 832 ; GFX8: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 -1 833 ; GFX8: [[SELECT:%[0-9]+]]:_(s64) = G_SELECT [[ICMP]](s1), [[C]], [[MV]] 834 ; GFX8: [[UV8:%[0-9]+]]:_(s32), [[UV9:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[UV1]](s64) 835 ; GFX8: [[UV10:%[0-9]+]]:_(s32), [[UV11:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[UV3]](s64) 836 ; GFX8: [[UADDO2:%[0-9]+]]:_(s32), [[UADDO3:%[0-9]+]]:_(s1) = G_UADDO [[UV8]], [[UV10]] 837 ; GFX8: [[UADDE2:%[0-9]+]]:_(s32), [[UADDE3:%[0-9]+]]:_(s1) = G_UADDE [[UV9]], [[UV11]], [[UADDO3]] 838 ; GFX8: [[MV1:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[UADDO2]](s32), [[UADDE2]](s32) 839 ; GFX8: [[ICMP1:%[0-9]+]]:_(s1) = G_ICMP intpred(ult), [[MV1]](s64), [[UV3]] 840 ; GFX8: [[SELECT1:%[0-9]+]]:_(s64) = G_SELECT [[ICMP1]](s1), [[C]], [[MV1]] 841 ; GFX8: [[BUILD_VECTOR:%[0-9]+]]:_(<2 x s64>) = G_BUILD_VECTOR [[SELECT]](s64), [[SELECT1]](s64) 842 ; GFX8: $vgpr0_vgpr1_vgpr2_vgpr3 = COPY [[BUILD_VECTOR]](<2 x s64>) 843 ; GFX9-LABEL: name: uaddsat_v2s64 844 ; GFX9: [[COPY:%[0-9]+]]:_(<2 x s64>) = COPY $vgpr0_vgpr1_vgpr2_vgpr3 845 ; GFX9: [[COPY1:%[0-9]+]]:_(<2 x s64>) = COPY $vgpr4_vgpr5_vgpr6_vgpr7 846 ; GFX9: [[UV:%[0-9]+]]:_(s64), [[UV1:%[0-9]+]]:_(s64) = G_UNMERGE_VALUES [[COPY]](<2 x s64>) 847 ; GFX9: [[UV2:%[0-9]+]]:_(s64), [[UV3:%[0-9]+]]:_(s64) = G_UNMERGE_VALUES [[COPY1]](<2 x s64>) 848 ; GFX9: [[UV4:%[0-9]+]]:_(s32), [[UV5:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[UV]](s64) 849 ; GFX9: [[UV6:%[0-9]+]]:_(s32), [[UV7:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[UV2]](s64) 850 ; GFX9: [[UADDO:%[0-9]+]]:_(s32), [[UADDO1:%[0-9]+]]:_(s1) = G_UADDO [[UV4]], [[UV6]] 851 ; GFX9: [[UADDE:%[0-9]+]]:_(s32), [[UADDE1:%[0-9]+]]:_(s1) = G_UADDE [[UV5]], [[UV7]], [[UADDO1]] 852 ; GFX9: [[MV:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[UADDO]](s32), [[UADDE]](s32) 853 ; GFX9: [[ICMP:%[0-9]+]]:_(s1) = G_ICMP intpred(ult), [[MV]](s64), [[UV2]] 854 ; GFX9: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 -1 855 ; GFX9: [[SELECT:%[0-9]+]]:_(s64) = G_SELECT [[ICMP]](s1), [[C]], [[MV]] 856 ; GFX9: [[UV8:%[0-9]+]]:_(s32), [[UV9:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[UV1]](s64) 857 ; GFX9: [[UV10:%[0-9]+]]:_(s32), [[UV11:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[UV3]](s64) 858 ; GFX9: [[UADDO2:%[0-9]+]]:_(s32), [[UADDO3:%[0-9]+]]:_(s1) = G_UADDO [[UV8]], [[UV10]] 859 ; GFX9: [[UADDE2:%[0-9]+]]:_(s32), [[UADDE3:%[0-9]+]]:_(s1) = G_UADDE [[UV9]], [[UV11]], [[UADDO3]] 860 ; GFX9: [[MV1:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[UADDO2]](s32), [[UADDE2]](s32) 861 ; GFX9: [[ICMP1:%[0-9]+]]:_(s1) = G_ICMP intpred(ult), [[MV1]](s64), [[UV3]] 862 ; GFX9: [[SELECT1:%[0-9]+]]:_(s64) = G_SELECT [[ICMP1]](s1), [[C]], [[MV1]] 863 ; GFX9: [[BUILD_VECTOR:%[0-9]+]]:_(<2 x s64>) = G_BUILD_VECTOR [[SELECT]](s64), [[SELECT1]](s64) 864 ; GFX9: $vgpr0_vgpr1_vgpr2_vgpr3 = COPY [[BUILD_VECTOR]](<2 x s64>) 865 %0:_(<2 x s64>) = COPY $vgpr0_vgpr1_vgpr2_vgpr3 866 %1:_(<2 x s64>) = COPY $vgpr4_vgpr5_vgpr6_vgpr7 867 %2:_(<2 x s64>) = G_UADDSAT %0, %1 868 $vgpr0_vgpr1_vgpr2_vgpr3 = COPY %2 869... 870