1# NOTE: Assertions have been autogenerated by utils/update_mir_test_checks.py 2# RUN: llc -O0 -mtriple=amdgcn-mesa-mesa3d -mcpu=gfx900 -run-pass=legalizer -global-isel-abort=0 %s -o - | FileCheck -check-prefix=GFX9 %s 3# RUN: llc -O0 -mtriple=amdgcn-mesa-mesa3d -mcpu=fiji -run-pass=legalizer -global-isel-abort=0 %s -o - | FileCheck -check-prefix=GFX8 %s 4# RUN: llc -O0 -mtriple=amdgcn-mesa-mesa3d -mcpu=tahiti -run-pass=legalizer -global-isel-abort=0 %s -o - | FileCheck -check-prefix=GFX6 %s 5 6--- 7name: test_sext_inreg_s32_1 8body: | 9 bb.0: 10 liveins: $vgpr0 11 12 ; GFX9-LABEL: name: test_sext_inreg_s32_1 13 ; GFX9: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0 14 ; GFX9: [[SEXT_INREG:%[0-9]+]]:_(s32) = G_SEXT_INREG [[COPY]], 1 15 ; GFX9: $vgpr0 = COPY [[SEXT_INREG]](s32) 16 ; GFX8-LABEL: name: test_sext_inreg_s32_1 17 ; GFX8: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0 18 ; GFX8: [[SEXT_INREG:%[0-9]+]]:_(s32) = G_SEXT_INREG [[COPY]], 1 19 ; GFX8: $vgpr0 = COPY [[SEXT_INREG]](s32) 20 ; GFX6-LABEL: name: test_sext_inreg_s32_1 21 ; GFX6: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0 22 ; GFX6: [[SEXT_INREG:%[0-9]+]]:_(s32) = G_SEXT_INREG [[COPY]], 1 23 ; GFX6: $vgpr0 = COPY [[SEXT_INREG]](s32) 24 %0:_(s32) = COPY $vgpr0 25 %1:_(s32) = G_SEXT_INREG %0, 1 26 $vgpr0 = COPY %1 27... 28 29--- 30name: test_sext_inreg_s32_2 31body: | 32 bb.0: 33 liveins: $vgpr0 34 35 ; GFX9-LABEL: name: test_sext_inreg_s32_2 36 ; GFX9: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0 37 ; GFX9: [[SEXT_INREG:%[0-9]+]]:_(s32) = G_SEXT_INREG [[COPY]], 2 38 ; GFX9: $vgpr0 = COPY [[SEXT_INREG]](s32) 39 ; GFX8-LABEL: name: test_sext_inreg_s32_2 40 ; GFX8: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0 41 ; GFX8: [[SEXT_INREG:%[0-9]+]]:_(s32) = G_SEXT_INREG [[COPY]], 2 42 ; GFX8: $vgpr0 = COPY [[SEXT_INREG]](s32) 43 ; GFX6-LABEL: name: test_sext_inreg_s32_2 44 ; GFX6: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0 45 ; GFX6: [[SEXT_INREG:%[0-9]+]]:_(s32) = G_SEXT_INREG [[COPY]], 2 46 ; GFX6: $vgpr0 = COPY [[SEXT_INREG]](s32) 47 %0:_(s32) = COPY $vgpr0 48 %1:_(s32) = G_SEXT_INREG %0, 2 49 $vgpr0 = COPY %1 50... 51 52--- 53name: test_sext_inreg_s32_8 54body: | 55 bb.0: 56 liveins: $vgpr0 57 58 ; GFX9-LABEL: name: test_sext_inreg_s32_8 59 ; GFX9: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0 60 ; GFX9: [[SEXT_INREG:%[0-9]+]]:_(s32) = G_SEXT_INREG [[COPY]], 8 61 ; GFX9: $vgpr0 = COPY [[SEXT_INREG]](s32) 62 ; GFX8-LABEL: name: test_sext_inreg_s32_8 63 ; GFX8: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0 64 ; GFX8: [[SEXT_INREG:%[0-9]+]]:_(s32) = G_SEXT_INREG [[COPY]], 8 65 ; GFX8: $vgpr0 = COPY [[SEXT_INREG]](s32) 66 ; GFX6-LABEL: name: test_sext_inreg_s32_8 67 ; GFX6: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0 68 ; GFX6: [[SEXT_INREG:%[0-9]+]]:_(s32) = G_SEXT_INREG [[COPY]], 8 69 ; GFX6: $vgpr0 = COPY [[SEXT_INREG]](s32) 70 %0:_(s32) = COPY $vgpr0 71 %1:_(s32) = G_SEXT_INREG %0, 8 72 $vgpr0 = COPY %1 73... 74 75--- 76name: test_sext_inreg_s32_16 77body: | 78 bb.0: 79 liveins: $vgpr0 80 81 ; GFX9-LABEL: name: test_sext_inreg_s32_16 82 ; GFX9: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0 83 ; GFX9: [[SEXT_INREG:%[0-9]+]]:_(s32) = G_SEXT_INREG [[COPY]], 8 84 ; GFX9: $vgpr0 = COPY [[SEXT_INREG]](s32) 85 ; GFX8-LABEL: name: test_sext_inreg_s32_16 86 ; GFX8: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0 87 ; GFX8: [[SEXT_INREG:%[0-9]+]]:_(s32) = G_SEXT_INREG [[COPY]], 8 88 ; GFX8: $vgpr0 = COPY [[SEXT_INREG]](s32) 89 ; GFX6-LABEL: name: test_sext_inreg_s32_16 90 ; GFX6: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0 91 ; GFX6: [[SEXT_INREG:%[0-9]+]]:_(s32) = G_SEXT_INREG [[COPY]], 8 92 ; GFX6: $vgpr0 = COPY [[SEXT_INREG]](s32) 93 %0:_(s32) = COPY $vgpr0 94 %1:_(s32) = G_SEXT_INREG %0, 8 95 $vgpr0 = COPY %1 96... 97 98--- 99name: test_sext_inreg_s32_31 100body: | 101 bb.0: 102 liveins: $vgpr0 103 104 ; GFX9-LABEL: name: test_sext_inreg_s32_31 105 ; GFX9: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0 106 ; GFX9: [[SEXT_INREG:%[0-9]+]]:_(s32) = G_SEXT_INREG [[COPY]], 31 107 ; GFX9: $vgpr0 = COPY [[SEXT_INREG]](s32) 108 ; GFX8-LABEL: name: test_sext_inreg_s32_31 109 ; GFX8: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0 110 ; GFX8: [[SEXT_INREG:%[0-9]+]]:_(s32) = G_SEXT_INREG [[COPY]], 31 111 ; GFX8: $vgpr0 = COPY [[SEXT_INREG]](s32) 112 ; GFX6-LABEL: name: test_sext_inreg_s32_31 113 ; GFX6: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0 114 ; GFX6: [[SEXT_INREG:%[0-9]+]]:_(s32) = G_SEXT_INREG [[COPY]], 31 115 ; GFX6: $vgpr0 = COPY [[SEXT_INREG]](s32) 116 %0:_(s32) = COPY $vgpr0 117 %1:_(s32) = G_SEXT_INREG %0, 31 118 $vgpr0 = COPY %1 119... 120 121--- 122name: test_sext_inreg_s64_1 123body: | 124 bb.0: 125 liveins: $vgpr0_vgpr1 126 127 ; GFX9-LABEL: name: test_sext_inreg_s64_1 128 ; GFX9: [[COPY:%[0-9]+]]:_(s64) = COPY $vgpr0_vgpr1 129 ; GFX9: [[SEXT_INREG:%[0-9]+]]:_(s64) = G_SEXT_INREG [[COPY]], 1 130 ; GFX9: $vgpr0_vgpr1 = COPY [[SEXT_INREG]](s64) 131 ; GFX8-LABEL: name: test_sext_inreg_s64_1 132 ; GFX8: [[COPY:%[0-9]+]]:_(s64) = COPY $vgpr0_vgpr1 133 ; GFX8: [[SEXT_INREG:%[0-9]+]]:_(s64) = G_SEXT_INREG [[COPY]], 1 134 ; GFX8: $vgpr0_vgpr1 = COPY [[SEXT_INREG]](s64) 135 ; GFX6-LABEL: name: test_sext_inreg_s64_1 136 ; GFX6: [[COPY:%[0-9]+]]:_(s64) = COPY $vgpr0_vgpr1 137 ; GFX6: [[SEXT_INREG:%[0-9]+]]:_(s64) = G_SEXT_INREG [[COPY]], 1 138 ; GFX6: $vgpr0_vgpr1 = COPY [[SEXT_INREG]](s64) 139 %0:_(s64) = COPY $vgpr0_vgpr1 140 %1:_(s64) = G_SEXT_INREG %0, 1 141 $vgpr0_vgpr1 = COPY %1 142... 143 144--- 145name: test_sext_inreg_s64_2 146body: | 147 bb.0: 148 liveins: $vgpr0_vgpr1 149 150 ; GFX9-LABEL: name: test_sext_inreg_s64_2 151 ; GFX9: [[COPY:%[0-9]+]]:_(s64) = COPY $vgpr0_vgpr1 152 ; GFX9: [[SEXT_INREG:%[0-9]+]]:_(s64) = G_SEXT_INREG [[COPY]], 2 153 ; GFX9: $vgpr0_vgpr1 = COPY [[SEXT_INREG]](s64) 154 ; GFX8-LABEL: name: test_sext_inreg_s64_2 155 ; GFX8: [[COPY:%[0-9]+]]:_(s64) = COPY $vgpr0_vgpr1 156 ; GFX8: [[SEXT_INREG:%[0-9]+]]:_(s64) = G_SEXT_INREG [[COPY]], 2 157 ; GFX8: $vgpr0_vgpr1 = COPY [[SEXT_INREG]](s64) 158 ; GFX6-LABEL: name: test_sext_inreg_s64_2 159 ; GFX6: [[COPY:%[0-9]+]]:_(s64) = COPY $vgpr0_vgpr1 160 ; GFX6: [[SEXT_INREG:%[0-9]+]]:_(s64) = G_SEXT_INREG [[COPY]], 2 161 ; GFX6: $vgpr0_vgpr1 = COPY [[SEXT_INREG]](s64) 162 %0:_(s64) = COPY $vgpr0_vgpr1 163 %1:_(s64) = G_SEXT_INREG %0, 2 164 $vgpr0_vgpr1 = COPY %1 165... 166 167--- 168name: test_sext_inreg_s64_8 169body: | 170 bb.0: 171 liveins: $vgpr0_vgpr1 172 173 ; GFX9-LABEL: name: test_sext_inreg_s64_8 174 ; GFX9: [[COPY:%[0-9]+]]:_(s64) = COPY $vgpr0_vgpr1 175 ; GFX9: [[SEXT_INREG:%[0-9]+]]:_(s64) = G_SEXT_INREG [[COPY]], 8 176 ; GFX9: $vgpr0_vgpr1 = COPY [[SEXT_INREG]](s64) 177 ; GFX8-LABEL: name: test_sext_inreg_s64_8 178 ; GFX8: [[COPY:%[0-9]+]]:_(s64) = COPY $vgpr0_vgpr1 179 ; GFX8: [[SEXT_INREG:%[0-9]+]]:_(s64) = G_SEXT_INREG [[COPY]], 8 180 ; GFX8: $vgpr0_vgpr1 = COPY [[SEXT_INREG]](s64) 181 ; GFX6-LABEL: name: test_sext_inreg_s64_8 182 ; GFX6: [[COPY:%[0-9]+]]:_(s64) = COPY $vgpr0_vgpr1 183 ; GFX6: [[SEXT_INREG:%[0-9]+]]:_(s64) = G_SEXT_INREG [[COPY]], 8 184 ; GFX6: $vgpr0_vgpr1 = COPY [[SEXT_INREG]](s64) 185 %0:_(s64) = COPY $vgpr0_vgpr1 186 %1:_(s64) = G_SEXT_INREG %0, 8 187 $vgpr0_vgpr1 = COPY %1 188... 189 190--- 191name: test_sext_inreg_s64_16 192body: | 193 bb.0: 194 liveins: $vgpr0_vgpr1 195 196 ; GFX9-LABEL: name: test_sext_inreg_s64_16 197 ; GFX9: [[COPY:%[0-9]+]]:_(s64) = COPY $vgpr0_vgpr1 198 ; GFX9: [[SEXT_INREG:%[0-9]+]]:_(s64) = G_SEXT_INREG [[COPY]], 8 199 ; GFX9: $vgpr0_vgpr1 = COPY [[SEXT_INREG]](s64) 200 ; GFX8-LABEL: name: test_sext_inreg_s64_16 201 ; GFX8: [[COPY:%[0-9]+]]:_(s64) = COPY $vgpr0_vgpr1 202 ; GFX8: [[SEXT_INREG:%[0-9]+]]:_(s64) = G_SEXT_INREG [[COPY]], 8 203 ; GFX8: $vgpr0_vgpr1 = COPY [[SEXT_INREG]](s64) 204 ; GFX6-LABEL: name: test_sext_inreg_s64_16 205 ; GFX6: [[COPY:%[0-9]+]]:_(s64) = COPY $vgpr0_vgpr1 206 ; GFX6: [[SEXT_INREG:%[0-9]+]]:_(s64) = G_SEXT_INREG [[COPY]], 8 207 ; GFX6: $vgpr0_vgpr1 = COPY [[SEXT_INREG]](s64) 208 %0:_(s64) = COPY $vgpr0_vgpr1 209 %1:_(s64) = G_SEXT_INREG %0, 8 210 $vgpr0_vgpr1 = COPY %1 211... 212 213--- 214name: test_sext_inreg_s64_31 215body: | 216 bb.0: 217 liveins: $vgpr0_vgpr1 218 219 ; GFX9-LABEL: name: test_sext_inreg_s64_31 220 ; GFX9: [[COPY:%[0-9]+]]:_(s64) = COPY $vgpr0_vgpr1 221 ; GFX9: [[SEXT_INREG:%[0-9]+]]:_(s64) = G_SEXT_INREG [[COPY]], 31 222 ; GFX9: $vgpr0_vgpr1 = COPY [[SEXT_INREG]](s64) 223 ; GFX8-LABEL: name: test_sext_inreg_s64_31 224 ; GFX8: [[COPY:%[0-9]+]]:_(s64) = COPY $vgpr0_vgpr1 225 ; GFX8: [[SEXT_INREG:%[0-9]+]]:_(s64) = G_SEXT_INREG [[COPY]], 31 226 ; GFX8: $vgpr0_vgpr1 = COPY [[SEXT_INREG]](s64) 227 ; GFX6-LABEL: name: test_sext_inreg_s64_31 228 ; GFX6: [[COPY:%[0-9]+]]:_(s64) = COPY $vgpr0_vgpr1 229 ; GFX6: [[SEXT_INREG:%[0-9]+]]:_(s64) = G_SEXT_INREG [[COPY]], 31 230 ; GFX6: $vgpr0_vgpr1 = COPY [[SEXT_INREG]](s64) 231 %0:_(s64) = COPY $vgpr0_vgpr1 232 %1:_(s64) = G_SEXT_INREG %0, 31 233 $vgpr0_vgpr1 = COPY %1 234... 235 236--- 237name: test_sext_inreg_s64_32 238body: | 239 bb.0: 240 liveins: $vgpr0_vgpr1 241 242 ; GFX9-LABEL: name: test_sext_inreg_s64_32 243 ; GFX9: [[COPY:%[0-9]+]]:_(s64) = COPY $vgpr0_vgpr1 244 ; GFX9: [[SEXT_INREG:%[0-9]+]]:_(s64) = G_SEXT_INREG [[COPY]], 32 245 ; GFX9: $vgpr0_vgpr1 = COPY [[SEXT_INREG]](s64) 246 ; GFX8-LABEL: name: test_sext_inreg_s64_32 247 ; GFX8: [[COPY:%[0-9]+]]:_(s64) = COPY $vgpr0_vgpr1 248 ; GFX8: [[SEXT_INREG:%[0-9]+]]:_(s64) = G_SEXT_INREG [[COPY]], 32 249 ; GFX8: $vgpr0_vgpr1 = COPY [[SEXT_INREG]](s64) 250 ; GFX6-LABEL: name: test_sext_inreg_s64_32 251 ; GFX6: [[COPY:%[0-9]+]]:_(s64) = COPY $vgpr0_vgpr1 252 ; GFX6: [[SEXT_INREG:%[0-9]+]]:_(s64) = G_SEXT_INREG [[COPY]], 32 253 ; GFX6: $vgpr0_vgpr1 = COPY [[SEXT_INREG]](s64) 254 %0:_(s64) = COPY $vgpr0_vgpr1 255 %1:_(s64) = G_SEXT_INREG %0, 32 256 $vgpr0_vgpr1 = COPY %1 257... 258 259--- 260name: test_sext_inreg_s64_33 261body: | 262 bb.0: 263 liveins: $vgpr0_vgpr1 264 265 ; GFX9-LABEL: name: test_sext_inreg_s64_33 266 ; GFX9: [[COPY:%[0-9]+]]:_(s64) = COPY $vgpr0_vgpr1 267 ; GFX9: [[SEXT_INREG:%[0-9]+]]:_(s64) = G_SEXT_INREG [[COPY]], 33 268 ; GFX9: $vgpr0_vgpr1 = COPY [[SEXT_INREG]](s64) 269 ; GFX8-LABEL: name: test_sext_inreg_s64_33 270 ; GFX8: [[COPY:%[0-9]+]]:_(s64) = COPY $vgpr0_vgpr1 271 ; GFX8: [[SEXT_INREG:%[0-9]+]]:_(s64) = G_SEXT_INREG [[COPY]], 33 272 ; GFX8: $vgpr0_vgpr1 = COPY [[SEXT_INREG]](s64) 273 ; GFX6-LABEL: name: test_sext_inreg_s64_33 274 ; GFX6: [[COPY:%[0-9]+]]:_(s64) = COPY $vgpr0_vgpr1 275 ; GFX6: [[SEXT_INREG:%[0-9]+]]:_(s64) = G_SEXT_INREG [[COPY]], 33 276 ; GFX6: $vgpr0_vgpr1 = COPY [[SEXT_INREG]](s64) 277 %0:_(s64) = COPY $vgpr0_vgpr1 278 %1:_(s64) = G_SEXT_INREG %0, 33 279 $vgpr0_vgpr1 = COPY %1 280... 281 282--- 283name: test_sext_inreg_s64_63 284body: | 285 bb.0: 286 liveins: $vgpr0_vgpr1 287 288 ; GFX9-LABEL: name: test_sext_inreg_s64_63 289 ; GFX9: [[COPY:%[0-9]+]]:_(s64) = COPY $vgpr0_vgpr1 290 ; GFX9: [[SEXT_INREG:%[0-9]+]]:_(s64) = G_SEXT_INREG [[COPY]], 63 291 ; GFX9: $vgpr0_vgpr1 = COPY [[SEXT_INREG]](s64) 292 ; GFX8-LABEL: name: test_sext_inreg_s64_63 293 ; GFX8: [[COPY:%[0-9]+]]:_(s64) = COPY $vgpr0_vgpr1 294 ; GFX8: [[SEXT_INREG:%[0-9]+]]:_(s64) = G_SEXT_INREG [[COPY]], 63 295 ; GFX8: $vgpr0_vgpr1 = COPY [[SEXT_INREG]](s64) 296 ; GFX6-LABEL: name: test_sext_inreg_s64_63 297 ; GFX6: [[COPY:%[0-9]+]]:_(s64) = COPY $vgpr0_vgpr1 298 ; GFX6: [[SEXT_INREG:%[0-9]+]]:_(s64) = G_SEXT_INREG [[COPY]], 63 299 ; GFX6: $vgpr0_vgpr1 = COPY [[SEXT_INREG]](s64) 300 %0:_(s64) = COPY $vgpr0_vgpr1 301 %1:_(s64) = G_SEXT_INREG %0, 63 302 $vgpr0_vgpr1 = COPY %1 303... 304 305--- 306name: test_sext_inreg_s16_1 307body: | 308 bb.0: 309 liveins: $vgpr0 310 311 ; GFX9-LABEL: name: test_sext_inreg_s16_1 312 ; GFX9: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0 313 ; GFX9: [[COPY1:%[0-9]+]]:_(s32) = COPY [[COPY]](s32) 314 ; GFX9: [[SEXT_INREG:%[0-9]+]]:_(s32) = G_SEXT_INREG [[COPY1]], 1 315 ; GFX9: [[TRUNC:%[0-9]+]]:_(s16) = G_TRUNC [[SEXT_INREG]](s32) 316 ; GFX9: S_ENDPGM 0, implicit [[TRUNC]](s16) 317 ; GFX8-LABEL: name: test_sext_inreg_s16_1 318 ; GFX8: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0 319 ; GFX8: [[TRUNC:%[0-9]+]]:_(s16) = G_TRUNC [[COPY]](s32) 320 ; GFX8: [[C:%[0-9]+]]:_(s16) = G_CONSTANT i16 15 321 ; GFX8: [[SHL:%[0-9]+]]:_(s16) = G_SHL [[TRUNC]], [[C]](s16) 322 ; GFX8: [[ASHR:%[0-9]+]]:_(s16) = G_ASHR [[SHL]], [[C]](s16) 323 ; GFX8: S_ENDPGM 0, implicit [[ASHR]](s16) 324 ; GFX6-LABEL: name: test_sext_inreg_s16_1 325 ; GFX6: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0 326 ; GFX6: [[COPY1:%[0-9]+]]:_(s32) = COPY [[COPY]](s32) 327 ; GFX6: [[SEXT_INREG:%[0-9]+]]:_(s32) = G_SEXT_INREG [[COPY1]], 1 328 ; GFX6: [[TRUNC:%[0-9]+]]:_(s16) = G_TRUNC [[SEXT_INREG]](s32) 329 ; GFX6: S_ENDPGM 0, implicit [[TRUNC]](s16) 330 %0:_(s32) = COPY $vgpr0 331 %1:_(s16) = G_TRUNC %0 332 %2:_(s16) = G_SEXT_INREG %1, 1 333 S_ENDPGM 0, implicit %2 334 335... 336 337--- 338name: test_sext_inreg_s16_15 339body: | 340 bb.0: 341 liveins: $vgpr0 342 343 ; GFX9-LABEL: name: test_sext_inreg_s16_15 344 ; GFX9: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0 345 ; GFX9: [[COPY1:%[0-9]+]]:_(s32) = COPY [[COPY]](s32) 346 ; GFX9: [[SEXT_INREG:%[0-9]+]]:_(s32) = G_SEXT_INREG [[COPY1]], 15 347 ; GFX9: [[TRUNC:%[0-9]+]]:_(s16) = G_TRUNC [[SEXT_INREG]](s32) 348 ; GFX9: S_ENDPGM 0, implicit [[TRUNC]](s16) 349 ; GFX8-LABEL: name: test_sext_inreg_s16_15 350 ; GFX8: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0 351 ; GFX8: [[TRUNC:%[0-9]+]]:_(s16) = G_TRUNC [[COPY]](s32) 352 ; GFX8: [[C:%[0-9]+]]:_(s16) = G_CONSTANT i16 1 353 ; GFX8: [[SHL:%[0-9]+]]:_(s16) = G_SHL [[TRUNC]], [[C]](s16) 354 ; GFX8: [[ASHR:%[0-9]+]]:_(s16) = G_ASHR [[SHL]], [[C]](s16) 355 ; GFX8: S_ENDPGM 0, implicit [[ASHR]](s16) 356 ; GFX6-LABEL: name: test_sext_inreg_s16_15 357 ; GFX6: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0 358 ; GFX6: [[COPY1:%[0-9]+]]:_(s32) = COPY [[COPY]](s32) 359 ; GFX6: [[SEXT_INREG:%[0-9]+]]:_(s32) = G_SEXT_INREG [[COPY1]], 15 360 ; GFX6: [[TRUNC:%[0-9]+]]:_(s16) = G_TRUNC [[SEXT_INREG]](s32) 361 ; GFX6: S_ENDPGM 0, implicit [[TRUNC]](s16) 362 %0:_(s32) = COPY $vgpr0 363 %1:_(s16) = G_TRUNC %0 364 %2:_(s16) = G_SEXT_INREG %1, 15 365 S_ENDPGM 0, implicit %2 366 367... 368 369--- 370name: test_sext_inreg_s96_8 371body: | 372 bb.0: 373 liveins: $vgpr0_vgpr1_vgpr2 374 375 ; GFX9-LABEL: name: test_sext_inreg_s96_8 376 ; GFX9: [[COPY:%[0-9]+]]:_(s96) = COPY $vgpr0_vgpr1_vgpr2 377 ; GFX9: [[TRUNC:%[0-9]+]]:_(s64) = G_TRUNC [[COPY]](s96) 378 ; GFX9: [[SEXT_INREG:%[0-9]+]]:_(s64) = G_SEXT_INREG [[TRUNC]], 8 379 ; GFX9: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[SEXT_INREG]](s64) 380 ; GFX9: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 31 381 ; GFX9: [[ASHR:%[0-9]+]]:_(s32) = G_ASHR [[UV1]], [[C]](s32) 382 ; GFX9: [[MV:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[UV]](s32), [[UV1]](s32) 383 ; GFX9: [[MV1:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[ASHR]](s32), [[ASHR]](s32) 384 ; GFX9: [[MV2:%[0-9]+]]:_(s192) = G_MERGE_VALUES [[MV]](s64), [[MV1]](s64), [[MV1]](s64) 385 ; GFX9: [[TRUNC1:%[0-9]+]]:_(s96) = G_TRUNC [[MV2]](s192) 386 ; GFX9: $vgpr0_vgpr1_vgpr2 = COPY [[TRUNC1]](s96) 387 ; GFX8-LABEL: name: test_sext_inreg_s96_8 388 ; GFX8: [[COPY:%[0-9]+]]:_(s96) = COPY $vgpr0_vgpr1_vgpr2 389 ; GFX8: [[TRUNC:%[0-9]+]]:_(s64) = G_TRUNC [[COPY]](s96) 390 ; GFX8: [[SEXT_INREG:%[0-9]+]]:_(s64) = G_SEXT_INREG [[TRUNC]], 8 391 ; GFX8: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[SEXT_INREG]](s64) 392 ; GFX8: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 31 393 ; GFX8: [[ASHR:%[0-9]+]]:_(s32) = G_ASHR [[UV1]], [[C]](s32) 394 ; GFX8: [[MV:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[UV]](s32), [[UV1]](s32) 395 ; GFX8: [[MV1:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[ASHR]](s32), [[ASHR]](s32) 396 ; GFX8: [[MV2:%[0-9]+]]:_(s192) = G_MERGE_VALUES [[MV]](s64), [[MV1]](s64), [[MV1]](s64) 397 ; GFX8: [[TRUNC1:%[0-9]+]]:_(s96) = G_TRUNC [[MV2]](s192) 398 ; GFX8: $vgpr0_vgpr1_vgpr2 = COPY [[TRUNC1]](s96) 399 ; GFX6-LABEL: name: test_sext_inreg_s96_8 400 ; GFX6: [[COPY:%[0-9]+]]:_(s96) = COPY $vgpr0_vgpr1_vgpr2 401 ; GFX6: [[TRUNC:%[0-9]+]]:_(s64) = G_TRUNC [[COPY]](s96) 402 ; GFX6: [[SEXT_INREG:%[0-9]+]]:_(s64) = G_SEXT_INREG [[TRUNC]], 8 403 ; GFX6: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[SEXT_INREG]](s64) 404 ; GFX6: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 31 405 ; GFX6: [[ASHR:%[0-9]+]]:_(s32) = G_ASHR [[UV1]], [[C]](s32) 406 ; GFX6: [[MV:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[UV]](s32), [[UV1]](s32) 407 ; GFX6: [[MV1:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[ASHR]](s32), [[ASHR]](s32) 408 ; GFX6: [[MV2:%[0-9]+]]:_(s192) = G_MERGE_VALUES [[MV]](s64), [[MV1]](s64), [[MV1]](s64) 409 ; GFX6: [[TRUNC1:%[0-9]+]]:_(s96) = G_TRUNC [[MV2]](s192) 410 ; GFX6: $vgpr0_vgpr1_vgpr2 = COPY [[TRUNC1]](s96) 411 %0:_(s96) = COPY $vgpr0_vgpr1_vgpr2 412 %1:_(s96) = G_SEXT_INREG %0, 8 413 $vgpr0_vgpr1_vgpr2 = COPY %1 414... 415 416--- 417name: test_sext_inreg_s128_8 418body: | 419 bb.0: 420 liveins: $vgpr0_vgpr1_vgpr2_vgpr3 421 422 ; GFX9-LABEL: name: test_sext_inreg_s128_8 423 ; GFX9: [[COPY:%[0-9]+]]:_(s128) = COPY $vgpr0_vgpr1_vgpr2_vgpr3 424 ; GFX9: [[TRUNC:%[0-9]+]]:_(s64) = G_TRUNC [[COPY]](s128) 425 ; GFX9: [[SEXT_INREG:%[0-9]+]]:_(s64) = G_SEXT_INREG [[TRUNC]], 8 426 ; GFX9: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 63 427 ; GFX9: [[ASHR:%[0-9]+]]:_(s64) = G_ASHR [[SEXT_INREG]], [[C]](s32) 428 ; GFX9: [[MV:%[0-9]+]]:_(s128) = G_MERGE_VALUES [[SEXT_INREG]](s64), [[ASHR]](s64) 429 ; GFX9: $vgpr0_vgpr1_vgpr2_vgpr3 = COPY [[MV]](s128) 430 ; GFX8-LABEL: name: test_sext_inreg_s128_8 431 ; GFX8: [[COPY:%[0-9]+]]:_(s128) = COPY $vgpr0_vgpr1_vgpr2_vgpr3 432 ; GFX8: [[TRUNC:%[0-9]+]]:_(s64) = G_TRUNC [[COPY]](s128) 433 ; GFX8: [[SEXT_INREG:%[0-9]+]]:_(s64) = G_SEXT_INREG [[TRUNC]], 8 434 ; GFX8: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 63 435 ; GFX8: [[ASHR:%[0-9]+]]:_(s64) = G_ASHR [[SEXT_INREG]], [[C]](s32) 436 ; GFX8: [[MV:%[0-9]+]]:_(s128) = G_MERGE_VALUES [[SEXT_INREG]](s64), [[ASHR]](s64) 437 ; GFX8: $vgpr0_vgpr1_vgpr2_vgpr3 = COPY [[MV]](s128) 438 ; GFX6-LABEL: name: test_sext_inreg_s128_8 439 ; GFX6: [[COPY:%[0-9]+]]:_(s128) = COPY $vgpr0_vgpr1_vgpr2_vgpr3 440 ; GFX6: [[TRUNC:%[0-9]+]]:_(s64) = G_TRUNC [[COPY]](s128) 441 ; GFX6: [[SEXT_INREG:%[0-9]+]]:_(s64) = G_SEXT_INREG [[TRUNC]], 8 442 ; GFX6: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 63 443 ; GFX6: [[ASHR:%[0-9]+]]:_(s64) = G_ASHR [[SEXT_INREG]], [[C]](s32) 444 ; GFX6: [[MV:%[0-9]+]]:_(s128) = G_MERGE_VALUES [[SEXT_INREG]](s64), [[ASHR]](s64) 445 ; GFX6: $vgpr0_vgpr1_vgpr2_vgpr3 = COPY [[MV]](s128) 446 %0:_(s128) = COPY $vgpr0_vgpr1_vgpr2_vgpr3 447 %1:_(s128) = G_SEXT_INREG %0, 8 448 $vgpr0_vgpr1_vgpr2_vgpr3 = COPY %1 449... 450 451--- 452name: test_sext_inreg_s160_8 453body: | 454 bb.0: 455 liveins: $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4 456 457 ; GFX9-LABEL: name: test_sext_inreg_s160_8 458 ; GFX9: [[COPY:%[0-9]+]]:_(s160) = COPY $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4 459 ; GFX9: [[TRUNC:%[0-9]+]]:_(s64) = G_TRUNC [[COPY]](s160) 460 ; GFX9: [[SEXT_INREG:%[0-9]+]]:_(s64) = G_SEXT_INREG [[TRUNC]], 8 461 ; GFX9: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[SEXT_INREG]](s64) 462 ; GFX9: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 31 463 ; GFX9: [[ASHR:%[0-9]+]]:_(s32) = G_ASHR [[UV1]], [[C]](s32) 464 ; GFX9: [[MV:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[UV]](s32), [[UV1]](s32) 465 ; GFX9: [[MV1:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[ASHR]](s32), [[ASHR]](s32) 466 ; GFX9: [[MV2:%[0-9]+]]:_(s320) = G_MERGE_VALUES [[MV]](s64), [[MV1]](s64), [[MV1]](s64), [[MV1]](s64), [[MV1]](s64) 467 ; GFX9: [[TRUNC1:%[0-9]+]]:_(s160) = G_TRUNC [[MV2]](s320) 468 ; GFX9: $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4 = COPY [[TRUNC1]](s160) 469 ; GFX8-LABEL: name: test_sext_inreg_s160_8 470 ; GFX8: [[COPY:%[0-9]+]]:_(s160) = COPY $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4 471 ; GFX8: [[TRUNC:%[0-9]+]]:_(s64) = G_TRUNC [[COPY]](s160) 472 ; GFX8: [[SEXT_INREG:%[0-9]+]]:_(s64) = G_SEXT_INREG [[TRUNC]], 8 473 ; GFX8: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[SEXT_INREG]](s64) 474 ; GFX8: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 31 475 ; GFX8: [[ASHR:%[0-9]+]]:_(s32) = G_ASHR [[UV1]], [[C]](s32) 476 ; GFX8: [[MV:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[UV]](s32), [[UV1]](s32) 477 ; GFX8: [[MV1:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[ASHR]](s32), [[ASHR]](s32) 478 ; GFX8: [[MV2:%[0-9]+]]:_(s320) = G_MERGE_VALUES [[MV]](s64), [[MV1]](s64), [[MV1]](s64), [[MV1]](s64), [[MV1]](s64) 479 ; GFX8: [[TRUNC1:%[0-9]+]]:_(s160) = G_TRUNC [[MV2]](s320) 480 ; GFX8: $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4 = COPY [[TRUNC1]](s160) 481 ; GFX6-LABEL: name: test_sext_inreg_s160_8 482 ; GFX6: [[COPY:%[0-9]+]]:_(s160) = COPY $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4 483 ; GFX6: [[TRUNC:%[0-9]+]]:_(s64) = G_TRUNC [[COPY]](s160) 484 ; GFX6: [[SEXT_INREG:%[0-9]+]]:_(s64) = G_SEXT_INREG [[TRUNC]], 8 485 ; GFX6: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[SEXT_INREG]](s64) 486 ; GFX6: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 31 487 ; GFX6: [[ASHR:%[0-9]+]]:_(s32) = G_ASHR [[UV1]], [[C]](s32) 488 ; GFX6: [[MV:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[UV]](s32), [[UV1]](s32) 489 ; GFX6: [[MV1:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[ASHR]](s32), [[ASHR]](s32) 490 ; GFX6: [[MV2:%[0-9]+]]:_(s320) = G_MERGE_VALUES [[MV]](s64), [[MV1]](s64), [[MV1]](s64), [[MV1]](s64), [[MV1]](s64) 491 ; GFX6: [[TRUNC1:%[0-9]+]]:_(s160) = G_TRUNC [[MV2]](s320) 492 ; GFX6: $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4 = COPY [[TRUNC1]](s160) 493 %0:_(s160) = COPY $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4 494 %1:_(s160) = G_SEXT_INREG %0, 8 495 $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4 = COPY %1 496... 497 498--- 499name: test_sext_inreg_256_8 500body: | 501 bb.0: 502 liveins: $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4_vgpr5_vgpr6_vgpr7 503 504 ; GFX9-LABEL: name: test_sext_inreg_256_8 505 ; GFX9: [[COPY:%[0-9]+]]:_(s256) = COPY $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4_vgpr5_vgpr6_vgpr7 506 ; GFX9: [[TRUNC:%[0-9]+]]:_(s64) = G_TRUNC [[COPY]](s256) 507 ; GFX9: [[SEXT_INREG:%[0-9]+]]:_(s64) = G_SEXT_INREG [[TRUNC]], 8 508 ; GFX9: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 63 509 ; GFX9: [[ASHR:%[0-9]+]]:_(s64) = G_ASHR [[SEXT_INREG]], [[C]](s32) 510 ; GFX9: [[MV:%[0-9]+]]:_(s256) = G_MERGE_VALUES [[SEXT_INREG]](s64), [[ASHR]](s64), [[ASHR]](s64), [[ASHR]](s64) 511 ; GFX9: $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4_vgpr5_vgpr6_vgpr7 = COPY [[MV]](s256) 512 ; GFX8-LABEL: name: test_sext_inreg_256_8 513 ; GFX8: [[COPY:%[0-9]+]]:_(s256) = COPY $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4_vgpr5_vgpr6_vgpr7 514 ; GFX8: [[TRUNC:%[0-9]+]]:_(s64) = G_TRUNC [[COPY]](s256) 515 ; GFX8: [[SEXT_INREG:%[0-9]+]]:_(s64) = G_SEXT_INREG [[TRUNC]], 8 516 ; GFX8: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 63 517 ; GFX8: [[ASHR:%[0-9]+]]:_(s64) = G_ASHR [[SEXT_INREG]], [[C]](s32) 518 ; GFX8: [[MV:%[0-9]+]]:_(s256) = G_MERGE_VALUES [[SEXT_INREG]](s64), [[ASHR]](s64), [[ASHR]](s64), [[ASHR]](s64) 519 ; GFX8: $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4_vgpr5_vgpr6_vgpr7 = COPY [[MV]](s256) 520 ; GFX6-LABEL: name: test_sext_inreg_256_8 521 ; GFX6: [[COPY:%[0-9]+]]:_(s256) = COPY $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4_vgpr5_vgpr6_vgpr7 522 ; GFX6: [[TRUNC:%[0-9]+]]:_(s64) = G_TRUNC [[COPY]](s256) 523 ; GFX6: [[SEXT_INREG:%[0-9]+]]:_(s64) = G_SEXT_INREG [[TRUNC]], 8 524 ; GFX6: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 63 525 ; GFX6: [[ASHR:%[0-9]+]]:_(s64) = G_ASHR [[SEXT_INREG]], [[C]](s32) 526 ; GFX6: [[MV:%[0-9]+]]:_(s256) = G_MERGE_VALUES [[SEXT_INREG]](s64), [[ASHR]](s64), [[ASHR]](s64), [[ASHR]](s64) 527 ; GFX6: $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4_vgpr5_vgpr6_vgpr7 = COPY [[MV]](s256) 528 %0:_(s256) = COPY $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4_vgpr5_vgpr6_vgpr7 529 %1:_(s256) = G_SEXT_INREG %0, 8 530 $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4_vgpr5_vgpr6_vgpr7 = COPY %1 531... 532 533--- 534name: test_sext_inreg_512_8 535body: | 536 bb.0: 537 liveins: $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4_vgpr5_vgpr6_vgpr7_vgpr8_vgpr9_vgpr10_vgpr11_vgpr12_vgpr13_vgpr14_vgpr15 538 539 ; GFX9-LABEL: name: test_sext_inreg_512_8 540 ; GFX9: [[COPY:%[0-9]+]]:_(s512) = COPY $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4_vgpr5_vgpr6_vgpr7_vgpr8_vgpr9_vgpr10_vgpr11_vgpr12_vgpr13_vgpr14_vgpr15 541 ; GFX9: [[TRUNC:%[0-9]+]]:_(s64) = G_TRUNC [[COPY]](s512) 542 ; GFX9: [[SEXT_INREG:%[0-9]+]]:_(s64) = G_SEXT_INREG [[TRUNC]], 8 543 ; GFX9: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 63 544 ; GFX9: [[ASHR:%[0-9]+]]:_(s64) = G_ASHR [[SEXT_INREG]], [[C]](s32) 545 ; GFX9: [[MV:%[0-9]+]]:_(s512) = G_MERGE_VALUES [[SEXT_INREG]](s64), [[ASHR]](s64), [[ASHR]](s64), [[ASHR]](s64), [[ASHR]](s64), [[ASHR]](s64), [[ASHR]](s64), [[ASHR]](s64) 546 ; GFX9: $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4_vgpr5_vgpr6_vgpr7_vgpr8_vgpr9_vgpr10_vgpr11_vgpr12_vgpr13_vgpr14_vgpr15 = COPY [[MV]](s512) 547 ; GFX8-LABEL: name: test_sext_inreg_512_8 548 ; GFX8: [[COPY:%[0-9]+]]:_(s512) = COPY $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4_vgpr5_vgpr6_vgpr7_vgpr8_vgpr9_vgpr10_vgpr11_vgpr12_vgpr13_vgpr14_vgpr15 549 ; GFX8: [[TRUNC:%[0-9]+]]:_(s64) = G_TRUNC [[COPY]](s512) 550 ; GFX8: [[SEXT_INREG:%[0-9]+]]:_(s64) = G_SEXT_INREG [[TRUNC]], 8 551 ; GFX8: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 63 552 ; GFX8: [[ASHR:%[0-9]+]]:_(s64) = G_ASHR [[SEXT_INREG]], [[C]](s32) 553 ; GFX8: [[MV:%[0-9]+]]:_(s512) = G_MERGE_VALUES [[SEXT_INREG]](s64), [[ASHR]](s64), [[ASHR]](s64), [[ASHR]](s64), [[ASHR]](s64), [[ASHR]](s64), [[ASHR]](s64), [[ASHR]](s64) 554 ; GFX8: $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4_vgpr5_vgpr6_vgpr7_vgpr8_vgpr9_vgpr10_vgpr11_vgpr12_vgpr13_vgpr14_vgpr15 = COPY [[MV]](s512) 555 ; GFX6-LABEL: name: test_sext_inreg_512_8 556 ; GFX6: [[COPY:%[0-9]+]]:_(s512) = COPY $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4_vgpr5_vgpr6_vgpr7_vgpr8_vgpr9_vgpr10_vgpr11_vgpr12_vgpr13_vgpr14_vgpr15 557 ; GFX6: [[TRUNC:%[0-9]+]]:_(s64) = G_TRUNC [[COPY]](s512) 558 ; GFX6: [[SEXT_INREG:%[0-9]+]]:_(s64) = G_SEXT_INREG [[TRUNC]], 8 559 ; GFX6: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 63 560 ; GFX6: [[ASHR:%[0-9]+]]:_(s64) = G_ASHR [[SEXT_INREG]], [[C]](s32) 561 ; GFX6: [[MV:%[0-9]+]]:_(s512) = G_MERGE_VALUES [[SEXT_INREG]](s64), [[ASHR]](s64), [[ASHR]](s64), [[ASHR]](s64), [[ASHR]](s64), [[ASHR]](s64), [[ASHR]](s64), [[ASHR]](s64) 562 ; GFX6: $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4_vgpr5_vgpr6_vgpr7_vgpr8_vgpr9_vgpr10_vgpr11_vgpr12_vgpr13_vgpr14_vgpr15 = COPY [[MV]](s512) 563 %0:_(s512) = COPY $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4_vgpr5_vgpr6_vgpr7_vgpr8_vgpr9_vgpr10_vgpr11_vgpr12_vgpr13_vgpr14_vgpr15 564 %1:_(s512) = G_SEXT_INREG %0, 8 565 $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4_vgpr5_vgpr6_vgpr7_vgpr8_vgpr9_vgpr10_vgpr11_vgpr12_vgpr13_vgpr14_vgpr15 = COPY %1 566... 567 568--- 569name: test_sext_inreg_1024_8 570body: | 571 bb.0: 572 liveins: $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4_vgpr5_vgpr6_vgpr7_vgpr8_vgpr9_vgpr10_vgpr11_vgpr12_vgpr13_vgpr14_vgpr15_vgpr16_vgpr17_vgpr18_vgpr19_vgpr20_vgpr21_vgpr22_vgpr23_vgpr24_vgpr25_vgpr26_vgpr27_vgpr28_vgpr29_vgpr30_vgpr31 573 574 ; GFX9-LABEL: name: test_sext_inreg_1024_8 575 ; GFX9: [[COPY:%[0-9]+]]:_(s1024) = COPY $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4_vgpr5_vgpr6_vgpr7_vgpr8_vgpr9_vgpr10_vgpr11_vgpr12_vgpr13_vgpr14_vgpr15_vgpr16_vgpr17_vgpr18_vgpr19_vgpr20_vgpr21_vgpr22_vgpr23_vgpr24_vgpr25_vgpr26_vgpr27_vgpr28_vgpr29_vgpr30_vgpr31 576 ; GFX9: [[TRUNC:%[0-9]+]]:_(s64) = G_TRUNC [[COPY]](s1024) 577 ; GFX9: [[SEXT_INREG:%[0-9]+]]:_(s64) = G_SEXT_INREG [[TRUNC]], 8 578 ; GFX9: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 63 579 ; GFX9: [[ASHR:%[0-9]+]]:_(s64) = G_ASHR [[SEXT_INREG]], [[C]](s32) 580 ; GFX9: [[MV:%[0-9]+]]:_(s1024) = G_MERGE_VALUES [[SEXT_INREG]](s64), [[ASHR]](s64), [[ASHR]](s64), [[ASHR]](s64), [[ASHR]](s64), [[ASHR]](s64), [[ASHR]](s64), [[ASHR]](s64), [[ASHR]](s64), [[ASHR]](s64), [[ASHR]](s64), [[ASHR]](s64), [[ASHR]](s64), [[ASHR]](s64), [[ASHR]](s64), [[ASHR]](s64) 581 ; GFX9: $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4_vgpr5_vgpr6_vgpr7_vgpr8_vgpr9_vgpr10_vgpr11_vgpr12_vgpr13_vgpr14_vgpr15_vgpr16_vgpr17_vgpr18_vgpr19_vgpr20_vgpr21_vgpr22_vgpr23_vgpr24_vgpr25_vgpr26_vgpr27_vgpr28_vgpr29_vgpr30_vgpr31 = COPY [[MV]](s1024) 582 ; GFX8-LABEL: name: test_sext_inreg_1024_8 583 ; GFX8: [[COPY:%[0-9]+]]:_(s1024) = COPY $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4_vgpr5_vgpr6_vgpr7_vgpr8_vgpr9_vgpr10_vgpr11_vgpr12_vgpr13_vgpr14_vgpr15_vgpr16_vgpr17_vgpr18_vgpr19_vgpr20_vgpr21_vgpr22_vgpr23_vgpr24_vgpr25_vgpr26_vgpr27_vgpr28_vgpr29_vgpr30_vgpr31 584 ; GFX8: [[TRUNC:%[0-9]+]]:_(s64) = G_TRUNC [[COPY]](s1024) 585 ; GFX8: [[SEXT_INREG:%[0-9]+]]:_(s64) = G_SEXT_INREG [[TRUNC]], 8 586 ; GFX8: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 63 587 ; GFX8: [[ASHR:%[0-9]+]]:_(s64) = G_ASHR [[SEXT_INREG]], [[C]](s32) 588 ; GFX8: [[MV:%[0-9]+]]:_(s1024) = G_MERGE_VALUES [[SEXT_INREG]](s64), [[ASHR]](s64), [[ASHR]](s64), [[ASHR]](s64), [[ASHR]](s64), [[ASHR]](s64), [[ASHR]](s64), [[ASHR]](s64), [[ASHR]](s64), [[ASHR]](s64), [[ASHR]](s64), [[ASHR]](s64), [[ASHR]](s64), [[ASHR]](s64), [[ASHR]](s64), [[ASHR]](s64) 589 ; GFX8: $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4_vgpr5_vgpr6_vgpr7_vgpr8_vgpr9_vgpr10_vgpr11_vgpr12_vgpr13_vgpr14_vgpr15_vgpr16_vgpr17_vgpr18_vgpr19_vgpr20_vgpr21_vgpr22_vgpr23_vgpr24_vgpr25_vgpr26_vgpr27_vgpr28_vgpr29_vgpr30_vgpr31 = COPY [[MV]](s1024) 590 ; GFX6-LABEL: name: test_sext_inreg_1024_8 591 ; GFX6: [[COPY:%[0-9]+]]:_(s1024) = COPY $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4_vgpr5_vgpr6_vgpr7_vgpr8_vgpr9_vgpr10_vgpr11_vgpr12_vgpr13_vgpr14_vgpr15_vgpr16_vgpr17_vgpr18_vgpr19_vgpr20_vgpr21_vgpr22_vgpr23_vgpr24_vgpr25_vgpr26_vgpr27_vgpr28_vgpr29_vgpr30_vgpr31 592 ; GFX6: [[TRUNC:%[0-9]+]]:_(s64) = G_TRUNC [[COPY]](s1024) 593 ; GFX6: [[SEXT_INREG:%[0-9]+]]:_(s64) = G_SEXT_INREG [[TRUNC]], 8 594 ; GFX6: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 63 595 ; GFX6: [[ASHR:%[0-9]+]]:_(s64) = G_ASHR [[SEXT_INREG]], [[C]](s32) 596 ; GFX6: [[MV:%[0-9]+]]:_(s1024) = G_MERGE_VALUES [[SEXT_INREG]](s64), [[ASHR]](s64), [[ASHR]](s64), [[ASHR]](s64), [[ASHR]](s64), [[ASHR]](s64), [[ASHR]](s64), [[ASHR]](s64), [[ASHR]](s64), [[ASHR]](s64), [[ASHR]](s64), [[ASHR]](s64), [[ASHR]](s64), [[ASHR]](s64), [[ASHR]](s64), [[ASHR]](s64) 597 ; GFX6: $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4_vgpr5_vgpr6_vgpr7_vgpr8_vgpr9_vgpr10_vgpr11_vgpr12_vgpr13_vgpr14_vgpr15_vgpr16_vgpr17_vgpr18_vgpr19_vgpr20_vgpr21_vgpr22_vgpr23_vgpr24_vgpr25_vgpr26_vgpr27_vgpr28_vgpr29_vgpr30_vgpr31 = COPY [[MV]](s1024) 598 %0:_(s1024) = COPY $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4_vgpr5_vgpr6_vgpr7_vgpr8_vgpr9_vgpr10_vgpr11_vgpr12_vgpr13_vgpr14_vgpr15_vgpr16_vgpr17_vgpr18_vgpr19_vgpr20_vgpr21_vgpr22_vgpr23_vgpr24_vgpr25_vgpr26_vgpr27_vgpr28_vgpr29_vgpr30_vgpr31 599 %1:_(s1024) = G_SEXT_INREG %0, 8 600 $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4_vgpr5_vgpr6_vgpr7_vgpr8_vgpr9_vgpr10_vgpr11_vgpr12_vgpr13_vgpr14_vgpr15_vgpr16_vgpr17_vgpr18_vgpr19_vgpr20_vgpr21_vgpr22_vgpr23_vgpr24_vgpr25_vgpr26_vgpr27_vgpr28_vgpr29_vgpr30_vgpr31 = COPY %1 601... 602 603--- 604name: test_sext_inreg_v2s32_1 605body: | 606 bb.0: 607 liveins: $vgpr0_vgpr1 608 609 ; GFX9-LABEL: name: test_sext_inreg_v2s32_1 610 ; GFX9: [[COPY:%[0-9]+]]:_(<2 x s32>) = COPY $vgpr0_vgpr1 611 ; GFX9: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY]](<2 x s32>) 612 ; GFX9: [[SEXT_INREG:%[0-9]+]]:_(s32) = G_SEXT_INREG [[UV]], 1 613 ; GFX9: [[SEXT_INREG1:%[0-9]+]]:_(s32) = G_SEXT_INREG [[UV1]], 1 614 ; GFX9: [[BUILD_VECTOR:%[0-9]+]]:_(<2 x s32>) = G_BUILD_VECTOR [[SEXT_INREG]](s32), [[SEXT_INREG1]](s32) 615 ; GFX9: $vgpr0_vgpr1 = COPY [[BUILD_VECTOR]](<2 x s32>) 616 ; GFX8-LABEL: name: test_sext_inreg_v2s32_1 617 ; GFX8: [[COPY:%[0-9]+]]:_(<2 x s32>) = COPY $vgpr0_vgpr1 618 ; GFX8: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY]](<2 x s32>) 619 ; GFX8: [[SEXT_INREG:%[0-9]+]]:_(s32) = G_SEXT_INREG [[UV]], 1 620 ; GFX8: [[SEXT_INREG1:%[0-9]+]]:_(s32) = G_SEXT_INREG [[UV1]], 1 621 ; GFX8: [[BUILD_VECTOR:%[0-9]+]]:_(<2 x s32>) = G_BUILD_VECTOR [[SEXT_INREG]](s32), [[SEXT_INREG1]](s32) 622 ; GFX8: $vgpr0_vgpr1 = COPY [[BUILD_VECTOR]](<2 x s32>) 623 ; GFX6-LABEL: name: test_sext_inreg_v2s32_1 624 ; GFX6: [[COPY:%[0-9]+]]:_(<2 x s32>) = COPY $vgpr0_vgpr1 625 ; GFX6: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY]](<2 x s32>) 626 ; GFX6: [[SEXT_INREG:%[0-9]+]]:_(s32) = G_SEXT_INREG [[UV]], 1 627 ; GFX6: [[SEXT_INREG1:%[0-9]+]]:_(s32) = G_SEXT_INREG [[UV1]], 1 628 ; GFX6: [[BUILD_VECTOR:%[0-9]+]]:_(<2 x s32>) = G_BUILD_VECTOR [[SEXT_INREG]](s32), [[SEXT_INREG1]](s32) 629 ; GFX6: $vgpr0_vgpr1 = COPY [[BUILD_VECTOR]](<2 x s32>) 630 %0:_(<2 x s32>) = COPY $vgpr0_vgpr1 631 %1:_(<2 x s32>) = G_SEXT_INREG %0, 1 632 $vgpr0_vgpr1 = COPY %1 633... 634 635--- 636name: test_sext_inreg_v2s16_1 637body: | 638 bb.0: 639 liveins: $vgpr0 640 641 ; GFX9-LABEL: name: test_sext_inreg_v2s16_1 642 ; GFX9: [[COPY:%[0-9]+]]:_(<2 x s16>) = COPY $vgpr0 643 ; GFX9: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 15 644 ; GFX9: [[COPY1:%[0-9]+]]:_(s32) = COPY [[C]](s32) 645 ; GFX9: [[BUILD_VECTOR_TRUNC:%[0-9]+]]:_(<2 x s16>) = G_BUILD_VECTOR_TRUNC [[COPY1]](s32), [[C]](s32) 646 ; GFX9: [[SHL:%[0-9]+]]:_(<2 x s16>) = G_SHL [[COPY]], [[BUILD_VECTOR_TRUNC]](<2 x s16>) 647 ; GFX9: [[ASHR:%[0-9]+]]:_(<2 x s16>) = G_ASHR [[SHL]], [[BUILD_VECTOR_TRUNC]](<2 x s16>) 648 ; GFX9: $vgpr0 = COPY [[ASHR]](<2 x s16>) 649 ; GFX8-LABEL: name: test_sext_inreg_v2s16_1 650 ; GFX8: [[COPY:%[0-9]+]]:_(<2 x s16>) = COPY $vgpr0 651 ; GFX8: [[BITCAST:%[0-9]+]]:_(s32) = G_BITCAST [[COPY]](<2 x s16>) 652 ; GFX8: [[TRUNC:%[0-9]+]]:_(s16) = G_TRUNC [[BITCAST]](s32) 653 ; GFX8: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 16 654 ; GFX8: [[LSHR:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST]], [[C]](s32) 655 ; GFX8: [[TRUNC1:%[0-9]+]]:_(s16) = G_TRUNC [[LSHR]](s32) 656 ; GFX8: [[C1:%[0-9]+]]:_(s16) = G_CONSTANT i16 15 657 ; GFX8: [[SHL:%[0-9]+]]:_(s16) = G_SHL [[TRUNC]], [[C1]](s16) 658 ; GFX8: [[ASHR:%[0-9]+]]:_(s16) = G_ASHR [[SHL]], [[C1]](s16) 659 ; GFX8: [[SHL1:%[0-9]+]]:_(s16) = G_SHL [[TRUNC1]], [[C1]](s16) 660 ; GFX8: [[ASHR1:%[0-9]+]]:_(s16) = G_ASHR [[SHL1]], [[C1]](s16) 661 ; GFX8: [[ZEXT:%[0-9]+]]:_(s32) = G_ZEXT [[ASHR]](s16) 662 ; GFX8: [[ZEXT1:%[0-9]+]]:_(s32) = G_ZEXT [[ASHR1]](s16) 663 ; GFX8: [[SHL2:%[0-9]+]]:_(s32) = G_SHL [[ZEXT1]], [[C]](s32) 664 ; GFX8: [[OR:%[0-9]+]]:_(s32) = G_OR [[ZEXT]], [[SHL2]] 665 ; GFX8: [[BITCAST1:%[0-9]+]]:_(<2 x s16>) = G_BITCAST [[OR]](s32) 666 ; GFX8: $vgpr0 = COPY [[BITCAST1]](<2 x s16>) 667 ; GFX6-LABEL: name: test_sext_inreg_v2s16_1 668 ; GFX6: [[COPY:%[0-9]+]]:_(<2 x s16>) = COPY $vgpr0 669 ; GFX6: [[BITCAST:%[0-9]+]]:_(s32) = G_BITCAST [[COPY]](<2 x s16>) 670 ; GFX6: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 16 671 ; GFX6: [[LSHR:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST]], [[C]](s32) 672 ; GFX6: [[COPY1:%[0-9]+]]:_(s32) = COPY [[BITCAST]](s32) 673 ; GFX6: [[SEXT_INREG:%[0-9]+]]:_(s32) = G_SEXT_INREG [[COPY1]], 1 674 ; GFX6: [[COPY2:%[0-9]+]]:_(s32) = COPY [[LSHR]](s32) 675 ; GFX6: [[SEXT_INREG1:%[0-9]+]]:_(s32) = G_SEXT_INREG [[COPY2]], 1 676 ; GFX6: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 65535 677 ; GFX6: [[COPY3:%[0-9]+]]:_(s32) = COPY [[SEXT_INREG]](s32) 678 ; GFX6: [[AND:%[0-9]+]]:_(s32) = G_AND [[COPY3]], [[C1]] 679 ; GFX6: [[COPY4:%[0-9]+]]:_(s32) = COPY [[SEXT_INREG1]](s32) 680 ; GFX6: [[AND1:%[0-9]+]]:_(s32) = G_AND [[COPY4]], [[C1]] 681 ; GFX6: [[SHL:%[0-9]+]]:_(s32) = G_SHL [[AND1]], [[C]](s32) 682 ; GFX6: [[OR:%[0-9]+]]:_(s32) = G_OR [[AND]], [[SHL]] 683 ; GFX6: [[BITCAST1:%[0-9]+]]:_(<2 x s16>) = G_BITCAST [[OR]](s32) 684 ; GFX6: $vgpr0 = COPY [[BITCAST1]](<2 x s16>) 685 %0:_(<2 x s16>) = COPY $vgpr0 686 %1:_(<2 x s16>) = G_SEXT_INREG %0, 1 687 $vgpr0 = COPY %1 688... 689 690--- 691name: test_sext_inreg_v3s16_1 692body: | 693 bb.0: 694 liveins: $vgpr0_vgpr1_vgpr2 695 ; GFX9-LABEL: name: test_sext_inreg_v3s16_1 696 ; GFX9: [[COPY:%[0-9]+]]:_(<6 x s16>) = COPY $vgpr0_vgpr1_vgpr2 697 ; GFX9: [[UV:%[0-9]+]]:_(<2 x s16>), [[UV1:%[0-9]+]]:_(<2 x s16>), [[UV2:%[0-9]+]]:_(<2 x s16>) = G_UNMERGE_VALUES [[COPY]](<6 x s16>) 698 ; GFX9: [[BITCAST:%[0-9]+]]:_(s32) = G_BITCAST [[UV]](<2 x s16>) 699 ; GFX9: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 16 700 ; GFX9: [[LSHR:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST]], [[C]](s32) 701 ; GFX9: [[BITCAST1:%[0-9]+]]:_(s32) = G_BITCAST [[UV1]](<2 x s16>) 702 ; GFX9: [[LSHR1:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST1]], [[C]](s32) 703 ; GFX9: [[COPY1:%[0-9]+]]:_(s32) = COPY [[BITCAST]](s32) 704 ; GFX9: [[COPY2:%[0-9]+]]:_(s32) = COPY [[LSHR]](s32) 705 ; GFX9: [[BUILD_VECTOR_TRUNC:%[0-9]+]]:_(<2 x s16>) = G_BUILD_VECTOR_TRUNC [[COPY1]](s32), [[COPY2]](s32) 706 ; GFX9: [[COPY3:%[0-9]+]]:_(s32) = COPY [[BITCAST1]](s32) 707 ; GFX9: [[DEF:%[0-9]+]]:_(s32) = G_IMPLICIT_DEF 708 ; GFX9: [[BUILD_VECTOR_TRUNC1:%[0-9]+]]:_(<2 x s16>) = G_BUILD_VECTOR_TRUNC [[COPY3]](s32), [[DEF]](s32) 709 ; GFX9: [[DEF1:%[0-9]+]]:_(<2 x s16>) = G_IMPLICIT_DEF 710 ; GFX9: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 15 711 ; GFX9: [[COPY4:%[0-9]+]]:_(s32) = COPY [[C1]](s32) 712 ; GFX9: [[BUILD_VECTOR_TRUNC2:%[0-9]+]]:_(<2 x s16>) = G_BUILD_VECTOR_TRUNC [[COPY4]](s32), [[C1]](s32) 713 ; GFX9: [[SHL:%[0-9]+]]:_(<2 x s16>) = G_SHL [[BUILD_VECTOR_TRUNC]], [[BUILD_VECTOR_TRUNC2]](<2 x s16>) 714 ; GFX9: [[ASHR:%[0-9]+]]:_(<2 x s16>) = G_ASHR [[SHL]], [[BUILD_VECTOR_TRUNC2]](<2 x s16>) 715 ; GFX9: [[COPY5:%[0-9]+]]:_(s32) = COPY [[C1]](s32) 716 ; GFX9: [[COPY6:%[0-9]+]]:_(s32) = COPY [[C1]](s32) 717 ; GFX9: [[BUILD_VECTOR_TRUNC3:%[0-9]+]]:_(<2 x s16>) = G_BUILD_VECTOR_TRUNC [[COPY5]](s32), [[COPY6]](s32) 718 ; GFX9: [[SHL1:%[0-9]+]]:_(<2 x s16>) = G_SHL [[BUILD_VECTOR_TRUNC1]], [[BUILD_VECTOR_TRUNC3]](<2 x s16>) 719 ; GFX9: [[ASHR1:%[0-9]+]]:_(<2 x s16>) = G_ASHR [[SHL1]], [[BUILD_VECTOR_TRUNC3]](<2 x s16>) 720 ; GFX9: [[COPY7:%[0-9]+]]:_(s32) = COPY [[C1]](s32) 721 ; GFX9: [[COPY8:%[0-9]+]]:_(s32) = COPY [[C1]](s32) 722 ; GFX9: [[BUILD_VECTOR_TRUNC4:%[0-9]+]]:_(<2 x s16>) = G_BUILD_VECTOR_TRUNC [[COPY7]](s32), [[COPY8]](s32) 723 ; GFX9: [[SHL2:%[0-9]+]]:_(<2 x s16>) = G_SHL [[DEF1]], [[BUILD_VECTOR_TRUNC4]](<2 x s16>) 724 ; GFX9: [[ASHR2:%[0-9]+]]:_(<2 x s16>) = G_ASHR [[SHL2]], [[BUILD_VECTOR_TRUNC4]](<2 x s16>) 725 ; GFX9: [[DEF2:%[0-9]+]]:_(<4 x s16>) = G_IMPLICIT_DEF 726 ; GFX9: [[DEF3:%[0-9]+]]:_(<4 x s16>) = G_IMPLICIT_DEF 727 ; GFX9: [[BITCAST2:%[0-9]+]]:_(s32) = G_BITCAST [[ASHR]](<2 x s16>) 728 ; GFX9: [[LSHR2:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST2]], [[C]](s32) 729 ; GFX9: [[BITCAST3:%[0-9]+]]:_(s32) = G_BITCAST [[ASHR1]](<2 x s16>) 730 ; GFX9: [[LSHR3:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST3]], [[C]](s32) 731 ; GFX9: [[UV3:%[0-9]+]]:_(<2 x s16>), [[UV4:%[0-9]+]]:_(<2 x s16>) = G_UNMERGE_VALUES [[DEF2]](<4 x s16>) 732 ; GFX9: [[BITCAST4:%[0-9]+]]:_(s32) = G_BITCAST [[UV3]](<2 x s16>) 733 ; GFX9: [[LSHR4:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST4]], [[C]](s32) 734 ; GFX9: [[BITCAST5:%[0-9]+]]:_(s32) = G_BITCAST [[UV4]](<2 x s16>) 735 ; GFX9: [[LSHR5:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST5]], [[C]](s32) 736 ; GFX9: [[COPY9:%[0-9]+]]:_(s32) = COPY [[BITCAST2]](s32) 737 ; GFX9: [[COPY10:%[0-9]+]]:_(s32) = COPY [[LSHR2]](s32) 738 ; GFX9: [[BUILD_VECTOR_TRUNC5:%[0-9]+]]:_(<2 x s16>) = G_BUILD_VECTOR_TRUNC [[COPY9]](s32), [[COPY10]](s32) 739 ; GFX9: [[COPY11:%[0-9]+]]:_(s32) = COPY [[BITCAST3]](s32) 740 ; GFX9: [[COPY12:%[0-9]+]]:_(s32) = COPY [[BITCAST4]](s32) 741 ; GFX9: [[BUILD_VECTOR_TRUNC6:%[0-9]+]]:_(<2 x s16>) = G_BUILD_VECTOR_TRUNC [[COPY11]](s32), [[COPY12]](s32) 742 ; GFX9: [[COPY13:%[0-9]+]]:_(s32) = COPY [[LSHR4]](s32) 743 ; GFX9: [[COPY14:%[0-9]+]]:_(s32) = COPY [[BITCAST5]](s32) 744 ; GFX9: [[BUILD_VECTOR_TRUNC7:%[0-9]+]]:_(<2 x s16>) = G_BUILD_VECTOR_TRUNC [[COPY13]](s32), [[COPY14]](s32) 745 ; GFX9: [[CONCAT_VECTORS:%[0-9]+]]:_(<6 x s16>) = G_CONCAT_VECTORS [[BUILD_VECTOR_TRUNC5]](<2 x s16>), [[BUILD_VECTOR_TRUNC6]](<2 x s16>), [[BUILD_VECTOR_TRUNC7]](<2 x s16>) 746 ; GFX9: $vgpr0_vgpr1_vgpr2 = COPY [[CONCAT_VECTORS]](<6 x s16>) 747 ; GFX8-LABEL: name: test_sext_inreg_v3s16_1 748 ; GFX8: [[COPY:%[0-9]+]]:_(<6 x s16>) = COPY $vgpr0_vgpr1_vgpr2 749 ; GFX8: [[UV:%[0-9]+]]:_(<2 x s16>), [[UV1:%[0-9]+]]:_(<2 x s16>), [[UV2:%[0-9]+]]:_(<2 x s16>) = G_UNMERGE_VALUES [[COPY]](<6 x s16>) 750 ; GFX8: [[BITCAST:%[0-9]+]]:_(s32) = G_BITCAST [[UV]](<2 x s16>) 751 ; GFX8: [[TRUNC:%[0-9]+]]:_(s16) = G_TRUNC [[BITCAST]](s32) 752 ; GFX8: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 16 753 ; GFX8: [[LSHR:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST]], [[C]](s32) 754 ; GFX8: [[TRUNC1:%[0-9]+]]:_(s16) = G_TRUNC [[LSHR]](s32) 755 ; GFX8: [[BITCAST1:%[0-9]+]]:_(s32) = G_BITCAST [[UV1]](<2 x s16>) 756 ; GFX8: [[TRUNC2:%[0-9]+]]:_(s16) = G_TRUNC [[BITCAST1]](s32) 757 ; GFX8: [[LSHR1:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST1]], [[C]](s32) 758 ; GFX8: [[C1:%[0-9]+]]:_(s16) = G_CONSTANT i16 15 759 ; GFX8: [[SHL:%[0-9]+]]:_(s16) = G_SHL [[TRUNC]], [[C1]](s16) 760 ; GFX8: [[ASHR:%[0-9]+]]:_(s16) = G_ASHR [[SHL]], [[C1]](s16) 761 ; GFX8: [[SHL1:%[0-9]+]]:_(s16) = G_SHL [[TRUNC1]], [[C1]](s16) 762 ; GFX8: [[ASHR1:%[0-9]+]]:_(s16) = G_ASHR [[SHL1]], [[C1]](s16) 763 ; GFX8: [[SHL2:%[0-9]+]]:_(s16) = G_SHL [[TRUNC2]], [[C1]](s16) 764 ; GFX8: [[ASHR2:%[0-9]+]]:_(s16) = G_ASHR [[SHL2]], [[C1]](s16) 765 ; GFX8: [[DEF:%[0-9]+]]:_(s16) = G_IMPLICIT_DEF 766 ; GFX8: [[DEF1:%[0-9]+]]:_(<2 x s16>) = G_IMPLICIT_DEF 767 ; GFX8: [[DEF2:%[0-9]+]]:_(<4 x s16>) = G_IMPLICIT_DEF 768 ; GFX8: [[DEF3:%[0-9]+]]:_(<4 x s16>) = G_IMPLICIT_DEF 769 ; GFX8: [[UV3:%[0-9]+]]:_(<2 x s16>), [[UV4:%[0-9]+]]:_(<2 x s16>) = G_UNMERGE_VALUES [[DEF2]](<4 x s16>) 770 ; GFX8: [[BITCAST2:%[0-9]+]]:_(s32) = G_BITCAST [[UV3]](<2 x s16>) 771 ; GFX8: [[LSHR2:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST2]], [[C]](s32) 772 ; GFX8: [[BITCAST3:%[0-9]+]]:_(s32) = G_BITCAST [[UV4]](<2 x s16>) 773 ; GFX8: [[LSHR3:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST3]], [[C]](s32) 774 ; GFX8: [[ZEXT:%[0-9]+]]:_(s32) = G_ZEXT [[ASHR]](s16) 775 ; GFX8: [[ZEXT1:%[0-9]+]]:_(s32) = G_ZEXT [[ASHR1]](s16) 776 ; GFX8: [[SHL3:%[0-9]+]]:_(s32) = G_SHL [[ZEXT1]], [[C]](s32) 777 ; GFX8: [[OR:%[0-9]+]]:_(s32) = G_OR [[ZEXT]], [[SHL3]] 778 ; GFX8: [[BITCAST4:%[0-9]+]]:_(<2 x s16>) = G_BITCAST [[OR]](s32) 779 ; GFX8: [[ZEXT2:%[0-9]+]]:_(s32) = G_ZEXT [[ASHR2]](s16) 780 ; GFX8: [[C2:%[0-9]+]]:_(s32) = G_CONSTANT i32 65535 781 ; GFX8: [[COPY1:%[0-9]+]]:_(s32) = COPY [[BITCAST2]](s32) 782 ; GFX8: [[AND:%[0-9]+]]:_(s32) = G_AND [[COPY1]], [[C2]] 783 ; GFX8: [[SHL4:%[0-9]+]]:_(s32) = G_SHL [[AND]], [[C]](s32) 784 ; GFX8: [[OR1:%[0-9]+]]:_(s32) = G_OR [[ZEXT2]], [[SHL4]] 785 ; GFX8: [[BITCAST5:%[0-9]+]]:_(<2 x s16>) = G_BITCAST [[OR1]](s32) 786 ; GFX8: [[COPY2:%[0-9]+]]:_(s32) = COPY [[LSHR2]](s32) 787 ; GFX8: [[AND1:%[0-9]+]]:_(s32) = G_AND [[COPY2]], [[C2]] 788 ; GFX8: [[COPY3:%[0-9]+]]:_(s32) = COPY [[BITCAST3]](s32) 789 ; GFX8: [[AND2:%[0-9]+]]:_(s32) = G_AND [[COPY3]], [[C2]] 790 ; GFX8: [[SHL5:%[0-9]+]]:_(s32) = G_SHL [[AND2]], [[C]](s32) 791 ; GFX8: [[OR2:%[0-9]+]]:_(s32) = G_OR [[AND1]], [[SHL5]] 792 ; GFX8: [[BITCAST6:%[0-9]+]]:_(<2 x s16>) = G_BITCAST [[OR2]](s32) 793 ; GFX8: [[CONCAT_VECTORS:%[0-9]+]]:_(<6 x s16>) = G_CONCAT_VECTORS [[BITCAST4]](<2 x s16>), [[BITCAST5]](<2 x s16>), [[BITCAST6]](<2 x s16>) 794 ; GFX8: $vgpr0_vgpr1_vgpr2 = COPY [[CONCAT_VECTORS]](<6 x s16>) 795 ; GFX6-LABEL: name: test_sext_inreg_v3s16_1 796 ; GFX6: [[COPY:%[0-9]+]]:_(<6 x s16>) = COPY $vgpr0_vgpr1_vgpr2 797 ; GFX6: [[UV:%[0-9]+]]:_(<2 x s16>), [[UV1:%[0-9]+]]:_(<2 x s16>), [[UV2:%[0-9]+]]:_(<2 x s16>) = G_UNMERGE_VALUES [[COPY]](<6 x s16>) 798 ; GFX6: [[BITCAST:%[0-9]+]]:_(s32) = G_BITCAST [[UV]](<2 x s16>) 799 ; GFX6: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 16 800 ; GFX6: [[LSHR:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST]], [[C]](s32) 801 ; GFX6: [[BITCAST1:%[0-9]+]]:_(s32) = G_BITCAST [[UV1]](<2 x s16>) 802 ; GFX6: [[LSHR1:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST1]], [[C]](s32) 803 ; GFX6: [[COPY1:%[0-9]+]]:_(s32) = COPY [[BITCAST]](s32) 804 ; GFX6: [[SEXT_INREG:%[0-9]+]]:_(s32) = G_SEXT_INREG [[COPY1]], 1 805 ; GFX6: [[COPY2:%[0-9]+]]:_(s32) = COPY [[LSHR]](s32) 806 ; GFX6: [[SEXT_INREG1:%[0-9]+]]:_(s32) = G_SEXT_INREG [[COPY2]], 1 807 ; GFX6: [[COPY3:%[0-9]+]]:_(s32) = COPY [[BITCAST1]](s32) 808 ; GFX6: [[SEXT_INREG2:%[0-9]+]]:_(s32) = G_SEXT_INREG [[COPY3]], 1 809 ; GFX6: [[DEF:%[0-9]+]]:_(s16) = G_IMPLICIT_DEF 810 ; GFX6: [[DEF1:%[0-9]+]]:_(<2 x s16>) = G_IMPLICIT_DEF 811 ; GFX6: [[DEF2:%[0-9]+]]:_(<4 x s16>) = G_IMPLICIT_DEF 812 ; GFX6: [[DEF3:%[0-9]+]]:_(<4 x s16>) = G_IMPLICIT_DEF 813 ; GFX6: [[UV3:%[0-9]+]]:_(<2 x s16>), [[UV4:%[0-9]+]]:_(<2 x s16>) = G_UNMERGE_VALUES [[DEF2]](<4 x s16>) 814 ; GFX6: [[BITCAST2:%[0-9]+]]:_(s32) = G_BITCAST [[UV3]](<2 x s16>) 815 ; GFX6: [[LSHR2:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST2]], [[C]](s32) 816 ; GFX6: [[BITCAST3:%[0-9]+]]:_(s32) = G_BITCAST [[UV4]](<2 x s16>) 817 ; GFX6: [[LSHR3:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST3]], [[C]](s32) 818 ; GFX6: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 65535 819 ; GFX6: [[COPY4:%[0-9]+]]:_(s32) = COPY [[SEXT_INREG]](s32) 820 ; GFX6: [[AND:%[0-9]+]]:_(s32) = G_AND [[COPY4]], [[C1]] 821 ; GFX6: [[COPY5:%[0-9]+]]:_(s32) = COPY [[SEXT_INREG1]](s32) 822 ; GFX6: [[AND1:%[0-9]+]]:_(s32) = G_AND [[COPY5]], [[C1]] 823 ; GFX6: [[SHL:%[0-9]+]]:_(s32) = G_SHL [[AND1]], [[C]](s32) 824 ; GFX6: [[OR:%[0-9]+]]:_(s32) = G_OR [[AND]], [[SHL]] 825 ; GFX6: [[BITCAST4:%[0-9]+]]:_(<2 x s16>) = G_BITCAST [[OR]](s32) 826 ; GFX6: [[COPY6:%[0-9]+]]:_(s32) = COPY [[SEXT_INREG2]](s32) 827 ; GFX6: [[AND2:%[0-9]+]]:_(s32) = G_AND [[COPY6]], [[C1]] 828 ; GFX6: [[COPY7:%[0-9]+]]:_(s32) = COPY [[BITCAST2]](s32) 829 ; GFX6: [[AND3:%[0-9]+]]:_(s32) = G_AND [[COPY7]], [[C1]] 830 ; GFX6: [[SHL1:%[0-9]+]]:_(s32) = G_SHL [[AND3]], [[C]](s32) 831 ; GFX6: [[OR1:%[0-9]+]]:_(s32) = G_OR [[AND2]], [[SHL1]] 832 ; GFX6: [[BITCAST5:%[0-9]+]]:_(<2 x s16>) = G_BITCAST [[OR1]](s32) 833 ; GFX6: [[COPY8:%[0-9]+]]:_(s32) = COPY [[LSHR2]](s32) 834 ; GFX6: [[AND4:%[0-9]+]]:_(s32) = G_AND [[COPY8]], [[C1]] 835 ; GFX6: [[COPY9:%[0-9]+]]:_(s32) = COPY [[BITCAST3]](s32) 836 ; GFX6: [[AND5:%[0-9]+]]:_(s32) = G_AND [[COPY9]], [[C1]] 837 ; GFX6: [[SHL2:%[0-9]+]]:_(s32) = G_SHL [[AND5]], [[C]](s32) 838 ; GFX6: [[OR2:%[0-9]+]]:_(s32) = G_OR [[AND4]], [[SHL2]] 839 ; GFX6: [[BITCAST6:%[0-9]+]]:_(<2 x s16>) = G_BITCAST [[OR2]](s32) 840 ; GFX6: [[CONCAT_VECTORS:%[0-9]+]]:_(<6 x s16>) = G_CONCAT_VECTORS [[BITCAST4]](<2 x s16>), [[BITCAST5]](<2 x s16>), [[BITCAST6]](<2 x s16>) 841 ; GFX6: $vgpr0_vgpr1_vgpr2 = COPY [[CONCAT_VECTORS]](<6 x s16>) 842 %0:_(<6 x s16>) = COPY $vgpr0_vgpr1_vgpr2 843 %1:_(<3 x s16>), %2:_(<3 x s16>) = G_UNMERGE_VALUES %0 844 %3:_(<3 x s16>) = G_SEXT_INREG %1, 1 845 %4:_(<3 x s16>) = G_IMPLICIT_DEF 846 %5:_(<6 x s16>) = G_CONCAT_VECTORS %3, %4 847 $vgpr0_vgpr1_vgpr2 = COPY %5 848... 849 850--- 851name: test_sext_inreg_v3s32_1 852body: | 853 bb.0: 854 liveins: $vgpr0_vgpr1_vgpr2 855 856 ; GFX9-LABEL: name: test_sext_inreg_v3s32_1 857 ; GFX9: [[COPY:%[0-9]+]]:_(<3 x s32>) = COPY $vgpr0_vgpr1_vgpr2 858 ; GFX9: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32), [[UV2:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY]](<3 x s32>) 859 ; GFX9: [[SEXT_INREG:%[0-9]+]]:_(s32) = G_SEXT_INREG [[UV]], 1 860 ; GFX9: [[SEXT_INREG1:%[0-9]+]]:_(s32) = G_SEXT_INREG [[UV1]], 1 861 ; GFX9: [[SEXT_INREG2:%[0-9]+]]:_(s32) = G_SEXT_INREG [[UV2]], 1 862 ; GFX9: [[BUILD_VECTOR:%[0-9]+]]:_(<3 x s32>) = G_BUILD_VECTOR [[SEXT_INREG]](s32), [[SEXT_INREG1]](s32), [[SEXT_INREG2]](s32) 863 ; GFX9: $vgpr0_vgpr1_vgpr2 = COPY [[BUILD_VECTOR]](<3 x s32>) 864 ; GFX8-LABEL: name: test_sext_inreg_v3s32_1 865 ; GFX8: [[COPY:%[0-9]+]]:_(<3 x s32>) = COPY $vgpr0_vgpr1_vgpr2 866 ; GFX8: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32), [[UV2:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY]](<3 x s32>) 867 ; GFX8: [[SEXT_INREG:%[0-9]+]]:_(s32) = G_SEXT_INREG [[UV]], 1 868 ; GFX8: [[SEXT_INREG1:%[0-9]+]]:_(s32) = G_SEXT_INREG [[UV1]], 1 869 ; GFX8: [[SEXT_INREG2:%[0-9]+]]:_(s32) = G_SEXT_INREG [[UV2]], 1 870 ; GFX8: [[BUILD_VECTOR:%[0-9]+]]:_(<3 x s32>) = G_BUILD_VECTOR [[SEXT_INREG]](s32), [[SEXT_INREG1]](s32), [[SEXT_INREG2]](s32) 871 ; GFX8: $vgpr0_vgpr1_vgpr2 = COPY [[BUILD_VECTOR]](<3 x s32>) 872 ; GFX6-LABEL: name: test_sext_inreg_v3s32_1 873 ; GFX6: [[COPY:%[0-9]+]]:_(<3 x s32>) = COPY $vgpr0_vgpr1_vgpr2 874 ; GFX6: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32), [[UV2:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY]](<3 x s32>) 875 ; GFX6: [[SEXT_INREG:%[0-9]+]]:_(s32) = G_SEXT_INREG [[UV]], 1 876 ; GFX6: [[SEXT_INREG1:%[0-9]+]]:_(s32) = G_SEXT_INREG [[UV1]], 1 877 ; GFX6: [[SEXT_INREG2:%[0-9]+]]:_(s32) = G_SEXT_INREG [[UV2]], 1 878 ; GFX6: [[BUILD_VECTOR:%[0-9]+]]:_(<3 x s32>) = G_BUILD_VECTOR [[SEXT_INREG]](s32), [[SEXT_INREG1]](s32), [[SEXT_INREG2]](s32) 879 ; GFX6: $vgpr0_vgpr1_vgpr2 = COPY [[BUILD_VECTOR]](<3 x s32>) 880 %0:_(<3 x s32>) = COPY $vgpr0_vgpr1_vgpr2 881 %1:_(<3 x s32>) = G_SEXT_INREG %0, 1 882 $vgpr0_vgpr1_vgpr2 = COPY %1 883... 884 885--- 886name: test_sext_inreg_v4s32_1 887body: | 888 bb.0: 889 liveins: $vgpr0_vgpr1_vgpr2_vgpr3 890 891 ; GFX9-LABEL: name: test_sext_inreg_v4s32_1 892 ; GFX9: [[COPY:%[0-9]+]]:_(<4 x s32>) = COPY $vgpr0_vgpr1_vgpr2_vgpr3 893 ; GFX9: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32), [[UV2:%[0-9]+]]:_(s32), [[UV3:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY]](<4 x s32>) 894 ; GFX9: [[SEXT_INREG:%[0-9]+]]:_(s32) = G_SEXT_INREG [[UV]], 1 895 ; GFX9: [[SEXT_INREG1:%[0-9]+]]:_(s32) = G_SEXT_INREG [[UV1]], 1 896 ; GFX9: [[SEXT_INREG2:%[0-9]+]]:_(s32) = G_SEXT_INREG [[UV2]], 1 897 ; GFX9: [[SEXT_INREG3:%[0-9]+]]:_(s32) = G_SEXT_INREG [[UV3]], 1 898 ; GFX9: [[BUILD_VECTOR:%[0-9]+]]:_(<4 x s32>) = G_BUILD_VECTOR [[SEXT_INREG]](s32), [[SEXT_INREG1]](s32), [[SEXT_INREG2]](s32), [[SEXT_INREG3]](s32) 899 ; GFX9: $vgpr0_vgpr1_vgpr2_vgpr3 = COPY [[BUILD_VECTOR]](<4 x s32>) 900 ; GFX8-LABEL: name: test_sext_inreg_v4s32_1 901 ; GFX8: [[COPY:%[0-9]+]]:_(<4 x s32>) = COPY $vgpr0_vgpr1_vgpr2_vgpr3 902 ; GFX8: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32), [[UV2:%[0-9]+]]:_(s32), [[UV3:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY]](<4 x s32>) 903 ; GFX8: [[SEXT_INREG:%[0-9]+]]:_(s32) = G_SEXT_INREG [[UV]], 1 904 ; GFX8: [[SEXT_INREG1:%[0-9]+]]:_(s32) = G_SEXT_INREG [[UV1]], 1 905 ; GFX8: [[SEXT_INREG2:%[0-9]+]]:_(s32) = G_SEXT_INREG [[UV2]], 1 906 ; GFX8: [[SEXT_INREG3:%[0-9]+]]:_(s32) = G_SEXT_INREG [[UV3]], 1 907 ; GFX8: [[BUILD_VECTOR:%[0-9]+]]:_(<4 x s32>) = G_BUILD_VECTOR [[SEXT_INREG]](s32), [[SEXT_INREG1]](s32), [[SEXT_INREG2]](s32), [[SEXT_INREG3]](s32) 908 ; GFX8: $vgpr0_vgpr1_vgpr2_vgpr3 = COPY [[BUILD_VECTOR]](<4 x s32>) 909 ; GFX6-LABEL: name: test_sext_inreg_v4s32_1 910 ; GFX6: [[COPY:%[0-9]+]]:_(<4 x s32>) = COPY $vgpr0_vgpr1_vgpr2_vgpr3 911 ; GFX6: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32), [[UV2:%[0-9]+]]:_(s32), [[UV3:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY]](<4 x s32>) 912 ; GFX6: [[SEXT_INREG:%[0-9]+]]:_(s32) = G_SEXT_INREG [[UV]], 1 913 ; GFX6: [[SEXT_INREG1:%[0-9]+]]:_(s32) = G_SEXT_INREG [[UV1]], 1 914 ; GFX6: [[SEXT_INREG2:%[0-9]+]]:_(s32) = G_SEXT_INREG [[UV2]], 1 915 ; GFX6: [[SEXT_INREG3:%[0-9]+]]:_(s32) = G_SEXT_INREG [[UV3]], 1 916 ; GFX6: [[BUILD_VECTOR:%[0-9]+]]:_(<4 x s32>) = G_BUILD_VECTOR [[SEXT_INREG]](s32), [[SEXT_INREG1]](s32), [[SEXT_INREG2]](s32), [[SEXT_INREG3]](s32) 917 ; GFX6: $vgpr0_vgpr1_vgpr2_vgpr3 = COPY [[BUILD_VECTOR]](<4 x s32>) 918 %0:_(<4 x s32>) = COPY $vgpr0_vgpr1_vgpr2_vgpr3 919 %1:_(<4 x s32>) = G_SEXT_INREG %0, 1 920 $vgpr0_vgpr1_vgpr2_vgpr3 = COPY %1 921... 922 923--- 924name: test_sext_inreg_v4s16_1 925body: | 926 bb.0: 927 liveins: $vgpr0_vgpr1 928 929 ; GFX9-LABEL: name: test_sext_inreg_v4s16_1 930 ; GFX9: [[COPY:%[0-9]+]]:_(<4 x s16>) = COPY $vgpr0_vgpr1 931 ; GFX9: [[UV:%[0-9]+]]:_(<2 x s16>), [[UV1:%[0-9]+]]:_(<2 x s16>) = G_UNMERGE_VALUES [[COPY]](<4 x s16>) 932 ; GFX9: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 15 933 ; GFX9: [[COPY1:%[0-9]+]]:_(s32) = COPY [[C]](s32) 934 ; GFX9: [[BUILD_VECTOR_TRUNC:%[0-9]+]]:_(<2 x s16>) = G_BUILD_VECTOR_TRUNC [[COPY1]](s32), [[C]](s32) 935 ; GFX9: [[SHL:%[0-9]+]]:_(<2 x s16>) = G_SHL [[UV]], [[BUILD_VECTOR_TRUNC]](<2 x s16>) 936 ; GFX9: [[ASHR:%[0-9]+]]:_(<2 x s16>) = G_ASHR [[SHL]], [[BUILD_VECTOR_TRUNC]](<2 x s16>) 937 ; GFX9: [[COPY2:%[0-9]+]]:_(s32) = COPY [[C]](s32) 938 ; GFX9: [[COPY3:%[0-9]+]]:_(s32) = COPY [[C]](s32) 939 ; GFX9: [[BUILD_VECTOR_TRUNC1:%[0-9]+]]:_(<2 x s16>) = G_BUILD_VECTOR_TRUNC [[COPY2]](s32), [[COPY3]](s32) 940 ; GFX9: [[SHL1:%[0-9]+]]:_(<2 x s16>) = G_SHL [[UV1]], [[BUILD_VECTOR_TRUNC1]](<2 x s16>) 941 ; GFX9: [[ASHR1:%[0-9]+]]:_(<2 x s16>) = G_ASHR [[SHL1]], [[BUILD_VECTOR_TRUNC1]](<2 x s16>) 942 ; GFX9: [[CONCAT_VECTORS:%[0-9]+]]:_(<4 x s16>) = G_CONCAT_VECTORS [[ASHR]](<2 x s16>), [[ASHR1]](<2 x s16>) 943 ; GFX9: $vgpr0_vgpr1 = COPY [[CONCAT_VECTORS]](<4 x s16>) 944 ; GFX8-LABEL: name: test_sext_inreg_v4s16_1 945 ; GFX8: [[COPY:%[0-9]+]]:_(<4 x s16>) = COPY $vgpr0_vgpr1 946 ; GFX8: [[UV:%[0-9]+]]:_(<2 x s16>), [[UV1:%[0-9]+]]:_(<2 x s16>) = G_UNMERGE_VALUES [[COPY]](<4 x s16>) 947 ; GFX8: [[BITCAST:%[0-9]+]]:_(s32) = G_BITCAST [[UV]](<2 x s16>) 948 ; GFX8: [[TRUNC:%[0-9]+]]:_(s16) = G_TRUNC [[BITCAST]](s32) 949 ; GFX8: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 16 950 ; GFX8: [[LSHR:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST]], [[C]](s32) 951 ; GFX8: [[TRUNC1:%[0-9]+]]:_(s16) = G_TRUNC [[LSHR]](s32) 952 ; GFX8: [[BITCAST1:%[0-9]+]]:_(s32) = G_BITCAST [[UV1]](<2 x s16>) 953 ; GFX8: [[TRUNC2:%[0-9]+]]:_(s16) = G_TRUNC [[BITCAST1]](s32) 954 ; GFX8: [[LSHR1:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST1]], [[C]](s32) 955 ; GFX8: [[TRUNC3:%[0-9]+]]:_(s16) = G_TRUNC [[LSHR1]](s32) 956 ; GFX8: [[C1:%[0-9]+]]:_(s16) = G_CONSTANT i16 15 957 ; GFX8: [[SHL:%[0-9]+]]:_(s16) = G_SHL [[TRUNC]], [[C1]](s16) 958 ; GFX8: [[ASHR:%[0-9]+]]:_(s16) = G_ASHR [[SHL]], [[C1]](s16) 959 ; GFX8: [[SHL1:%[0-9]+]]:_(s16) = G_SHL [[TRUNC1]], [[C1]](s16) 960 ; GFX8: [[ASHR1:%[0-9]+]]:_(s16) = G_ASHR [[SHL1]], [[C1]](s16) 961 ; GFX8: [[SHL2:%[0-9]+]]:_(s16) = G_SHL [[TRUNC2]], [[C1]](s16) 962 ; GFX8: [[ASHR2:%[0-9]+]]:_(s16) = G_ASHR [[SHL2]], [[C1]](s16) 963 ; GFX8: [[SHL3:%[0-9]+]]:_(s16) = G_SHL [[TRUNC3]], [[C1]](s16) 964 ; GFX8: [[ASHR3:%[0-9]+]]:_(s16) = G_ASHR [[SHL3]], [[C1]](s16) 965 ; GFX8: [[ZEXT:%[0-9]+]]:_(s32) = G_ZEXT [[ASHR]](s16) 966 ; GFX8: [[ZEXT1:%[0-9]+]]:_(s32) = G_ZEXT [[ASHR1]](s16) 967 ; GFX8: [[SHL4:%[0-9]+]]:_(s32) = G_SHL [[ZEXT1]], [[C]](s32) 968 ; GFX8: [[OR:%[0-9]+]]:_(s32) = G_OR [[ZEXT]], [[SHL4]] 969 ; GFX8: [[BITCAST2:%[0-9]+]]:_(<2 x s16>) = G_BITCAST [[OR]](s32) 970 ; GFX8: [[ZEXT2:%[0-9]+]]:_(s32) = G_ZEXT [[ASHR2]](s16) 971 ; GFX8: [[ZEXT3:%[0-9]+]]:_(s32) = G_ZEXT [[ASHR3]](s16) 972 ; GFX8: [[SHL5:%[0-9]+]]:_(s32) = G_SHL [[ZEXT3]], [[C]](s32) 973 ; GFX8: [[OR1:%[0-9]+]]:_(s32) = G_OR [[ZEXT2]], [[SHL5]] 974 ; GFX8: [[BITCAST3:%[0-9]+]]:_(<2 x s16>) = G_BITCAST [[OR1]](s32) 975 ; GFX8: [[CONCAT_VECTORS:%[0-9]+]]:_(<4 x s16>) = G_CONCAT_VECTORS [[BITCAST2]](<2 x s16>), [[BITCAST3]](<2 x s16>) 976 ; GFX8: $vgpr0_vgpr1 = COPY [[CONCAT_VECTORS]](<4 x s16>) 977 ; GFX6-LABEL: name: test_sext_inreg_v4s16_1 978 ; GFX6: [[COPY:%[0-9]+]]:_(<4 x s16>) = COPY $vgpr0_vgpr1 979 ; GFX6: [[UV:%[0-9]+]]:_(<2 x s16>), [[UV1:%[0-9]+]]:_(<2 x s16>) = G_UNMERGE_VALUES [[COPY]](<4 x s16>) 980 ; GFX6: [[BITCAST:%[0-9]+]]:_(s32) = G_BITCAST [[UV]](<2 x s16>) 981 ; GFX6: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 16 982 ; GFX6: [[LSHR:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST]], [[C]](s32) 983 ; GFX6: [[BITCAST1:%[0-9]+]]:_(s32) = G_BITCAST [[UV1]](<2 x s16>) 984 ; GFX6: [[LSHR1:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST1]], [[C]](s32) 985 ; GFX6: [[COPY1:%[0-9]+]]:_(s32) = COPY [[BITCAST]](s32) 986 ; GFX6: [[SEXT_INREG:%[0-9]+]]:_(s32) = G_SEXT_INREG [[COPY1]], 1 987 ; GFX6: [[COPY2:%[0-9]+]]:_(s32) = COPY [[LSHR]](s32) 988 ; GFX6: [[SEXT_INREG1:%[0-9]+]]:_(s32) = G_SEXT_INREG [[COPY2]], 1 989 ; GFX6: [[COPY3:%[0-9]+]]:_(s32) = COPY [[BITCAST1]](s32) 990 ; GFX6: [[SEXT_INREG2:%[0-9]+]]:_(s32) = G_SEXT_INREG [[COPY3]], 1 991 ; GFX6: [[COPY4:%[0-9]+]]:_(s32) = COPY [[LSHR1]](s32) 992 ; GFX6: [[SEXT_INREG3:%[0-9]+]]:_(s32) = G_SEXT_INREG [[COPY4]], 1 993 ; GFX6: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 65535 994 ; GFX6: [[COPY5:%[0-9]+]]:_(s32) = COPY [[SEXT_INREG]](s32) 995 ; GFX6: [[AND:%[0-9]+]]:_(s32) = G_AND [[COPY5]], [[C1]] 996 ; GFX6: [[COPY6:%[0-9]+]]:_(s32) = COPY [[SEXT_INREG1]](s32) 997 ; GFX6: [[AND1:%[0-9]+]]:_(s32) = G_AND [[COPY6]], [[C1]] 998 ; GFX6: [[SHL:%[0-9]+]]:_(s32) = G_SHL [[AND1]], [[C]](s32) 999 ; GFX6: [[OR:%[0-9]+]]:_(s32) = G_OR [[AND]], [[SHL]] 1000 ; GFX6: [[BITCAST2:%[0-9]+]]:_(<2 x s16>) = G_BITCAST [[OR]](s32) 1001 ; GFX6: [[COPY7:%[0-9]+]]:_(s32) = COPY [[SEXT_INREG2]](s32) 1002 ; GFX6: [[AND2:%[0-9]+]]:_(s32) = G_AND [[COPY7]], [[C1]] 1003 ; GFX6: [[COPY8:%[0-9]+]]:_(s32) = COPY [[SEXT_INREG3]](s32) 1004 ; GFX6: [[AND3:%[0-9]+]]:_(s32) = G_AND [[COPY8]], [[C1]] 1005 ; GFX6: [[SHL1:%[0-9]+]]:_(s32) = G_SHL [[AND3]], [[C]](s32) 1006 ; GFX6: [[OR1:%[0-9]+]]:_(s32) = G_OR [[AND2]], [[SHL1]] 1007 ; GFX6: [[BITCAST3:%[0-9]+]]:_(<2 x s16>) = G_BITCAST [[OR1]](s32) 1008 ; GFX6: [[CONCAT_VECTORS:%[0-9]+]]:_(<4 x s16>) = G_CONCAT_VECTORS [[BITCAST2]](<2 x s16>), [[BITCAST3]](<2 x s16>) 1009 ; GFX6: $vgpr0_vgpr1 = COPY [[CONCAT_VECTORS]](<4 x s16>) 1010 %0:_(<4 x s16>) = COPY $vgpr0_vgpr1 1011 %1:_(<4 x s16>) = G_SEXT_INREG %0, 1 1012 $vgpr0_vgpr1 = COPY %1 1013... 1014 1015--- 1016name: test_sext_inreg_v6s16_1 1017body: | 1018 bb.0: 1019 1020 ; GFX9-LABEL: name: test_sext_inreg_v6s16_1 1021 ; GFX9: [[DEF:%[0-9]+]]:_(<6 x s16>) = G_IMPLICIT_DEF 1022 ; GFX9: [[UV:%[0-9]+]]:_(<2 x s16>), [[UV1:%[0-9]+]]:_(<2 x s16>), [[UV2:%[0-9]+]]:_(<2 x s16>) = G_UNMERGE_VALUES [[DEF]](<6 x s16>) 1023 ; GFX9: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 15 1024 ; GFX9: [[COPY:%[0-9]+]]:_(s32) = COPY [[C]](s32) 1025 ; GFX9: [[BUILD_VECTOR_TRUNC:%[0-9]+]]:_(<2 x s16>) = G_BUILD_VECTOR_TRUNC [[COPY]](s32), [[C]](s32) 1026 ; GFX9: [[SHL:%[0-9]+]]:_(<2 x s16>) = G_SHL [[UV]], [[BUILD_VECTOR_TRUNC]](<2 x s16>) 1027 ; GFX9: [[ASHR:%[0-9]+]]:_(<2 x s16>) = G_ASHR [[SHL]], [[BUILD_VECTOR_TRUNC]](<2 x s16>) 1028 ; GFX9: [[COPY1:%[0-9]+]]:_(s32) = COPY [[C]](s32) 1029 ; GFX9: [[COPY2:%[0-9]+]]:_(s32) = COPY [[C]](s32) 1030 ; GFX9: [[BUILD_VECTOR_TRUNC1:%[0-9]+]]:_(<2 x s16>) = G_BUILD_VECTOR_TRUNC [[COPY1]](s32), [[COPY2]](s32) 1031 ; GFX9: [[SHL1:%[0-9]+]]:_(<2 x s16>) = G_SHL [[UV1]], [[BUILD_VECTOR_TRUNC1]](<2 x s16>) 1032 ; GFX9: [[ASHR1:%[0-9]+]]:_(<2 x s16>) = G_ASHR [[SHL1]], [[BUILD_VECTOR_TRUNC1]](<2 x s16>) 1033 ; GFX9: [[COPY3:%[0-9]+]]:_(s32) = COPY [[C]](s32) 1034 ; GFX9: [[COPY4:%[0-9]+]]:_(s32) = COPY [[C]](s32) 1035 ; GFX9: [[BUILD_VECTOR_TRUNC2:%[0-9]+]]:_(<2 x s16>) = G_BUILD_VECTOR_TRUNC [[COPY3]](s32), [[COPY4]](s32) 1036 ; GFX9: [[SHL2:%[0-9]+]]:_(<2 x s16>) = G_SHL [[UV2]], [[BUILD_VECTOR_TRUNC2]](<2 x s16>) 1037 ; GFX9: [[ASHR2:%[0-9]+]]:_(<2 x s16>) = G_ASHR [[SHL2]], [[BUILD_VECTOR_TRUNC2]](<2 x s16>) 1038 ; GFX9: [[CONCAT_VECTORS:%[0-9]+]]:_(<6 x s16>) = G_CONCAT_VECTORS [[ASHR]](<2 x s16>), [[ASHR1]](<2 x s16>), [[ASHR2]](<2 x s16>) 1039 ; GFX9: S_ENDPGM 0, implicit [[CONCAT_VECTORS]](<6 x s16>) 1040 ; GFX8-LABEL: name: test_sext_inreg_v6s16_1 1041 ; GFX8: [[DEF:%[0-9]+]]:_(<6 x s16>) = G_IMPLICIT_DEF 1042 ; GFX8: [[UV:%[0-9]+]]:_(<2 x s16>), [[UV1:%[0-9]+]]:_(<2 x s16>), [[UV2:%[0-9]+]]:_(<2 x s16>) = G_UNMERGE_VALUES [[DEF]](<6 x s16>) 1043 ; GFX8: [[BITCAST:%[0-9]+]]:_(s32) = G_BITCAST [[UV]](<2 x s16>) 1044 ; GFX8: [[TRUNC:%[0-9]+]]:_(s16) = G_TRUNC [[BITCAST]](s32) 1045 ; GFX8: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 16 1046 ; GFX8: [[LSHR:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST]], [[C]](s32) 1047 ; GFX8: [[TRUNC1:%[0-9]+]]:_(s16) = G_TRUNC [[LSHR]](s32) 1048 ; GFX8: [[BITCAST1:%[0-9]+]]:_(s32) = G_BITCAST [[UV1]](<2 x s16>) 1049 ; GFX8: [[TRUNC2:%[0-9]+]]:_(s16) = G_TRUNC [[BITCAST1]](s32) 1050 ; GFX8: [[LSHR1:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST1]], [[C]](s32) 1051 ; GFX8: [[TRUNC3:%[0-9]+]]:_(s16) = G_TRUNC [[LSHR1]](s32) 1052 ; GFX8: [[BITCAST2:%[0-9]+]]:_(s32) = G_BITCAST [[UV2]](<2 x s16>) 1053 ; GFX8: [[TRUNC4:%[0-9]+]]:_(s16) = G_TRUNC [[BITCAST2]](s32) 1054 ; GFX8: [[LSHR2:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST2]], [[C]](s32) 1055 ; GFX8: [[TRUNC5:%[0-9]+]]:_(s16) = G_TRUNC [[LSHR2]](s32) 1056 ; GFX8: [[C1:%[0-9]+]]:_(s16) = G_CONSTANT i16 15 1057 ; GFX8: [[SHL:%[0-9]+]]:_(s16) = G_SHL [[TRUNC]], [[C1]](s16) 1058 ; GFX8: [[ASHR:%[0-9]+]]:_(s16) = G_ASHR [[SHL]], [[C1]](s16) 1059 ; GFX8: [[SHL1:%[0-9]+]]:_(s16) = G_SHL [[TRUNC1]], [[C1]](s16) 1060 ; GFX8: [[ASHR1:%[0-9]+]]:_(s16) = G_ASHR [[SHL1]], [[C1]](s16) 1061 ; GFX8: [[SHL2:%[0-9]+]]:_(s16) = G_SHL [[TRUNC2]], [[C1]](s16) 1062 ; GFX8: [[ASHR2:%[0-9]+]]:_(s16) = G_ASHR [[SHL2]], [[C1]](s16) 1063 ; GFX8: [[SHL3:%[0-9]+]]:_(s16) = G_SHL [[TRUNC3]], [[C1]](s16) 1064 ; GFX8: [[ASHR3:%[0-9]+]]:_(s16) = G_ASHR [[SHL3]], [[C1]](s16) 1065 ; GFX8: [[SHL4:%[0-9]+]]:_(s16) = G_SHL [[TRUNC4]], [[C1]](s16) 1066 ; GFX8: [[ASHR4:%[0-9]+]]:_(s16) = G_ASHR [[SHL4]], [[C1]](s16) 1067 ; GFX8: [[SHL5:%[0-9]+]]:_(s16) = G_SHL [[TRUNC5]], [[C1]](s16) 1068 ; GFX8: [[ASHR5:%[0-9]+]]:_(s16) = G_ASHR [[SHL5]], [[C1]](s16) 1069 ; GFX8: [[ZEXT:%[0-9]+]]:_(s32) = G_ZEXT [[ASHR]](s16) 1070 ; GFX8: [[ZEXT1:%[0-9]+]]:_(s32) = G_ZEXT [[ASHR1]](s16) 1071 ; GFX8: [[SHL6:%[0-9]+]]:_(s32) = G_SHL [[ZEXT1]], [[C]](s32) 1072 ; GFX8: [[OR:%[0-9]+]]:_(s32) = G_OR [[ZEXT]], [[SHL6]] 1073 ; GFX8: [[BITCAST3:%[0-9]+]]:_(<2 x s16>) = G_BITCAST [[OR]](s32) 1074 ; GFX8: [[ZEXT2:%[0-9]+]]:_(s32) = G_ZEXT [[ASHR2]](s16) 1075 ; GFX8: [[ZEXT3:%[0-9]+]]:_(s32) = G_ZEXT [[ASHR3]](s16) 1076 ; GFX8: [[SHL7:%[0-9]+]]:_(s32) = G_SHL [[ZEXT3]], [[C]](s32) 1077 ; GFX8: [[OR1:%[0-9]+]]:_(s32) = G_OR [[ZEXT2]], [[SHL7]] 1078 ; GFX8: [[BITCAST4:%[0-9]+]]:_(<2 x s16>) = G_BITCAST [[OR1]](s32) 1079 ; GFX8: [[ZEXT4:%[0-9]+]]:_(s32) = G_ZEXT [[ASHR4]](s16) 1080 ; GFX8: [[ZEXT5:%[0-9]+]]:_(s32) = G_ZEXT [[ASHR5]](s16) 1081 ; GFX8: [[SHL8:%[0-9]+]]:_(s32) = G_SHL [[ZEXT5]], [[C]](s32) 1082 ; GFX8: [[OR2:%[0-9]+]]:_(s32) = G_OR [[ZEXT4]], [[SHL8]] 1083 ; GFX8: [[BITCAST5:%[0-9]+]]:_(<2 x s16>) = G_BITCAST [[OR2]](s32) 1084 ; GFX8: [[CONCAT_VECTORS:%[0-9]+]]:_(<6 x s16>) = G_CONCAT_VECTORS [[BITCAST3]](<2 x s16>), [[BITCAST4]](<2 x s16>), [[BITCAST5]](<2 x s16>) 1085 ; GFX8: S_ENDPGM 0, implicit [[CONCAT_VECTORS]](<6 x s16>) 1086 ; GFX6-LABEL: name: test_sext_inreg_v6s16_1 1087 ; GFX6: [[DEF:%[0-9]+]]:_(<6 x s16>) = G_IMPLICIT_DEF 1088 ; GFX6: [[UV:%[0-9]+]]:_(<2 x s16>), [[UV1:%[0-9]+]]:_(<2 x s16>), [[UV2:%[0-9]+]]:_(<2 x s16>) = G_UNMERGE_VALUES [[DEF]](<6 x s16>) 1089 ; GFX6: [[BITCAST:%[0-9]+]]:_(s32) = G_BITCAST [[UV]](<2 x s16>) 1090 ; GFX6: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 16 1091 ; GFX6: [[LSHR:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST]], [[C]](s32) 1092 ; GFX6: [[BITCAST1:%[0-9]+]]:_(s32) = G_BITCAST [[UV1]](<2 x s16>) 1093 ; GFX6: [[LSHR1:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST1]], [[C]](s32) 1094 ; GFX6: [[BITCAST2:%[0-9]+]]:_(s32) = G_BITCAST [[UV2]](<2 x s16>) 1095 ; GFX6: [[LSHR2:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST2]], [[C]](s32) 1096 ; GFX6: [[COPY:%[0-9]+]]:_(s32) = COPY [[BITCAST]](s32) 1097 ; GFX6: [[SEXT_INREG:%[0-9]+]]:_(s32) = G_SEXT_INREG [[COPY]], 1 1098 ; GFX6: [[COPY1:%[0-9]+]]:_(s32) = COPY [[LSHR]](s32) 1099 ; GFX6: [[SEXT_INREG1:%[0-9]+]]:_(s32) = G_SEXT_INREG [[COPY1]], 1 1100 ; GFX6: [[COPY2:%[0-9]+]]:_(s32) = COPY [[BITCAST1]](s32) 1101 ; GFX6: [[SEXT_INREG2:%[0-9]+]]:_(s32) = G_SEXT_INREG [[COPY2]], 1 1102 ; GFX6: [[COPY3:%[0-9]+]]:_(s32) = COPY [[LSHR1]](s32) 1103 ; GFX6: [[SEXT_INREG3:%[0-9]+]]:_(s32) = G_SEXT_INREG [[COPY3]], 1 1104 ; GFX6: [[COPY4:%[0-9]+]]:_(s32) = COPY [[BITCAST2]](s32) 1105 ; GFX6: [[SEXT_INREG4:%[0-9]+]]:_(s32) = G_SEXT_INREG [[COPY4]], 1 1106 ; GFX6: [[COPY5:%[0-9]+]]:_(s32) = COPY [[LSHR2]](s32) 1107 ; GFX6: [[SEXT_INREG5:%[0-9]+]]:_(s32) = G_SEXT_INREG [[COPY5]], 1 1108 ; GFX6: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 65535 1109 ; GFX6: [[COPY6:%[0-9]+]]:_(s32) = COPY [[SEXT_INREG]](s32) 1110 ; GFX6: [[AND:%[0-9]+]]:_(s32) = G_AND [[COPY6]], [[C1]] 1111 ; GFX6: [[COPY7:%[0-9]+]]:_(s32) = COPY [[SEXT_INREG1]](s32) 1112 ; GFX6: [[AND1:%[0-9]+]]:_(s32) = G_AND [[COPY7]], [[C1]] 1113 ; GFX6: [[SHL:%[0-9]+]]:_(s32) = G_SHL [[AND1]], [[C]](s32) 1114 ; GFX6: [[OR:%[0-9]+]]:_(s32) = G_OR [[AND]], [[SHL]] 1115 ; GFX6: [[BITCAST3:%[0-9]+]]:_(<2 x s16>) = G_BITCAST [[OR]](s32) 1116 ; GFX6: [[COPY8:%[0-9]+]]:_(s32) = COPY [[SEXT_INREG2]](s32) 1117 ; GFX6: [[AND2:%[0-9]+]]:_(s32) = G_AND [[COPY8]], [[C1]] 1118 ; GFX6: [[COPY9:%[0-9]+]]:_(s32) = COPY [[SEXT_INREG3]](s32) 1119 ; GFX6: [[AND3:%[0-9]+]]:_(s32) = G_AND [[COPY9]], [[C1]] 1120 ; GFX6: [[SHL1:%[0-9]+]]:_(s32) = G_SHL [[AND3]], [[C]](s32) 1121 ; GFX6: [[OR1:%[0-9]+]]:_(s32) = G_OR [[AND2]], [[SHL1]] 1122 ; GFX6: [[BITCAST4:%[0-9]+]]:_(<2 x s16>) = G_BITCAST [[OR1]](s32) 1123 ; GFX6: [[COPY10:%[0-9]+]]:_(s32) = COPY [[SEXT_INREG4]](s32) 1124 ; GFX6: [[AND4:%[0-9]+]]:_(s32) = G_AND [[COPY10]], [[C1]] 1125 ; GFX6: [[COPY11:%[0-9]+]]:_(s32) = COPY [[SEXT_INREG5]](s32) 1126 ; GFX6: [[AND5:%[0-9]+]]:_(s32) = G_AND [[COPY11]], [[C1]] 1127 ; GFX6: [[SHL2:%[0-9]+]]:_(s32) = G_SHL [[AND5]], [[C]](s32) 1128 ; GFX6: [[OR2:%[0-9]+]]:_(s32) = G_OR [[AND4]], [[SHL2]] 1129 ; GFX6: [[BITCAST5:%[0-9]+]]:_(<2 x s16>) = G_BITCAST [[OR2]](s32) 1130 ; GFX6: [[CONCAT_VECTORS:%[0-9]+]]:_(<6 x s16>) = G_CONCAT_VECTORS [[BITCAST3]](<2 x s16>), [[BITCAST4]](<2 x s16>), [[BITCAST5]](<2 x s16>) 1131 ; GFX6: S_ENDPGM 0, implicit [[CONCAT_VECTORS]](<6 x s16>) 1132 %0:_(<6 x s16>) = G_IMPLICIT_DEF 1133 %1:_(<6 x s16>) = G_SEXT_INREG %0, 1 1134 S_ENDPGM 0, implicit %1 1135 1136... 1137 1138# FIXME: Should scalarize first 1139--- 1140name: test_sext_inreg_v2s128_1 1141body: | 1142 bb.0: 1143 liveins: $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4_vgpr5_vgpr6_vgpr7 1144 1145 ; GFX9-LABEL: name: test_sext_inreg_v2s128_1 1146 ; GFX9: [[COPY:%[0-9]+]]:_(<2 x s128>) = COPY $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4_vgpr5_vgpr6_vgpr7 1147 ; GFX9: [[UV:%[0-9]+]]:_(s128), [[UV1:%[0-9]+]]:_(s128) = G_UNMERGE_VALUES [[COPY]](<2 x s128>) 1148 ; GFX9: [[TRUNC:%[0-9]+]]:_(s64) = G_TRUNC [[UV]](s128) 1149 ; GFX9: [[SEXT_INREG:%[0-9]+]]:_(s64) = G_SEXT_INREG [[TRUNC]], 1 1150 ; GFX9: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 63 1151 ; GFX9: [[ASHR:%[0-9]+]]:_(s64) = G_ASHR [[SEXT_INREG]], [[C]](s32) 1152 ; GFX9: [[MV:%[0-9]+]]:_(s128) = G_MERGE_VALUES [[SEXT_INREG]](s64), [[ASHR]](s64) 1153 ; GFX9: [[TRUNC1:%[0-9]+]]:_(s64) = G_TRUNC [[UV1]](s128) 1154 ; GFX9: [[SEXT_INREG1:%[0-9]+]]:_(s64) = G_SEXT_INREG [[TRUNC1]], 1 1155 ; GFX9: [[COPY1:%[0-9]+]]:_(s32) = COPY [[C]](s32) 1156 ; GFX9: [[ASHR1:%[0-9]+]]:_(s64) = G_ASHR [[SEXT_INREG1]], [[COPY1]](s32) 1157 ; GFX9: [[MV1:%[0-9]+]]:_(s128) = G_MERGE_VALUES [[SEXT_INREG1]](s64), [[ASHR1]](s64) 1158 ; GFX9: [[BUILD_VECTOR:%[0-9]+]]:_(<2 x s128>) = G_BUILD_VECTOR [[MV]](s128), [[MV1]](s128) 1159 ; GFX9: $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4_vgpr5_vgpr6_vgpr7 = COPY [[BUILD_VECTOR]](<2 x s128>) 1160 ; GFX8-LABEL: name: test_sext_inreg_v2s128_1 1161 ; GFX8: [[COPY:%[0-9]+]]:_(<2 x s128>) = COPY $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4_vgpr5_vgpr6_vgpr7 1162 ; GFX8: [[UV:%[0-9]+]]:_(s128), [[UV1:%[0-9]+]]:_(s128) = G_UNMERGE_VALUES [[COPY]](<2 x s128>) 1163 ; GFX8: [[TRUNC:%[0-9]+]]:_(s64) = G_TRUNC [[UV]](s128) 1164 ; GFX8: [[SEXT_INREG:%[0-9]+]]:_(s64) = G_SEXT_INREG [[TRUNC]], 1 1165 ; GFX8: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 63 1166 ; GFX8: [[ASHR:%[0-9]+]]:_(s64) = G_ASHR [[SEXT_INREG]], [[C]](s32) 1167 ; GFX8: [[MV:%[0-9]+]]:_(s128) = G_MERGE_VALUES [[SEXT_INREG]](s64), [[ASHR]](s64) 1168 ; GFX8: [[TRUNC1:%[0-9]+]]:_(s64) = G_TRUNC [[UV1]](s128) 1169 ; GFX8: [[SEXT_INREG1:%[0-9]+]]:_(s64) = G_SEXT_INREG [[TRUNC1]], 1 1170 ; GFX8: [[COPY1:%[0-9]+]]:_(s32) = COPY [[C]](s32) 1171 ; GFX8: [[ASHR1:%[0-9]+]]:_(s64) = G_ASHR [[SEXT_INREG1]], [[COPY1]](s32) 1172 ; GFX8: [[MV1:%[0-9]+]]:_(s128) = G_MERGE_VALUES [[SEXT_INREG1]](s64), [[ASHR1]](s64) 1173 ; GFX8: [[BUILD_VECTOR:%[0-9]+]]:_(<2 x s128>) = G_BUILD_VECTOR [[MV]](s128), [[MV1]](s128) 1174 ; GFX8: $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4_vgpr5_vgpr6_vgpr7 = COPY [[BUILD_VECTOR]](<2 x s128>) 1175 ; GFX6-LABEL: name: test_sext_inreg_v2s128_1 1176 ; GFX6: [[COPY:%[0-9]+]]:_(<2 x s128>) = COPY $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4_vgpr5_vgpr6_vgpr7 1177 ; GFX6: [[UV:%[0-9]+]]:_(s128), [[UV1:%[0-9]+]]:_(s128) = G_UNMERGE_VALUES [[COPY]](<2 x s128>) 1178 ; GFX6: [[TRUNC:%[0-9]+]]:_(s64) = G_TRUNC [[UV]](s128) 1179 ; GFX6: [[SEXT_INREG:%[0-9]+]]:_(s64) = G_SEXT_INREG [[TRUNC]], 1 1180 ; GFX6: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 63 1181 ; GFX6: [[ASHR:%[0-9]+]]:_(s64) = G_ASHR [[SEXT_INREG]], [[C]](s32) 1182 ; GFX6: [[MV:%[0-9]+]]:_(s128) = G_MERGE_VALUES [[SEXT_INREG]](s64), [[ASHR]](s64) 1183 ; GFX6: [[TRUNC1:%[0-9]+]]:_(s64) = G_TRUNC [[UV1]](s128) 1184 ; GFX6: [[SEXT_INREG1:%[0-9]+]]:_(s64) = G_SEXT_INREG [[TRUNC1]], 1 1185 ; GFX6: [[COPY1:%[0-9]+]]:_(s32) = COPY [[C]](s32) 1186 ; GFX6: [[ASHR1:%[0-9]+]]:_(s64) = G_ASHR [[SEXT_INREG1]], [[COPY1]](s32) 1187 ; GFX6: [[MV1:%[0-9]+]]:_(s128) = G_MERGE_VALUES [[SEXT_INREG1]](s64), [[ASHR1]](s64) 1188 ; GFX6: [[BUILD_VECTOR:%[0-9]+]]:_(<2 x s128>) = G_BUILD_VECTOR [[MV]](s128), [[MV1]](s128) 1189 ; GFX6: $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4_vgpr5_vgpr6_vgpr7 = COPY [[BUILD_VECTOR]](<2 x s128>) 1190 %0:_(<2 x s128>) = COPY $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4_vgpr5_vgpr6_vgpr7 1191 %1:_(<2 x s128>) = G_SEXT_INREG %0, 1 1192 $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4_vgpr5_vgpr6_vgpr7 = COPY %1 1193... 1194