• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1# NOTE: Assertions have been autogenerated by utils/update_mir_test_checks.py
2# RUN: llc -mtriple=amdgcn-mesa-mesa3d -mcpu=fiji -run-pass=legalizer -global-isel-abort=0 %s -o - | FileCheck %s
3
4---
5name: extract_vector_elt_0_v2i32
6
7body: |
8  bb.0:
9    liveins: $vgpr0_vgpr1
10    ; CHECK-LABEL: name: extract_vector_elt_0_v2i32
11    ; CHECK: [[COPY:%[0-9]+]]:_(<2 x s32>) = COPY $vgpr0_vgpr1
12    ; CHECK: [[EXTRACT:%[0-9]+]]:_(s32) = G_EXTRACT [[COPY]](<2 x s32>), 0
13    ; CHECK: $vgpr0 = COPY [[EXTRACT]](s32)
14    %0:_(<2 x s32>) = COPY $vgpr0_vgpr1
15    %1:_(s32) = G_CONSTANT i32 0
16    %2:_(s32) = G_EXTRACT_VECTOR_ELT %0, %1
17    $vgpr0 = COPY %2
18...
19---
20name: extract_vector_elt_1_v2i32
21
22body: |
23  bb.0:
24    liveins: $vgpr0_vgpr1
25    ; CHECK-LABEL: name: extract_vector_elt_1_v2i32
26    ; CHECK: [[COPY:%[0-9]+]]:_(<2 x s32>) = COPY $vgpr0_vgpr1
27    ; CHECK: [[EXTRACT:%[0-9]+]]:_(s32) = G_EXTRACT [[COPY]](<2 x s32>), 32
28    ; CHECK: $vgpr0 = COPY [[EXTRACT]](s32)
29    %0:_(<2 x s32>) = COPY $vgpr0_vgpr1
30    %1:_(s32) = G_CONSTANT i32 1
31    %2:_(s32) = G_EXTRACT_VECTOR_ELT %0, %1
32    $vgpr0 = COPY %2
33...
34---
35name: extract_vector_elt_2_v2i32
36
37body: |
38  bb.0:
39    liveins: $vgpr0_vgpr1
40    ; CHECK-LABEL: name: extract_vector_elt_2_v2i32
41    ; CHECK: [[COPY:%[0-9]+]]:_(<2 x s32>) = COPY $vgpr0_vgpr1
42    ; CHECK: [[EXTRACT:%[0-9]+]]:_(s32) = G_EXTRACT [[COPY]](<2 x s32>), 32
43    ; CHECK: $vgpr0 = COPY [[EXTRACT]](s32)
44    %0:_(<2 x s32>) = COPY $vgpr0_vgpr1
45    %1:_(s32) = G_CONSTANT i32 1
46    %2:_(s32) = G_EXTRACT_VECTOR_ELT %0, %1
47    $vgpr0 = COPY %2
48...
49---
50name: extract_vector_elt_0_v3i32
51
52body: |
53  bb.0:
54    liveins: $vgpr0_vgpr1_vgpr2
55    ; CHECK-LABEL: name: extract_vector_elt_0_v3i32
56    ; CHECK: [[COPY:%[0-9]+]]:_(<3 x s32>) = COPY $vgpr0_vgpr1_vgpr2
57    ; CHECK: [[EXTRACT:%[0-9]+]]:_(s32) = G_EXTRACT [[COPY]](<3 x s32>), 0
58    ; CHECK: $vgpr0 = COPY [[EXTRACT]](s32)
59    %0:_(<3 x s32>) = COPY $vgpr0_vgpr1_vgpr2
60    %1:_(s32) = G_CONSTANT i32 0
61    %2:_(s32) = G_EXTRACT_VECTOR_ELT %0, %1
62    $vgpr0 = COPY %2
63...
64---
65name: extract_vector_elt_0_v4i32
66
67body: |
68  bb.0:
69    liveins: $vgpr0_vgpr1_vgpr2_vgpr3
70    ; CHECK-LABEL: name: extract_vector_elt_0_v4i32
71    ; CHECK: [[COPY:%[0-9]+]]:_(<4 x s32>) = COPY $vgpr0_vgpr1_vgpr2_vgpr3
72    ; CHECK: [[EXTRACT:%[0-9]+]]:_(s32) = G_EXTRACT [[COPY]](<4 x s32>), 0
73    ; CHECK: $vgpr0 = COPY [[EXTRACT]](s32)
74    %0:_(<4 x s32>) = COPY $vgpr0_vgpr1_vgpr2_vgpr3
75    %1:_(s32) = G_CONSTANT i32 0
76    %2:_(s32) = G_EXTRACT_VECTOR_ELT %0, %1
77    $vgpr0 = COPY %2
78...
79
80---
81name: extract_vector_elt_0_v5i32
82
83body: |
84  bb.0:
85    liveins: $vgpr0
86    ; CHECK-LABEL: name: extract_vector_elt_0_v5i32
87    ; CHECK: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
88    ; CHECK: [[COPY1:%[0-9]+]]:_(s32) = COPY [[COPY]](s32)
89    ; CHECK: $vgpr0 = COPY [[COPY1]](s32)
90    %0:_(s32) = COPY $vgpr0
91    %1:_(<5 x s32>) = G_BUILD_VECTOR %0, %0, %0, %0, %0
92    %2:_(s32) = G_CONSTANT i32 0
93    %3:_(s32) = G_EXTRACT_VECTOR_ELT %1, %2
94    $vgpr0 = COPY %3
95...
96
97---
98name: extract_vector_elt_0_v6i32
99
100body: |
101  bb.0:
102    liveins: $vgpr0
103    ; CHECK-LABEL: name: extract_vector_elt_0_v6i32
104    ; CHECK: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
105    ; CHECK: [[COPY1:%[0-9]+]]:_(s32) = COPY [[COPY]](s32)
106    ; CHECK: $vgpr0 = COPY [[COPY1]](s32)
107    %0:_(s32) = COPY $vgpr0
108    %1:_(<6 x s32>) = G_BUILD_VECTOR %0, %0, %0, %0, %0, %0
109    %2:_(s32) = G_CONSTANT i32 0
110    %3:_(s32) = G_EXTRACT_VECTOR_ELT %1, %2
111    $vgpr0 = COPY %3
112...
113
114---
115name: extract_vector_elt_0_v7i32
116
117body: |
118  bb.0:
119    liveins: $vgpr0
120    ; CHECK-LABEL: name: extract_vector_elt_0_v7i32
121    ; CHECK: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
122    ; CHECK: [[COPY1:%[0-9]+]]:_(s32) = COPY [[COPY]](s32)
123    ; CHECK: $vgpr0 = COPY [[COPY1]](s32)
124    %0:_(s32) = COPY $vgpr0
125    %1:_(<7 x s32>) = G_BUILD_VECTOR %0, %0, %0, %0, %0, %0, %0
126    %2:_(s32) = G_CONSTANT i32 0
127    %3:_(s32) = G_EXTRACT_VECTOR_ELT %1, %2
128    $vgpr0 = COPY %3
129...
130
131---
132name: extract_vector_elt_0_v8i32
133
134body: |
135  bb.0:
136    liveins: $vgpr0
137    ; CHECK-LABEL: name: extract_vector_elt_0_v8i32
138    ; CHECK: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
139    ; CHECK: [[COPY1:%[0-9]+]]:_(s32) = COPY [[COPY]](s32)
140    ; CHECK: $vgpr0 = COPY [[COPY1]](s32)
141    %0:_(s32) = COPY $vgpr0
142    %1:_(<8 x s32>) = G_BUILD_VECTOR %0, %0, %0, %0, %0, %0, %0, %0
143    %2:_(s32) = G_CONSTANT i32 0
144    %3:_(s32) = G_EXTRACT_VECTOR_ELT %1, %2
145    $vgpr0 = COPY %3
146...
147
148---
149name: extract_vector_elt_0_v16i32
150
151body: |
152  bb.0:
153    liveins: $vgpr0
154    ; CHECK-LABEL: name: extract_vector_elt_0_v16i32
155    ; CHECK: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
156    ; CHECK: [[COPY1:%[0-9]+]]:_(s32) = COPY [[COPY]](s32)
157    ; CHECK: $vgpr0 = COPY [[COPY1]](s32)
158    %0:_(s32) = COPY $vgpr0
159    %1:_(<16 x s32>) = G_BUILD_VECTOR %0, %0, %0, %0, %0, %0, %0, %0, %0, %0, %0, %0, %0, %0, %0, %0
160    %2:_(s32) = G_CONSTANT i32 0
161    %3:_(s32) = G_EXTRACT_VECTOR_ELT %1, %2
162    $vgpr0 = COPY %3
163...
164
165---
166name: extract_vector_elt_var_v2i32
167
168body: |
169  bb.0:
170    liveins: $vgpr0_vgpr1, $vgpr2
171    ; CHECK-LABEL: name: extract_vector_elt_var_v2i32
172    ; CHECK: [[COPY:%[0-9]+]]:_(<2 x s32>) = COPY $vgpr0_vgpr1
173    ; CHECK: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr2
174    ; CHECK: [[EVEC:%[0-9]+]]:_(s32) = G_EXTRACT_VECTOR_ELT [[COPY]](<2 x s32>), [[COPY1]](s32)
175    ; CHECK: $vgpr0 = COPY [[EVEC]](s32)
176    %0:_(<2 x s32>) = COPY $vgpr0_vgpr1
177    %1:_(s32) = COPY $vgpr2
178    %2:_(s32) = G_EXTRACT_VECTOR_ELT %0, %1
179    $vgpr0 = COPY %2
180...
181
182---
183name: extract_vector_elt_var_v8i32
184
185body: |
186  bb.0:
187    liveins: $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4_vgpr5_vgpr6_vgpr7
188    ; CHECK-LABEL: name: extract_vector_elt_var_v8i32
189    ; CHECK: [[COPY:%[0-9]+]]:_(<8 x s32>) = COPY $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4_vgpr5_vgpr6_vgpr7
190    ; CHECK: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr2
191    ; CHECK: [[EVEC:%[0-9]+]]:_(s32) = G_EXTRACT_VECTOR_ELT [[COPY]](<8 x s32>), [[COPY1]](s32)
192    ; CHECK: $vgpr0 = COPY [[EVEC]](s32)
193    %0:_(<8 x s32>) = COPY $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4_vgpr5_vgpr6_vgpr7
194    %1:_(s32) = COPY $vgpr2
195    %2:_(s32) = G_EXTRACT_VECTOR_ELT %0, %1
196    $vgpr0 = COPY %2
197...
198
199
200---
201name: extract_vector_elt_0_v2i8_i32
202
203body: |
204  bb.0:
205
206    ; CHECK-LABEL: name: extract_vector_elt_0_v2i8_i32
207    ; CHECK: [[DEF:%[0-9]+]]:_(<2 x s32>) = G_IMPLICIT_DEF
208    ; CHECK: [[COPY:%[0-9]+]]:_(<2 x s32>) = COPY [[DEF]](<2 x s32>)
209    ; CHECK: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY]](<2 x s32>)
210    ; CHECK: [[SEXT_INREG:%[0-9]+]]:_(s32) = G_SEXT_INREG [[UV]], 8
211    ; CHECK: [[SEXT_INREG1:%[0-9]+]]:_(s32) = G_SEXT_INREG [[UV1]], 8
212    ; CHECK: [[BUILD_VECTOR:%[0-9]+]]:_(<2 x s32>) = G_BUILD_VECTOR [[SEXT_INREG]](s32), [[SEXT_INREG1]](s32)
213    ; CHECK: [[EXTRACT:%[0-9]+]]:_(s32) = G_EXTRACT [[BUILD_VECTOR]](<2 x s32>), 0
214    ; CHECK: [[COPY1:%[0-9]+]]:_(s32) = COPY [[EXTRACT]](s32)
215    ; CHECK: $vgpr0 = COPY [[COPY1]](s32)
216    %0:_(<2 x s8>) = G_IMPLICIT_DEF
217    %1:_(s32) = G_CONSTANT i32 0
218    %2:_(s8) = G_EXTRACT_VECTOR_ELT %0, %1
219    %3:_(s32) = G_ANYEXT %2
220    $vgpr0 = COPY %3
221...
222
223---
224name: extract_vector_elt_0_v2i16_i32
225
226body: |
227  bb.0:
228
229    ; CHECK-LABEL: name: extract_vector_elt_0_v2i16_i32
230    ; CHECK: [[DEF:%[0-9]+]]:_(<2 x s16>) = G_IMPLICIT_DEF
231    ; CHECK: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 0
232    ; CHECK: [[BITCAST:%[0-9]+]]:_(s32) = G_BITCAST [[DEF]](<2 x s16>)
233    ; CHECK: [[LSHR:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST]], [[C]](s32)
234    ; CHECK: [[COPY:%[0-9]+]]:_(s32) = COPY [[LSHR]](s32)
235    ; CHECK: $vgpr0 = COPY [[COPY]](s32)
236    %0:_(<2 x s16>) = G_IMPLICIT_DEF
237    %1:_(s32) = G_CONSTANT i32 0
238    %2:_(s16) = G_EXTRACT_VECTOR_ELT %0, %1
239    %3:_(s32) = G_ANYEXT %2
240    $vgpr0 = COPY %3
241...
242
243---
244name: extract_vector_elt_0_v2i1_i32
245
246body: |
247  bb.0:
248
249    ; CHECK-LABEL: name: extract_vector_elt_0_v2i1_i32
250    ; CHECK: [[DEF:%[0-9]+]]:_(<2 x s32>) = G_IMPLICIT_DEF
251    ; CHECK: [[COPY:%[0-9]+]]:_(<2 x s32>) = COPY [[DEF]](<2 x s32>)
252    ; CHECK: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY]](<2 x s32>)
253    ; CHECK: [[SEXT_INREG:%[0-9]+]]:_(s32) = G_SEXT_INREG [[UV]], 1
254    ; CHECK: [[SEXT_INREG1:%[0-9]+]]:_(s32) = G_SEXT_INREG [[UV1]], 1
255    ; CHECK: [[BUILD_VECTOR:%[0-9]+]]:_(<2 x s32>) = G_BUILD_VECTOR [[SEXT_INREG]](s32), [[SEXT_INREG1]](s32)
256    ; CHECK: [[EXTRACT:%[0-9]+]]:_(s32) = G_EXTRACT [[BUILD_VECTOR]](<2 x s32>), 0
257    ; CHECK: [[COPY1:%[0-9]+]]:_(s32) = COPY [[EXTRACT]](s32)
258    ; CHECK: $vgpr0 = COPY [[COPY1]](s32)
259    %0:_(<2 x s1>) = G_IMPLICIT_DEF
260    %1:_(s32) = G_CONSTANT i32 0
261    %2:_(s1) = G_EXTRACT_VECTOR_ELT %0, %1
262    %3:_(s32) = G_ANYEXT %2
263    $vgpr0 = COPY %3
264...
265
266---
267name: extract_vector_elt_0_v2i1_i1
268
269body: |
270  bb.0:
271
272    ; CHECK-LABEL: name: extract_vector_elt_0_v2i1_i1
273    ; CHECK: [[DEF:%[0-9]+]]:_(<2 x s32>) = G_IMPLICIT_DEF
274    ; CHECK: [[C:%[0-9]+]]:_(s1) = G_CONSTANT i1 false
275    ; CHECK: [[COPY:%[0-9]+]]:_(<2 x s32>) = COPY [[DEF]](<2 x s32>)
276    ; CHECK: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY]](<2 x s32>)
277    ; CHECK: [[SEXT_INREG:%[0-9]+]]:_(s32) = G_SEXT_INREG [[UV]], 1
278    ; CHECK: [[SEXT_INREG1:%[0-9]+]]:_(s32) = G_SEXT_INREG [[UV1]], 1
279    ; CHECK: [[BUILD_VECTOR:%[0-9]+]]:_(<2 x s32>) = G_BUILD_VECTOR [[SEXT_INREG]](s32), [[SEXT_INREG1]](s32)
280    ; CHECK: [[EXTRACT:%[0-9]+]]:_(s32) = G_EXTRACT [[BUILD_VECTOR]](<2 x s32>), 0
281    ; CHECK: [[COPY1:%[0-9]+]]:_(s32) = COPY [[EXTRACT]](s32)
282    ; CHECK: $vgpr0 = COPY [[COPY1]](s32)
283    %0:_(<2 x s1>) = G_IMPLICIT_DEF
284    %1:_(s1) = G_CONSTANT i1 false
285    %2:_(s1) = G_EXTRACT_VECTOR_ELT %0, %1
286    %3:_(s32) = G_ANYEXT %2
287    $vgpr0 = COPY %3
288...
289
290---
291name: extract_vector_elt_v2s8_varidx_i32
292
293body: |
294  bb.0:
295    liveins: $vgpr0, $vgpr1
296
297    ; CHECK-LABEL: name: extract_vector_elt_v2s8_varidx_i32
298    ; CHECK: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
299    ; CHECK: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
300    ; CHECK: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 8
301    ; CHECK: [[LSHR:%[0-9]+]]:_(s32) = G_LSHR [[COPY]], [[C]](s32)
302    ; CHECK: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 16
303    ; CHECK: [[LSHR1:%[0-9]+]]:_(s32) = G_LSHR [[COPY]], [[C1]](s32)
304    ; CHECK: [[C2:%[0-9]+]]:_(s32) = G_CONSTANT i32 24
305    ; CHECK: [[LSHR2:%[0-9]+]]:_(s32) = G_LSHR [[COPY]], [[C2]](s32)
306    ; CHECK: [[COPY2:%[0-9]+]]:_(s32) = COPY [[COPY]](s32)
307    ; CHECK: [[COPY3:%[0-9]+]]:_(s32) = COPY [[LSHR]](s32)
308    ; CHECK: [[COPY4:%[0-9]+]]:_(s32) = COPY [[COPY2]](s32)
309    ; CHECK: [[SEXT_INREG:%[0-9]+]]:_(s32) = G_SEXT_INREG [[COPY4]], 8
310    ; CHECK: [[COPY5:%[0-9]+]]:_(s32) = COPY [[COPY3]](s32)
311    ; CHECK: [[SEXT_INREG1:%[0-9]+]]:_(s32) = G_SEXT_INREG [[COPY5]], 8
312    ; CHECK: [[BUILD_VECTOR:%[0-9]+]]:_(<2 x s32>) = G_BUILD_VECTOR [[SEXT_INREG]](s32), [[SEXT_INREG1]](s32)
313    ; CHECK: [[EVEC:%[0-9]+]]:_(s32) = G_EXTRACT_VECTOR_ELT [[BUILD_VECTOR]](<2 x s32>), [[COPY1]](s32)
314    ; CHECK: [[COPY6:%[0-9]+]]:_(s32) = COPY [[EVEC]](s32)
315    ; CHECK: $vgpr0 = COPY [[COPY6]](s32)
316    %0:_(s32) = COPY $vgpr0
317    %1:_(s32) = COPY $vgpr1
318    %2:_(s16) = G_TRUNC %0
319    %3:_(<2 x s8>) = G_BITCAST %2
320    %4:_(s8) = G_EXTRACT_VECTOR_ELT %3, %1
321    %5:_(s32) = G_ANYEXT %4
322    $vgpr0 = COPY %5
323...
324
325---
326name: extract_vector_elt_v2s8_constidx_0_i32
327
328body: |
329  bb.0:
330    liveins: $vgpr0
331
332    ; CHECK-LABEL: name: extract_vector_elt_v2s8_constidx_0_i32
333    ; CHECK: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
334    ; CHECK: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
335    ; CHECK: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 8
336    ; CHECK: [[LSHR:%[0-9]+]]:_(s32) = G_LSHR [[COPY]], [[C]](s32)
337    ; CHECK: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 16
338    ; CHECK: [[LSHR1:%[0-9]+]]:_(s32) = G_LSHR [[COPY]], [[C1]](s32)
339    ; CHECK: [[C2:%[0-9]+]]:_(s32) = G_CONSTANT i32 24
340    ; CHECK: [[LSHR2:%[0-9]+]]:_(s32) = G_LSHR [[COPY]], [[C2]](s32)
341    ; CHECK: [[COPY2:%[0-9]+]]:_(s32) = COPY [[COPY]](s32)
342    ; CHECK: [[COPY3:%[0-9]+]]:_(s32) = COPY [[LSHR]](s32)
343    ; CHECK: [[COPY4:%[0-9]+]]:_(s32) = COPY [[COPY2]](s32)
344    ; CHECK: [[SEXT_INREG:%[0-9]+]]:_(s32) = G_SEXT_INREG [[COPY4]], 8
345    ; CHECK: [[COPY5:%[0-9]+]]:_(s32) = COPY [[COPY3]](s32)
346    ; CHECK: [[SEXT_INREG1:%[0-9]+]]:_(s32) = G_SEXT_INREG [[COPY5]], 8
347    ; CHECK: [[BUILD_VECTOR:%[0-9]+]]:_(<2 x s32>) = G_BUILD_VECTOR [[SEXT_INREG]](s32), [[SEXT_INREG1]](s32)
348    ; CHECK: [[EXTRACT:%[0-9]+]]:_(s32) = G_EXTRACT [[BUILD_VECTOR]](<2 x s32>), 0
349    ; CHECK: [[COPY6:%[0-9]+]]:_(s32) = COPY [[EXTRACT]](s32)
350    ; CHECK: $vgpr0 = COPY [[COPY6]](s32)
351    %0:_(s32) = COPY $vgpr0
352    %1:_(s32) = COPY $vgpr1
353    %2:_(s16) = G_TRUNC %0
354    %3:_(<2 x s8>) = G_BITCAST %2
355    %4:_(s32) = G_CONSTANT i32 0
356    %5:_(s8) = G_EXTRACT_VECTOR_ELT %3, %4
357    %6:_(s32) = G_ANYEXT %5
358    $vgpr0 = COPY %6
359...
360
361---
362name: extract_vector_elt_v2s8_constidx_1_i32
363
364body: |
365  bb.0:
366    liveins: $vgpr0
367
368    ; CHECK-LABEL: name: extract_vector_elt_v2s8_constidx_1_i32
369    ; CHECK: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
370    ; CHECK: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
371    ; CHECK: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 8
372    ; CHECK: [[LSHR:%[0-9]+]]:_(s32) = G_LSHR [[COPY]], [[C]](s32)
373    ; CHECK: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 16
374    ; CHECK: [[LSHR1:%[0-9]+]]:_(s32) = G_LSHR [[COPY]], [[C1]](s32)
375    ; CHECK: [[C2:%[0-9]+]]:_(s32) = G_CONSTANT i32 24
376    ; CHECK: [[LSHR2:%[0-9]+]]:_(s32) = G_LSHR [[COPY]], [[C2]](s32)
377    ; CHECK: [[COPY2:%[0-9]+]]:_(s32) = COPY [[COPY]](s32)
378    ; CHECK: [[COPY3:%[0-9]+]]:_(s32) = COPY [[LSHR]](s32)
379    ; CHECK: [[COPY4:%[0-9]+]]:_(s32) = COPY [[COPY2]](s32)
380    ; CHECK: [[SEXT_INREG:%[0-9]+]]:_(s32) = G_SEXT_INREG [[COPY4]], 8
381    ; CHECK: [[COPY5:%[0-9]+]]:_(s32) = COPY [[COPY3]](s32)
382    ; CHECK: [[SEXT_INREG1:%[0-9]+]]:_(s32) = G_SEXT_INREG [[COPY5]], 8
383    ; CHECK: [[BUILD_VECTOR:%[0-9]+]]:_(<2 x s32>) = G_BUILD_VECTOR [[SEXT_INREG]](s32), [[SEXT_INREG1]](s32)
384    ; CHECK: [[EXTRACT:%[0-9]+]]:_(s32) = G_EXTRACT [[BUILD_VECTOR]](<2 x s32>), 32
385    ; CHECK: [[COPY6:%[0-9]+]]:_(s32) = COPY [[EXTRACT]](s32)
386    ; CHECK: $vgpr0 = COPY [[COPY6]](s32)
387    %0:_(s32) = COPY $vgpr0
388    %1:_(s32) = COPY $vgpr1
389    %2:_(s16) = G_TRUNC %0
390    %3:_(<2 x s8>) = G_BITCAST %2
391    %4:_(s32) = G_CONSTANT i32 1
392    %5:_(s8) = G_EXTRACT_VECTOR_ELT %3, %4
393    %6:_(s32) = G_ANYEXT %5
394    $vgpr0 = COPY %6
395...
396
397---
398name: extract_vector_elt_v4s4_varidx_i32
399
400body: |
401  bb.0:
402    liveins: $vgpr0, $vgpr1
403
404    ; CHECK-LABEL: name: extract_vector_elt_v4s4_varidx_i32
405    ; CHECK: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
406    ; CHECK: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
407    ; CHECK: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 4
408    ; CHECK: [[LSHR:%[0-9]+]]:_(s32) = G_LSHR [[COPY]], [[C]](s32)
409    ; CHECK: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 8
410    ; CHECK: [[LSHR1:%[0-9]+]]:_(s32) = G_LSHR [[COPY]], [[C1]](s32)
411    ; CHECK: [[C2:%[0-9]+]]:_(s32) = G_CONSTANT i32 12
412    ; CHECK: [[LSHR2:%[0-9]+]]:_(s32) = G_LSHR [[COPY]], [[C2]](s32)
413    ; CHECK: [[C3:%[0-9]+]]:_(s32) = G_CONSTANT i32 16
414    ; CHECK: [[LSHR3:%[0-9]+]]:_(s32) = G_LSHR [[COPY]], [[C3]](s32)
415    ; CHECK: [[C4:%[0-9]+]]:_(s32) = G_CONSTANT i32 20
416    ; CHECK: [[LSHR4:%[0-9]+]]:_(s32) = G_LSHR [[COPY]], [[C4]](s32)
417    ; CHECK: [[C5:%[0-9]+]]:_(s32) = G_CONSTANT i32 24
418    ; CHECK: [[LSHR5:%[0-9]+]]:_(s32) = G_LSHR [[COPY]], [[C5]](s32)
419    ; CHECK: [[C6:%[0-9]+]]:_(s32) = G_CONSTANT i32 28
420    ; CHECK: [[LSHR6:%[0-9]+]]:_(s32) = G_LSHR [[COPY]], [[C6]](s32)
421    ; CHECK: [[COPY2:%[0-9]+]]:_(s32) = COPY [[COPY]](s32)
422    ; CHECK: [[COPY3:%[0-9]+]]:_(s32) = COPY [[LSHR]](s32)
423    ; CHECK: [[COPY4:%[0-9]+]]:_(s32) = COPY [[LSHR1]](s32)
424    ; CHECK: [[COPY5:%[0-9]+]]:_(s32) = COPY [[LSHR2]](s32)
425    ; CHECK: [[COPY6:%[0-9]+]]:_(s32) = COPY [[COPY2]](s32)
426    ; CHECK: [[SEXT_INREG:%[0-9]+]]:_(s32) = G_SEXT_INREG [[COPY6]], 4
427    ; CHECK: [[COPY7:%[0-9]+]]:_(s32) = COPY [[COPY3]](s32)
428    ; CHECK: [[SEXT_INREG1:%[0-9]+]]:_(s32) = G_SEXT_INREG [[COPY7]], 4
429    ; CHECK: [[COPY8:%[0-9]+]]:_(s32) = COPY [[COPY4]](s32)
430    ; CHECK: [[SEXT_INREG2:%[0-9]+]]:_(s32) = G_SEXT_INREG [[COPY8]], 4
431    ; CHECK: [[COPY9:%[0-9]+]]:_(s32) = COPY [[COPY5]](s32)
432    ; CHECK: [[SEXT_INREG3:%[0-9]+]]:_(s32) = G_SEXT_INREG [[COPY9]], 4
433    ; CHECK: [[BUILD_VECTOR:%[0-9]+]]:_(<4 x s32>) = G_BUILD_VECTOR [[SEXT_INREG]](s32), [[SEXT_INREG1]](s32), [[SEXT_INREG2]](s32), [[SEXT_INREG3]](s32)
434    ; CHECK: [[EVEC:%[0-9]+]]:_(s32) = G_EXTRACT_VECTOR_ELT [[BUILD_VECTOR]](<4 x s32>), [[COPY1]](s32)
435    ; CHECK: [[COPY10:%[0-9]+]]:_(s32) = COPY [[EVEC]](s32)
436    ; CHECK: $vgpr0 = COPY [[COPY10]](s32)
437    %0:_(s32) = COPY $vgpr0
438    %1:_(s32) = COPY $vgpr1
439    %2:_(s16) = G_TRUNC %0
440    %3:_(<4 x s4>) = G_BITCAST %2
441    %4:_(s4) = G_EXTRACT_VECTOR_ELT %3, %1
442    %5:_(s32) = G_ANYEXT %4
443    $vgpr0 = COPY %5
444...
445
446---
447name: extract_vector_elt_v3s8_varidx_i32
448
449body: |
450  bb.0:
451    liveins: $vgpr0_vgpr1_vgpr2, $vgpr3
452
453    ; CHECK-LABEL: name: extract_vector_elt_v3s8_varidx_i32
454    ; CHECK: [[COPY:%[0-9]+]]:_(<3 x s32>) = COPY $vgpr0_vgpr1_vgpr2
455    ; CHECK: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr3
456    ; CHECK: [[COPY2:%[0-9]+]]:_(<3 x s32>) = COPY [[COPY]](<3 x s32>)
457    ; CHECK: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32), [[UV2:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY2]](<3 x s32>)
458    ; CHECK: [[SEXT_INREG:%[0-9]+]]:_(s32) = G_SEXT_INREG [[UV]], 8
459    ; CHECK: [[SEXT_INREG1:%[0-9]+]]:_(s32) = G_SEXT_INREG [[UV1]], 8
460    ; CHECK: [[SEXT_INREG2:%[0-9]+]]:_(s32) = G_SEXT_INREG [[UV2]], 8
461    ; CHECK: [[BUILD_VECTOR:%[0-9]+]]:_(<3 x s32>) = G_BUILD_VECTOR [[SEXT_INREG]](s32), [[SEXT_INREG1]](s32), [[SEXT_INREG2]](s32)
462    ; CHECK: [[EVEC:%[0-9]+]]:_(s32) = G_EXTRACT_VECTOR_ELT [[BUILD_VECTOR]](<3 x s32>), [[COPY1]](s32)
463    ; CHECK: [[COPY3:%[0-9]+]]:_(s32) = COPY [[EVEC]](s32)
464    ; CHECK: $vgpr0 = COPY [[COPY3]](s32)
465    %0:_(<3 x s32>) = COPY $vgpr0_vgpr1_vgpr2
466    %1:_(s32) = COPY $vgpr3
467    %2:_(<3 x s8>) = G_TRUNC %0
468    %3:_(s8) = G_EXTRACT_VECTOR_ELT %2, %1
469    %4:_(s32) = G_ANYEXT %3
470    $vgpr0 = COPY %4
471...
472
473---
474name: extract_vector_elt_v4s8_varidx_i32
475
476body: |
477  bb.0:
478    liveins: $vgpr0, $vgpr1
479
480    ; CHECK-LABEL: name: extract_vector_elt_v4s8_varidx_i32
481    ; CHECK: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
482    ; CHECK: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
483    ; CHECK: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 8
484    ; CHECK: [[LSHR:%[0-9]+]]:_(s32) = G_LSHR [[COPY]], [[C]](s32)
485    ; CHECK: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 16
486    ; CHECK: [[LSHR1:%[0-9]+]]:_(s32) = G_LSHR [[COPY]], [[C1]](s32)
487    ; CHECK: [[C2:%[0-9]+]]:_(s32) = G_CONSTANT i32 24
488    ; CHECK: [[LSHR2:%[0-9]+]]:_(s32) = G_LSHR [[COPY]], [[C2]](s32)
489    ; CHECK: [[C3:%[0-9]+]]:_(s32) = G_CONSTANT i32 255
490    ; CHECK: [[COPY2:%[0-9]+]]:_(s32) = COPY [[COPY]](s32)
491    ; CHECK: [[AND:%[0-9]+]]:_(s32) = G_AND [[COPY2]], [[C3]]
492    ; CHECK: [[COPY3:%[0-9]+]]:_(s32) = COPY [[LSHR]](s32)
493    ; CHECK: [[AND1:%[0-9]+]]:_(s32) = G_AND [[COPY3]], [[C3]]
494    ; CHECK: [[SHL:%[0-9]+]]:_(s32) = G_SHL [[AND1]], [[C]](s32)
495    ; CHECK: [[OR:%[0-9]+]]:_(s32) = G_OR [[AND]], [[SHL]]
496    ; CHECK: [[COPY4:%[0-9]+]]:_(s32) = COPY [[LSHR1]](s32)
497    ; CHECK: [[AND2:%[0-9]+]]:_(s32) = G_AND [[COPY4]], [[C3]]
498    ; CHECK: [[SHL1:%[0-9]+]]:_(s32) = G_SHL [[AND2]], [[C1]](s32)
499    ; CHECK: [[OR1:%[0-9]+]]:_(s32) = G_OR [[OR]], [[SHL1]]
500    ; CHECK: [[COPY5:%[0-9]+]]:_(s32) = COPY [[LSHR2]](s32)
501    ; CHECK: [[AND3:%[0-9]+]]:_(s32) = G_AND [[COPY5]], [[C3]]
502    ; CHECK: [[SHL2:%[0-9]+]]:_(s32) = G_SHL [[AND3]], [[C2]](s32)
503    ; CHECK: [[OR2:%[0-9]+]]:_(s32) = G_OR [[OR1]], [[SHL2]]
504    ; CHECK: [[C4:%[0-9]+]]:_(s32) = G_CONSTANT i32 3
505    ; CHECK: [[AND4:%[0-9]+]]:_(s32) = G_AND [[COPY1]], [[C4]]
506    ; CHECK: [[SHL3:%[0-9]+]]:_(s32) = G_SHL [[AND4]], [[C4]](s32)
507    ; CHECK: [[LSHR3:%[0-9]+]]:_(s32) = G_LSHR [[OR2]], [[SHL3]](s32)
508    ; CHECK: [[COPY6:%[0-9]+]]:_(s32) = COPY [[LSHR3]](s32)
509    ; CHECK: $vgpr0 = COPY [[COPY6]](s32)
510    %0:_(s32) = COPY $vgpr0
511    %1:_(s32) = COPY $vgpr1
512    %2:_(<4 x s8>) = G_BITCAST %0
513    %3:_(s8) = G_EXTRACT_VECTOR_ELT %2, %1
514    %4:_(s32) = G_ANYEXT %3
515    $vgpr0 = COPY %4
516...
517
518---
519name: extract_vector_elt_v4s8_constidx_0_i32
520
521body: |
522  bb.0:
523    liveins: $vgpr0
524
525    ; CHECK-LABEL: name: extract_vector_elt_v4s8_constidx_0_i32
526    ; CHECK: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
527    ; CHECK: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 8
528    ; CHECK: [[LSHR:%[0-9]+]]:_(s32) = G_LSHR [[COPY]], [[C]](s32)
529    ; CHECK: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 16
530    ; CHECK: [[LSHR1:%[0-9]+]]:_(s32) = G_LSHR [[COPY]], [[C1]](s32)
531    ; CHECK: [[C2:%[0-9]+]]:_(s32) = G_CONSTANT i32 24
532    ; CHECK: [[LSHR2:%[0-9]+]]:_(s32) = G_LSHR [[COPY]], [[C2]](s32)
533    ; CHECK: [[C3:%[0-9]+]]:_(s32) = G_CONSTANT i32 0
534    ; CHECK: [[C4:%[0-9]+]]:_(s32) = G_CONSTANT i32 255
535    ; CHECK: [[COPY1:%[0-9]+]]:_(s32) = COPY [[COPY]](s32)
536    ; CHECK: [[AND:%[0-9]+]]:_(s32) = G_AND [[COPY1]], [[C4]]
537    ; CHECK: [[COPY2:%[0-9]+]]:_(s32) = COPY [[LSHR]](s32)
538    ; CHECK: [[AND1:%[0-9]+]]:_(s32) = G_AND [[COPY2]], [[C4]]
539    ; CHECK: [[SHL:%[0-9]+]]:_(s32) = G_SHL [[AND1]], [[C]](s32)
540    ; CHECK: [[OR:%[0-9]+]]:_(s32) = G_OR [[AND]], [[SHL]]
541    ; CHECK: [[COPY3:%[0-9]+]]:_(s32) = COPY [[LSHR1]](s32)
542    ; CHECK: [[AND2:%[0-9]+]]:_(s32) = G_AND [[COPY3]], [[C4]]
543    ; CHECK: [[SHL1:%[0-9]+]]:_(s32) = G_SHL [[AND2]], [[C1]](s32)
544    ; CHECK: [[OR1:%[0-9]+]]:_(s32) = G_OR [[OR]], [[SHL1]]
545    ; CHECK: [[COPY4:%[0-9]+]]:_(s32) = COPY [[LSHR2]](s32)
546    ; CHECK: [[AND3:%[0-9]+]]:_(s32) = G_AND [[COPY4]], [[C4]]
547    ; CHECK: [[SHL2:%[0-9]+]]:_(s32) = G_SHL [[AND3]], [[C2]](s32)
548    ; CHECK: [[OR2:%[0-9]+]]:_(s32) = G_OR [[OR1]], [[SHL2]]
549    ; CHECK: [[LSHR3:%[0-9]+]]:_(s32) = G_LSHR [[OR2]], [[C3]](s32)
550    ; CHECK: [[COPY5:%[0-9]+]]:_(s32) = COPY [[LSHR3]](s32)
551    ; CHECK: $vgpr0 = COPY [[COPY5]](s32)
552    %0:_(s32) = COPY $vgpr0
553    %1:_(<4 x s8>) = G_BITCAST %0
554    %2:_(s32) = G_CONSTANT i32 0
555    %3:_(s8) = G_EXTRACT_VECTOR_ELT %1, %2
556    %4:_(s32) = G_ANYEXT %3
557    $vgpr0 = COPY %4
558...
559
560---
561name: extract_vector_elt_v4s8_constidx_1_i32
562
563body: |
564  bb.0:
565    liveins: $vgpr0
566
567    ; CHECK-LABEL: name: extract_vector_elt_v4s8_constidx_1_i32
568    ; CHECK: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
569    ; CHECK: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 8
570    ; CHECK: [[LSHR:%[0-9]+]]:_(s32) = G_LSHR [[COPY]], [[C]](s32)
571    ; CHECK: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 16
572    ; CHECK: [[LSHR1:%[0-9]+]]:_(s32) = G_LSHR [[COPY]], [[C1]](s32)
573    ; CHECK: [[C2:%[0-9]+]]:_(s32) = G_CONSTANT i32 24
574    ; CHECK: [[LSHR2:%[0-9]+]]:_(s32) = G_LSHR [[COPY]], [[C2]](s32)
575    ; CHECK: [[C3:%[0-9]+]]:_(s32) = G_CONSTANT i32 255
576    ; CHECK: [[COPY1:%[0-9]+]]:_(s32) = COPY [[COPY]](s32)
577    ; CHECK: [[AND:%[0-9]+]]:_(s32) = G_AND [[COPY1]], [[C3]]
578    ; CHECK: [[COPY2:%[0-9]+]]:_(s32) = COPY [[LSHR]](s32)
579    ; CHECK: [[AND1:%[0-9]+]]:_(s32) = G_AND [[COPY2]], [[C3]]
580    ; CHECK: [[SHL:%[0-9]+]]:_(s32) = G_SHL [[AND1]], [[C]](s32)
581    ; CHECK: [[OR:%[0-9]+]]:_(s32) = G_OR [[AND]], [[SHL]]
582    ; CHECK: [[COPY3:%[0-9]+]]:_(s32) = COPY [[LSHR1]](s32)
583    ; CHECK: [[AND2:%[0-9]+]]:_(s32) = G_AND [[COPY3]], [[C3]]
584    ; CHECK: [[SHL1:%[0-9]+]]:_(s32) = G_SHL [[AND2]], [[C1]](s32)
585    ; CHECK: [[OR1:%[0-9]+]]:_(s32) = G_OR [[OR]], [[SHL1]]
586    ; CHECK: [[COPY4:%[0-9]+]]:_(s32) = COPY [[LSHR2]](s32)
587    ; CHECK: [[AND3:%[0-9]+]]:_(s32) = G_AND [[COPY4]], [[C3]]
588    ; CHECK: [[SHL2:%[0-9]+]]:_(s32) = G_SHL [[AND3]], [[C2]](s32)
589    ; CHECK: [[OR2:%[0-9]+]]:_(s32) = G_OR [[OR1]], [[SHL2]]
590    ; CHECK: [[LSHR3:%[0-9]+]]:_(s32) = G_LSHR [[OR2]], [[C]](s32)
591    ; CHECK: [[COPY5:%[0-9]+]]:_(s32) = COPY [[LSHR3]](s32)
592    ; CHECK: $vgpr0 = COPY [[COPY5]](s32)
593    %0:_(s32) = COPY $vgpr0
594    %1:_(<4 x s8>) = G_BITCAST %0
595    %2:_(s32) = G_CONSTANT i32 1
596    %3:_(s8) = G_EXTRACT_VECTOR_ELT %1, %2
597    %4:_(s32) = G_ANYEXT %3
598    $vgpr0 = COPY %4
599...
600
601---
602name: extract_vector_elt_v4s8_constidx_2_i32
603
604body: |
605  bb.0:
606    liveins: $vgpr0
607
608    ; CHECK-LABEL: name: extract_vector_elt_v4s8_constidx_2_i32
609    ; CHECK: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
610    ; CHECK: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 8
611    ; CHECK: [[LSHR:%[0-9]+]]:_(s32) = G_LSHR [[COPY]], [[C]](s32)
612    ; CHECK: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 16
613    ; CHECK: [[LSHR1:%[0-9]+]]:_(s32) = G_LSHR [[COPY]], [[C1]](s32)
614    ; CHECK: [[C2:%[0-9]+]]:_(s32) = G_CONSTANT i32 24
615    ; CHECK: [[LSHR2:%[0-9]+]]:_(s32) = G_LSHR [[COPY]], [[C2]](s32)
616    ; CHECK: [[C3:%[0-9]+]]:_(s32) = G_CONSTANT i32 255
617    ; CHECK: [[COPY1:%[0-9]+]]:_(s32) = COPY [[COPY]](s32)
618    ; CHECK: [[AND:%[0-9]+]]:_(s32) = G_AND [[COPY1]], [[C3]]
619    ; CHECK: [[COPY2:%[0-9]+]]:_(s32) = COPY [[LSHR]](s32)
620    ; CHECK: [[AND1:%[0-9]+]]:_(s32) = G_AND [[COPY2]], [[C3]]
621    ; CHECK: [[SHL:%[0-9]+]]:_(s32) = G_SHL [[AND1]], [[C]](s32)
622    ; CHECK: [[OR:%[0-9]+]]:_(s32) = G_OR [[AND]], [[SHL]]
623    ; CHECK: [[COPY3:%[0-9]+]]:_(s32) = COPY [[LSHR1]](s32)
624    ; CHECK: [[AND2:%[0-9]+]]:_(s32) = G_AND [[COPY3]], [[C3]]
625    ; CHECK: [[SHL1:%[0-9]+]]:_(s32) = G_SHL [[AND2]], [[C1]](s32)
626    ; CHECK: [[OR1:%[0-9]+]]:_(s32) = G_OR [[OR]], [[SHL1]]
627    ; CHECK: [[COPY4:%[0-9]+]]:_(s32) = COPY [[LSHR2]](s32)
628    ; CHECK: [[AND3:%[0-9]+]]:_(s32) = G_AND [[COPY4]], [[C3]]
629    ; CHECK: [[SHL2:%[0-9]+]]:_(s32) = G_SHL [[AND3]], [[C2]](s32)
630    ; CHECK: [[OR2:%[0-9]+]]:_(s32) = G_OR [[OR1]], [[SHL2]]
631    ; CHECK: [[LSHR3:%[0-9]+]]:_(s32) = G_LSHR [[OR2]], [[C1]](s32)
632    ; CHECK: [[COPY5:%[0-9]+]]:_(s32) = COPY [[LSHR3]](s32)
633    ; CHECK: $vgpr0 = COPY [[COPY5]](s32)
634    %0:_(s32) = COPY $vgpr0
635    %1:_(<4 x s8>) = G_BITCAST %0
636    %2:_(s32) = G_CONSTANT i32 2
637    %3:_(s8) = G_EXTRACT_VECTOR_ELT %1, %2
638    %4:_(s32) = G_ANYEXT %3
639    $vgpr0 = COPY %4
640...
641
642---
643name: extract_vector_elt_v4s8_constidx_3_i32
644
645body: |
646  bb.0:
647    liveins: $vgpr0
648
649    ; CHECK-LABEL: name: extract_vector_elt_v4s8_constidx_3_i32
650    ; CHECK: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
651    ; CHECK: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 8
652    ; CHECK: [[LSHR:%[0-9]+]]:_(s32) = G_LSHR [[COPY]], [[C]](s32)
653    ; CHECK: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 16
654    ; CHECK: [[LSHR1:%[0-9]+]]:_(s32) = G_LSHR [[COPY]], [[C1]](s32)
655    ; CHECK: [[C2:%[0-9]+]]:_(s32) = G_CONSTANT i32 24
656    ; CHECK: [[LSHR2:%[0-9]+]]:_(s32) = G_LSHR [[COPY]], [[C2]](s32)
657    ; CHECK: [[C3:%[0-9]+]]:_(s32) = G_CONSTANT i32 255
658    ; CHECK: [[COPY1:%[0-9]+]]:_(s32) = COPY [[COPY]](s32)
659    ; CHECK: [[AND:%[0-9]+]]:_(s32) = G_AND [[COPY1]], [[C3]]
660    ; CHECK: [[COPY2:%[0-9]+]]:_(s32) = COPY [[LSHR]](s32)
661    ; CHECK: [[AND1:%[0-9]+]]:_(s32) = G_AND [[COPY2]], [[C3]]
662    ; CHECK: [[SHL:%[0-9]+]]:_(s32) = G_SHL [[AND1]], [[C]](s32)
663    ; CHECK: [[OR:%[0-9]+]]:_(s32) = G_OR [[AND]], [[SHL]]
664    ; CHECK: [[COPY3:%[0-9]+]]:_(s32) = COPY [[LSHR1]](s32)
665    ; CHECK: [[AND2:%[0-9]+]]:_(s32) = G_AND [[COPY3]], [[C3]]
666    ; CHECK: [[SHL1:%[0-9]+]]:_(s32) = G_SHL [[AND2]], [[C1]](s32)
667    ; CHECK: [[OR1:%[0-9]+]]:_(s32) = G_OR [[OR]], [[SHL1]]
668    ; CHECK: [[COPY4:%[0-9]+]]:_(s32) = COPY [[LSHR2]](s32)
669    ; CHECK: [[AND3:%[0-9]+]]:_(s32) = G_AND [[COPY4]], [[C3]]
670    ; CHECK: [[SHL2:%[0-9]+]]:_(s32) = G_SHL [[AND3]], [[C2]](s32)
671    ; CHECK: [[OR2:%[0-9]+]]:_(s32) = G_OR [[OR1]], [[SHL2]]
672    ; CHECK: [[LSHR3:%[0-9]+]]:_(s32) = G_LSHR [[OR2]], [[C2]](s32)
673    ; CHECK: [[COPY5:%[0-9]+]]:_(s32) = COPY [[LSHR3]](s32)
674    ; CHECK: $vgpr0 = COPY [[COPY5]](s32)
675    %0:_(s32) = COPY $vgpr0
676    %1:_(<4 x s8>) = G_BITCAST %0
677    %2:_(s32) = G_CONSTANT i32 3
678    %3:_(s8) = G_EXTRACT_VECTOR_ELT %1, %2
679    %4:_(s32) = G_ANYEXT %3
680    $vgpr0 = COPY %4
681...
682
683
684
685---
686name: extract_vector_elt_v8s8_varidx_i32
687
688body: |
689  bb.0:
690    liveins: $vgpr0_vgpr1, $vgpr2
691
692    ; CHECK-LABEL: name: extract_vector_elt_v8s8_varidx_i32
693    ; CHECK: [[COPY:%[0-9]+]]:_(s64) = COPY $vgpr0_vgpr1
694    ; CHECK: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr2
695    ; CHECK: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY]](s64)
696    ; CHECK: [[TRUNC:%[0-9]+]]:_(s16) = G_TRUNC [[UV]](s32)
697    ; CHECK: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 16
698    ; CHECK: [[LSHR:%[0-9]+]]:_(s32) = G_LSHR [[UV]], [[C]](s32)
699    ; CHECK: [[TRUNC1:%[0-9]+]]:_(s16) = G_TRUNC [[LSHR]](s32)
700    ; CHECK: [[TRUNC2:%[0-9]+]]:_(s16) = G_TRUNC [[UV1]](s32)
701    ; CHECK: [[LSHR1:%[0-9]+]]:_(s32) = G_LSHR [[UV1]], [[C]](s32)
702    ; CHECK: [[TRUNC3:%[0-9]+]]:_(s16) = G_TRUNC [[LSHR1]](s32)
703    ; CHECK: [[C1:%[0-9]+]]:_(s16) = G_CONSTANT i16 8
704    ; CHECK: [[LSHR2:%[0-9]+]]:_(s16) = G_LSHR [[TRUNC]], [[C1]](s16)
705    ; CHECK: [[LSHR3:%[0-9]+]]:_(s16) = G_LSHR [[TRUNC1]], [[C1]](s16)
706    ; CHECK: [[LSHR4:%[0-9]+]]:_(s16) = G_LSHR [[TRUNC2]], [[C1]](s16)
707    ; CHECK: [[LSHR5:%[0-9]+]]:_(s16) = G_LSHR [[TRUNC3]], [[C1]](s16)
708    ; CHECK: [[C2:%[0-9]+]]:_(s32) = G_CONSTANT i32 255
709    ; CHECK: [[COPY2:%[0-9]+]]:_(s32) = COPY [[UV]](s32)
710    ; CHECK: [[AND:%[0-9]+]]:_(s32) = G_AND [[COPY2]], [[C2]]
711    ; CHECK: [[ANYEXT:%[0-9]+]]:_(s32) = G_ANYEXT [[LSHR2]](s16)
712    ; CHECK: [[AND1:%[0-9]+]]:_(s32) = G_AND [[ANYEXT]], [[C2]]
713    ; CHECK: [[C3:%[0-9]+]]:_(s32) = G_CONSTANT i32 8
714    ; CHECK: [[SHL:%[0-9]+]]:_(s32) = G_SHL [[AND1]], [[C3]](s32)
715    ; CHECK: [[OR:%[0-9]+]]:_(s32) = G_OR [[AND]], [[SHL]]
716    ; CHECK: [[COPY3:%[0-9]+]]:_(s32) = COPY [[LSHR]](s32)
717    ; CHECK: [[AND2:%[0-9]+]]:_(s32) = G_AND [[COPY3]], [[C2]]
718    ; CHECK: [[SHL1:%[0-9]+]]:_(s32) = G_SHL [[AND2]], [[C]](s32)
719    ; CHECK: [[OR1:%[0-9]+]]:_(s32) = G_OR [[OR]], [[SHL1]]
720    ; CHECK: [[ANYEXT1:%[0-9]+]]:_(s32) = G_ANYEXT [[LSHR3]](s16)
721    ; CHECK: [[AND3:%[0-9]+]]:_(s32) = G_AND [[ANYEXT1]], [[C2]]
722    ; CHECK: [[C4:%[0-9]+]]:_(s32) = G_CONSTANT i32 24
723    ; CHECK: [[SHL2:%[0-9]+]]:_(s32) = G_SHL [[AND3]], [[C4]](s32)
724    ; CHECK: [[OR2:%[0-9]+]]:_(s32) = G_OR [[OR1]], [[SHL2]]
725    ; CHECK: [[COPY4:%[0-9]+]]:_(s32) = COPY [[UV1]](s32)
726    ; CHECK: [[AND4:%[0-9]+]]:_(s32) = G_AND [[COPY4]], [[C2]]
727    ; CHECK: [[ANYEXT2:%[0-9]+]]:_(s32) = G_ANYEXT [[LSHR4]](s16)
728    ; CHECK: [[AND5:%[0-9]+]]:_(s32) = G_AND [[ANYEXT2]], [[C2]]
729    ; CHECK: [[SHL3:%[0-9]+]]:_(s32) = G_SHL [[AND5]], [[C3]](s32)
730    ; CHECK: [[OR3:%[0-9]+]]:_(s32) = G_OR [[AND4]], [[SHL3]]
731    ; CHECK: [[COPY5:%[0-9]+]]:_(s32) = COPY [[LSHR1]](s32)
732    ; CHECK: [[AND6:%[0-9]+]]:_(s32) = G_AND [[COPY5]], [[C2]]
733    ; CHECK: [[SHL4:%[0-9]+]]:_(s32) = G_SHL [[AND6]], [[C]](s32)
734    ; CHECK: [[OR4:%[0-9]+]]:_(s32) = G_OR [[OR3]], [[SHL4]]
735    ; CHECK: [[ANYEXT3:%[0-9]+]]:_(s32) = G_ANYEXT [[LSHR5]](s16)
736    ; CHECK: [[AND7:%[0-9]+]]:_(s32) = G_AND [[ANYEXT3]], [[C2]]
737    ; CHECK: [[SHL5:%[0-9]+]]:_(s32) = G_SHL [[AND7]], [[C4]](s32)
738    ; CHECK: [[OR5:%[0-9]+]]:_(s32) = G_OR [[OR4]], [[SHL5]]
739    ; CHECK: [[BUILD_VECTOR:%[0-9]+]]:_(<2 x s32>) = G_BUILD_VECTOR [[OR2]](s32), [[OR5]](s32)
740    ; CHECK: [[C5:%[0-9]+]]:_(s32) = G_CONSTANT i32 2
741    ; CHECK: [[LSHR6:%[0-9]+]]:_(s32) = G_LSHR [[COPY1]], [[C5]](s32)
742    ; CHECK: [[EVEC:%[0-9]+]]:_(s32) = G_EXTRACT_VECTOR_ELT [[BUILD_VECTOR]](<2 x s32>), [[LSHR6]](s32)
743    ; CHECK: [[C6:%[0-9]+]]:_(s32) = G_CONSTANT i32 3
744    ; CHECK: [[AND8:%[0-9]+]]:_(s32) = G_AND [[COPY1]], [[C6]]
745    ; CHECK: [[SHL6:%[0-9]+]]:_(s32) = G_SHL [[AND8]], [[C6]](s32)
746    ; CHECK: [[LSHR7:%[0-9]+]]:_(s32) = G_LSHR [[EVEC]], [[SHL6]](s32)
747    ; CHECK: [[COPY6:%[0-9]+]]:_(s32) = COPY [[LSHR7]](s32)
748    ; CHECK: $vgpr0 = COPY [[COPY6]](s32)
749    %0:_(s64) = COPY $vgpr0_vgpr1
750    %1:_(s32) = COPY $vgpr2
751    %2:_(<8 x s8>) = G_BITCAST %0
752    %3:_(s8) = G_EXTRACT_VECTOR_ELT %2, %1
753    %4:_(s32) = G_ANYEXT %3
754    $vgpr0 = COPY %4
755...
756
757
758---
759name: extract_vector_elt_v8s8_constidx_0_i32
760
761body: |
762  bb.0:
763    liveins: $vgpr0_vgpr1
764
765    ; CHECK-LABEL: name: extract_vector_elt_v8s8_constidx_0_i32
766    ; CHECK: [[COPY:%[0-9]+]]:_(s64) = COPY $vgpr0_vgpr1
767    ; CHECK: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 0
768    ; CHECK: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY]](s64)
769    ; CHECK: [[TRUNC:%[0-9]+]]:_(s16) = G_TRUNC [[UV]](s32)
770    ; CHECK: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 16
771    ; CHECK: [[LSHR:%[0-9]+]]:_(s32) = G_LSHR [[UV]], [[C1]](s32)
772    ; CHECK: [[TRUNC1:%[0-9]+]]:_(s16) = G_TRUNC [[LSHR]](s32)
773    ; CHECK: [[C2:%[0-9]+]]:_(s16) = G_CONSTANT i16 8
774    ; CHECK: [[LSHR1:%[0-9]+]]:_(s16) = G_LSHR [[TRUNC]], [[C2]](s16)
775    ; CHECK: [[LSHR2:%[0-9]+]]:_(s16) = G_LSHR [[TRUNC1]], [[C2]](s16)
776    ; CHECK: [[C3:%[0-9]+]]:_(s32) = G_CONSTANT i32 255
777    ; CHECK: [[COPY1:%[0-9]+]]:_(s32) = COPY [[UV]](s32)
778    ; CHECK: [[AND:%[0-9]+]]:_(s32) = G_AND [[COPY1]], [[C3]]
779    ; CHECK: [[ANYEXT:%[0-9]+]]:_(s32) = G_ANYEXT [[LSHR1]](s16)
780    ; CHECK: [[AND1:%[0-9]+]]:_(s32) = G_AND [[ANYEXT]], [[C3]]
781    ; CHECK: [[C4:%[0-9]+]]:_(s32) = G_CONSTANT i32 8
782    ; CHECK: [[SHL:%[0-9]+]]:_(s32) = G_SHL [[AND1]], [[C4]](s32)
783    ; CHECK: [[OR:%[0-9]+]]:_(s32) = G_OR [[AND]], [[SHL]]
784    ; CHECK: [[COPY2:%[0-9]+]]:_(s32) = COPY [[LSHR]](s32)
785    ; CHECK: [[AND2:%[0-9]+]]:_(s32) = G_AND [[COPY2]], [[C3]]
786    ; CHECK: [[SHL1:%[0-9]+]]:_(s32) = G_SHL [[AND2]], [[C1]](s32)
787    ; CHECK: [[OR1:%[0-9]+]]:_(s32) = G_OR [[OR]], [[SHL1]]
788    ; CHECK: [[ANYEXT1:%[0-9]+]]:_(s32) = G_ANYEXT [[LSHR2]](s16)
789    ; CHECK: [[AND3:%[0-9]+]]:_(s32) = G_AND [[ANYEXT1]], [[C3]]
790    ; CHECK: [[C5:%[0-9]+]]:_(s32) = G_CONSTANT i32 24
791    ; CHECK: [[SHL2:%[0-9]+]]:_(s32) = G_SHL [[AND3]], [[C5]](s32)
792    ; CHECK: [[OR2:%[0-9]+]]:_(s32) = G_OR [[OR1]], [[SHL2]]
793    ; CHECK: [[COPY3:%[0-9]+]]:_(s32) = COPY [[OR2]](s32)
794    ; CHECK: [[LSHR3:%[0-9]+]]:_(s32) = G_LSHR [[COPY3]], [[C]](s32)
795    ; CHECK: [[COPY4:%[0-9]+]]:_(s32) = COPY [[LSHR3]](s32)
796    ; CHECK: $vgpr0 = COPY [[COPY4]](s32)
797    %0:_(s64) = COPY $vgpr0_vgpr1
798    %1:_(s32) = G_CONSTANT i32 0
799    %2:_(<8 x s8>) = G_BITCAST %0
800    %3:_(s8) = G_EXTRACT_VECTOR_ELT %2, %1
801    %4:_(s32) = G_ANYEXT %3
802    $vgpr0 = COPY %4
803...
804
805---
806name: extract_vector_elt_v8s8_constidx_1_i32
807
808body: |
809  bb.0:
810    liveins: $vgpr0_vgpr1
811
812    ; CHECK-LABEL: name: extract_vector_elt_v8s8_constidx_1_i32
813    ; CHECK: [[COPY:%[0-9]+]]:_(s64) = COPY $vgpr0_vgpr1
814    ; CHECK: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY]](s64)
815    ; CHECK: [[TRUNC:%[0-9]+]]:_(s16) = G_TRUNC [[UV]](s32)
816    ; CHECK: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 16
817    ; CHECK: [[LSHR:%[0-9]+]]:_(s32) = G_LSHR [[UV]], [[C]](s32)
818    ; CHECK: [[TRUNC1:%[0-9]+]]:_(s16) = G_TRUNC [[LSHR]](s32)
819    ; CHECK: [[C1:%[0-9]+]]:_(s16) = G_CONSTANT i16 8
820    ; CHECK: [[LSHR1:%[0-9]+]]:_(s16) = G_LSHR [[TRUNC]], [[C1]](s16)
821    ; CHECK: [[LSHR2:%[0-9]+]]:_(s16) = G_LSHR [[TRUNC1]], [[C1]](s16)
822    ; CHECK: [[C2:%[0-9]+]]:_(s32) = G_CONSTANT i32 255
823    ; CHECK: [[COPY1:%[0-9]+]]:_(s32) = COPY [[UV]](s32)
824    ; CHECK: [[AND:%[0-9]+]]:_(s32) = G_AND [[COPY1]], [[C2]]
825    ; CHECK: [[ANYEXT:%[0-9]+]]:_(s32) = G_ANYEXT [[LSHR1]](s16)
826    ; CHECK: [[AND1:%[0-9]+]]:_(s32) = G_AND [[ANYEXT]], [[C2]]
827    ; CHECK: [[C3:%[0-9]+]]:_(s32) = G_CONSTANT i32 8
828    ; CHECK: [[SHL:%[0-9]+]]:_(s32) = G_SHL [[AND1]], [[C3]](s32)
829    ; CHECK: [[OR:%[0-9]+]]:_(s32) = G_OR [[AND]], [[SHL]]
830    ; CHECK: [[COPY2:%[0-9]+]]:_(s32) = COPY [[LSHR]](s32)
831    ; CHECK: [[AND2:%[0-9]+]]:_(s32) = G_AND [[COPY2]], [[C2]]
832    ; CHECK: [[SHL1:%[0-9]+]]:_(s32) = G_SHL [[AND2]], [[C]](s32)
833    ; CHECK: [[OR1:%[0-9]+]]:_(s32) = G_OR [[OR]], [[SHL1]]
834    ; CHECK: [[ANYEXT1:%[0-9]+]]:_(s32) = G_ANYEXT [[LSHR2]](s16)
835    ; CHECK: [[AND3:%[0-9]+]]:_(s32) = G_AND [[ANYEXT1]], [[C2]]
836    ; CHECK: [[C4:%[0-9]+]]:_(s32) = G_CONSTANT i32 24
837    ; CHECK: [[SHL2:%[0-9]+]]:_(s32) = G_SHL [[AND3]], [[C4]](s32)
838    ; CHECK: [[OR2:%[0-9]+]]:_(s32) = G_OR [[OR1]], [[SHL2]]
839    ; CHECK: [[COPY3:%[0-9]+]]:_(s32) = COPY [[OR2]](s32)
840    ; CHECK: [[LSHR3:%[0-9]+]]:_(s32) = G_LSHR [[COPY3]], [[C3]](s32)
841    ; CHECK: [[COPY4:%[0-9]+]]:_(s32) = COPY [[LSHR3]](s32)
842    ; CHECK: $vgpr0 = COPY [[COPY4]](s32)
843    %0:_(s64) = COPY $vgpr0_vgpr1
844    %1:_(s32) = G_CONSTANT i32 1
845    %2:_(<8 x s8>) = G_BITCAST %0
846    %3:_(s8) = G_EXTRACT_VECTOR_ELT %2, %1
847    %4:_(s32) = G_ANYEXT %3
848    $vgpr0 = COPY %4
849...
850
851---
852name: extract_vector_elt_v8s8_constidx_3_i32
853
854body: |
855  bb.0:
856    liveins: $vgpr0_vgpr1
857
858    ; CHECK-LABEL: name: extract_vector_elt_v8s8_constidx_3_i32
859    ; CHECK: [[COPY:%[0-9]+]]:_(s64) = COPY $vgpr0_vgpr1
860    ; CHECK: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY]](s64)
861    ; CHECK: [[TRUNC:%[0-9]+]]:_(s16) = G_TRUNC [[UV]](s32)
862    ; CHECK: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 16
863    ; CHECK: [[LSHR:%[0-9]+]]:_(s32) = G_LSHR [[UV]], [[C]](s32)
864    ; CHECK: [[TRUNC1:%[0-9]+]]:_(s16) = G_TRUNC [[LSHR]](s32)
865    ; CHECK: [[C1:%[0-9]+]]:_(s16) = G_CONSTANT i16 8
866    ; CHECK: [[LSHR1:%[0-9]+]]:_(s16) = G_LSHR [[TRUNC]], [[C1]](s16)
867    ; CHECK: [[LSHR2:%[0-9]+]]:_(s16) = G_LSHR [[TRUNC1]], [[C1]](s16)
868    ; CHECK: [[C2:%[0-9]+]]:_(s32) = G_CONSTANT i32 255
869    ; CHECK: [[COPY1:%[0-9]+]]:_(s32) = COPY [[UV]](s32)
870    ; CHECK: [[AND:%[0-9]+]]:_(s32) = G_AND [[COPY1]], [[C2]]
871    ; CHECK: [[ANYEXT:%[0-9]+]]:_(s32) = G_ANYEXT [[LSHR1]](s16)
872    ; CHECK: [[AND1:%[0-9]+]]:_(s32) = G_AND [[ANYEXT]], [[C2]]
873    ; CHECK: [[C3:%[0-9]+]]:_(s32) = G_CONSTANT i32 8
874    ; CHECK: [[SHL:%[0-9]+]]:_(s32) = G_SHL [[AND1]], [[C3]](s32)
875    ; CHECK: [[OR:%[0-9]+]]:_(s32) = G_OR [[AND]], [[SHL]]
876    ; CHECK: [[COPY2:%[0-9]+]]:_(s32) = COPY [[LSHR]](s32)
877    ; CHECK: [[AND2:%[0-9]+]]:_(s32) = G_AND [[COPY2]], [[C2]]
878    ; CHECK: [[SHL1:%[0-9]+]]:_(s32) = G_SHL [[AND2]], [[C]](s32)
879    ; CHECK: [[OR1:%[0-9]+]]:_(s32) = G_OR [[OR]], [[SHL1]]
880    ; CHECK: [[ANYEXT1:%[0-9]+]]:_(s32) = G_ANYEXT [[LSHR2]](s16)
881    ; CHECK: [[AND3:%[0-9]+]]:_(s32) = G_AND [[ANYEXT1]], [[C2]]
882    ; CHECK: [[C4:%[0-9]+]]:_(s32) = G_CONSTANT i32 24
883    ; CHECK: [[SHL2:%[0-9]+]]:_(s32) = G_SHL [[AND3]], [[C4]](s32)
884    ; CHECK: [[OR2:%[0-9]+]]:_(s32) = G_OR [[OR1]], [[SHL2]]
885    ; CHECK: [[COPY3:%[0-9]+]]:_(s32) = COPY [[OR2]](s32)
886    ; CHECK: [[LSHR3:%[0-9]+]]:_(s32) = G_LSHR [[COPY3]], [[C4]](s32)
887    ; CHECK: [[COPY4:%[0-9]+]]:_(s32) = COPY [[LSHR3]](s32)
888    ; CHECK: $vgpr0 = COPY [[COPY4]](s32)
889    %0:_(s64) = COPY $vgpr0_vgpr1
890    %1:_(s32) = G_CONSTANT i32 3
891    %2:_(<8 x s8>) = G_BITCAST %0
892    %3:_(s8) = G_EXTRACT_VECTOR_ELT %2, %1
893    %4:_(s32) = G_ANYEXT %3
894    $vgpr0 = COPY %4
895...
896
897---
898name: extract_vector_elt_v8s8_constidx_4_i32
899
900body: |
901  bb.0:
902    liveins: $vgpr0_vgpr1
903
904    ; CHECK-LABEL: name: extract_vector_elt_v8s8_constidx_4_i32
905    ; CHECK: [[COPY:%[0-9]+]]:_(s64) = COPY $vgpr0_vgpr1
906    ; CHECK: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY]](s64)
907    ; CHECK: [[TRUNC:%[0-9]+]]:_(s16) = G_TRUNC [[UV1]](s32)
908    ; CHECK: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 16
909    ; CHECK: [[LSHR:%[0-9]+]]:_(s32) = G_LSHR [[UV1]], [[C]](s32)
910    ; CHECK: [[TRUNC1:%[0-9]+]]:_(s16) = G_TRUNC [[LSHR]](s32)
911    ; CHECK: [[C1:%[0-9]+]]:_(s16) = G_CONSTANT i16 8
912    ; CHECK: [[LSHR1:%[0-9]+]]:_(s16) = G_LSHR [[TRUNC]], [[C1]](s16)
913    ; CHECK: [[LSHR2:%[0-9]+]]:_(s16) = G_LSHR [[TRUNC1]], [[C1]](s16)
914    ; CHECK: [[C2:%[0-9]+]]:_(s32) = G_CONSTANT i32 255
915    ; CHECK: [[COPY1:%[0-9]+]]:_(s32) = COPY [[UV1]](s32)
916    ; CHECK: [[AND:%[0-9]+]]:_(s32) = G_AND [[COPY1]], [[C2]]
917    ; CHECK: [[ANYEXT:%[0-9]+]]:_(s32) = G_ANYEXT [[LSHR1]](s16)
918    ; CHECK: [[AND1:%[0-9]+]]:_(s32) = G_AND [[ANYEXT]], [[C2]]
919    ; CHECK: [[C3:%[0-9]+]]:_(s32) = G_CONSTANT i32 8
920    ; CHECK: [[SHL:%[0-9]+]]:_(s32) = G_SHL [[AND1]], [[C3]](s32)
921    ; CHECK: [[OR:%[0-9]+]]:_(s32) = G_OR [[AND]], [[SHL]]
922    ; CHECK: [[COPY2:%[0-9]+]]:_(s32) = COPY [[LSHR]](s32)
923    ; CHECK: [[AND2:%[0-9]+]]:_(s32) = G_AND [[COPY2]], [[C2]]
924    ; CHECK: [[SHL1:%[0-9]+]]:_(s32) = G_SHL [[AND2]], [[C]](s32)
925    ; CHECK: [[OR1:%[0-9]+]]:_(s32) = G_OR [[OR]], [[SHL1]]
926    ; CHECK: [[ANYEXT1:%[0-9]+]]:_(s32) = G_ANYEXT [[LSHR2]](s16)
927    ; CHECK: [[AND3:%[0-9]+]]:_(s32) = G_AND [[ANYEXT1]], [[C2]]
928    ; CHECK: [[C4:%[0-9]+]]:_(s32) = G_CONSTANT i32 24
929    ; CHECK: [[SHL2:%[0-9]+]]:_(s32) = G_SHL [[AND3]], [[C4]](s32)
930    ; CHECK: [[OR2:%[0-9]+]]:_(s32) = G_OR [[OR1]], [[SHL2]]
931    ; CHECK: [[COPY3:%[0-9]+]]:_(s32) = COPY [[OR2]](s32)
932    ; CHECK: [[C5:%[0-9]+]]:_(s32) = G_CONSTANT i32 0
933    ; CHECK: [[LSHR3:%[0-9]+]]:_(s32) = G_LSHR [[COPY3]], [[C5]](s32)
934    ; CHECK: [[COPY4:%[0-9]+]]:_(s32) = COPY [[LSHR3]](s32)
935    ; CHECK: $vgpr0 = COPY [[COPY4]](s32)
936    %0:_(s64) = COPY $vgpr0_vgpr1
937    %1:_(s32) = G_CONSTANT i32 4
938    %2:_(<8 x s8>) = G_BITCAST %0
939    %3:_(s8) = G_EXTRACT_VECTOR_ELT %2, %1
940    %4:_(s32) = G_ANYEXT %3
941    $vgpr0 = COPY %4
942...
943
944---
945name: extract_vector_elt_v8s8_constidx_5_i32
946
947body: |
948  bb.0:
949    liveins: $vgpr0_vgpr1
950
951    ; CHECK-LABEL: name: extract_vector_elt_v8s8_constidx_5_i32
952    ; CHECK: [[COPY:%[0-9]+]]:_(s64) = COPY $vgpr0_vgpr1
953    ; CHECK: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY]](s64)
954    ; CHECK: [[TRUNC:%[0-9]+]]:_(s16) = G_TRUNC [[UV1]](s32)
955    ; CHECK: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 16
956    ; CHECK: [[LSHR:%[0-9]+]]:_(s32) = G_LSHR [[UV1]], [[C]](s32)
957    ; CHECK: [[TRUNC1:%[0-9]+]]:_(s16) = G_TRUNC [[LSHR]](s32)
958    ; CHECK: [[C1:%[0-9]+]]:_(s16) = G_CONSTANT i16 8
959    ; CHECK: [[LSHR1:%[0-9]+]]:_(s16) = G_LSHR [[TRUNC]], [[C1]](s16)
960    ; CHECK: [[LSHR2:%[0-9]+]]:_(s16) = G_LSHR [[TRUNC1]], [[C1]](s16)
961    ; CHECK: [[C2:%[0-9]+]]:_(s32) = G_CONSTANT i32 255
962    ; CHECK: [[COPY1:%[0-9]+]]:_(s32) = COPY [[UV1]](s32)
963    ; CHECK: [[AND:%[0-9]+]]:_(s32) = G_AND [[COPY1]], [[C2]]
964    ; CHECK: [[ANYEXT:%[0-9]+]]:_(s32) = G_ANYEXT [[LSHR1]](s16)
965    ; CHECK: [[AND1:%[0-9]+]]:_(s32) = G_AND [[ANYEXT]], [[C2]]
966    ; CHECK: [[C3:%[0-9]+]]:_(s32) = G_CONSTANT i32 8
967    ; CHECK: [[SHL:%[0-9]+]]:_(s32) = G_SHL [[AND1]], [[C3]](s32)
968    ; CHECK: [[OR:%[0-9]+]]:_(s32) = G_OR [[AND]], [[SHL]]
969    ; CHECK: [[COPY2:%[0-9]+]]:_(s32) = COPY [[LSHR]](s32)
970    ; CHECK: [[AND2:%[0-9]+]]:_(s32) = G_AND [[COPY2]], [[C2]]
971    ; CHECK: [[SHL1:%[0-9]+]]:_(s32) = G_SHL [[AND2]], [[C]](s32)
972    ; CHECK: [[OR1:%[0-9]+]]:_(s32) = G_OR [[OR]], [[SHL1]]
973    ; CHECK: [[ANYEXT1:%[0-9]+]]:_(s32) = G_ANYEXT [[LSHR2]](s16)
974    ; CHECK: [[AND3:%[0-9]+]]:_(s32) = G_AND [[ANYEXT1]], [[C2]]
975    ; CHECK: [[C4:%[0-9]+]]:_(s32) = G_CONSTANT i32 24
976    ; CHECK: [[SHL2:%[0-9]+]]:_(s32) = G_SHL [[AND3]], [[C4]](s32)
977    ; CHECK: [[OR2:%[0-9]+]]:_(s32) = G_OR [[OR1]], [[SHL2]]
978    ; CHECK: [[COPY3:%[0-9]+]]:_(s32) = COPY [[OR2]](s32)
979    ; CHECK: [[LSHR3:%[0-9]+]]:_(s32) = G_LSHR [[COPY3]], [[C3]](s32)
980    ; CHECK: [[COPY4:%[0-9]+]]:_(s32) = COPY [[LSHR3]](s32)
981    ; CHECK: $vgpr0 = COPY [[COPY4]](s32)
982    %0:_(s64) = COPY $vgpr0_vgpr1
983    %1:_(s32) = G_CONSTANT i32 5
984    %2:_(<8 x s8>) = G_BITCAST %0
985    %3:_(s8) = G_EXTRACT_VECTOR_ELT %2, %1
986    %4:_(s32) = G_ANYEXT %3
987    $vgpr0 = COPY %4
988...
989
990---
991name: extract_vector_elt_v8s8_constidx_7_i32
992
993body: |
994  bb.0:
995    liveins: $vgpr0_vgpr1
996
997    ; CHECK-LABEL: name: extract_vector_elt_v8s8_constidx_7_i32
998    ; CHECK: [[COPY:%[0-9]+]]:_(s64) = COPY $vgpr0_vgpr1
999    ; CHECK: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY]](s64)
1000    ; CHECK: [[TRUNC:%[0-9]+]]:_(s16) = G_TRUNC [[UV1]](s32)
1001    ; CHECK: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 16
1002    ; CHECK: [[LSHR:%[0-9]+]]:_(s32) = G_LSHR [[UV1]], [[C]](s32)
1003    ; CHECK: [[TRUNC1:%[0-9]+]]:_(s16) = G_TRUNC [[LSHR]](s32)
1004    ; CHECK: [[C1:%[0-9]+]]:_(s16) = G_CONSTANT i16 8
1005    ; CHECK: [[LSHR1:%[0-9]+]]:_(s16) = G_LSHR [[TRUNC]], [[C1]](s16)
1006    ; CHECK: [[LSHR2:%[0-9]+]]:_(s16) = G_LSHR [[TRUNC1]], [[C1]](s16)
1007    ; CHECK: [[C2:%[0-9]+]]:_(s32) = G_CONSTANT i32 255
1008    ; CHECK: [[COPY1:%[0-9]+]]:_(s32) = COPY [[UV1]](s32)
1009    ; CHECK: [[AND:%[0-9]+]]:_(s32) = G_AND [[COPY1]], [[C2]]
1010    ; CHECK: [[ANYEXT:%[0-9]+]]:_(s32) = G_ANYEXT [[LSHR1]](s16)
1011    ; CHECK: [[AND1:%[0-9]+]]:_(s32) = G_AND [[ANYEXT]], [[C2]]
1012    ; CHECK: [[C3:%[0-9]+]]:_(s32) = G_CONSTANT i32 8
1013    ; CHECK: [[SHL:%[0-9]+]]:_(s32) = G_SHL [[AND1]], [[C3]](s32)
1014    ; CHECK: [[OR:%[0-9]+]]:_(s32) = G_OR [[AND]], [[SHL]]
1015    ; CHECK: [[COPY2:%[0-9]+]]:_(s32) = COPY [[LSHR]](s32)
1016    ; CHECK: [[AND2:%[0-9]+]]:_(s32) = G_AND [[COPY2]], [[C2]]
1017    ; CHECK: [[SHL1:%[0-9]+]]:_(s32) = G_SHL [[AND2]], [[C]](s32)
1018    ; CHECK: [[OR1:%[0-9]+]]:_(s32) = G_OR [[OR]], [[SHL1]]
1019    ; CHECK: [[ANYEXT1:%[0-9]+]]:_(s32) = G_ANYEXT [[LSHR2]](s16)
1020    ; CHECK: [[AND3:%[0-9]+]]:_(s32) = G_AND [[ANYEXT1]], [[C2]]
1021    ; CHECK: [[C4:%[0-9]+]]:_(s32) = G_CONSTANT i32 24
1022    ; CHECK: [[SHL2:%[0-9]+]]:_(s32) = G_SHL [[AND3]], [[C4]](s32)
1023    ; CHECK: [[OR2:%[0-9]+]]:_(s32) = G_OR [[OR1]], [[SHL2]]
1024    ; CHECK: [[COPY3:%[0-9]+]]:_(s32) = COPY [[OR2]](s32)
1025    ; CHECK: [[LSHR3:%[0-9]+]]:_(s32) = G_LSHR [[COPY3]], [[C4]](s32)
1026    ; CHECK: [[COPY4:%[0-9]+]]:_(s32) = COPY [[LSHR3]](s32)
1027    ; CHECK: $vgpr0 = COPY [[COPY4]](s32)
1028    %0:_(s64) = COPY $vgpr0_vgpr1
1029    %1:_(s32) = G_CONSTANT i32 7
1030    %2:_(<8 x s8>) = G_BITCAST %0
1031    %3:_(s8) = G_EXTRACT_VECTOR_ELT %2, %1
1032    %4:_(s32) = G_ANYEXT %3
1033    $vgpr0 = COPY %4
1034...
1035
1036---
1037name: extract_vector_elt_v2s16_varidx_i32
1038
1039body: |
1040  bb.0:
1041    liveins: $vgpr0, $vgpr1
1042
1043    ; CHECK-LABEL: name: extract_vector_elt_v2s16_varidx_i32
1044    ; CHECK: [[COPY:%[0-9]+]]:_(<2 x s16>) = COPY $vgpr0
1045    ; CHECK: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
1046    ; CHECK: [[BITCAST:%[0-9]+]]:_(s32) = G_BITCAST [[COPY]](<2 x s16>)
1047    ; CHECK: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 1
1048    ; CHECK: [[AND:%[0-9]+]]:_(s32) = G_AND [[COPY1]], [[C]]
1049    ; CHECK: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 4
1050    ; CHECK: [[SHL:%[0-9]+]]:_(s32) = G_SHL [[AND]], [[C1]](s32)
1051    ; CHECK: [[LSHR:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST]], [[SHL]](s32)
1052    ; CHECK: [[COPY2:%[0-9]+]]:_(s32) = COPY [[LSHR]](s32)
1053    ; CHECK: $vgpr0 = COPY [[COPY2]](s32)
1054    %0:_(<2 x s16>) = COPY $vgpr0
1055    %1:_(s32) = COPY $vgpr1
1056    %2:_(s16) = G_EXTRACT_VECTOR_ELT %0, %1
1057    %3:_(s32) = G_ANYEXT %2
1058    $vgpr0 = COPY %3
1059...
1060
1061---
1062name: extract_vector_elt_v2s16_idx0_i32
1063
1064body: |
1065  bb.0:
1066    liveins: $vgpr0
1067
1068    ; CHECK-LABEL: name: extract_vector_elt_v2s16_idx0_i32
1069    ; CHECK: [[COPY:%[0-9]+]]:_(<2 x s16>) = COPY $vgpr0
1070    ; CHECK: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 0
1071    ; CHECK: [[BITCAST:%[0-9]+]]:_(s32) = G_BITCAST [[COPY]](<2 x s16>)
1072    ; CHECK: [[LSHR:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST]], [[C]](s32)
1073    ; CHECK: [[COPY1:%[0-9]+]]:_(s32) = COPY [[LSHR]](s32)
1074    ; CHECK: $vgpr0 = COPY [[COPY1]](s32)
1075    %0:_(<2 x s16>) = COPY $vgpr0
1076    %1:_(s32) = G_CONSTANT i32 0
1077    %2:_(s16) = G_EXTRACT_VECTOR_ELT %0, %1
1078    %3:_(s32) = G_ANYEXT %2
1079    $vgpr0 = COPY %3
1080...
1081
1082---
1083name: extract_vector_elt_v2s16_idx1_i32
1084
1085body: |
1086  bb.0:
1087    liveins: $vgpr0
1088
1089    ; CHECK-LABEL: name: extract_vector_elt_v2s16_idx1_i32
1090    ; CHECK: [[COPY:%[0-9]+]]:_(<2 x s16>) = COPY $vgpr0
1091    ; CHECK: [[BITCAST:%[0-9]+]]:_(s32) = G_BITCAST [[COPY]](<2 x s16>)
1092    ; CHECK: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 16
1093    ; CHECK: [[LSHR:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST]], [[C]](s32)
1094    ; CHECK: [[COPY1:%[0-9]+]]:_(s32) = COPY [[LSHR]](s32)
1095    ; CHECK: $vgpr0 = COPY [[COPY1]](s32)
1096    %0:_(<2 x s16>) = COPY $vgpr0
1097    %1:_(s32) = G_CONSTANT i32 1
1098    %2:_(s16) = G_EXTRACT_VECTOR_ELT %0, %1
1099    %3:_(s32) = G_ANYEXT %2
1100    $vgpr0 = COPY %3
1101...
1102
1103---
1104name: extract_vector_elt_v2s16_idx2_i32
1105
1106body: |
1107  bb.0:
1108    liveins: $vgpr0
1109
1110    ; CHECK-LABEL: name: extract_vector_elt_v2s16_idx2_i32
1111    ; CHECK: [[COPY:%[0-9]+]]:_(<2 x s16>) = COPY $vgpr0
1112    ; CHECK: [[BITCAST:%[0-9]+]]:_(s32) = G_BITCAST [[COPY]](<2 x s16>)
1113    ; CHECK: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 0
1114    ; CHECK: [[LSHR:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST]], [[C]](s32)
1115    ; CHECK: [[COPY1:%[0-9]+]]:_(s32) = COPY [[LSHR]](s32)
1116    ; CHECK: $vgpr0 = COPY [[COPY1]](s32)
1117    %0:_(<2 x s16>) = COPY $vgpr0
1118    %1:_(s32) = G_CONSTANT i32 2
1119    %2:_(s16) = G_EXTRACT_VECTOR_ELT %0, %1
1120    %3:_(s32) = G_ANYEXT %2
1121    $vgpr0 = COPY %3
1122...
1123
1124---
1125name: extract_vector_elt_v3s16_varidx_i32
1126
1127body: |
1128  bb.0:
1129    liveins: $vgpr0_vgpr1_vgpr2, $vgpr3
1130
1131    ; CHECK-LABEL: name: extract_vector_elt_v3s16_varidx_i32
1132    ; CHECK: [[COPY:%[0-9]+]]:_(<3 x s32>) = COPY $vgpr0_vgpr1_vgpr2
1133    ; CHECK: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr3
1134    ; CHECK: [[COPY2:%[0-9]+]]:_(<3 x s32>) = COPY [[COPY]](<3 x s32>)
1135    ; CHECK: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32), [[UV2:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY2]](<3 x s32>)
1136    ; CHECK: [[SEXT_INREG:%[0-9]+]]:_(s32) = G_SEXT_INREG [[UV]], 16
1137    ; CHECK: [[SEXT_INREG1:%[0-9]+]]:_(s32) = G_SEXT_INREG [[UV1]], 16
1138    ; CHECK: [[SEXT_INREG2:%[0-9]+]]:_(s32) = G_SEXT_INREG [[UV2]], 16
1139    ; CHECK: [[BUILD_VECTOR:%[0-9]+]]:_(<3 x s32>) = G_BUILD_VECTOR [[SEXT_INREG]](s32), [[SEXT_INREG1]](s32), [[SEXT_INREG2]](s32)
1140    ; CHECK: [[EVEC:%[0-9]+]]:_(s32) = G_EXTRACT_VECTOR_ELT [[BUILD_VECTOR]](<3 x s32>), [[COPY1]](s32)
1141    ; CHECK: [[COPY3:%[0-9]+]]:_(s32) = COPY [[EVEC]](s32)
1142    ; CHECK: $vgpr0 = COPY [[COPY3]](s32)
1143    %0:_(<3 x s32>) = COPY $vgpr0_vgpr1_vgpr2
1144    %1:_(s32) = COPY $vgpr3
1145    %2:_(<3 x s16>) = G_TRUNC %0
1146    %3:_(s16) = G_EXTRACT_VECTOR_ELT %2, %1
1147    %4:_(s32) = G_ANYEXT %3
1148    $vgpr0 = COPY %4
1149...
1150
1151---
1152name: extract_vector_elt_v3s16_idx0_i32
1153
1154body: |
1155  bb.0:
1156    liveins: $vgpr0_vgpr1_vgpr2
1157
1158    ; CHECK-LABEL: name: extract_vector_elt_v3s16_idx0_i32
1159    ; CHECK: [[COPY:%[0-9]+]]:_(<3 x s32>) = COPY $vgpr0_vgpr1_vgpr2
1160    ; CHECK: [[COPY1:%[0-9]+]]:_(<3 x s32>) = COPY [[COPY]](<3 x s32>)
1161    ; CHECK: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32), [[UV2:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY1]](<3 x s32>)
1162    ; CHECK: [[SEXT_INREG:%[0-9]+]]:_(s32) = G_SEXT_INREG [[UV]], 16
1163    ; CHECK: [[SEXT_INREG1:%[0-9]+]]:_(s32) = G_SEXT_INREG [[UV1]], 16
1164    ; CHECK: [[SEXT_INREG2:%[0-9]+]]:_(s32) = G_SEXT_INREG [[UV2]], 16
1165    ; CHECK: [[BUILD_VECTOR:%[0-9]+]]:_(<3 x s32>) = G_BUILD_VECTOR [[SEXT_INREG]](s32), [[SEXT_INREG1]](s32), [[SEXT_INREG2]](s32)
1166    ; CHECK: [[EXTRACT:%[0-9]+]]:_(s32) = G_EXTRACT [[BUILD_VECTOR]](<3 x s32>), 0
1167    ; CHECK: [[COPY2:%[0-9]+]]:_(s32) = COPY [[EXTRACT]](s32)
1168    ; CHECK: $vgpr0 = COPY [[COPY2]](s32)
1169    %0:_(<3 x s32>) = COPY $vgpr0_vgpr1_vgpr2
1170    %1:_(s32) = G_CONSTANT i32 0
1171    %2:_(<3 x s16>) = G_TRUNC %0
1172    %3:_(s16) = G_EXTRACT_VECTOR_ELT %2, %1
1173    %4:_(s32) = G_ANYEXT %3
1174    $vgpr0 = COPY %4
1175...
1176
1177---
1178name: extract_vector_elt_v3s16_idx1_i32
1179
1180body: |
1181  bb.0:
1182    liveins: $vgpr0_vgpr1_vgpr2
1183
1184    ; CHECK-LABEL: name: extract_vector_elt_v3s16_idx1_i32
1185    ; CHECK: [[COPY:%[0-9]+]]:_(<3 x s32>) = COPY $vgpr0_vgpr1_vgpr2
1186    ; CHECK: [[COPY1:%[0-9]+]]:_(<3 x s32>) = COPY [[COPY]](<3 x s32>)
1187    ; CHECK: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32), [[UV2:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY1]](<3 x s32>)
1188    ; CHECK: [[SEXT_INREG:%[0-9]+]]:_(s32) = G_SEXT_INREG [[UV]], 16
1189    ; CHECK: [[SEXT_INREG1:%[0-9]+]]:_(s32) = G_SEXT_INREG [[UV1]], 16
1190    ; CHECK: [[SEXT_INREG2:%[0-9]+]]:_(s32) = G_SEXT_INREG [[UV2]], 16
1191    ; CHECK: [[BUILD_VECTOR:%[0-9]+]]:_(<3 x s32>) = G_BUILD_VECTOR [[SEXT_INREG]](s32), [[SEXT_INREG1]](s32), [[SEXT_INREG2]](s32)
1192    ; CHECK: [[EXTRACT:%[0-9]+]]:_(s32) = G_EXTRACT [[BUILD_VECTOR]](<3 x s32>), 32
1193    ; CHECK: [[COPY2:%[0-9]+]]:_(s32) = COPY [[EXTRACT]](s32)
1194    ; CHECK: $vgpr0 = COPY [[COPY2]](s32)
1195    %0:_(<3 x s32>) = COPY $vgpr0_vgpr1_vgpr2
1196    %1:_(s32) = G_CONSTANT i32 1
1197    %2:_(<3 x s16>) = G_TRUNC %0
1198    %3:_(s16) = G_EXTRACT_VECTOR_ELT %2, %1
1199    %4:_(s32) = G_ANYEXT %3
1200    $vgpr0 = COPY %4
1201...
1202
1203---
1204name: extract_vector_elt_v3s16_idx2_i32
1205
1206body: |
1207  bb.0:
1208    liveins: $vgpr0_vgpr1_vgpr2
1209
1210    ; CHECK-LABEL: name: extract_vector_elt_v3s16_idx2_i32
1211    ; CHECK: [[COPY:%[0-9]+]]:_(<3 x s32>) = COPY $vgpr0_vgpr1_vgpr2
1212    ; CHECK: [[COPY1:%[0-9]+]]:_(<3 x s32>) = COPY [[COPY]](<3 x s32>)
1213    ; CHECK: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32), [[UV2:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY1]](<3 x s32>)
1214    ; CHECK: [[SEXT_INREG:%[0-9]+]]:_(s32) = G_SEXT_INREG [[UV]], 16
1215    ; CHECK: [[SEXT_INREG1:%[0-9]+]]:_(s32) = G_SEXT_INREG [[UV1]], 16
1216    ; CHECK: [[SEXT_INREG2:%[0-9]+]]:_(s32) = G_SEXT_INREG [[UV2]], 16
1217    ; CHECK: [[BUILD_VECTOR:%[0-9]+]]:_(<3 x s32>) = G_BUILD_VECTOR [[SEXT_INREG]](s32), [[SEXT_INREG1]](s32), [[SEXT_INREG2]](s32)
1218    ; CHECK: [[EXTRACT:%[0-9]+]]:_(s32) = G_EXTRACT [[BUILD_VECTOR]](<3 x s32>), 64
1219    ; CHECK: [[COPY2:%[0-9]+]]:_(s32) = COPY [[EXTRACT]](s32)
1220    ; CHECK: $vgpr0 = COPY [[COPY2]](s32)
1221    %0:_(<3 x s32>) = COPY $vgpr0_vgpr1_vgpr2
1222    %1:_(s32) = G_CONSTANT i32 2
1223    %2:_(<3 x s16>) = G_TRUNC %0
1224    %3:_(s16) = G_EXTRACT_VECTOR_ELT %2, %1
1225    %4:_(s32) = G_ANYEXT %3
1226    $vgpr0 = COPY %4
1227...
1228
1229---
1230name: extract_vector_elt_v3s16_idx3_i32
1231
1232body: |
1233  bb.0:
1234    liveins: $vgpr0_vgpr1_vgpr2
1235
1236    ; CHECK-LABEL: name: extract_vector_elt_v3s16_idx3_i32
1237    ; CHECK: [[COPY:%[0-9]+]]:_(<3 x s32>) = COPY $vgpr0_vgpr1_vgpr2
1238    ; CHECK: [[DEF:%[0-9]+]]:_(s32) = G_IMPLICIT_DEF
1239    ; CHECK: [[COPY1:%[0-9]+]]:_(s32) = COPY [[DEF]](s32)
1240    ; CHECK: $vgpr0 = COPY [[COPY1]](s32)
1241    %0:_(<3 x s32>) = COPY $vgpr0_vgpr1_vgpr2
1242    %1:_(s32) = G_CONSTANT i32 3
1243    %2:_(<3 x s16>) = G_TRUNC %0
1244    %3:_(s16) = G_EXTRACT_VECTOR_ELT %2, %1
1245    %4:_(s32) = G_ANYEXT %3
1246    $vgpr0 = COPY %4
1247...
1248
1249---
1250name: extract_vector_elt_v4s16_varidx_i32
1251
1252body: |
1253  bb.0:
1254    liveins: $vgpr0_vgpr1, $vgpr2
1255
1256    ; CHECK-LABEL: name: extract_vector_elt_v4s16_varidx_i32
1257    ; CHECK: [[COPY:%[0-9]+]]:_(<4 x s16>) = COPY $vgpr0_vgpr1
1258    ; CHECK: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr2
1259    ; CHECK: [[BITCAST:%[0-9]+]]:_(<2 x s32>) = G_BITCAST [[COPY]](<4 x s16>)
1260    ; CHECK: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 1
1261    ; CHECK: [[LSHR:%[0-9]+]]:_(s32) = G_LSHR [[COPY1]], [[C]](s32)
1262    ; CHECK: [[EVEC:%[0-9]+]]:_(s32) = G_EXTRACT_VECTOR_ELT [[BITCAST]](<2 x s32>), [[LSHR]](s32)
1263    ; CHECK: [[AND:%[0-9]+]]:_(s32) = G_AND [[COPY1]], [[C]]
1264    ; CHECK: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 4
1265    ; CHECK: [[SHL:%[0-9]+]]:_(s32) = G_SHL [[AND]], [[C1]](s32)
1266    ; CHECK: [[LSHR1:%[0-9]+]]:_(s32) = G_LSHR [[EVEC]], [[SHL]](s32)
1267    ; CHECK: [[COPY2:%[0-9]+]]:_(s32) = COPY [[LSHR1]](s32)
1268    ; CHECK: $vgpr0 = COPY [[COPY2]](s32)
1269    %0:_(<4 x s16>) = COPY $vgpr0_vgpr1
1270    %1:_(s32) = COPY $vgpr2
1271    %2:_(s16) = G_EXTRACT_VECTOR_ELT %0, %1
1272    %3:_(s32) = G_ANYEXT %2
1273    $vgpr0 = COPY %3
1274...
1275
1276---
1277name: extract_vector_elt_v2s128_varidx_i32
1278
1279body: |
1280  bb.0:
1281    liveins: $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4_vgpr5_vgpr6_vgpr7, $vgpr8
1282
1283    ; CHECK-LABEL: name: extract_vector_elt_v2s128_varidx_i32
1284    ; CHECK: [[COPY:%[0-9]+]]:_(<2 x s128>) = COPY $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4_vgpr5_vgpr6_vgpr7
1285    ; CHECK: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr8
1286    ; CHECK: [[BITCAST:%[0-9]+]]:_(<4 x s64>) = G_BITCAST [[COPY]](<2 x s128>)
1287    ; CHECK: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 2
1288    ; CHECK: [[MUL:%[0-9]+]]:_(s32) = G_MUL [[COPY1]], [[C]]
1289    ; CHECK: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 0
1290    ; CHECK: [[ADD:%[0-9]+]]:_(s32) = G_ADD [[MUL]], [[C1]]
1291    ; CHECK: [[EVEC:%[0-9]+]]:_(s64) = G_EXTRACT_VECTOR_ELT [[BITCAST]](<4 x s64>), [[ADD]](s32)
1292    ; CHECK: [[C2:%[0-9]+]]:_(s32) = G_CONSTANT i32 1
1293    ; CHECK: [[ADD1:%[0-9]+]]:_(s32) = G_ADD [[MUL]], [[C2]]
1294    ; CHECK: [[EVEC1:%[0-9]+]]:_(s64) = G_EXTRACT_VECTOR_ELT [[BITCAST]](<4 x s64>), [[ADD1]](s32)
1295    ; CHECK: [[BUILD_VECTOR:%[0-9]+]]:_(<2 x s64>) = G_BUILD_VECTOR [[EVEC]](s64), [[EVEC1]](s64)
1296    ; CHECK: [[BITCAST1:%[0-9]+]]:_(s128) = G_BITCAST [[BUILD_VECTOR]](<2 x s64>)
1297    ; CHECK: $vgpr0_vgpr1_vgpr2_vgpr3 = COPY [[BITCAST1]](s128)
1298    %0:_(<2 x s128>) = COPY $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4_vgpr5_vgpr6_vgpr7
1299    %1:_(s32) = COPY $vgpr8
1300    %2:_(s128) = G_EXTRACT_VECTOR_ELT %0, %1
1301    $vgpr0_vgpr1_vgpr2_vgpr3 = COPY %2
1302...
1303
1304---
1305name: extract_vector_elt_v2i32_varidx_i64
1306
1307body: |
1308  bb.0:
1309    liveins: $vgpr0_vgpr1, $vgpr2_vgpr3
1310
1311    ; CHECK-LABEL: name: extract_vector_elt_v2i32_varidx_i64
1312    ; CHECK: [[COPY:%[0-9]+]]:_(<2 x s32>) = COPY $vgpr0_vgpr1
1313    ; CHECK: [[COPY1:%[0-9]+]]:_(s64) = COPY $vgpr2_vgpr3
1314    ; CHECK: [[TRUNC:%[0-9]+]]:_(s32) = G_TRUNC [[COPY1]](s64)
1315    ; CHECK: [[EVEC:%[0-9]+]]:_(s32) = G_EXTRACT_VECTOR_ELT [[COPY]](<2 x s32>), [[TRUNC]](s32)
1316    ; CHECK: $vgpr0 = COPY [[EVEC]](s32)
1317    %0:_(<2 x s32>) = COPY $vgpr0_vgpr1
1318    %1:_(s64) = COPY $vgpr2_vgpr3
1319    %2:_(s32) = G_EXTRACT_VECTOR_ELT %0, %1
1320    $vgpr0 = COPY %2
1321...
1322---
1323name: extract_vector_elt_0_v2i64
1324
1325body: |
1326  bb.0:
1327    liveins: $vgpr0_vgpr1_vgpr2_vgpr3
1328
1329    ; CHECK-LABEL: name: extract_vector_elt_0_v2i64
1330    ; CHECK: [[COPY:%[0-9]+]]:_(<2 x s64>) = COPY $vgpr0_vgpr1_vgpr2_vgpr3
1331    ; CHECK: [[EXTRACT:%[0-9]+]]:_(s64) = G_EXTRACT [[COPY]](<2 x s64>), 0
1332    ; CHECK: $vgpr0_vgpr1 = COPY [[EXTRACT]](s64)
1333    %0:_(<2 x s64>) = COPY $vgpr0_vgpr1_vgpr2_vgpr3
1334    %1:_(s32) = G_CONSTANT i32 0
1335    %2:_(s64) = G_EXTRACT_VECTOR_ELT %0, %1
1336    $vgpr0_vgpr1 = COPY %2
1337...
1338
1339---
1340name: extract_vector_elt_0_v8i64
1341
1342body: |
1343  bb.0:
1344    liveins: $vgpr0_vgpr1_vgpr2_vgpr3
1345
1346    ; CHECK-LABEL: name: extract_vector_elt_0_v8i64
1347    ; CHECK: [[DEF:%[0-9]+]]:_(<8 x s64>) = G_IMPLICIT_DEF
1348    ; CHECK: [[EXTRACT:%[0-9]+]]:_(s64) = G_EXTRACT [[DEF]](<8 x s64>), 0
1349    ; CHECK: $vgpr0_vgpr1 = COPY [[EXTRACT]](s64)
1350    %0:_(<8 x s64>) = G_IMPLICIT_DEF
1351    %1:_(s32) = G_CONSTANT i32 0
1352    %2:_(s64) = G_EXTRACT_VECTOR_ELT %0, %1
1353    $vgpr0_vgpr1 = COPY %2
1354...
1355
1356---
1357name: extract_vector_elt_0_v16i64
1358
1359body: |
1360  bb.0:
1361    liveins: $vgpr0_vgpr1_vgpr2_vgpr3
1362
1363    ; CHECK-LABEL: name: extract_vector_elt_0_v16i64
1364    ; CHECK: [[DEF:%[0-9]+]]:_(<16 x s64>) = G_IMPLICIT_DEF
1365    ; CHECK: [[EXTRACT:%[0-9]+]]:_(s64) = G_EXTRACT [[DEF]](<16 x s64>), 0
1366    ; CHECK: $vgpr0_vgpr1 = COPY [[EXTRACT]](s64)
1367    %0:_(<16 x s64>) = G_IMPLICIT_DEF
1368    %1:_(s32) = G_CONSTANT i32 0
1369    %2:_(s64) = G_EXTRACT_VECTOR_ELT %0, %1
1370    $vgpr0_vgpr1 = COPY %2
1371...
1372
1373# Make sure we look through casts looking for a constant index.
1374---
1375name: extract_vector_elt_look_through_trunc_0_v4i32
1376
1377body: |
1378  bb.0:
1379    liveins: $vgpr0_vgpr1_vgpr2_vgpr3
1380    ; CHECK-LABEL: name: extract_vector_elt_look_through_trunc_0_v4i32
1381    ; CHECK: [[COPY:%[0-9]+]]:_(<4 x s32>) = COPY $vgpr0_vgpr1_vgpr2_vgpr3
1382    ; CHECK: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 0
1383    ; CHECK: [[EXTRACT:%[0-9]+]]:_(s32) = G_EXTRACT [[COPY]](<4 x s32>), 0
1384    ; CHECK: $vgpr0 = COPY [[EXTRACT]](s32)
1385    %0:_(<4 x s32>) = COPY $vgpr0_vgpr1_vgpr2_vgpr3
1386    %1:_(s64) = G_CONSTANT i64 0
1387    %2:_(s32) = G_TRUNC %1
1388    %3:_(s32) = G_EXTRACT_VECTOR_ELT %0, %2
1389    $vgpr0 = COPY %3
1390...
1391
1392---
1393name: extract_vector_elt_7_v64s32
1394
1395body: |
1396  bb.0:
1397    liveins: $sgpr0_sgpr1
1398
1399    ; CHECK-LABEL: name: extract_vector_elt_7_v64s32
1400    ; CHECK: [[COPY:%[0-9]+]]:_(p1) = COPY $sgpr0_sgpr1
1401    ; CHECK: [[LOAD:%[0-9]+]]:_(<16 x s32>) = G_LOAD [[COPY]](p1) :: (load 64, align 4, addrspace 4)
1402    ; CHECK: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 64
1403    ; CHECK: [[PTR_ADD:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C]](s64)
1404    ; CHECK: [[LOAD1:%[0-9]+]]:_(<16 x s32>) = G_LOAD [[PTR_ADD]](p1) :: (load 64 + 64, align 4, addrspace 4)
1405    ; CHECK: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 128
1406    ; CHECK: [[PTR_ADD1:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C1]](s64)
1407    ; CHECK: [[LOAD2:%[0-9]+]]:_(<16 x s32>) = G_LOAD [[PTR_ADD1]](p1) :: (load 64 + 128, align 4, addrspace 4)
1408    ; CHECK: [[C2:%[0-9]+]]:_(s64) = G_CONSTANT i64 192
1409    ; CHECK: [[PTR_ADD2:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C2]](s64)
1410    ; CHECK: [[LOAD3:%[0-9]+]]:_(<16 x s32>) = G_LOAD [[PTR_ADD2]](p1) :: (load 64 + 192, align 4, addrspace 4)
1411    ; CHECK: [[EXTRACT:%[0-9]+]]:_(s32) = G_EXTRACT [[LOAD]](<16 x s32>), 224
1412    ; CHECK: S_ENDPGM 0, implicit [[EXTRACT]](s32)
1413    %0:_(p1) = COPY $sgpr0_sgpr1
1414    %1:_(s32) = G_CONSTANT i32 7
1415    %2:_(<64 x s32>) = G_LOAD %0 :: (load 256, align 4, addrspace 4)
1416    %3:_(s32) = G_EXTRACT_VECTOR_ELT %2, %1
1417    S_ENDPGM 0, implicit %3
1418...
1419
1420---
1421name: extract_vector_elt_33_v64s32
1422
1423body: |
1424  bb.0:
1425    liveins: $sgpr0_sgpr1
1426
1427    ; CHECK-LABEL: name: extract_vector_elt_33_v64s32
1428    ; CHECK: [[COPY:%[0-9]+]]:_(p1) = COPY $sgpr0_sgpr1
1429    ; CHECK: [[LOAD:%[0-9]+]]:_(<16 x s32>) = G_LOAD [[COPY]](p1) :: (load 64, align 4, addrspace 4)
1430    ; CHECK: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 64
1431    ; CHECK: [[PTR_ADD:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C]](s64)
1432    ; CHECK: [[LOAD1:%[0-9]+]]:_(<16 x s32>) = G_LOAD [[PTR_ADD]](p1) :: (load 64 + 64, align 4, addrspace 4)
1433    ; CHECK: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 128
1434    ; CHECK: [[PTR_ADD1:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C1]](s64)
1435    ; CHECK: [[LOAD2:%[0-9]+]]:_(<16 x s32>) = G_LOAD [[PTR_ADD1]](p1) :: (load 64 + 128, align 4, addrspace 4)
1436    ; CHECK: [[C2:%[0-9]+]]:_(s64) = G_CONSTANT i64 192
1437    ; CHECK: [[PTR_ADD2:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C2]](s64)
1438    ; CHECK: [[LOAD3:%[0-9]+]]:_(<16 x s32>) = G_LOAD [[PTR_ADD2]](p1) :: (load 64 + 192, align 4, addrspace 4)
1439    ; CHECK: [[EXTRACT:%[0-9]+]]:_(s32) = G_EXTRACT [[LOAD2]](<16 x s32>), 32
1440    ; CHECK: S_ENDPGM 0, implicit [[EXTRACT]](s32)
1441    %0:_(p1) = COPY $sgpr0_sgpr1
1442    %1:_(s32) = G_CONSTANT i32 33
1443    %2:_(<64 x s32>) = G_LOAD %0 :: (load 256, align 4, addrspace 4)
1444    %3:_(s32) = G_EXTRACT_VECTOR_ELT %2, %1
1445    S_ENDPGM 0, implicit %3
1446...
1447
1448# Test handling of out of bounds indexes
1449---
1450name: extract_vector_elt_64_65_v64s32
1451
1452body: |
1453  bb.0:
1454    liveins: $sgpr0_sgpr1
1455
1456    ; CHECK-LABEL: name: extract_vector_elt_64_65_v64s32
1457    ; CHECK: [[COPY:%[0-9]+]]:_(p1) = COPY $sgpr0_sgpr1
1458    ; CHECK: [[DEF:%[0-9]+]]:_(s32) = G_IMPLICIT_DEF
1459    ; CHECK: [[COPY1:%[0-9]+]]:_(s32) = COPY [[DEF]](s32)
1460    ; CHECK: S_ENDPGM 0, implicit [[COPY1]](s32), implicit [[DEF]](s32)
1461    %0:_(p1) = COPY $sgpr0_sgpr1
1462    %1:_(s32) = G_CONSTANT i32 64
1463    %2:_(<64 x s32>) = G_LOAD %0 :: (load 256, align 4, addrspace 4)
1464    %3:_(s32) = G_EXTRACT_VECTOR_ELT %2, %1
1465    %4:_(s32) = G_CONSTANT i32 65
1466    %5:_(s32) = G_EXTRACT_VECTOR_ELT %2, %4
1467    S_ENDPGM 0, implicit %3, implicit %5
1468...
1469
1470---
1471name: extract_vector_elt_33_v64p3
1472
1473body: |
1474  bb.0:
1475    liveins: $sgpr0_sgpr1
1476
1477    ; CHECK-LABEL: name: extract_vector_elt_33_v64p3
1478    ; CHECK: [[COPY:%[0-9]+]]:_(p1) = COPY $sgpr0_sgpr1
1479    ; CHECK: [[LOAD:%[0-9]+]]:_(<16 x s32>) = G_LOAD [[COPY]](p1) :: (load 64, align 4, addrspace 4)
1480    ; CHECK: [[BITCAST:%[0-9]+]]:_(<16 x p3>) = G_BITCAST [[LOAD]](<16 x s32>)
1481    ; CHECK: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 64
1482    ; CHECK: [[PTR_ADD:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C]](s64)
1483    ; CHECK: [[LOAD1:%[0-9]+]]:_(<16 x s32>) = G_LOAD [[PTR_ADD]](p1) :: (load 64 + 64, align 4, addrspace 4)
1484    ; CHECK: [[BITCAST1:%[0-9]+]]:_(<16 x p3>) = G_BITCAST [[LOAD1]](<16 x s32>)
1485    ; CHECK: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 128
1486    ; CHECK: [[PTR_ADD1:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C1]](s64)
1487    ; CHECK: [[LOAD2:%[0-9]+]]:_(<16 x s32>) = G_LOAD [[PTR_ADD1]](p1) :: (load 64 + 128, align 4, addrspace 4)
1488    ; CHECK: [[BITCAST2:%[0-9]+]]:_(<16 x p3>) = G_BITCAST [[LOAD2]](<16 x s32>)
1489    ; CHECK: [[C2:%[0-9]+]]:_(s64) = G_CONSTANT i64 192
1490    ; CHECK: [[PTR_ADD2:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C2]](s64)
1491    ; CHECK: [[LOAD3:%[0-9]+]]:_(<16 x s32>) = G_LOAD [[PTR_ADD2]](p1) :: (load 64 + 192, align 4, addrspace 4)
1492    ; CHECK: [[BITCAST3:%[0-9]+]]:_(<16 x p3>) = G_BITCAST [[LOAD3]](<16 x s32>)
1493    ; CHECK: [[FRAME_INDEX:%[0-9]+]]:_(p5) = G_FRAME_INDEX %stack.0
1494    ; CHECK: [[UV:%[0-9]+]]:_(p3), [[UV1:%[0-9]+]]:_(p3), [[UV2:%[0-9]+]]:_(p3), [[UV3:%[0-9]+]]:_(p3), [[UV4:%[0-9]+]]:_(p3), [[UV5:%[0-9]+]]:_(p3), [[UV6:%[0-9]+]]:_(p3), [[UV7:%[0-9]+]]:_(p3), [[UV8:%[0-9]+]]:_(p3), [[UV9:%[0-9]+]]:_(p3), [[UV10:%[0-9]+]]:_(p3), [[UV11:%[0-9]+]]:_(p3), [[UV12:%[0-9]+]]:_(p3), [[UV13:%[0-9]+]]:_(p3), [[UV14:%[0-9]+]]:_(p3), [[UV15:%[0-9]+]]:_(p3) = G_UNMERGE_VALUES [[BITCAST]](<16 x p3>)
1495    ; CHECK: [[UV16:%[0-9]+]]:_(p3), [[UV17:%[0-9]+]]:_(p3), [[UV18:%[0-9]+]]:_(p3), [[UV19:%[0-9]+]]:_(p3), [[UV20:%[0-9]+]]:_(p3), [[UV21:%[0-9]+]]:_(p3), [[UV22:%[0-9]+]]:_(p3), [[UV23:%[0-9]+]]:_(p3), [[UV24:%[0-9]+]]:_(p3), [[UV25:%[0-9]+]]:_(p3), [[UV26:%[0-9]+]]:_(p3), [[UV27:%[0-9]+]]:_(p3), [[UV28:%[0-9]+]]:_(p3), [[UV29:%[0-9]+]]:_(p3), [[UV30:%[0-9]+]]:_(p3), [[UV31:%[0-9]+]]:_(p3) = G_UNMERGE_VALUES [[BITCAST1]](<16 x p3>)
1496    ; CHECK: [[UV32:%[0-9]+]]:_(p3), [[UV33:%[0-9]+]]:_(p3), [[UV34:%[0-9]+]]:_(p3), [[UV35:%[0-9]+]]:_(p3), [[UV36:%[0-9]+]]:_(p3), [[UV37:%[0-9]+]]:_(p3), [[UV38:%[0-9]+]]:_(p3), [[UV39:%[0-9]+]]:_(p3), [[UV40:%[0-9]+]]:_(p3), [[UV41:%[0-9]+]]:_(p3), [[UV42:%[0-9]+]]:_(p3), [[UV43:%[0-9]+]]:_(p3), [[UV44:%[0-9]+]]:_(p3), [[UV45:%[0-9]+]]:_(p3), [[UV46:%[0-9]+]]:_(p3), [[UV47:%[0-9]+]]:_(p3) = G_UNMERGE_VALUES [[BITCAST2]](<16 x p3>)
1497    ; CHECK: [[UV48:%[0-9]+]]:_(p3), [[UV49:%[0-9]+]]:_(p3), [[UV50:%[0-9]+]]:_(p3), [[UV51:%[0-9]+]]:_(p3), [[UV52:%[0-9]+]]:_(p3), [[UV53:%[0-9]+]]:_(p3), [[UV54:%[0-9]+]]:_(p3), [[UV55:%[0-9]+]]:_(p3), [[UV56:%[0-9]+]]:_(p3), [[UV57:%[0-9]+]]:_(p3), [[UV58:%[0-9]+]]:_(p3), [[UV59:%[0-9]+]]:_(p3), [[UV60:%[0-9]+]]:_(p3), [[UV61:%[0-9]+]]:_(p3), [[UV62:%[0-9]+]]:_(p3), [[UV63:%[0-9]+]]:_(p3) = G_UNMERGE_VALUES [[BITCAST3]](<16 x p3>)
1498    ; CHECK: G_STORE [[UV]](p3), [[FRAME_INDEX]](p5) :: (store 4 into %stack.0, align 256, addrspace 5)
1499    ; CHECK: [[C3:%[0-9]+]]:_(s32) = G_CONSTANT i32 4
1500    ; CHECK: [[PTR_ADD3:%[0-9]+]]:_(p5) = G_PTR_ADD [[FRAME_INDEX]], [[C3]](s32)
1501    ; CHECK: G_STORE [[UV1]](p3), [[PTR_ADD3]](p5) :: (store 4 into %stack.0 + 4, align 256, addrspace 5)
1502    ; CHECK: [[C4:%[0-9]+]]:_(s32) = G_CONSTANT i32 8
1503    ; CHECK: [[PTR_ADD4:%[0-9]+]]:_(p5) = G_PTR_ADD [[FRAME_INDEX]], [[C4]](s32)
1504    ; CHECK: G_STORE [[UV2]](p3), [[PTR_ADD4]](p5) :: (store 4 into %stack.0 + 8, align 256, addrspace 5)
1505    ; CHECK: [[C5:%[0-9]+]]:_(s32) = G_CONSTANT i32 12
1506    ; CHECK: [[PTR_ADD5:%[0-9]+]]:_(p5) = G_PTR_ADD [[FRAME_INDEX]], [[C5]](s32)
1507    ; CHECK: G_STORE [[UV3]](p3), [[PTR_ADD5]](p5) :: (store 4 into %stack.0 + 12, align 256, addrspace 5)
1508    ; CHECK: [[C6:%[0-9]+]]:_(s32) = G_CONSTANT i32 16
1509    ; CHECK: [[PTR_ADD6:%[0-9]+]]:_(p5) = G_PTR_ADD [[FRAME_INDEX]], [[C6]](s32)
1510    ; CHECK: G_STORE [[UV4]](p3), [[PTR_ADD6]](p5) :: (store 4 into %stack.0 + 16, align 256, addrspace 5)
1511    ; CHECK: [[C7:%[0-9]+]]:_(s32) = G_CONSTANT i32 20
1512    ; CHECK: [[PTR_ADD7:%[0-9]+]]:_(p5) = G_PTR_ADD [[FRAME_INDEX]], [[C7]](s32)
1513    ; CHECK: G_STORE [[UV5]](p3), [[PTR_ADD7]](p5) :: (store 4 into %stack.0 + 20, align 256, addrspace 5)
1514    ; CHECK: [[C8:%[0-9]+]]:_(s32) = G_CONSTANT i32 24
1515    ; CHECK: [[PTR_ADD8:%[0-9]+]]:_(p5) = G_PTR_ADD [[FRAME_INDEX]], [[C8]](s32)
1516    ; CHECK: G_STORE [[UV6]](p3), [[PTR_ADD8]](p5) :: (store 4 into %stack.0 + 24, align 256, addrspace 5)
1517    ; CHECK: [[C9:%[0-9]+]]:_(s32) = G_CONSTANT i32 28
1518    ; CHECK: [[PTR_ADD9:%[0-9]+]]:_(p5) = G_PTR_ADD [[FRAME_INDEX]], [[C9]](s32)
1519    ; CHECK: G_STORE [[UV7]](p3), [[PTR_ADD9]](p5) :: (store 4 into %stack.0 + 28, align 256, addrspace 5)
1520    ; CHECK: [[C10:%[0-9]+]]:_(s32) = G_CONSTANT i32 32
1521    ; CHECK: [[PTR_ADD10:%[0-9]+]]:_(p5) = G_PTR_ADD [[FRAME_INDEX]], [[C10]](s32)
1522    ; CHECK: G_STORE [[UV8]](p3), [[PTR_ADD10]](p5) :: (store 4 into %stack.0 + 32, align 256, addrspace 5)
1523    ; CHECK: [[C11:%[0-9]+]]:_(s32) = G_CONSTANT i32 36
1524    ; CHECK: [[PTR_ADD11:%[0-9]+]]:_(p5) = G_PTR_ADD [[FRAME_INDEX]], [[C11]](s32)
1525    ; CHECK: G_STORE [[UV9]](p3), [[PTR_ADD11]](p5) :: (store 4 into %stack.0 + 36, align 256, addrspace 5)
1526    ; CHECK: [[C12:%[0-9]+]]:_(s32) = G_CONSTANT i32 40
1527    ; CHECK: [[PTR_ADD12:%[0-9]+]]:_(p5) = G_PTR_ADD [[FRAME_INDEX]], [[C12]](s32)
1528    ; CHECK: G_STORE [[UV10]](p3), [[PTR_ADD12]](p5) :: (store 4 into %stack.0 + 40, align 256, addrspace 5)
1529    ; CHECK: [[C13:%[0-9]+]]:_(s32) = G_CONSTANT i32 44
1530    ; CHECK: [[PTR_ADD13:%[0-9]+]]:_(p5) = G_PTR_ADD [[FRAME_INDEX]], [[C13]](s32)
1531    ; CHECK: G_STORE [[UV11]](p3), [[PTR_ADD13]](p5) :: (store 4 into %stack.0 + 44, align 256, addrspace 5)
1532    ; CHECK: [[C14:%[0-9]+]]:_(s32) = G_CONSTANT i32 48
1533    ; CHECK: [[PTR_ADD14:%[0-9]+]]:_(p5) = G_PTR_ADD [[FRAME_INDEX]], [[C14]](s32)
1534    ; CHECK: G_STORE [[UV12]](p3), [[PTR_ADD14]](p5) :: (store 4 into %stack.0 + 48, align 256, addrspace 5)
1535    ; CHECK: [[C15:%[0-9]+]]:_(s32) = G_CONSTANT i32 52
1536    ; CHECK: [[PTR_ADD15:%[0-9]+]]:_(p5) = G_PTR_ADD [[FRAME_INDEX]], [[C15]](s32)
1537    ; CHECK: G_STORE [[UV13]](p3), [[PTR_ADD15]](p5) :: (store 4 into %stack.0 + 52, align 256, addrspace 5)
1538    ; CHECK: [[C16:%[0-9]+]]:_(s32) = G_CONSTANT i32 56
1539    ; CHECK: [[PTR_ADD16:%[0-9]+]]:_(p5) = G_PTR_ADD [[FRAME_INDEX]], [[C16]](s32)
1540    ; CHECK: G_STORE [[UV14]](p3), [[PTR_ADD16]](p5) :: (store 4 into %stack.0 + 56, align 256, addrspace 5)
1541    ; CHECK: [[C17:%[0-9]+]]:_(s32) = G_CONSTANT i32 60
1542    ; CHECK: [[PTR_ADD17:%[0-9]+]]:_(p5) = G_PTR_ADD [[FRAME_INDEX]], [[C17]](s32)
1543    ; CHECK: G_STORE [[UV15]](p3), [[PTR_ADD17]](p5) :: (store 4 into %stack.0 + 60, align 256, addrspace 5)
1544    ; CHECK: [[C18:%[0-9]+]]:_(s32) = G_CONSTANT i32 64
1545    ; CHECK: [[PTR_ADD18:%[0-9]+]]:_(p5) = G_PTR_ADD [[FRAME_INDEX]], [[C18]](s32)
1546    ; CHECK: G_STORE [[UV16]](p3), [[PTR_ADD18]](p5) :: (store 4 into %stack.0 + 64, align 256, addrspace 5)
1547    ; CHECK: [[C19:%[0-9]+]]:_(s32) = G_CONSTANT i32 68
1548    ; CHECK: [[PTR_ADD19:%[0-9]+]]:_(p5) = G_PTR_ADD [[FRAME_INDEX]], [[C19]](s32)
1549    ; CHECK: G_STORE [[UV17]](p3), [[PTR_ADD19]](p5) :: (store 4 into %stack.0 + 68, align 256, addrspace 5)
1550    ; CHECK: [[C20:%[0-9]+]]:_(s32) = G_CONSTANT i32 72
1551    ; CHECK: [[PTR_ADD20:%[0-9]+]]:_(p5) = G_PTR_ADD [[FRAME_INDEX]], [[C20]](s32)
1552    ; CHECK: G_STORE [[UV18]](p3), [[PTR_ADD20]](p5) :: (store 4 into %stack.0 + 72, align 256, addrspace 5)
1553    ; CHECK: [[C21:%[0-9]+]]:_(s32) = G_CONSTANT i32 76
1554    ; CHECK: [[PTR_ADD21:%[0-9]+]]:_(p5) = G_PTR_ADD [[FRAME_INDEX]], [[C21]](s32)
1555    ; CHECK: G_STORE [[UV19]](p3), [[PTR_ADD21]](p5) :: (store 4 into %stack.0 + 76, align 256, addrspace 5)
1556    ; CHECK: [[C22:%[0-9]+]]:_(s32) = G_CONSTANT i32 80
1557    ; CHECK: [[PTR_ADD22:%[0-9]+]]:_(p5) = G_PTR_ADD [[FRAME_INDEX]], [[C22]](s32)
1558    ; CHECK: G_STORE [[UV20]](p3), [[PTR_ADD22]](p5) :: (store 4 into %stack.0 + 80, align 256, addrspace 5)
1559    ; CHECK: [[C23:%[0-9]+]]:_(s32) = G_CONSTANT i32 84
1560    ; CHECK: [[PTR_ADD23:%[0-9]+]]:_(p5) = G_PTR_ADD [[FRAME_INDEX]], [[C23]](s32)
1561    ; CHECK: G_STORE [[UV21]](p3), [[PTR_ADD23]](p5) :: (store 4 into %stack.0 + 84, align 256, addrspace 5)
1562    ; CHECK: [[C24:%[0-9]+]]:_(s32) = G_CONSTANT i32 88
1563    ; CHECK: [[PTR_ADD24:%[0-9]+]]:_(p5) = G_PTR_ADD [[FRAME_INDEX]], [[C24]](s32)
1564    ; CHECK: G_STORE [[UV22]](p3), [[PTR_ADD24]](p5) :: (store 4 into %stack.0 + 88, align 256, addrspace 5)
1565    ; CHECK: [[C25:%[0-9]+]]:_(s32) = G_CONSTANT i32 92
1566    ; CHECK: [[PTR_ADD25:%[0-9]+]]:_(p5) = G_PTR_ADD [[FRAME_INDEX]], [[C25]](s32)
1567    ; CHECK: G_STORE [[UV23]](p3), [[PTR_ADD25]](p5) :: (store 4 into %stack.0 + 92, align 256, addrspace 5)
1568    ; CHECK: [[C26:%[0-9]+]]:_(s32) = G_CONSTANT i32 96
1569    ; CHECK: [[PTR_ADD26:%[0-9]+]]:_(p5) = G_PTR_ADD [[FRAME_INDEX]], [[C26]](s32)
1570    ; CHECK: G_STORE [[UV24]](p3), [[PTR_ADD26]](p5) :: (store 4 into %stack.0 + 96, align 256, addrspace 5)
1571    ; CHECK: [[C27:%[0-9]+]]:_(s32) = G_CONSTANT i32 100
1572    ; CHECK: [[PTR_ADD27:%[0-9]+]]:_(p5) = G_PTR_ADD [[FRAME_INDEX]], [[C27]](s32)
1573    ; CHECK: G_STORE [[UV25]](p3), [[PTR_ADD27]](p5) :: (store 4 into %stack.0 + 100, align 256, addrspace 5)
1574    ; CHECK: [[C28:%[0-9]+]]:_(s32) = G_CONSTANT i32 104
1575    ; CHECK: [[PTR_ADD28:%[0-9]+]]:_(p5) = G_PTR_ADD [[FRAME_INDEX]], [[C28]](s32)
1576    ; CHECK: G_STORE [[UV26]](p3), [[PTR_ADD28]](p5) :: (store 4 into %stack.0 + 104, align 256, addrspace 5)
1577    ; CHECK: [[C29:%[0-9]+]]:_(s32) = G_CONSTANT i32 108
1578    ; CHECK: [[PTR_ADD29:%[0-9]+]]:_(p5) = G_PTR_ADD [[FRAME_INDEX]], [[C29]](s32)
1579    ; CHECK: G_STORE [[UV27]](p3), [[PTR_ADD29]](p5) :: (store 4 into %stack.0 + 108, align 256, addrspace 5)
1580    ; CHECK: [[C30:%[0-9]+]]:_(s32) = G_CONSTANT i32 112
1581    ; CHECK: [[PTR_ADD30:%[0-9]+]]:_(p5) = G_PTR_ADD [[FRAME_INDEX]], [[C30]](s32)
1582    ; CHECK: G_STORE [[UV28]](p3), [[PTR_ADD30]](p5) :: (store 4 into %stack.0 + 112, align 256, addrspace 5)
1583    ; CHECK: [[C31:%[0-9]+]]:_(s32) = G_CONSTANT i32 116
1584    ; CHECK: [[PTR_ADD31:%[0-9]+]]:_(p5) = G_PTR_ADD [[FRAME_INDEX]], [[C31]](s32)
1585    ; CHECK: G_STORE [[UV29]](p3), [[PTR_ADD31]](p5) :: (store 4 into %stack.0 + 116, align 256, addrspace 5)
1586    ; CHECK: [[C32:%[0-9]+]]:_(s32) = G_CONSTANT i32 120
1587    ; CHECK: [[PTR_ADD32:%[0-9]+]]:_(p5) = G_PTR_ADD [[FRAME_INDEX]], [[C32]](s32)
1588    ; CHECK: G_STORE [[UV30]](p3), [[PTR_ADD32]](p5) :: (store 4 into %stack.0 + 120, align 256, addrspace 5)
1589    ; CHECK: [[C33:%[0-9]+]]:_(s32) = G_CONSTANT i32 124
1590    ; CHECK: [[PTR_ADD33:%[0-9]+]]:_(p5) = G_PTR_ADD [[FRAME_INDEX]], [[C33]](s32)
1591    ; CHECK: G_STORE [[UV31]](p3), [[PTR_ADD33]](p5) :: (store 4 into %stack.0 + 124, align 256, addrspace 5)
1592    ; CHECK: [[C34:%[0-9]+]]:_(s32) = G_CONSTANT i32 128
1593    ; CHECK: [[PTR_ADD34:%[0-9]+]]:_(p5) = G_PTR_ADD [[FRAME_INDEX]], [[C34]](s32)
1594    ; CHECK: G_STORE [[UV32]](p3), [[PTR_ADD34]](p5) :: (store 4 into %stack.0 + 128, align 256, addrspace 5)
1595    ; CHECK: [[C35:%[0-9]+]]:_(s32) = G_CONSTANT i32 132
1596    ; CHECK: [[PTR_ADD35:%[0-9]+]]:_(p5) = G_PTR_ADD [[FRAME_INDEX]], [[C35]](s32)
1597    ; CHECK: [[COPY1:%[0-9]+]]:_(p5) = COPY [[PTR_ADD35]](p5)
1598    ; CHECK: G_STORE [[UV33]](p3), [[COPY1]](p5) :: (store 4 into %stack.0 + 132, align 256, addrspace 5)
1599    ; CHECK: [[C36:%[0-9]+]]:_(s32) = G_CONSTANT i32 136
1600    ; CHECK: [[PTR_ADD36:%[0-9]+]]:_(p5) = G_PTR_ADD [[FRAME_INDEX]], [[C36]](s32)
1601    ; CHECK: G_STORE [[UV34]](p3), [[PTR_ADD36]](p5) :: (store 4 into %stack.0 + 136, align 256, addrspace 5)
1602    ; CHECK: [[C37:%[0-9]+]]:_(s32) = G_CONSTANT i32 140
1603    ; CHECK: [[PTR_ADD37:%[0-9]+]]:_(p5) = G_PTR_ADD [[FRAME_INDEX]], [[C37]](s32)
1604    ; CHECK: G_STORE [[UV35]](p3), [[PTR_ADD37]](p5) :: (store 4 into %stack.0 + 140, align 256, addrspace 5)
1605    ; CHECK: [[C38:%[0-9]+]]:_(s32) = G_CONSTANT i32 144
1606    ; CHECK: [[PTR_ADD38:%[0-9]+]]:_(p5) = G_PTR_ADD [[FRAME_INDEX]], [[C38]](s32)
1607    ; CHECK: G_STORE [[UV36]](p3), [[PTR_ADD38]](p5) :: (store 4 into %stack.0 + 144, align 256, addrspace 5)
1608    ; CHECK: [[C39:%[0-9]+]]:_(s32) = G_CONSTANT i32 148
1609    ; CHECK: [[PTR_ADD39:%[0-9]+]]:_(p5) = G_PTR_ADD [[FRAME_INDEX]], [[C39]](s32)
1610    ; CHECK: G_STORE [[UV37]](p3), [[PTR_ADD39]](p5) :: (store 4 into %stack.0 + 148, align 256, addrspace 5)
1611    ; CHECK: [[C40:%[0-9]+]]:_(s32) = G_CONSTANT i32 152
1612    ; CHECK: [[PTR_ADD40:%[0-9]+]]:_(p5) = G_PTR_ADD [[FRAME_INDEX]], [[C40]](s32)
1613    ; CHECK: G_STORE [[UV38]](p3), [[PTR_ADD40]](p5) :: (store 4 into %stack.0 + 152, align 256, addrspace 5)
1614    ; CHECK: [[C41:%[0-9]+]]:_(s32) = G_CONSTANT i32 156
1615    ; CHECK: [[PTR_ADD41:%[0-9]+]]:_(p5) = G_PTR_ADD [[FRAME_INDEX]], [[C41]](s32)
1616    ; CHECK: G_STORE [[UV39]](p3), [[PTR_ADD41]](p5) :: (store 4 into %stack.0 + 156, align 256, addrspace 5)
1617    ; CHECK: [[C42:%[0-9]+]]:_(s32) = G_CONSTANT i32 160
1618    ; CHECK: [[PTR_ADD42:%[0-9]+]]:_(p5) = G_PTR_ADD [[FRAME_INDEX]], [[C42]](s32)
1619    ; CHECK: G_STORE [[UV40]](p3), [[PTR_ADD42]](p5) :: (store 4 into %stack.0 + 160, align 256, addrspace 5)
1620    ; CHECK: [[C43:%[0-9]+]]:_(s32) = G_CONSTANT i32 164
1621    ; CHECK: [[PTR_ADD43:%[0-9]+]]:_(p5) = G_PTR_ADD [[FRAME_INDEX]], [[C43]](s32)
1622    ; CHECK: G_STORE [[UV41]](p3), [[PTR_ADD43]](p5) :: (store 4 into %stack.0 + 164, align 256, addrspace 5)
1623    ; CHECK: [[C44:%[0-9]+]]:_(s32) = G_CONSTANT i32 168
1624    ; CHECK: [[PTR_ADD44:%[0-9]+]]:_(p5) = G_PTR_ADD [[FRAME_INDEX]], [[C44]](s32)
1625    ; CHECK: G_STORE [[UV42]](p3), [[PTR_ADD44]](p5) :: (store 4 into %stack.0 + 168, align 256, addrspace 5)
1626    ; CHECK: [[C45:%[0-9]+]]:_(s32) = G_CONSTANT i32 172
1627    ; CHECK: [[PTR_ADD45:%[0-9]+]]:_(p5) = G_PTR_ADD [[FRAME_INDEX]], [[C45]](s32)
1628    ; CHECK: G_STORE [[UV43]](p3), [[PTR_ADD45]](p5) :: (store 4 into %stack.0 + 172, align 256, addrspace 5)
1629    ; CHECK: [[C46:%[0-9]+]]:_(s32) = G_CONSTANT i32 176
1630    ; CHECK: [[PTR_ADD46:%[0-9]+]]:_(p5) = G_PTR_ADD [[FRAME_INDEX]], [[C46]](s32)
1631    ; CHECK: G_STORE [[UV44]](p3), [[PTR_ADD46]](p5) :: (store 4 into %stack.0 + 176, align 256, addrspace 5)
1632    ; CHECK: [[C47:%[0-9]+]]:_(s32) = G_CONSTANT i32 180
1633    ; CHECK: [[PTR_ADD47:%[0-9]+]]:_(p5) = G_PTR_ADD [[FRAME_INDEX]], [[C47]](s32)
1634    ; CHECK: G_STORE [[UV45]](p3), [[PTR_ADD47]](p5) :: (store 4 into %stack.0 + 180, align 256, addrspace 5)
1635    ; CHECK: [[C48:%[0-9]+]]:_(s32) = G_CONSTANT i32 184
1636    ; CHECK: [[PTR_ADD48:%[0-9]+]]:_(p5) = G_PTR_ADD [[FRAME_INDEX]], [[C48]](s32)
1637    ; CHECK: G_STORE [[UV46]](p3), [[PTR_ADD48]](p5) :: (store 4 into %stack.0 + 184, align 256, addrspace 5)
1638    ; CHECK: [[C49:%[0-9]+]]:_(s32) = G_CONSTANT i32 188
1639    ; CHECK: [[PTR_ADD49:%[0-9]+]]:_(p5) = G_PTR_ADD [[FRAME_INDEX]], [[C49]](s32)
1640    ; CHECK: G_STORE [[UV47]](p3), [[PTR_ADD49]](p5) :: (store 4 into %stack.0 + 188, align 256, addrspace 5)
1641    ; CHECK: [[C50:%[0-9]+]]:_(s32) = G_CONSTANT i32 192
1642    ; CHECK: [[PTR_ADD50:%[0-9]+]]:_(p5) = G_PTR_ADD [[FRAME_INDEX]], [[C50]](s32)
1643    ; CHECK: G_STORE [[UV48]](p3), [[PTR_ADD50]](p5) :: (store 4 into %stack.0 + 192, align 256, addrspace 5)
1644    ; CHECK: [[C51:%[0-9]+]]:_(s32) = G_CONSTANT i32 196
1645    ; CHECK: [[PTR_ADD51:%[0-9]+]]:_(p5) = G_PTR_ADD [[FRAME_INDEX]], [[C51]](s32)
1646    ; CHECK: G_STORE [[UV49]](p3), [[PTR_ADD51]](p5) :: (store 4 into %stack.0 + 196, align 256, addrspace 5)
1647    ; CHECK: [[C52:%[0-9]+]]:_(s32) = G_CONSTANT i32 200
1648    ; CHECK: [[PTR_ADD52:%[0-9]+]]:_(p5) = G_PTR_ADD [[FRAME_INDEX]], [[C52]](s32)
1649    ; CHECK: G_STORE [[UV50]](p3), [[PTR_ADD52]](p5) :: (store 4 into %stack.0 + 200, align 256, addrspace 5)
1650    ; CHECK: [[C53:%[0-9]+]]:_(s32) = G_CONSTANT i32 204
1651    ; CHECK: [[PTR_ADD53:%[0-9]+]]:_(p5) = G_PTR_ADD [[FRAME_INDEX]], [[C53]](s32)
1652    ; CHECK: G_STORE [[UV51]](p3), [[PTR_ADD53]](p5) :: (store 4 into %stack.0 + 204, align 256, addrspace 5)
1653    ; CHECK: [[C54:%[0-9]+]]:_(s32) = G_CONSTANT i32 208
1654    ; CHECK: [[PTR_ADD54:%[0-9]+]]:_(p5) = G_PTR_ADD [[FRAME_INDEX]], [[C54]](s32)
1655    ; CHECK: G_STORE [[UV52]](p3), [[PTR_ADD54]](p5) :: (store 4 into %stack.0 + 208, align 256, addrspace 5)
1656    ; CHECK: [[C55:%[0-9]+]]:_(s32) = G_CONSTANT i32 212
1657    ; CHECK: [[PTR_ADD55:%[0-9]+]]:_(p5) = G_PTR_ADD [[FRAME_INDEX]], [[C55]](s32)
1658    ; CHECK: G_STORE [[UV53]](p3), [[PTR_ADD55]](p5) :: (store 4 into %stack.0 + 212, align 256, addrspace 5)
1659    ; CHECK: [[C56:%[0-9]+]]:_(s32) = G_CONSTANT i32 216
1660    ; CHECK: [[PTR_ADD56:%[0-9]+]]:_(p5) = G_PTR_ADD [[FRAME_INDEX]], [[C56]](s32)
1661    ; CHECK: G_STORE [[UV54]](p3), [[PTR_ADD56]](p5) :: (store 4 into %stack.0 + 216, align 256, addrspace 5)
1662    ; CHECK: [[C57:%[0-9]+]]:_(s32) = G_CONSTANT i32 220
1663    ; CHECK: [[PTR_ADD57:%[0-9]+]]:_(p5) = G_PTR_ADD [[FRAME_INDEX]], [[C57]](s32)
1664    ; CHECK: G_STORE [[UV55]](p3), [[PTR_ADD57]](p5) :: (store 4 into %stack.0 + 220, align 256, addrspace 5)
1665    ; CHECK: [[C58:%[0-9]+]]:_(s32) = G_CONSTANT i32 224
1666    ; CHECK: [[PTR_ADD58:%[0-9]+]]:_(p5) = G_PTR_ADD [[FRAME_INDEX]], [[C58]](s32)
1667    ; CHECK: G_STORE [[UV56]](p3), [[PTR_ADD58]](p5) :: (store 4 into %stack.0 + 224, align 256, addrspace 5)
1668    ; CHECK: [[C59:%[0-9]+]]:_(s32) = G_CONSTANT i32 228
1669    ; CHECK: [[PTR_ADD59:%[0-9]+]]:_(p5) = G_PTR_ADD [[FRAME_INDEX]], [[C59]](s32)
1670    ; CHECK: G_STORE [[UV57]](p3), [[PTR_ADD59]](p5) :: (store 4 into %stack.0 + 228, align 256, addrspace 5)
1671    ; CHECK: [[C60:%[0-9]+]]:_(s32) = G_CONSTANT i32 232
1672    ; CHECK: [[PTR_ADD60:%[0-9]+]]:_(p5) = G_PTR_ADD [[FRAME_INDEX]], [[C60]](s32)
1673    ; CHECK: G_STORE [[UV58]](p3), [[PTR_ADD60]](p5) :: (store 4 into %stack.0 + 232, align 256, addrspace 5)
1674    ; CHECK: [[C61:%[0-9]+]]:_(s32) = G_CONSTANT i32 236
1675    ; CHECK: [[PTR_ADD61:%[0-9]+]]:_(p5) = G_PTR_ADD [[FRAME_INDEX]], [[C61]](s32)
1676    ; CHECK: G_STORE [[UV59]](p3), [[PTR_ADD61]](p5) :: (store 4 into %stack.0 + 236, align 256, addrspace 5)
1677    ; CHECK: [[C62:%[0-9]+]]:_(s32) = G_CONSTANT i32 240
1678    ; CHECK: [[PTR_ADD62:%[0-9]+]]:_(p5) = G_PTR_ADD [[FRAME_INDEX]], [[C62]](s32)
1679    ; CHECK: G_STORE [[UV60]](p3), [[PTR_ADD62]](p5) :: (store 4 into %stack.0 + 240, align 256, addrspace 5)
1680    ; CHECK: [[C63:%[0-9]+]]:_(s32) = G_CONSTANT i32 244
1681    ; CHECK: [[PTR_ADD63:%[0-9]+]]:_(p5) = G_PTR_ADD [[FRAME_INDEX]], [[C63]](s32)
1682    ; CHECK: G_STORE [[UV61]](p3), [[PTR_ADD63]](p5) :: (store 4 into %stack.0 + 244, align 256, addrspace 5)
1683    ; CHECK: [[C64:%[0-9]+]]:_(s32) = G_CONSTANT i32 248
1684    ; CHECK: [[PTR_ADD64:%[0-9]+]]:_(p5) = G_PTR_ADD [[FRAME_INDEX]], [[C64]](s32)
1685    ; CHECK: G_STORE [[UV62]](p3), [[PTR_ADD64]](p5) :: (store 4 into %stack.0 + 248, align 256, addrspace 5)
1686    ; CHECK: [[C65:%[0-9]+]]:_(s32) = G_CONSTANT i32 252
1687    ; CHECK: [[PTR_ADD65:%[0-9]+]]:_(p5) = G_PTR_ADD [[FRAME_INDEX]], [[C65]](s32)
1688    ; CHECK: G_STORE [[UV63]](p3), [[PTR_ADD65]](p5) :: (store 4 into %stack.0 + 252, align 256, addrspace 5)
1689    ; CHECK: [[LOAD4:%[0-9]+]]:_(p3) = G_LOAD [[PTR_ADD35]](p5) :: (load 4 from %stack.0 + 132, addrspace 5)
1690    ; CHECK: S_ENDPGM 0, implicit [[LOAD4]](p3)
1691    %0:_(p1) = COPY $sgpr0_sgpr1
1692    %1:_(s32) = G_CONSTANT i32 33
1693    %2:_(<64 x p3>) = G_LOAD %0 :: (load 256, align 4, addrspace 4)
1694    %3:_(p3) = G_EXTRACT_VECTOR_ELT %2, %1
1695    S_ENDPGM 0, implicit %3
1696...
1697
1698---
1699name: extract_vector_elt_varidx_v64s32
1700
1701body: |
1702  bb.0:
1703    liveins: $sgpr0_sgpr1, $sgpr2
1704
1705    ; CHECK-LABEL: name: extract_vector_elt_varidx_v64s32
1706    ; CHECK: [[COPY:%[0-9]+]]:_(p1) = COPY $sgpr0_sgpr1
1707    ; CHECK: [[COPY1:%[0-9]+]]:_(s32) = COPY $sgpr2
1708    ; CHECK: [[LOAD:%[0-9]+]]:_(<16 x s32>) = G_LOAD [[COPY]](p1) :: (load 64, align 4, addrspace 4)
1709    ; CHECK: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 64
1710    ; CHECK: [[PTR_ADD:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C]](s64)
1711    ; CHECK: [[LOAD1:%[0-9]+]]:_(<16 x s32>) = G_LOAD [[PTR_ADD]](p1) :: (load 64 + 64, align 4, addrspace 4)
1712    ; CHECK: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 128
1713    ; CHECK: [[PTR_ADD1:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C1]](s64)
1714    ; CHECK: [[LOAD2:%[0-9]+]]:_(<16 x s32>) = G_LOAD [[PTR_ADD1]](p1) :: (load 64 + 128, align 4, addrspace 4)
1715    ; CHECK: [[C2:%[0-9]+]]:_(s64) = G_CONSTANT i64 192
1716    ; CHECK: [[PTR_ADD2:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C2]](s64)
1717    ; CHECK: [[LOAD3:%[0-9]+]]:_(<16 x s32>) = G_LOAD [[PTR_ADD2]](p1) :: (load 64 + 192, align 4, addrspace 4)
1718    ; CHECK: [[FRAME_INDEX:%[0-9]+]]:_(p5) = G_FRAME_INDEX %stack.0
1719    ; CHECK: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32), [[UV2:%[0-9]+]]:_(s32), [[UV3:%[0-9]+]]:_(s32), [[UV4:%[0-9]+]]:_(s32), [[UV5:%[0-9]+]]:_(s32), [[UV6:%[0-9]+]]:_(s32), [[UV7:%[0-9]+]]:_(s32), [[UV8:%[0-9]+]]:_(s32), [[UV9:%[0-9]+]]:_(s32), [[UV10:%[0-9]+]]:_(s32), [[UV11:%[0-9]+]]:_(s32), [[UV12:%[0-9]+]]:_(s32), [[UV13:%[0-9]+]]:_(s32), [[UV14:%[0-9]+]]:_(s32), [[UV15:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[LOAD]](<16 x s32>)
1720    ; CHECK: [[UV16:%[0-9]+]]:_(s32), [[UV17:%[0-9]+]]:_(s32), [[UV18:%[0-9]+]]:_(s32), [[UV19:%[0-9]+]]:_(s32), [[UV20:%[0-9]+]]:_(s32), [[UV21:%[0-9]+]]:_(s32), [[UV22:%[0-9]+]]:_(s32), [[UV23:%[0-9]+]]:_(s32), [[UV24:%[0-9]+]]:_(s32), [[UV25:%[0-9]+]]:_(s32), [[UV26:%[0-9]+]]:_(s32), [[UV27:%[0-9]+]]:_(s32), [[UV28:%[0-9]+]]:_(s32), [[UV29:%[0-9]+]]:_(s32), [[UV30:%[0-9]+]]:_(s32), [[UV31:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[LOAD1]](<16 x s32>)
1721    ; CHECK: [[UV32:%[0-9]+]]:_(s32), [[UV33:%[0-9]+]]:_(s32), [[UV34:%[0-9]+]]:_(s32), [[UV35:%[0-9]+]]:_(s32), [[UV36:%[0-9]+]]:_(s32), [[UV37:%[0-9]+]]:_(s32), [[UV38:%[0-9]+]]:_(s32), [[UV39:%[0-9]+]]:_(s32), [[UV40:%[0-9]+]]:_(s32), [[UV41:%[0-9]+]]:_(s32), [[UV42:%[0-9]+]]:_(s32), [[UV43:%[0-9]+]]:_(s32), [[UV44:%[0-9]+]]:_(s32), [[UV45:%[0-9]+]]:_(s32), [[UV46:%[0-9]+]]:_(s32), [[UV47:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[LOAD2]](<16 x s32>)
1722    ; CHECK: [[UV48:%[0-9]+]]:_(s32), [[UV49:%[0-9]+]]:_(s32), [[UV50:%[0-9]+]]:_(s32), [[UV51:%[0-9]+]]:_(s32), [[UV52:%[0-9]+]]:_(s32), [[UV53:%[0-9]+]]:_(s32), [[UV54:%[0-9]+]]:_(s32), [[UV55:%[0-9]+]]:_(s32), [[UV56:%[0-9]+]]:_(s32), [[UV57:%[0-9]+]]:_(s32), [[UV58:%[0-9]+]]:_(s32), [[UV59:%[0-9]+]]:_(s32), [[UV60:%[0-9]+]]:_(s32), [[UV61:%[0-9]+]]:_(s32), [[UV62:%[0-9]+]]:_(s32), [[UV63:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[LOAD3]](<16 x s32>)
1723    ; CHECK: G_STORE [[UV]](s32), [[FRAME_INDEX]](p5) :: (store 4 into %stack.0, align 256, addrspace 5)
1724    ; CHECK: [[C3:%[0-9]+]]:_(s32) = G_CONSTANT i32 4
1725    ; CHECK: [[PTR_ADD3:%[0-9]+]]:_(p5) = G_PTR_ADD [[FRAME_INDEX]], [[C3]](s32)
1726    ; CHECK: G_STORE [[UV1]](s32), [[PTR_ADD3]](p5) :: (store 4 into %stack.0 + 4, align 256, addrspace 5)
1727    ; CHECK: [[C4:%[0-9]+]]:_(s32) = G_CONSTANT i32 8
1728    ; CHECK: [[PTR_ADD4:%[0-9]+]]:_(p5) = G_PTR_ADD [[FRAME_INDEX]], [[C4]](s32)
1729    ; CHECK: G_STORE [[UV2]](s32), [[PTR_ADD4]](p5) :: (store 4 into %stack.0 + 8, align 256, addrspace 5)
1730    ; CHECK: [[C5:%[0-9]+]]:_(s32) = G_CONSTANT i32 12
1731    ; CHECK: [[PTR_ADD5:%[0-9]+]]:_(p5) = G_PTR_ADD [[FRAME_INDEX]], [[C5]](s32)
1732    ; CHECK: G_STORE [[UV3]](s32), [[PTR_ADD5]](p5) :: (store 4 into %stack.0 + 12, align 256, addrspace 5)
1733    ; CHECK: [[C6:%[0-9]+]]:_(s32) = G_CONSTANT i32 16
1734    ; CHECK: [[PTR_ADD6:%[0-9]+]]:_(p5) = G_PTR_ADD [[FRAME_INDEX]], [[C6]](s32)
1735    ; CHECK: G_STORE [[UV4]](s32), [[PTR_ADD6]](p5) :: (store 4 into %stack.0 + 16, align 256, addrspace 5)
1736    ; CHECK: [[C7:%[0-9]+]]:_(s32) = G_CONSTANT i32 20
1737    ; CHECK: [[PTR_ADD7:%[0-9]+]]:_(p5) = G_PTR_ADD [[FRAME_INDEX]], [[C7]](s32)
1738    ; CHECK: G_STORE [[UV5]](s32), [[PTR_ADD7]](p5) :: (store 4 into %stack.0 + 20, align 256, addrspace 5)
1739    ; CHECK: [[C8:%[0-9]+]]:_(s32) = G_CONSTANT i32 24
1740    ; CHECK: [[PTR_ADD8:%[0-9]+]]:_(p5) = G_PTR_ADD [[FRAME_INDEX]], [[C8]](s32)
1741    ; CHECK: G_STORE [[UV6]](s32), [[PTR_ADD8]](p5) :: (store 4 into %stack.0 + 24, align 256, addrspace 5)
1742    ; CHECK: [[C9:%[0-9]+]]:_(s32) = G_CONSTANT i32 28
1743    ; CHECK: [[PTR_ADD9:%[0-9]+]]:_(p5) = G_PTR_ADD [[FRAME_INDEX]], [[C9]](s32)
1744    ; CHECK: G_STORE [[UV7]](s32), [[PTR_ADD9]](p5) :: (store 4 into %stack.0 + 28, align 256, addrspace 5)
1745    ; CHECK: [[C10:%[0-9]+]]:_(s32) = G_CONSTANT i32 32
1746    ; CHECK: [[PTR_ADD10:%[0-9]+]]:_(p5) = G_PTR_ADD [[FRAME_INDEX]], [[C10]](s32)
1747    ; CHECK: G_STORE [[UV8]](s32), [[PTR_ADD10]](p5) :: (store 4 into %stack.0 + 32, align 256, addrspace 5)
1748    ; CHECK: [[C11:%[0-9]+]]:_(s32) = G_CONSTANT i32 36
1749    ; CHECK: [[PTR_ADD11:%[0-9]+]]:_(p5) = G_PTR_ADD [[FRAME_INDEX]], [[C11]](s32)
1750    ; CHECK: G_STORE [[UV9]](s32), [[PTR_ADD11]](p5) :: (store 4 into %stack.0 + 36, align 256, addrspace 5)
1751    ; CHECK: [[C12:%[0-9]+]]:_(s32) = G_CONSTANT i32 40
1752    ; CHECK: [[PTR_ADD12:%[0-9]+]]:_(p5) = G_PTR_ADD [[FRAME_INDEX]], [[C12]](s32)
1753    ; CHECK: G_STORE [[UV10]](s32), [[PTR_ADD12]](p5) :: (store 4 into %stack.0 + 40, align 256, addrspace 5)
1754    ; CHECK: [[C13:%[0-9]+]]:_(s32) = G_CONSTANT i32 44
1755    ; CHECK: [[PTR_ADD13:%[0-9]+]]:_(p5) = G_PTR_ADD [[FRAME_INDEX]], [[C13]](s32)
1756    ; CHECK: G_STORE [[UV11]](s32), [[PTR_ADD13]](p5) :: (store 4 into %stack.0 + 44, align 256, addrspace 5)
1757    ; CHECK: [[C14:%[0-9]+]]:_(s32) = G_CONSTANT i32 48
1758    ; CHECK: [[PTR_ADD14:%[0-9]+]]:_(p5) = G_PTR_ADD [[FRAME_INDEX]], [[C14]](s32)
1759    ; CHECK: G_STORE [[UV12]](s32), [[PTR_ADD14]](p5) :: (store 4 into %stack.0 + 48, align 256, addrspace 5)
1760    ; CHECK: [[C15:%[0-9]+]]:_(s32) = G_CONSTANT i32 52
1761    ; CHECK: [[PTR_ADD15:%[0-9]+]]:_(p5) = G_PTR_ADD [[FRAME_INDEX]], [[C15]](s32)
1762    ; CHECK: G_STORE [[UV13]](s32), [[PTR_ADD15]](p5) :: (store 4 into %stack.0 + 52, align 256, addrspace 5)
1763    ; CHECK: [[C16:%[0-9]+]]:_(s32) = G_CONSTANT i32 56
1764    ; CHECK: [[PTR_ADD16:%[0-9]+]]:_(p5) = G_PTR_ADD [[FRAME_INDEX]], [[C16]](s32)
1765    ; CHECK: G_STORE [[UV14]](s32), [[PTR_ADD16]](p5) :: (store 4 into %stack.0 + 56, align 256, addrspace 5)
1766    ; CHECK: [[C17:%[0-9]+]]:_(s32) = G_CONSTANT i32 60
1767    ; CHECK: [[PTR_ADD17:%[0-9]+]]:_(p5) = G_PTR_ADD [[FRAME_INDEX]], [[C17]](s32)
1768    ; CHECK: G_STORE [[UV15]](s32), [[PTR_ADD17]](p5) :: (store 4 into %stack.0 + 60, align 256, addrspace 5)
1769    ; CHECK: [[C18:%[0-9]+]]:_(s32) = G_CONSTANT i32 64
1770    ; CHECK: [[PTR_ADD18:%[0-9]+]]:_(p5) = G_PTR_ADD [[FRAME_INDEX]], [[C18]](s32)
1771    ; CHECK: G_STORE [[UV16]](s32), [[PTR_ADD18]](p5) :: (store 4 into %stack.0 + 64, align 256, addrspace 5)
1772    ; CHECK: [[C19:%[0-9]+]]:_(s32) = G_CONSTANT i32 68
1773    ; CHECK: [[PTR_ADD19:%[0-9]+]]:_(p5) = G_PTR_ADD [[FRAME_INDEX]], [[C19]](s32)
1774    ; CHECK: G_STORE [[UV17]](s32), [[PTR_ADD19]](p5) :: (store 4 into %stack.0 + 68, align 256, addrspace 5)
1775    ; CHECK: [[C20:%[0-9]+]]:_(s32) = G_CONSTANT i32 72
1776    ; CHECK: [[PTR_ADD20:%[0-9]+]]:_(p5) = G_PTR_ADD [[FRAME_INDEX]], [[C20]](s32)
1777    ; CHECK: G_STORE [[UV18]](s32), [[PTR_ADD20]](p5) :: (store 4 into %stack.0 + 72, align 256, addrspace 5)
1778    ; CHECK: [[C21:%[0-9]+]]:_(s32) = G_CONSTANT i32 76
1779    ; CHECK: [[PTR_ADD21:%[0-9]+]]:_(p5) = G_PTR_ADD [[FRAME_INDEX]], [[C21]](s32)
1780    ; CHECK: G_STORE [[UV19]](s32), [[PTR_ADD21]](p5) :: (store 4 into %stack.0 + 76, align 256, addrspace 5)
1781    ; CHECK: [[C22:%[0-9]+]]:_(s32) = G_CONSTANT i32 80
1782    ; CHECK: [[PTR_ADD22:%[0-9]+]]:_(p5) = G_PTR_ADD [[FRAME_INDEX]], [[C22]](s32)
1783    ; CHECK: G_STORE [[UV20]](s32), [[PTR_ADD22]](p5) :: (store 4 into %stack.0 + 80, align 256, addrspace 5)
1784    ; CHECK: [[C23:%[0-9]+]]:_(s32) = G_CONSTANT i32 84
1785    ; CHECK: [[PTR_ADD23:%[0-9]+]]:_(p5) = G_PTR_ADD [[FRAME_INDEX]], [[C23]](s32)
1786    ; CHECK: G_STORE [[UV21]](s32), [[PTR_ADD23]](p5) :: (store 4 into %stack.0 + 84, align 256, addrspace 5)
1787    ; CHECK: [[C24:%[0-9]+]]:_(s32) = G_CONSTANT i32 88
1788    ; CHECK: [[PTR_ADD24:%[0-9]+]]:_(p5) = G_PTR_ADD [[FRAME_INDEX]], [[C24]](s32)
1789    ; CHECK: G_STORE [[UV22]](s32), [[PTR_ADD24]](p5) :: (store 4 into %stack.0 + 88, align 256, addrspace 5)
1790    ; CHECK: [[C25:%[0-9]+]]:_(s32) = G_CONSTANT i32 92
1791    ; CHECK: [[PTR_ADD25:%[0-9]+]]:_(p5) = G_PTR_ADD [[FRAME_INDEX]], [[C25]](s32)
1792    ; CHECK: G_STORE [[UV23]](s32), [[PTR_ADD25]](p5) :: (store 4 into %stack.0 + 92, align 256, addrspace 5)
1793    ; CHECK: [[C26:%[0-9]+]]:_(s32) = G_CONSTANT i32 96
1794    ; CHECK: [[PTR_ADD26:%[0-9]+]]:_(p5) = G_PTR_ADD [[FRAME_INDEX]], [[C26]](s32)
1795    ; CHECK: G_STORE [[UV24]](s32), [[PTR_ADD26]](p5) :: (store 4 into %stack.0 + 96, align 256, addrspace 5)
1796    ; CHECK: [[C27:%[0-9]+]]:_(s32) = G_CONSTANT i32 100
1797    ; CHECK: [[PTR_ADD27:%[0-9]+]]:_(p5) = G_PTR_ADD [[FRAME_INDEX]], [[C27]](s32)
1798    ; CHECK: G_STORE [[UV25]](s32), [[PTR_ADD27]](p5) :: (store 4 into %stack.0 + 100, align 256, addrspace 5)
1799    ; CHECK: [[C28:%[0-9]+]]:_(s32) = G_CONSTANT i32 104
1800    ; CHECK: [[PTR_ADD28:%[0-9]+]]:_(p5) = G_PTR_ADD [[FRAME_INDEX]], [[C28]](s32)
1801    ; CHECK: G_STORE [[UV26]](s32), [[PTR_ADD28]](p5) :: (store 4 into %stack.0 + 104, align 256, addrspace 5)
1802    ; CHECK: [[C29:%[0-9]+]]:_(s32) = G_CONSTANT i32 108
1803    ; CHECK: [[PTR_ADD29:%[0-9]+]]:_(p5) = G_PTR_ADD [[FRAME_INDEX]], [[C29]](s32)
1804    ; CHECK: G_STORE [[UV27]](s32), [[PTR_ADD29]](p5) :: (store 4 into %stack.0 + 108, align 256, addrspace 5)
1805    ; CHECK: [[C30:%[0-9]+]]:_(s32) = G_CONSTANT i32 112
1806    ; CHECK: [[PTR_ADD30:%[0-9]+]]:_(p5) = G_PTR_ADD [[FRAME_INDEX]], [[C30]](s32)
1807    ; CHECK: G_STORE [[UV28]](s32), [[PTR_ADD30]](p5) :: (store 4 into %stack.0 + 112, align 256, addrspace 5)
1808    ; CHECK: [[C31:%[0-9]+]]:_(s32) = G_CONSTANT i32 116
1809    ; CHECK: [[PTR_ADD31:%[0-9]+]]:_(p5) = G_PTR_ADD [[FRAME_INDEX]], [[C31]](s32)
1810    ; CHECK: G_STORE [[UV29]](s32), [[PTR_ADD31]](p5) :: (store 4 into %stack.0 + 116, align 256, addrspace 5)
1811    ; CHECK: [[C32:%[0-9]+]]:_(s32) = G_CONSTANT i32 120
1812    ; CHECK: [[PTR_ADD32:%[0-9]+]]:_(p5) = G_PTR_ADD [[FRAME_INDEX]], [[C32]](s32)
1813    ; CHECK: G_STORE [[UV30]](s32), [[PTR_ADD32]](p5) :: (store 4 into %stack.0 + 120, align 256, addrspace 5)
1814    ; CHECK: [[C33:%[0-9]+]]:_(s32) = G_CONSTANT i32 124
1815    ; CHECK: [[PTR_ADD33:%[0-9]+]]:_(p5) = G_PTR_ADD [[FRAME_INDEX]], [[C33]](s32)
1816    ; CHECK: G_STORE [[UV31]](s32), [[PTR_ADD33]](p5) :: (store 4 into %stack.0 + 124, align 256, addrspace 5)
1817    ; CHECK: [[C34:%[0-9]+]]:_(s32) = G_CONSTANT i32 128
1818    ; CHECK: [[PTR_ADD34:%[0-9]+]]:_(p5) = G_PTR_ADD [[FRAME_INDEX]], [[C34]](s32)
1819    ; CHECK: G_STORE [[UV32]](s32), [[PTR_ADD34]](p5) :: (store 4 into %stack.0 + 128, align 256, addrspace 5)
1820    ; CHECK: [[C35:%[0-9]+]]:_(s32) = G_CONSTANT i32 132
1821    ; CHECK: [[PTR_ADD35:%[0-9]+]]:_(p5) = G_PTR_ADD [[FRAME_INDEX]], [[C35]](s32)
1822    ; CHECK: G_STORE [[UV33]](s32), [[PTR_ADD35]](p5) :: (store 4 into %stack.0 + 132, align 256, addrspace 5)
1823    ; CHECK: [[C36:%[0-9]+]]:_(s32) = G_CONSTANT i32 136
1824    ; CHECK: [[PTR_ADD36:%[0-9]+]]:_(p5) = G_PTR_ADD [[FRAME_INDEX]], [[C36]](s32)
1825    ; CHECK: G_STORE [[UV34]](s32), [[PTR_ADD36]](p5) :: (store 4 into %stack.0 + 136, align 256, addrspace 5)
1826    ; CHECK: [[C37:%[0-9]+]]:_(s32) = G_CONSTANT i32 140
1827    ; CHECK: [[PTR_ADD37:%[0-9]+]]:_(p5) = G_PTR_ADD [[FRAME_INDEX]], [[C37]](s32)
1828    ; CHECK: G_STORE [[UV35]](s32), [[PTR_ADD37]](p5) :: (store 4 into %stack.0 + 140, align 256, addrspace 5)
1829    ; CHECK: [[C38:%[0-9]+]]:_(s32) = G_CONSTANT i32 144
1830    ; CHECK: [[PTR_ADD38:%[0-9]+]]:_(p5) = G_PTR_ADD [[FRAME_INDEX]], [[C38]](s32)
1831    ; CHECK: G_STORE [[UV36]](s32), [[PTR_ADD38]](p5) :: (store 4 into %stack.0 + 144, align 256, addrspace 5)
1832    ; CHECK: [[C39:%[0-9]+]]:_(s32) = G_CONSTANT i32 148
1833    ; CHECK: [[PTR_ADD39:%[0-9]+]]:_(p5) = G_PTR_ADD [[FRAME_INDEX]], [[C39]](s32)
1834    ; CHECK: G_STORE [[UV37]](s32), [[PTR_ADD39]](p5) :: (store 4 into %stack.0 + 148, align 256, addrspace 5)
1835    ; CHECK: [[C40:%[0-9]+]]:_(s32) = G_CONSTANT i32 152
1836    ; CHECK: [[PTR_ADD40:%[0-9]+]]:_(p5) = G_PTR_ADD [[FRAME_INDEX]], [[C40]](s32)
1837    ; CHECK: G_STORE [[UV38]](s32), [[PTR_ADD40]](p5) :: (store 4 into %stack.0 + 152, align 256, addrspace 5)
1838    ; CHECK: [[C41:%[0-9]+]]:_(s32) = G_CONSTANT i32 156
1839    ; CHECK: [[PTR_ADD41:%[0-9]+]]:_(p5) = G_PTR_ADD [[FRAME_INDEX]], [[C41]](s32)
1840    ; CHECK: G_STORE [[UV39]](s32), [[PTR_ADD41]](p5) :: (store 4 into %stack.0 + 156, align 256, addrspace 5)
1841    ; CHECK: [[C42:%[0-9]+]]:_(s32) = G_CONSTANT i32 160
1842    ; CHECK: [[PTR_ADD42:%[0-9]+]]:_(p5) = G_PTR_ADD [[FRAME_INDEX]], [[C42]](s32)
1843    ; CHECK: G_STORE [[UV40]](s32), [[PTR_ADD42]](p5) :: (store 4 into %stack.0 + 160, align 256, addrspace 5)
1844    ; CHECK: [[C43:%[0-9]+]]:_(s32) = G_CONSTANT i32 164
1845    ; CHECK: [[PTR_ADD43:%[0-9]+]]:_(p5) = G_PTR_ADD [[FRAME_INDEX]], [[C43]](s32)
1846    ; CHECK: G_STORE [[UV41]](s32), [[PTR_ADD43]](p5) :: (store 4 into %stack.0 + 164, align 256, addrspace 5)
1847    ; CHECK: [[C44:%[0-9]+]]:_(s32) = G_CONSTANT i32 168
1848    ; CHECK: [[PTR_ADD44:%[0-9]+]]:_(p5) = G_PTR_ADD [[FRAME_INDEX]], [[C44]](s32)
1849    ; CHECK: G_STORE [[UV42]](s32), [[PTR_ADD44]](p5) :: (store 4 into %stack.0 + 168, align 256, addrspace 5)
1850    ; CHECK: [[C45:%[0-9]+]]:_(s32) = G_CONSTANT i32 172
1851    ; CHECK: [[PTR_ADD45:%[0-9]+]]:_(p5) = G_PTR_ADD [[FRAME_INDEX]], [[C45]](s32)
1852    ; CHECK: G_STORE [[UV43]](s32), [[PTR_ADD45]](p5) :: (store 4 into %stack.0 + 172, align 256, addrspace 5)
1853    ; CHECK: [[C46:%[0-9]+]]:_(s32) = G_CONSTANT i32 176
1854    ; CHECK: [[PTR_ADD46:%[0-9]+]]:_(p5) = G_PTR_ADD [[FRAME_INDEX]], [[C46]](s32)
1855    ; CHECK: G_STORE [[UV44]](s32), [[PTR_ADD46]](p5) :: (store 4 into %stack.0 + 176, align 256, addrspace 5)
1856    ; CHECK: [[C47:%[0-9]+]]:_(s32) = G_CONSTANT i32 180
1857    ; CHECK: [[PTR_ADD47:%[0-9]+]]:_(p5) = G_PTR_ADD [[FRAME_INDEX]], [[C47]](s32)
1858    ; CHECK: G_STORE [[UV45]](s32), [[PTR_ADD47]](p5) :: (store 4 into %stack.0 + 180, align 256, addrspace 5)
1859    ; CHECK: [[C48:%[0-9]+]]:_(s32) = G_CONSTANT i32 184
1860    ; CHECK: [[PTR_ADD48:%[0-9]+]]:_(p5) = G_PTR_ADD [[FRAME_INDEX]], [[C48]](s32)
1861    ; CHECK: G_STORE [[UV46]](s32), [[PTR_ADD48]](p5) :: (store 4 into %stack.0 + 184, align 256, addrspace 5)
1862    ; CHECK: [[C49:%[0-9]+]]:_(s32) = G_CONSTANT i32 188
1863    ; CHECK: [[PTR_ADD49:%[0-9]+]]:_(p5) = G_PTR_ADD [[FRAME_INDEX]], [[C49]](s32)
1864    ; CHECK: G_STORE [[UV47]](s32), [[PTR_ADD49]](p5) :: (store 4 into %stack.0 + 188, align 256, addrspace 5)
1865    ; CHECK: [[C50:%[0-9]+]]:_(s32) = G_CONSTANT i32 192
1866    ; CHECK: [[PTR_ADD50:%[0-9]+]]:_(p5) = G_PTR_ADD [[FRAME_INDEX]], [[C50]](s32)
1867    ; CHECK: G_STORE [[UV48]](s32), [[PTR_ADD50]](p5) :: (store 4 into %stack.0 + 192, align 256, addrspace 5)
1868    ; CHECK: [[C51:%[0-9]+]]:_(s32) = G_CONSTANT i32 196
1869    ; CHECK: [[PTR_ADD51:%[0-9]+]]:_(p5) = G_PTR_ADD [[FRAME_INDEX]], [[C51]](s32)
1870    ; CHECK: G_STORE [[UV49]](s32), [[PTR_ADD51]](p5) :: (store 4 into %stack.0 + 196, align 256, addrspace 5)
1871    ; CHECK: [[C52:%[0-9]+]]:_(s32) = G_CONSTANT i32 200
1872    ; CHECK: [[PTR_ADD52:%[0-9]+]]:_(p5) = G_PTR_ADD [[FRAME_INDEX]], [[C52]](s32)
1873    ; CHECK: G_STORE [[UV50]](s32), [[PTR_ADD52]](p5) :: (store 4 into %stack.0 + 200, align 256, addrspace 5)
1874    ; CHECK: [[C53:%[0-9]+]]:_(s32) = G_CONSTANT i32 204
1875    ; CHECK: [[PTR_ADD53:%[0-9]+]]:_(p5) = G_PTR_ADD [[FRAME_INDEX]], [[C53]](s32)
1876    ; CHECK: G_STORE [[UV51]](s32), [[PTR_ADD53]](p5) :: (store 4 into %stack.0 + 204, align 256, addrspace 5)
1877    ; CHECK: [[C54:%[0-9]+]]:_(s32) = G_CONSTANT i32 208
1878    ; CHECK: [[PTR_ADD54:%[0-9]+]]:_(p5) = G_PTR_ADD [[FRAME_INDEX]], [[C54]](s32)
1879    ; CHECK: G_STORE [[UV52]](s32), [[PTR_ADD54]](p5) :: (store 4 into %stack.0 + 208, align 256, addrspace 5)
1880    ; CHECK: [[C55:%[0-9]+]]:_(s32) = G_CONSTANT i32 212
1881    ; CHECK: [[PTR_ADD55:%[0-9]+]]:_(p5) = G_PTR_ADD [[FRAME_INDEX]], [[C55]](s32)
1882    ; CHECK: G_STORE [[UV53]](s32), [[PTR_ADD55]](p5) :: (store 4 into %stack.0 + 212, align 256, addrspace 5)
1883    ; CHECK: [[C56:%[0-9]+]]:_(s32) = G_CONSTANT i32 216
1884    ; CHECK: [[PTR_ADD56:%[0-9]+]]:_(p5) = G_PTR_ADD [[FRAME_INDEX]], [[C56]](s32)
1885    ; CHECK: G_STORE [[UV54]](s32), [[PTR_ADD56]](p5) :: (store 4 into %stack.0 + 216, align 256, addrspace 5)
1886    ; CHECK: [[C57:%[0-9]+]]:_(s32) = G_CONSTANT i32 220
1887    ; CHECK: [[PTR_ADD57:%[0-9]+]]:_(p5) = G_PTR_ADD [[FRAME_INDEX]], [[C57]](s32)
1888    ; CHECK: G_STORE [[UV55]](s32), [[PTR_ADD57]](p5) :: (store 4 into %stack.0 + 220, align 256, addrspace 5)
1889    ; CHECK: [[C58:%[0-9]+]]:_(s32) = G_CONSTANT i32 224
1890    ; CHECK: [[PTR_ADD58:%[0-9]+]]:_(p5) = G_PTR_ADD [[FRAME_INDEX]], [[C58]](s32)
1891    ; CHECK: G_STORE [[UV56]](s32), [[PTR_ADD58]](p5) :: (store 4 into %stack.0 + 224, align 256, addrspace 5)
1892    ; CHECK: [[C59:%[0-9]+]]:_(s32) = G_CONSTANT i32 228
1893    ; CHECK: [[PTR_ADD59:%[0-9]+]]:_(p5) = G_PTR_ADD [[FRAME_INDEX]], [[C59]](s32)
1894    ; CHECK: G_STORE [[UV57]](s32), [[PTR_ADD59]](p5) :: (store 4 into %stack.0 + 228, align 256, addrspace 5)
1895    ; CHECK: [[C60:%[0-9]+]]:_(s32) = G_CONSTANT i32 232
1896    ; CHECK: [[PTR_ADD60:%[0-9]+]]:_(p5) = G_PTR_ADD [[FRAME_INDEX]], [[C60]](s32)
1897    ; CHECK: G_STORE [[UV58]](s32), [[PTR_ADD60]](p5) :: (store 4 into %stack.0 + 232, align 256, addrspace 5)
1898    ; CHECK: [[C61:%[0-9]+]]:_(s32) = G_CONSTANT i32 236
1899    ; CHECK: [[PTR_ADD61:%[0-9]+]]:_(p5) = G_PTR_ADD [[FRAME_INDEX]], [[C61]](s32)
1900    ; CHECK: G_STORE [[UV59]](s32), [[PTR_ADD61]](p5) :: (store 4 into %stack.0 + 236, align 256, addrspace 5)
1901    ; CHECK: [[C62:%[0-9]+]]:_(s32) = G_CONSTANT i32 240
1902    ; CHECK: [[PTR_ADD62:%[0-9]+]]:_(p5) = G_PTR_ADD [[FRAME_INDEX]], [[C62]](s32)
1903    ; CHECK: G_STORE [[UV60]](s32), [[PTR_ADD62]](p5) :: (store 4 into %stack.0 + 240, align 256, addrspace 5)
1904    ; CHECK: [[C63:%[0-9]+]]:_(s32) = G_CONSTANT i32 244
1905    ; CHECK: [[PTR_ADD63:%[0-9]+]]:_(p5) = G_PTR_ADD [[FRAME_INDEX]], [[C63]](s32)
1906    ; CHECK: G_STORE [[UV61]](s32), [[PTR_ADD63]](p5) :: (store 4 into %stack.0 + 244, align 256, addrspace 5)
1907    ; CHECK: [[C64:%[0-9]+]]:_(s32) = G_CONSTANT i32 248
1908    ; CHECK: [[PTR_ADD64:%[0-9]+]]:_(p5) = G_PTR_ADD [[FRAME_INDEX]], [[C64]](s32)
1909    ; CHECK: G_STORE [[UV62]](s32), [[PTR_ADD64]](p5) :: (store 4 into %stack.0 + 248, align 256, addrspace 5)
1910    ; CHECK: [[C65:%[0-9]+]]:_(s32) = G_CONSTANT i32 252
1911    ; CHECK: [[PTR_ADD65:%[0-9]+]]:_(p5) = G_PTR_ADD [[FRAME_INDEX]], [[C65]](s32)
1912    ; CHECK: G_STORE [[UV63]](s32), [[PTR_ADD65]](p5) :: (store 4 into %stack.0 + 252, align 256, addrspace 5)
1913    ; CHECK: [[C66:%[0-9]+]]:_(s32) = G_CONSTANT i32 63
1914    ; CHECK: [[AND:%[0-9]+]]:_(s32) = G_AND [[COPY1]], [[C66]]
1915    ; CHECK: [[MUL:%[0-9]+]]:_(s32) = G_MUL [[AND]], [[C3]]
1916    ; CHECK: [[PTR_ADD66:%[0-9]+]]:_(p5) = G_PTR_ADD [[FRAME_INDEX]], [[MUL]](s32)
1917    ; CHECK: [[LOAD4:%[0-9]+]]:_(s32) = G_LOAD [[PTR_ADD66]](p5) :: (load 4, addrspace 5)
1918    ; CHECK: S_ENDPGM 0, implicit [[LOAD4]](s32)
1919    %0:_(p1) = COPY $sgpr0_sgpr1
1920    %1:_(s32) = COPY $sgpr2
1921    %2:_(<64 x s32>) = G_LOAD %0 :: (load 256, align 4, addrspace 4)
1922    %3:_(s32) = G_EXTRACT_VECTOR_ELT %2, %1
1923    S_ENDPGM 0, implicit %3
1924...
1925
1926---
1927name: extract_vector_elt_v32s1_varidx_i32
1928
1929body: |
1930  bb.0:
1931    liveins: $vgpr0, $vgpr1
1932
1933    ; CHECK-LABEL: name: extract_vector_elt_v32s1_varidx_i32
1934    ; CHECK: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
1935    ; CHECK: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
1936    ; CHECK: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 1
1937    ; CHECK: [[LSHR:%[0-9]+]]:_(s32) = G_LSHR [[COPY]], [[C]](s32)
1938    ; CHECK: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 2
1939    ; CHECK: [[LSHR1:%[0-9]+]]:_(s32) = G_LSHR [[COPY]], [[C1]](s32)
1940    ; CHECK: [[C2:%[0-9]+]]:_(s32) = G_CONSTANT i32 3
1941    ; CHECK: [[LSHR2:%[0-9]+]]:_(s32) = G_LSHR [[COPY]], [[C2]](s32)
1942    ; CHECK: [[C3:%[0-9]+]]:_(s32) = G_CONSTANT i32 4
1943    ; CHECK: [[LSHR3:%[0-9]+]]:_(s32) = G_LSHR [[COPY]], [[C3]](s32)
1944    ; CHECK: [[C4:%[0-9]+]]:_(s32) = G_CONSTANT i32 5
1945    ; CHECK: [[LSHR4:%[0-9]+]]:_(s32) = G_LSHR [[COPY]], [[C4]](s32)
1946    ; CHECK: [[C5:%[0-9]+]]:_(s32) = G_CONSTANT i32 6
1947    ; CHECK: [[LSHR5:%[0-9]+]]:_(s32) = G_LSHR [[COPY]], [[C5]](s32)
1948    ; CHECK: [[C6:%[0-9]+]]:_(s32) = G_CONSTANT i32 7
1949    ; CHECK: [[LSHR6:%[0-9]+]]:_(s32) = G_LSHR [[COPY]], [[C6]](s32)
1950    ; CHECK: [[C7:%[0-9]+]]:_(s32) = G_CONSTANT i32 8
1951    ; CHECK: [[LSHR7:%[0-9]+]]:_(s32) = G_LSHR [[COPY]], [[C7]](s32)
1952    ; CHECK: [[C8:%[0-9]+]]:_(s32) = G_CONSTANT i32 9
1953    ; CHECK: [[LSHR8:%[0-9]+]]:_(s32) = G_LSHR [[COPY]], [[C8]](s32)
1954    ; CHECK: [[C9:%[0-9]+]]:_(s32) = G_CONSTANT i32 10
1955    ; CHECK: [[LSHR9:%[0-9]+]]:_(s32) = G_LSHR [[COPY]], [[C9]](s32)
1956    ; CHECK: [[C10:%[0-9]+]]:_(s32) = G_CONSTANT i32 11
1957    ; CHECK: [[LSHR10:%[0-9]+]]:_(s32) = G_LSHR [[COPY]], [[C10]](s32)
1958    ; CHECK: [[C11:%[0-9]+]]:_(s32) = G_CONSTANT i32 12
1959    ; CHECK: [[LSHR11:%[0-9]+]]:_(s32) = G_LSHR [[COPY]], [[C11]](s32)
1960    ; CHECK: [[C12:%[0-9]+]]:_(s32) = G_CONSTANT i32 13
1961    ; CHECK: [[LSHR12:%[0-9]+]]:_(s32) = G_LSHR [[COPY]], [[C12]](s32)
1962    ; CHECK: [[C13:%[0-9]+]]:_(s32) = G_CONSTANT i32 14
1963    ; CHECK: [[LSHR13:%[0-9]+]]:_(s32) = G_LSHR [[COPY]], [[C13]](s32)
1964    ; CHECK: [[C14:%[0-9]+]]:_(s32) = G_CONSTANT i32 15
1965    ; CHECK: [[LSHR14:%[0-9]+]]:_(s32) = G_LSHR [[COPY]], [[C14]](s32)
1966    ; CHECK: [[C15:%[0-9]+]]:_(s32) = G_CONSTANT i32 16
1967    ; CHECK: [[LSHR15:%[0-9]+]]:_(s32) = G_LSHR [[COPY]], [[C15]](s32)
1968    ; CHECK: [[C16:%[0-9]+]]:_(s32) = G_CONSTANT i32 17
1969    ; CHECK: [[LSHR16:%[0-9]+]]:_(s32) = G_LSHR [[COPY]], [[C16]](s32)
1970    ; CHECK: [[C17:%[0-9]+]]:_(s32) = G_CONSTANT i32 18
1971    ; CHECK: [[LSHR17:%[0-9]+]]:_(s32) = G_LSHR [[COPY]], [[C17]](s32)
1972    ; CHECK: [[C18:%[0-9]+]]:_(s32) = G_CONSTANT i32 19
1973    ; CHECK: [[LSHR18:%[0-9]+]]:_(s32) = G_LSHR [[COPY]], [[C18]](s32)
1974    ; CHECK: [[C19:%[0-9]+]]:_(s32) = G_CONSTANT i32 20
1975    ; CHECK: [[LSHR19:%[0-9]+]]:_(s32) = G_LSHR [[COPY]], [[C19]](s32)
1976    ; CHECK: [[C20:%[0-9]+]]:_(s32) = G_CONSTANT i32 21
1977    ; CHECK: [[LSHR20:%[0-9]+]]:_(s32) = G_LSHR [[COPY]], [[C20]](s32)
1978    ; CHECK: [[C21:%[0-9]+]]:_(s32) = G_CONSTANT i32 22
1979    ; CHECK: [[LSHR21:%[0-9]+]]:_(s32) = G_LSHR [[COPY]], [[C21]](s32)
1980    ; CHECK: [[C22:%[0-9]+]]:_(s32) = G_CONSTANT i32 23
1981    ; CHECK: [[LSHR22:%[0-9]+]]:_(s32) = G_LSHR [[COPY]], [[C22]](s32)
1982    ; CHECK: [[C23:%[0-9]+]]:_(s32) = G_CONSTANT i32 24
1983    ; CHECK: [[LSHR23:%[0-9]+]]:_(s32) = G_LSHR [[COPY]], [[C23]](s32)
1984    ; CHECK: [[C24:%[0-9]+]]:_(s32) = G_CONSTANT i32 25
1985    ; CHECK: [[LSHR24:%[0-9]+]]:_(s32) = G_LSHR [[COPY]], [[C24]](s32)
1986    ; CHECK: [[C25:%[0-9]+]]:_(s32) = G_CONSTANT i32 26
1987    ; CHECK: [[LSHR25:%[0-9]+]]:_(s32) = G_LSHR [[COPY]], [[C25]](s32)
1988    ; CHECK: [[C26:%[0-9]+]]:_(s32) = G_CONSTANT i32 27
1989    ; CHECK: [[LSHR26:%[0-9]+]]:_(s32) = G_LSHR [[COPY]], [[C26]](s32)
1990    ; CHECK: [[C27:%[0-9]+]]:_(s32) = G_CONSTANT i32 28
1991    ; CHECK: [[LSHR27:%[0-9]+]]:_(s32) = G_LSHR [[COPY]], [[C27]](s32)
1992    ; CHECK: [[C28:%[0-9]+]]:_(s32) = G_CONSTANT i32 29
1993    ; CHECK: [[LSHR28:%[0-9]+]]:_(s32) = G_LSHR [[COPY]], [[C28]](s32)
1994    ; CHECK: [[C29:%[0-9]+]]:_(s32) = G_CONSTANT i32 30
1995    ; CHECK: [[LSHR29:%[0-9]+]]:_(s32) = G_LSHR [[COPY]], [[C29]](s32)
1996    ; CHECK: [[C30:%[0-9]+]]:_(s32) = G_CONSTANT i32 31
1997    ; CHECK: [[LSHR30:%[0-9]+]]:_(s32) = G_LSHR [[COPY]], [[C30]](s32)
1998    ; CHECK: [[COPY2:%[0-9]+]]:_(s32) = COPY [[COPY]](s32)
1999    ; CHECK: [[AND:%[0-9]+]]:_(s32) = G_AND [[COPY2]], [[C]]
2000    ; CHECK: [[COPY3:%[0-9]+]]:_(s32) = COPY [[LSHR]](s32)
2001    ; CHECK: [[AND1:%[0-9]+]]:_(s32) = G_AND [[COPY3]], [[C]]
2002    ; CHECK: [[SHL:%[0-9]+]]:_(s32) = G_SHL [[AND1]], [[C]](s32)
2003    ; CHECK: [[OR:%[0-9]+]]:_(s32) = G_OR [[AND]], [[SHL]]
2004    ; CHECK: [[COPY4:%[0-9]+]]:_(s32) = COPY [[LSHR1]](s32)
2005    ; CHECK: [[AND2:%[0-9]+]]:_(s32) = G_AND [[COPY4]], [[C]]
2006    ; CHECK: [[SHL1:%[0-9]+]]:_(s32) = G_SHL [[AND2]], [[C1]](s32)
2007    ; CHECK: [[OR1:%[0-9]+]]:_(s32) = G_OR [[OR]], [[SHL1]]
2008    ; CHECK: [[COPY5:%[0-9]+]]:_(s32) = COPY [[LSHR2]](s32)
2009    ; CHECK: [[AND3:%[0-9]+]]:_(s32) = G_AND [[COPY5]], [[C]]
2010    ; CHECK: [[SHL2:%[0-9]+]]:_(s32) = G_SHL [[AND3]], [[C2]](s32)
2011    ; CHECK: [[OR2:%[0-9]+]]:_(s32) = G_OR [[OR1]], [[SHL2]]
2012    ; CHECK: [[COPY6:%[0-9]+]]:_(s32) = COPY [[LSHR3]](s32)
2013    ; CHECK: [[AND4:%[0-9]+]]:_(s32) = G_AND [[COPY6]], [[C]]
2014    ; CHECK: [[SHL3:%[0-9]+]]:_(s32) = G_SHL [[AND4]], [[C3]](s32)
2015    ; CHECK: [[OR3:%[0-9]+]]:_(s32) = G_OR [[OR2]], [[SHL3]]
2016    ; CHECK: [[COPY7:%[0-9]+]]:_(s32) = COPY [[LSHR4]](s32)
2017    ; CHECK: [[AND5:%[0-9]+]]:_(s32) = G_AND [[COPY7]], [[C]]
2018    ; CHECK: [[SHL4:%[0-9]+]]:_(s32) = G_SHL [[AND5]], [[C4]](s32)
2019    ; CHECK: [[OR4:%[0-9]+]]:_(s32) = G_OR [[OR3]], [[SHL4]]
2020    ; CHECK: [[COPY8:%[0-9]+]]:_(s32) = COPY [[LSHR5]](s32)
2021    ; CHECK: [[AND6:%[0-9]+]]:_(s32) = G_AND [[COPY8]], [[C]]
2022    ; CHECK: [[SHL5:%[0-9]+]]:_(s32) = G_SHL [[AND6]], [[C5]](s32)
2023    ; CHECK: [[OR5:%[0-9]+]]:_(s32) = G_OR [[OR4]], [[SHL5]]
2024    ; CHECK: [[COPY9:%[0-9]+]]:_(s32) = COPY [[LSHR6]](s32)
2025    ; CHECK: [[AND7:%[0-9]+]]:_(s32) = G_AND [[COPY9]], [[C]]
2026    ; CHECK: [[SHL6:%[0-9]+]]:_(s32) = G_SHL [[AND7]], [[C6]](s32)
2027    ; CHECK: [[OR6:%[0-9]+]]:_(s32) = G_OR [[OR5]], [[SHL6]]
2028    ; CHECK: [[COPY10:%[0-9]+]]:_(s32) = COPY [[LSHR7]](s32)
2029    ; CHECK: [[AND8:%[0-9]+]]:_(s32) = G_AND [[COPY10]], [[C]]
2030    ; CHECK: [[SHL7:%[0-9]+]]:_(s32) = G_SHL [[AND8]], [[C7]](s32)
2031    ; CHECK: [[OR7:%[0-9]+]]:_(s32) = G_OR [[OR6]], [[SHL7]]
2032    ; CHECK: [[COPY11:%[0-9]+]]:_(s32) = COPY [[LSHR8]](s32)
2033    ; CHECK: [[AND9:%[0-9]+]]:_(s32) = G_AND [[COPY11]], [[C]]
2034    ; CHECK: [[SHL8:%[0-9]+]]:_(s32) = G_SHL [[AND9]], [[C8]](s32)
2035    ; CHECK: [[OR8:%[0-9]+]]:_(s32) = G_OR [[OR7]], [[SHL8]]
2036    ; CHECK: [[COPY12:%[0-9]+]]:_(s32) = COPY [[LSHR9]](s32)
2037    ; CHECK: [[AND10:%[0-9]+]]:_(s32) = G_AND [[COPY12]], [[C]]
2038    ; CHECK: [[SHL9:%[0-9]+]]:_(s32) = G_SHL [[AND10]], [[C9]](s32)
2039    ; CHECK: [[OR9:%[0-9]+]]:_(s32) = G_OR [[OR8]], [[SHL9]]
2040    ; CHECK: [[COPY13:%[0-9]+]]:_(s32) = COPY [[LSHR10]](s32)
2041    ; CHECK: [[AND11:%[0-9]+]]:_(s32) = G_AND [[COPY13]], [[C]]
2042    ; CHECK: [[SHL10:%[0-9]+]]:_(s32) = G_SHL [[AND11]], [[C10]](s32)
2043    ; CHECK: [[OR10:%[0-9]+]]:_(s32) = G_OR [[OR9]], [[SHL10]]
2044    ; CHECK: [[COPY14:%[0-9]+]]:_(s32) = COPY [[LSHR11]](s32)
2045    ; CHECK: [[AND12:%[0-9]+]]:_(s32) = G_AND [[COPY14]], [[C]]
2046    ; CHECK: [[SHL11:%[0-9]+]]:_(s32) = G_SHL [[AND12]], [[C11]](s32)
2047    ; CHECK: [[OR11:%[0-9]+]]:_(s32) = G_OR [[OR10]], [[SHL11]]
2048    ; CHECK: [[COPY15:%[0-9]+]]:_(s32) = COPY [[LSHR12]](s32)
2049    ; CHECK: [[AND13:%[0-9]+]]:_(s32) = G_AND [[COPY15]], [[C]]
2050    ; CHECK: [[SHL12:%[0-9]+]]:_(s32) = G_SHL [[AND13]], [[C12]](s32)
2051    ; CHECK: [[OR12:%[0-9]+]]:_(s32) = G_OR [[OR11]], [[SHL12]]
2052    ; CHECK: [[COPY16:%[0-9]+]]:_(s32) = COPY [[LSHR13]](s32)
2053    ; CHECK: [[AND14:%[0-9]+]]:_(s32) = G_AND [[COPY16]], [[C]]
2054    ; CHECK: [[SHL13:%[0-9]+]]:_(s32) = G_SHL [[AND14]], [[C13]](s32)
2055    ; CHECK: [[OR13:%[0-9]+]]:_(s32) = G_OR [[OR12]], [[SHL13]]
2056    ; CHECK: [[COPY17:%[0-9]+]]:_(s32) = COPY [[LSHR14]](s32)
2057    ; CHECK: [[AND15:%[0-9]+]]:_(s32) = G_AND [[COPY17]], [[C]]
2058    ; CHECK: [[SHL14:%[0-9]+]]:_(s32) = G_SHL [[AND15]], [[C14]](s32)
2059    ; CHECK: [[OR14:%[0-9]+]]:_(s32) = G_OR [[OR13]], [[SHL14]]
2060    ; CHECK: [[COPY18:%[0-9]+]]:_(s32) = COPY [[LSHR15]](s32)
2061    ; CHECK: [[AND16:%[0-9]+]]:_(s32) = G_AND [[COPY18]], [[C]]
2062    ; CHECK: [[SHL15:%[0-9]+]]:_(s32) = G_SHL [[AND16]], [[C15]](s32)
2063    ; CHECK: [[OR15:%[0-9]+]]:_(s32) = G_OR [[OR14]], [[SHL15]]
2064    ; CHECK: [[COPY19:%[0-9]+]]:_(s32) = COPY [[LSHR16]](s32)
2065    ; CHECK: [[AND17:%[0-9]+]]:_(s32) = G_AND [[COPY19]], [[C]]
2066    ; CHECK: [[SHL16:%[0-9]+]]:_(s32) = G_SHL [[AND17]], [[C16]](s32)
2067    ; CHECK: [[OR16:%[0-9]+]]:_(s32) = G_OR [[OR15]], [[SHL16]]
2068    ; CHECK: [[COPY20:%[0-9]+]]:_(s32) = COPY [[LSHR17]](s32)
2069    ; CHECK: [[AND18:%[0-9]+]]:_(s32) = G_AND [[COPY20]], [[C]]
2070    ; CHECK: [[SHL17:%[0-9]+]]:_(s32) = G_SHL [[AND18]], [[C17]](s32)
2071    ; CHECK: [[OR17:%[0-9]+]]:_(s32) = G_OR [[OR16]], [[SHL17]]
2072    ; CHECK: [[COPY21:%[0-9]+]]:_(s32) = COPY [[LSHR18]](s32)
2073    ; CHECK: [[AND19:%[0-9]+]]:_(s32) = G_AND [[COPY21]], [[C]]
2074    ; CHECK: [[SHL18:%[0-9]+]]:_(s32) = G_SHL [[AND19]], [[C18]](s32)
2075    ; CHECK: [[OR18:%[0-9]+]]:_(s32) = G_OR [[OR17]], [[SHL18]]
2076    ; CHECK: [[COPY22:%[0-9]+]]:_(s32) = COPY [[LSHR19]](s32)
2077    ; CHECK: [[AND20:%[0-9]+]]:_(s32) = G_AND [[COPY22]], [[C]]
2078    ; CHECK: [[SHL19:%[0-9]+]]:_(s32) = G_SHL [[AND20]], [[C19]](s32)
2079    ; CHECK: [[OR19:%[0-9]+]]:_(s32) = G_OR [[OR18]], [[SHL19]]
2080    ; CHECK: [[COPY23:%[0-9]+]]:_(s32) = COPY [[LSHR20]](s32)
2081    ; CHECK: [[AND21:%[0-9]+]]:_(s32) = G_AND [[COPY23]], [[C]]
2082    ; CHECK: [[SHL20:%[0-9]+]]:_(s32) = G_SHL [[AND21]], [[C20]](s32)
2083    ; CHECK: [[OR20:%[0-9]+]]:_(s32) = G_OR [[OR19]], [[SHL20]]
2084    ; CHECK: [[COPY24:%[0-9]+]]:_(s32) = COPY [[LSHR21]](s32)
2085    ; CHECK: [[AND22:%[0-9]+]]:_(s32) = G_AND [[COPY24]], [[C]]
2086    ; CHECK: [[SHL21:%[0-9]+]]:_(s32) = G_SHL [[AND22]], [[C21]](s32)
2087    ; CHECK: [[OR21:%[0-9]+]]:_(s32) = G_OR [[OR20]], [[SHL21]]
2088    ; CHECK: [[COPY25:%[0-9]+]]:_(s32) = COPY [[LSHR22]](s32)
2089    ; CHECK: [[AND23:%[0-9]+]]:_(s32) = G_AND [[COPY25]], [[C]]
2090    ; CHECK: [[SHL22:%[0-9]+]]:_(s32) = G_SHL [[AND23]], [[C22]](s32)
2091    ; CHECK: [[OR22:%[0-9]+]]:_(s32) = G_OR [[OR21]], [[SHL22]]
2092    ; CHECK: [[COPY26:%[0-9]+]]:_(s32) = COPY [[LSHR23]](s32)
2093    ; CHECK: [[AND24:%[0-9]+]]:_(s32) = G_AND [[COPY26]], [[C]]
2094    ; CHECK: [[SHL23:%[0-9]+]]:_(s32) = G_SHL [[AND24]], [[C23]](s32)
2095    ; CHECK: [[OR23:%[0-9]+]]:_(s32) = G_OR [[OR22]], [[SHL23]]
2096    ; CHECK: [[COPY27:%[0-9]+]]:_(s32) = COPY [[LSHR24]](s32)
2097    ; CHECK: [[AND25:%[0-9]+]]:_(s32) = G_AND [[COPY27]], [[C]]
2098    ; CHECK: [[SHL24:%[0-9]+]]:_(s32) = G_SHL [[AND25]], [[C24]](s32)
2099    ; CHECK: [[OR24:%[0-9]+]]:_(s32) = G_OR [[OR23]], [[SHL24]]
2100    ; CHECK: [[COPY28:%[0-9]+]]:_(s32) = COPY [[LSHR25]](s32)
2101    ; CHECK: [[AND26:%[0-9]+]]:_(s32) = G_AND [[COPY28]], [[C]]
2102    ; CHECK: [[SHL25:%[0-9]+]]:_(s32) = G_SHL [[AND26]], [[C25]](s32)
2103    ; CHECK: [[OR25:%[0-9]+]]:_(s32) = G_OR [[OR24]], [[SHL25]]
2104    ; CHECK: [[COPY29:%[0-9]+]]:_(s32) = COPY [[LSHR26]](s32)
2105    ; CHECK: [[AND27:%[0-9]+]]:_(s32) = G_AND [[COPY29]], [[C]]
2106    ; CHECK: [[SHL26:%[0-9]+]]:_(s32) = G_SHL [[AND27]], [[C26]](s32)
2107    ; CHECK: [[OR26:%[0-9]+]]:_(s32) = G_OR [[OR25]], [[SHL26]]
2108    ; CHECK: [[COPY30:%[0-9]+]]:_(s32) = COPY [[LSHR27]](s32)
2109    ; CHECK: [[AND28:%[0-9]+]]:_(s32) = G_AND [[COPY30]], [[C]]
2110    ; CHECK: [[SHL27:%[0-9]+]]:_(s32) = G_SHL [[AND28]], [[C27]](s32)
2111    ; CHECK: [[OR27:%[0-9]+]]:_(s32) = G_OR [[OR26]], [[SHL27]]
2112    ; CHECK: [[COPY31:%[0-9]+]]:_(s32) = COPY [[LSHR28]](s32)
2113    ; CHECK: [[AND29:%[0-9]+]]:_(s32) = G_AND [[COPY31]], [[C]]
2114    ; CHECK: [[SHL28:%[0-9]+]]:_(s32) = G_SHL [[AND29]], [[C28]](s32)
2115    ; CHECK: [[OR28:%[0-9]+]]:_(s32) = G_OR [[OR27]], [[SHL28]]
2116    ; CHECK: [[COPY32:%[0-9]+]]:_(s32) = COPY [[LSHR29]](s32)
2117    ; CHECK: [[AND30:%[0-9]+]]:_(s32) = G_AND [[COPY32]], [[C]]
2118    ; CHECK: [[SHL29:%[0-9]+]]:_(s32) = G_SHL [[AND30]], [[C29]](s32)
2119    ; CHECK: [[OR29:%[0-9]+]]:_(s32) = G_OR [[OR28]], [[SHL29]]
2120    ; CHECK: [[COPY33:%[0-9]+]]:_(s32) = COPY [[LSHR30]](s32)
2121    ; CHECK: [[AND31:%[0-9]+]]:_(s32) = G_AND [[COPY33]], [[C]]
2122    ; CHECK: [[SHL30:%[0-9]+]]:_(s32) = G_SHL [[AND31]], [[C30]](s32)
2123    ; CHECK: [[OR30:%[0-9]+]]:_(s32) = G_OR [[OR29]], [[SHL30]]
2124    ; CHECK: [[AND32:%[0-9]+]]:_(s32) = G_AND [[COPY1]], [[C30]]
2125    ; CHECK: [[C31:%[0-9]+]]:_(s32) = G_CONSTANT i32 0
2126    ; CHECK: [[SHL31:%[0-9]+]]:_(s32) = G_SHL [[AND32]], [[C31]](s32)
2127    ; CHECK: [[LSHR31:%[0-9]+]]:_(s32) = G_LSHR [[OR30]], [[SHL31]](s32)
2128    ; CHECK: [[COPY34:%[0-9]+]]:_(s32) = COPY [[LSHR31]](s32)
2129    ; CHECK: $vgpr0 = COPY [[COPY34]](s32)
2130    %0:_(s32) = COPY $vgpr0
2131    %1:_(s32) = COPY $vgpr1
2132    %2:_(<32 x s1>) = G_BITCAST %0
2133    %3:_(s1) = G_EXTRACT_VECTOR_ELT %2, %1
2134    %4:_(s32) = G_ANYEXT %3
2135    $vgpr0 = COPY %4
2136...
2137
2138---
2139name: extract_vector_elt_v12s8_varidx_s32
2140
2141body: |
2142  bb.0:
2143    liveins: $vgpr0_vgpr1_vgpr2, $vgpr3
2144    ; CHECK-LABEL: name: extract_vector_elt_v12s8_varidx_s32
2145    ; CHECK: [[COPY:%[0-9]+]]:_(<3 x s32>) = COPY $vgpr0_vgpr1_vgpr2
2146    ; CHECK: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32), [[UV2:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY]](<3 x s32>)
2147    ; CHECK: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 8
2148    ; CHECK: [[LSHR:%[0-9]+]]:_(s32) = G_LSHR [[UV]], [[C]](s32)
2149    ; CHECK: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 16
2150    ; CHECK: [[LSHR1:%[0-9]+]]:_(s32) = G_LSHR [[UV]], [[C1]](s32)
2151    ; CHECK: [[C2:%[0-9]+]]:_(s32) = G_CONSTANT i32 24
2152    ; CHECK: [[LSHR2:%[0-9]+]]:_(s32) = G_LSHR [[UV]], [[C2]](s32)
2153    ; CHECK: [[LSHR3:%[0-9]+]]:_(s32) = G_LSHR [[UV1]], [[C]](s32)
2154    ; CHECK: [[LSHR4:%[0-9]+]]:_(s32) = G_LSHR [[UV1]], [[C1]](s32)
2155    ; CHECK: [[LSHR5:%[0-9]+]]:_(s32) = G_LSHR [[UV1]], [[C2]](s32)
2156    ; CHECK: [[LSHR6:%[0-9]+]]:_(s32) = G_LSHR [[UV2]], [[C]](s32)
2157    ; CHECK: [[LSHR7:%[0-9]+]]:_(s32) = G_LSHR [[UV2]], [[C1]](s32)
2158    ; CHECK: [[LSHR8:%[0-9]+]]:_(s32) = G_LSHR [[UV2]], [[C2]](s32)
2159    ; CHECK: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr3
2160    ; CHECK: [[C3:%[0-9]+]]:_(s32) = G_CONSTANT i32 255
2161    ; CHECK: [[COPY2:%[0-9]+]]:_(s32) = COPY [[UV]](s32)
2162    ; CHECK: [[AND:%[0-9]+]]:_(s32) = G_AND [[COPY2]], [[C3]]
2163    ; CHECK: [[COPY3:%[0-9]+]]:_(s32) = COPY [[LSHR]](s32)
2164    ; CHECK: [[AND1:%[0-9]+]]:_(s32) = G_AND [[COPY3]], [[C3]]
2165    ; CHECK: [[SHL:%[0-9]+]]:_(s32) = G_SHL [[AND1]], [[C]](s32)
2166    ; CHECK: [[OR:%[0-9]+]]:_(s32) = G_OR [[AND]], [[SHL]]
2167    ; CHECK: [[COPY4:%[0-9]+]]:_(s32) = COPY [[LSHR1]](s32)
2168    ; CHECK: [[AND2:%[0-9]+]]:_(s32) = G_AND [[COPY4]], [[C3]]
2169    ; CHECK: [[SHL1:%[0-9]+]]:_(s32) = G_SHL [[AND2]], [[C1]](s32)
2170    ; CHECK: [[OR1:%[0-9]+]]:_(s32) = G_OR [[OR]], [[SHL1]]
2171    ; CHECK: [[COPY5:%[0-9]+]]:_(s32) = COPY [[LSHR2]](s32)
2172    ; CHECK: [[AND3:%[0-9]+]]:_(s32) = G_AND [[COPY5]], [[C3]]
2173    ; CHECK: [[SHL2:%[0-9]+]]:_(s32) = G_SHL [[AND3]], [[C2]](s32)
2174    ; CHECK: [[OR2:%[0-9]+]]:_(s32) = G_OR [[OR1]], [[SHL2]]
2175    ; CHECK: [[COPY6:%[0-9]+]]:_(s32) = COPY [[UV1]](s32)
2176    ; CHECK: [[AND4:%[0-9]+]]:_(s32) = G_AND [[COPY6]], [[C3]]
2177    ; CHECK: [[COPY7:%[0-9]+]]:_(s32) = COPY [[LSHR3]](s32)
2178    ; CHECK: [[AND5:%[0-9]+]]:_(s32) = G_AND [[COPY7]], [[C3]]
2179    ; CHECK: [[SHL3:%[0-9]+]]:_(s32) = G_SHL [[AND5]], [[C]](s32)
2180    ; CHECK: [[OR3:%[0-9]+]]:_(s32) = G_OR [[AND4]], [[SHL3]]
2181    ; CHECK: [[COPY8:%[0-9]+]]:_(s32) = COPY [[LSHR4]](s32)
2182    ; CHECK: [[AND6:%[0-9]+]]:_(s32) = G_AND [[COPY8]], [[C3]]
2183    ; CHECK: [[SHL4:%[0-9]+]]:_(s32) = G_SHL [[AND6]], [[C1]](s32)
2184    ; CHECK: [[OR4:%[0-9]+]]:_(s32) = G_OR [[OR3]], [[SHL4]]
2185    ; CHECK: [[COPY9:%[0-9]+]]:_(s32) = COPY [[LSHR5]](s32)
2186    ; CHECK: [[AND7:%[0-9]+]]:_(s32) = G_AND [[COPY9]], [[C3]]
2187    ; CHECK: [[SHL5:%[0-9]+]]:_(s32) = G_SHL [[AND7]], [[C2]](s32)
2188    ; CHECK: [[OR5:%[0-9]+]]:_(s32) = G_OR [[OR4]], [[SHL5]]
2189    ; CHECK: [[COPY10:%[0-9]+]]:_(s32) = COPY [[UV2]](s32)
2190    ; CHECK: [[AND8:%[0-9]+]]:_(s32) = G_AND [[COPY10]], [[C3]]
2191    ; CHECK: [[COPY11:%[0-9]+]]:_(s32) = COPY [[LSHR6]](s32)
2192    ; CHECK: [[AND9:%[0-9]+]]:_(s32) = G_AND [[COPY11]], [[C3]]
2193    ; CHECK: [[SHL6:%[0-9]+]]:_(s32) = G_SHL [[AND9]], [[C]](s32)
2194    ; CHECK: [[OR6:%[0-9]+]]:_(s32) = G_OR [[AND8]], [[SHL6]]
2195    ; CHECK: [[COPY12:%[0-9]+]]:_(s32) = COPY [[LSHR7]](s32)
2196    ; CHECK: [[AND10:%[0-9]+]]:_(s32) = G_AND [[COPY12]], [[C3]]
2197    ; CHECK: [[SHL7:%[0-9]+]]:_(s32) = G_SHL [[AND10]], [[C1]](s32)
2198    ; CHECK: [[OR7:%[0-9]+]]:_(s32) = G_OR [[OR6]], [[SHL7]]
2199    ; CHECK: [[COPY13:%[0-9]+]]:_(s32) = COPY [[LSHR8]](s32)
2200    ; CHECK: [[AND11:%[0-9]+]]:_(s32) = G_AND [[COPY13]], [[C3]]
2201    ; CHECK: [[SHL8:%[0-9]+]]:_(s32) = G_SHL [[AND11]], [[C2]](s32)
2202    ; CHECK: [[OR8:%[0-9]+]]:_(s32) = G_OR [[OR7]], [[SHL8]]
2203    ; CHECK: [[BUILD_VECTOR:%[0-9]+]]:_(<3 x s32>) = G_BUILD_VECTOR [[OR2]](s32), [[OR5]](s32), [[OR8]](s32)
2204    ; CHECK: [[C4:%[0-9]+]]:_(s32) = G_CONSTANT i32 2
2205    ; CHECK: [[LSHR9:%[0-9]+]]:_(s32) = G_LSHR [[COPY1]], [[C4]](s32)
2206    ; CHECK: [[EVEC:%[0-9]+]]:_(s32) = G_EXTRACT_VECTOR_ELT [[BUILD_VECTOR]](<3 x s32>), [[LSHR9]](s32)
2207    ; CHECK: [[C5:%[0-9]+]]:_(s32) = G_CONSTANT i32 3
2208    ; CHECK: [[AND12:%[0-9]+]]:_(s32) = G_AND [[COPY1]], [[C5]]
2209    ; CHECK: [[SHL9:%[0-9]+]]:_(s32) = G_SHL [[AND12]], [[C5]](s32)
2210    ; CHECK: [[LSHR10:%[0-9]+]]:_(s32) = G_LSHR [[EVEC]], [[SHL9]](s32)
2211    ; CHECK: [[COPY14:%[0-9]+]]:_(s32) = COPY [[LSHR10]](s32)
2212    ; CHECK: $vgpr0 = COPY [[COPY14]](s32)
2213    %0:_(<3 x s32>) = COPY $vgpr0_vgpr1_vgpr2
2214    %1:_(<12 x s8>) = G_BITCAST %0
2215    %2:_(s32) = COPY $vgpr3
2216    %3:_(s8) = G_EXTRACT_VECTOR_ELT %1, %2
2217    %4:_(s32) = G_ANYEXT %3
2218    $vgpr0 = COPY %4
2219...
2220
2221---
2222name: extract_vector_elt_v3s8_varidx_s32
2223
2224body: |
2225  bb.0:
2226    liveins: $vgpr0, $vgpr1
2227    ; CHECK-LABEL: name: extract_vector_elt_v3s8_varidx_s32
2228    ; CHECK: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
2229    ; CHECK: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
2230    ; CHECK: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 8
2231    ; CHECK: [[LSHR:%[0-9]+]]:_(s32) = G_LSHR [[COPY]], [[C]](s32)
2232    ; CHECK: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 16
2233    ; CHECK: [[LSHR1:%[0-9]+]]:_(s32) = G_LSHR [[COPY]], [[C1]](s32)
2234    ; CHECK: [[C2:%[0-9]+]]:_(s32) = G_CONSTANT i32 24
2235    ; CHECK: [[LSHR2:%[0-9]+]]:_(s32) = G_LSHR [[COPY]], [[C2]](s32)
2236    ; CHECK: [[COPY2:%[0-9]+]]:_(s32) = COPY [[COPY]](s32)
2237    ; CHECK: [[COPY3:%[0-9]+]]:_(s32) = COPY [[LSHR]](s32)
2238    ; CHECK: [[COPY4:%[0-9]+]]:_(s32) = COPY [[LSHR1]](s32)
2239    ; CHECK: [[COPY5:%[0-9]+]]:_(s32) = COPY [[COPY2]](s32)
2240    ; CHECK: [[SEXT_INREG:%[0-9]+]]:_(s32) = G_SEXT_INREG [[COPY5]], 8
2241    ; CHECK: [[COPY6:%[0-9]+]]:_(s32) = COPY [[COPY3]](s32)
2242    ; CHECK: [[SEXT_INREG1:%[0-9]+]]:_(s32) = G_SEXT_INREG [[COPY6]], 8
2243    ; CHECK: [[COPY7:%[0-9]+]]:_(s32) = COPY [[COPY4]](s32)
2244    ; CHECK: [[SEXT_INREG2:%[0-9]+]]:_(s32) = G_SEXT_INREG [[COPY7]], 8
2245    ; CHECK: [[BUILD_VECTOR:%[0-9]+]]:_(<3 x s32>) = G_BUILD_VECTOR [[SEXT_INREG]](s32), [[SEXT_INREG1]](s32), [[SEXT_INREG2]](s32)
2246    ; CHECK: [[EVEC:%[0-9]+]]:_(s32) = G_EXTRACT_VECTOR_ELT [[BUILD_VECTOR]](<3 x s32>), [[COPY1]](s32)
2247    ; CHECK: [[COPY8:%[0-9]+]]:_(s32) = COPY [[EVEC]](s32)
2248    ; CHECK: $vgpr0 = COPY [[COPY8]](s32)
2249    %0:_(s32) = COPY $vgpr0
2250    %1:_(s32) = COPY $vgpr1
2251    %2:_(s24) = G_TRUNC %0
2252    %3:_(<3 x s8>) = G_BITCAST %2
2253    %4:_(s8) = G_EXTRACT_VECTOR_ELT %3, %1
2254    %5:_(s32) = G_ANYEXT %4
2255    $vgpr0 = COPY %5
2256...
2257