• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1# NOTE: Assertions have been autogenerated by utils/update_mir_test_checks.py
2# RUN: llc -mtriple=amdgcn-mesa-mesa3d -mcpu=tahiti -run-pass=legalizer -global-isel-abort=0 %s -o - | FileCheck -check-prefix=SI  %s
3# RUN: llc -mtriple=amdgcn-mesa-mesa3d -mcpu=fiji -run-pass=legalizer -global-isel-abort=0 %s -o - | FileCheck -check-prefix=VI %s
4# RUN: llc -mtriple=amdgcn-mesa-mesa3d -mcpu=gfx900 -run-pass=legalizer -global-isel-abort=0 %s -o - | FileCheck -check-prefix=GFX9  %s
5
6---
7name: test_umax_s32
8body: |
9  bb.0:
10    liveins: $vgpr0, $vgpr1
11
12    ; SI-LABEL: name: test_umax_s32
13    ; SI: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
14    ; SI: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
15    ; SI: [[UMAX:%[0-9]+]]:_(s32) = G_UMAX [[COPY]], [[COPY1]]
16    ; SI: $vgpr0 = COPY [[UMAX]](s32)
17    ; VI-LABEL: name: test_umax_s32
18    ; VI: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
19    ; VI: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
20    ; VI: [[UMAX:%[0-9]+]]:_(s32) = G_UMAX [[COPY]], [[COPY1]]
21    ; VI: $vgpr0 = COPY [[UMAX]](s32)
22    ; GFX9-LABEL: name: test_umax_s32
23    ; GFX9: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
24    ; GFX9: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
25    ; GFX9: [[UMAX:%[0-9]+]]:_(s32) = G_UMAX [[COPY]], [[COPY1]]
26    ; GFX9: $vgpr0 = COPY [[UMAX]](s32)
27    %0:_(s32) = COPY $vgpr0
28    %1:_(s32) = COPY $vgpr1
29    %2:_(s32) = G_UMAX %0, %1
30    $vgpr0 = COPY %2
31...
32
33---
34name: test_umax_s64
35body: |
36  bb.0:
37    liveins: $vgpr0_vgpr1, $vgpr2_vgpr3
38
39    ; SI-LABEL: name: test_umax_s64
40    ; SI: [[COPY:%[0-9]+]]:_(s64) = COPY $vgpr0_vgpr1
41    ; SI: [[COPY1:%[0-9]+]]:_(s64) = COPY $vgpr2_vgpr3
42    ; SI: [[ICMP:%[0-9]+]]:_(s1) = G_ICMP intpred(ugt), [[COPY]](s64), [[COPY1]]
43    ; SI: [[SELECT:%[0-9]+]]:_(s64) = G_SELECT [[ICMP]](s1), [[COPY]], [[COPY1]]
44    ; SI: $vgpr0_vgpr1 = COPY [[SELECT]](s64)
45    ; VI-LABEL: name: test_umax_s64
46    ; VI: [[COPY:%[0-9]+]]:_(s64) = COPY $vgpr0_vgpr1
47    ; VI: [[COPY1:%[0-9]+]]:_(s64) = COPY $vgpr2_vgpr3
48    ; VI: [[ICMP:%[0-9]+]]:_(s1) = G_ICMP intpred(ugt), [[COPY]](s64), [[COPY1]]
49    ; VI: [[SELECT:%[0-9]+]]:_(s64) = G_SELECT [[ICMP]](s1), [[COPY]], [[COPY1]]
50    ; VI: $vgpr0_vgpr1 = COPY [[SELECT]](s64)
51    ; GFX9-LABEL: name: test_umax_s64
52    ; GFX9: [[COPY:%[0-9]+]]:_(s64) = COPY $vgpr0_vgpr1
53    ; GFX9: [[COPY1:%[0-9]+]]:_(s64) = COPY $vgpr2_vgpr3
54    ; GFX9: [[ICMP:%[0-9]+]]:_(s1) = G_ICMP intpred(ugt), [[COPY]](s64), [[COPY1]]
55    ; GFX9: [[SELECT:%[0-9]+]]:_(s64) = G_SELECT [[ICMP]](s1), [[COPY]], [[COPY1]]
56    ; GFX9: $vgpr0_vgpr1 = COPY [[SELECT]](s64)
57    %0:_(s64) = COPY $vgpr0_vgpr1
58    %1:_(s64) = COPY $vgpr2_vgpr3
59    %2:_(s64) = G_UMAX %0, %1
60    $vgpr0_vgpr1 = COPY %2
61...
62
63---
64name: test_umax_s16
65body: |
66  bb.0:
67    liveins: $vgpr0, $vgpr1
68
69    ; SI-LABEL: name: test_umax_s16
70    ; SI: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
71    ; SI: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
72    ; SI: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 65535
73    ; SI: [[COPY2:%[0-9]+]]:_(s32) = COPY [[COPY]](s32)
74    ; SI: [[AND:%[0-9]+]]:_(s32) = G_AND [[COPY2]], [[C]]
75    ; SI: [[COPY3:%[0-9]+]]:_(s32) = COPY [[COPY1]](s32)
76    ; SI: [[AND1:%[0-9]+]]:_(s32) = G_AND [[COPY3]], [[C]]
77    ; SI: [[UMAX:%[0-9]+]]:_(s32) = G_UMAX [[AND]], [[AND1]]
78    ; SI: [[COPY4:%[0-9]+]]:_(s32) = COPY [[UMAX]](s32)
79    ; SI: $vgpr0 = COPY [[COPY4]](s32)
80    ; VI-LABEL: name: test_umax_s16
81    ; VI: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
82    ; VI: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
83    ; VI: [[TRUNC:%[0-9]+]]:_(s16) = G_TRUNC [[COPY]](s32)
84    ; VI: [[TRUNC1:%[0-9]+]]:_(s16) = G_TRUNC [[COPY1]](s32)
85    ; VI: [[UMAX:%[0-9]+]]:_(s16) = G_UMAX [[TRUNC]], [[TRUNC1]]
86    ; VI: [[ANYEXT:%[0-9]+]]:_(s32) = G_ANYEXT [[UMAX]](s16)
87    ; VI: $vgpr0 = COPY [[ANYEXT]](s32)
88    ; GFX9-LABEL: name: test_umax_s16
89    ; GFX9: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
90    ; GFX9: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
91    ; GFX9: [[TRUNC:%[0-9]+]]:_(s16) = G_TRUNC [[COPY]](s32)
92    ; GFX9: [[TRUNC1:%[0-9]+]]:_(s16) = G_TRUNC [[COPY1]](s32)
93    ; GFX9: [[UMAX:%[0-9]+]]:_(s16) = G_UMAX [[TRUNC]], [[TRUNC1]]
94    ; GFX9: [[ANYEXT:%[0-9]+]]:_(s32) = G_ANYEXT [[UMAX]](s16)
95    ; GFX9: $vgpr0 = COPY [[ANYEXT]](s32)
96    %0:_(s32) = COPY $vgpr0
97    %1:_(s32) = COPY $vgpr1
98    %2:_(s16) = G_TRUNC %0
99    %3:_(s16) = G_TRUNC %1
100    %4:_(s16) = G_UMAX %2, %3
101    %5:_(s32) = G_ANYEXT %4
102    $vgpr0 = COPY %5
103...
104
105---
106name: test_umax_s8
107body: |
108  bb.0:
109    liveins: $vgpr0, $vgpr1
110
111    ; SI-LABEL: name: test_umax_s8
112    ; SI: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
113    ; SI: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
114    ; SI: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 255
115    ; SI: [[COPY2:%[0-9]+]]:_(s32) = COPY [[COPY]](s32)
116    ; SI: [[AND:%[0-9]+]]:_(s32) = G_AND [[COPY2]], [[C]]
117    ; SI: [[COPY3:%[0-9]+]]:_(s32) = COPY [[COPY1]](s32)
118    ; SI: [[AND1:%[0-9]+]]:_(s32) = G_AND [[COPY3]], [[C]]
119    ; SI: [[UMAX:%[0-9]+]]:_(s32) = G_UMAX [[AND]], [[AND1]]
120    ; SI: [[COPY4:%[0-9]+]]:_(s32) = COPY [[UMAX]](s32)
121    ; SI: $vgpr0 = COPY [[COPY4]](s32)
122    ; VI-LABEL: name: test_umax_s8
123    ; VI: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
124    ; VI: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
125    ; VI: [[C:%[0-9]+]]:_(s16) = G_CONSTANT i16 255
126    ; VI: [[TRUNC:%[0-9]+]]:_(s16) = G_TRUNC [[COPY]](s32)
127    ; VI: [[AND:%[0-9]+]]:_(s16) = G_AND [[TRUNC]], [[C]]
128    ; VI: [[TRUNC1:%[0-9]+]]:_(s16) = G_TRUNC [[COPY1]](s32)
129    ; VI: [[AND1:%[0-9]+]]:_(s16) = G_AND [[TRUNC1]], [[C]]
130    ; VI: [[UMAX:%[0-9]+]]:_(s16) = G_UMAX [[AND]], [[AND1]]
131    ; VI: [[ANYEXT:%[0-9]+]]:_(s32) = G_ANYEXT [[UMAX]](s16)
132    ; VI: $vgpr0 = COPY [[ANYEXT]](s32)
133    ; GFX9-LABEL: name: test_umax_s8
134    ; GFX9: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
135    ; GFX9: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
136    ; GFX9: [[C:%[0-9]+]]:_(s16) = G_CONSTANT i16 255
137    ; GFX9: [[TRUNC:%[0-9]+]]:_(s16) = G_TRUNC [[COPY]](s32)
138    ; GFX9: [[AND:%[0-9]+]]:_(s16) = G_AND [[TRUNC]], [[C]]
139    ; GFX9: [[TRUNC1:%[0-9]+]]:_(s16) = G_TRUNC [[COPY1]](s32)
140    ; GFX9: [[AND1:%[0-9]+]]:_(s16) = G_AND [[TRUNC1]], [[C]]
141    ; GFX9: [[UMAX:%[0-9]+]]:_(s16) = G_UMAX [[AND]], [[AND1]]
142    ; GFX9: [[ANYEXT:%[0-9]+]]:_(s32) = G_ANYEXT [[UMAX]](s16)
143    ; GFX9: $vgpr0 = COPY [[ANYEXT]](s32)
144    %0:_(s32) = COPY $vgpr0
145    %1:_(s32) = COPY $vgpr1
146    %2:_(s8) = G_TRUNC %0
147    %3:_(s8) = G_TRUNC %1
148    %4:_(s8) = G_UMAX %2, %3
149    %5:_(s32) = G_ANYEXT %4
150    $vgpr0 = COPY %5
151...
152
153---
154name: test_umax_s17
155body: |
156  bb.0:
157    liveins: $vgpr0, $vgpr1
158
159    ; SI-LABEL: name: test_umax_s17
160    ; SI: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
161    ; SI: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
162    ; SI: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 131071
163    ; SI: [[COPY2:%[0-9]+]]:_(s32) = COPY [[COPY]](s32)
164    ; SI: [[AND:%[0-9]+]]:_(s32) = G_AND [[COPY2]], [[C]]
165    ; SI: [[COPY3:%[0-9]+]]:_(s32) = COPY [[COPY1]](s32)
166    ; SI: [[AND1:%[0-9]+]]:_(s32) = G_AND [[COPY3]], [[C]]
167    ; SI: [[UMAX:%[0-9]+]]:_(s32) = G_UMAX [[AND]], [[AND1]]
168    ; SI: [[COPY4:%[0-9]+]]:_(s32) = COPY [[UMAX]](s32)
169    ; SI: $vgpr0 = COPY [[COPY4]](s32)
170    ; VI-LABEL: name: test_umax_s17
171    ; VI: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
172    ; VI: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
173    ; VI: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 131071
174    ; VI: [[COPY2:%[0-9]+]]:_(s32) = COPY [[COPY]](s32)
175    ; VI: [[AND:%[0-9]+]]:_(s32) = G_AND [[COPY2]], [[C]]
176    ; VI: [[COPY3:%[0-9]+]]:_(s32) = COPY [[COPY1]](s32)
177    ; VI: [[AND1:%[0-9]+]]:_(s32) = G_AND [[COPY3]], [[C]]
178    ; VI: [[UMAX:%[0-9]+]]:_(s32) = G_UMAX [[AND]], [[AND1]]
179    ; VI: [[COPY4:%[0-9]+]]:_(s32) = COPY [[UMAX]](s32)
180    ; VI: $vgpr0 = COPY [[COPY4]](s32)
181    ; GFX9-LABEL: name: test_umax_s17
182    ; GFX9: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
183    ; GFX9: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
184    ; GFX9: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 131071
185    ; GFX9: [[COPY2:%[0-9]+]]:_(s32) = COPY [[COPY]](s32)
186    ; GFX9: [[AND:%[0-9]+]]:_(s32) = G_AND [[COPY2]], [[C]]
187    ; GFX9: [[COPY3:%[0-9]+]]:_(s32) = COPY [[COPY1]](s32)
188    ; GFX9: [[AND1:%[0-9]+]]:_(s32) = G_AND [[COPY3]], [[C]]
189    ; GFX9: [[UMAX:%[0-9]+]]:_(s32) = G_UMAX [[AND]], [[AND1]]
190    ; GFX9: [[COPY4:%[0-9]+]]:_(s32) = COPY [[UMAX]](s32)
191    ; GFX9: $vgpr0 = COPY [[COPY4]](s32)
192    %0:_(s32) = COPY $vgpr0
193    %1:_(s32) = COPY $vgpr1
194    %2:_(s17) = G_TRUNC %0
195    %3:_(s17) = G_TRUNC %1
196    %4:_(s17) = G_UMAX %2, %3
197    %5:_(s32) = G_ANYEXT %4
198    $vgpr0 = COPY %5
199...
200
201---
202name: test_umax_v2s32
203body: |
204  bb.0:
205    liveins: $vgpr0_vgpr1, $vgpr2_vgpr3
206
207    ; SI-LABEL: name: test_umax_v2s32
208    ; SI: [[COPY:%[0-9]+]]:_(<2 x s32>) = COPY $vgpr0_vgpr1
209    ; SI: [[COPY1:%[0-9]+]]:_(<2 x s32>) = COPY $vgpr2_vgpr3
210    ; SI: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY]](<2 x s32>)
211    ; SI: [[UV2:%[0-9]+]]:_(s32), [[UV3:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY1]](<2 x s32>)
212    ; SI: [[UMAX:%[0-9]+]]:_(s32) = G_UMAX [[UV]], [[UV2]]
213    ; SI: [[UMAX1:%[0-9]+]]:_(s32) = G_UMAX [[UV1]], [[UV3]]
214    ; SI: [[BUILD_VECTOR:%[0-9]+]]:_(<2 x s32>) = G_BUILD_VECTOR [[UMAX]](s32), [[UMAX1]](s32)
215    ; SI: $vgpr0_vgpr1 = COPY [[BUILD_VECTOR]](<2 x s32>)
216    ; VI-LABEL: name: test_umax_v2s32
217    ; VI: [[COPY:%[0-9]+]]:_(<2 x s32>) = COPY $vgpr0_vgpr1
218    ; VI: [[COPY1:%[0-9]+]]:_(<2 x s32>) = COPY $vgpr2_vgpr3
219    ; VI: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY]](<2 x s32>)
220    ; VI: [[UV2:%[0-9]+]]:_(s32), [[UV3:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY1]](<2 x s32>)
221    ; VI: [[UMAX:%[0-9]+]]:_(s32) = G_UMAX [[UV]], [[UV2]]
222    ; VI: [[UMAX1:%[0-9]+]]:_(s32) = G_UMAX [[UV1]], [[UV3]]
223    ; VI: [[BUILD_VECTOR:%[0-9]+]]:_(<2 x s32>) = G_BUILD_VECTOR [[UMAX]](s32), [[UMAX1]](s32)
224    ; VI: $vgpr0_vgpr1 = COPY [[BUILD_VECTOR]](<2 x s32>)
225    ; GFX9-LABEL: name: test_umax_v2s32
226    ; GFX9: [[COPY:%[0-9]+]]:_(<2 x s32>) = COPY $vgpr0_vgpr1
227    ; GFX9: [[COPY1:%[0-9]+]]:_(<2 x s32>) = COPY $vgpr2_vgpr3
228    ; GFX9: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY]](<2 x s32>)
229    ; GFX9: [[UV2:%[0-9]+]]:_(s32), [[UV3:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY1]](<2 x s32>)
230    ; GFX9: [[UMAX:%[0-9]+]]:_(s32) = G_UMAX [[UV]], [[UV2]]
231    ; GFX9: [[UMAX1:%[0-9]+]]:_(s32) = G_UMAX [[UV1]], [[UV3]]
232    ; GFX9: [[BUILD_VECTOR:%[0-9]+]]:_(<2 x s32>) = G_BUILD_VECTOR [[UMAX]](s32), [[UMAX1]](s32)
233    ; GFX9: $vgpr0_vgpr1 = COPY [[BUILD_VECTOR]](<2 x s32>)
234    %0:_(<2 x s32>) = COPY $vgpr0_vgpr1
235    %1:_(<2 x s32>) = COPY $vgpr2_vgpr3
236    %2:_(<2 x s32>) = G_UMAX %0, %1
237    $vgpr0_vgpr1 = COPY %2
238...
239
240---
241name: test_umax_v3s32
242body: |
243  bb.0:
244    liveins: $vgpr0_vgpr1_vgpr2, $vgpr3_vgpr4_vgpr5
245
246    ; SI-LABEL: name: test_umax_v3s32
247    ; SI: [[COPY:%[0-9]+]]:_(<3 x s32>) = COPY $vgpr0_vgpr1_vgpr2
248    ; SI: [[COPY1:%[0-9]+]]:_(<3 x s32>) = COPY $vgpr3_vgpr4_vgpr5
249    ; SI: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32), [[UV2:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY]](<3 x s32>)
250    ; SI: [[UV3:%[0-9]+]]:_(s32), [[UV4:%[0-9]+]]:_(s32), [[UV5:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY1]](<3 x s32>)
251    ; SI: [[UMAX:%[0-9]+]]:_(s32) = G_UMAX [[UV]], [[UV3]]
252    ; SI: [[UMAX1:%[0-9]+]]:_(s32) = G_UMAX [[UV1]], [[UV4]]
253    ; SI: [[UMAX2:%[0-9]+]]:_(s32) = G_UMAX [[UV2]], [[UV5]]
254    ; SI: [[BUILD_VECTOR:%[0-9]+]]:_(<3 x s32>) = G_BUILD_VECTOR [[UMAX]](s32), [[UMAX1]](s32), [[UMAX2]](s32)
255    ; SI: $vgpr0_vgpr1_vgpr2 = COPY [[BUILD_VECTOR]](<3 x s32>)
256    ; VI-LABEL: name: test_umax_v3s32
257    ; VI: [[COPY:%[0-9]+]]:_(<3 x s32>) = COPY $vgpr0_vgpr1_vgpr2
258    ; VI: [[COPY1:%[0-9]+]]:_(<3 x s32>) = COPY $vgpr3_vgpr4_vgpr5
259    ; VI: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32), [[UV2:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY]](<3 x s32>)
260    ; VI: [[UV3:%[0-9]+]]:_(s32), [[UV4:%[0-9]+]]:_(s32), [[UV5:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY1]](<3 x s32>)
261    ; VI: [[UMAX:%[0-9]+]]:_(s32) = G_UMAX [[UV]], [[UV3]]
262    ; VI: [[UMAX1:%[0-9]+]]:_(s32) = G_UMAX [[UV1]], [[UV4]]
263    ; VI: [[UMAX2:%[0-9]+]]:_(s32) = G_UMAX [[UV2]], [[UV5]]
264    ; VI: [[BUILD_VECTOR:%[0-9]+]]:_(<3 x s32>) = G_BUILD_VECTOR [[UMAX]](s32), [[UMAX1]](s32), [[UMAX2]](s32)
265    ; VI: $vgpr0_vgpr1_vgpr2 = COPY [[BUILD_VECTOR]](<3 x s32>)
266    ; GFX9-LABEL: name: test_umax_v3s32
267    ; GFX9: [[COPY:%[0-9]+]]:_(<3 x s32>) = COPY $vgpr0_vgpr1_vgpr2
268    ; GFX9: [[COPY1:%[0-9]+]]:_(<3 x s32>) = COPY $vgpr3_vgpr4_vgpr5
269    ; GFX9: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32), [[UV2:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY]](<3 x s32>)
270    ; GFX9: [[UV3:%[0-9]+]]:_(s32), [[UV4:%[0-9]+]]:_(s32), [[UV5:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY1]](<3 x s32>)
271    ; GFX9: [[UMAX:%[0-9]+]]:_(s32) = G_UMAX [[UV]], [[UV3]]
272    ; GFX9: [[UMAX1:%[0-9]+]]:_(s32) = G_UMAX [[UV1]], [[UV4]]
273    ; GFX9: [[UMAX2:%[0-9]+]]:_(s32) = G_UMAX [[UV2]], [[UV5]]
274    ; GFX9: [[BUILD_VECTOR:%[0-9]+]]:_(<3 x s32>) = G_BUILD_VECTOR [[UMAX]](s32), [[UMAX1]](s32), [[UMAX2]](s32)
275    ; GFX9: $vgpr0_vgpr1_vgpr2 = COPY [[BUILD_VECTOR]](<3 x s32>)
276    %0:_(<3 x s32>) = COPY $vgpr0_vgpr1_vgpr2
277    %1:_(<3 x s32>) = COPY $vgpr3_vgpr4_vgpr5
278    %2:_(<3 x s32>) = G_UMAX %0, %1
279    $vgpr0_vgpr1_vgpr2 = COPY %2
280...
281
282---
283name: test_umax_v2s16
284body: |
285  bb.0:
286    liveins: $vgpr0, $vgpr1
287
288    ; SI-LABEL: name: test_umax_v2s16
289    ; SI: [[COPY:%[0-9]+]]:_(<2 x s16>) = COPY $vgpr0
290    ; SI: [[COPY1:%[0-9]+]]:_(<2 x s16>) = COPY $vgpr1
291    ; SI: [[BITCAST:%[0-9]+]]:_(s32) = G_BITCAST [[COPY]](<2 x s16>)
292    ; SI: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 16
293    ; SI: [[LSHR:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST]], [[C]](s32)
294    ; SI: [[BITCAST1:%[0-9]+]]:_(s32) = G_BITCAST [[COPY1]](<2 x s16>)
295    ; SI: [[LSHR1:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST1]], [[C]](s32)
296    ; SI: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 65535
297    ; SI: [[COPY2:%[0-9]+]]:_(s32) = COPY [[BITCAST]](s32)
298    ; SI: [[AND:%[0-9]+]]:_(s32) = G_AND [[COPY2]], [[C1]]
299    ; SI: [[COPY3:%[0-9]+]]:_(s32) = COPY [[BITCAST1]](s32)
300    ; SI: [[AND1:%[0-9]+]]:_(s32) = G_AND [[COPY3]], [[C1]]
301    ; SI: [[UMAX:%[0-9]+]]:_(s32) = G_UMAX [[AND]], [[AND1]]
302    ; SI: [[COPY4:%[0-9]+]]:_(s32) = COPY [[LSHR]](s32)
303    ; SI: [[AND2:%[0-9]+]]:_(s32) = G_AND [[COPY4]], [[C1]]
304    ; SI: [[COPY5:%[0-9]+]]:_(s32) = COPY [[LSHR1]](s32)
305    ; SI: [[AND3:%[0-9]+]]:_(s32) = G_AND [[COPY5]], [[C1]]
306    ; SI: [[UMAX1:%[0-9]+]]:_(s32) = G_UMAX [[AND2]], [[AND3]]
307    ; SI: [[COPY6:%[0-9]+]]:_(s32) = COPY [[UMAX]](s32)
308    ; SI: [[AND4:%[0-9]+]]:_(s32) = G_AND [[COPY6]], [[C1]]
309    ; SI: [[COPY7:%[0-9]+]]:_(s32) = COPY [[UMAX1]](s32)
310    ; SI: [[AND5:%[0-9]+]]:_(s32) = G_AND [[COPY7]], [[C1]]
311    ; SI: [[SHL:%[0-9]+]]:_(s32) = G_SHL [[AND5]], [[C]](s32)
312    ; SI: [[OR:%[0-9]+]]:_(s32) = G_OR [[AND4]], [[SHL]]
313    ; SI: [[BITCAST2:%[0-9]+]]:_(<2 x s16>) = G_BITCAST [[OR]](s32)
314    ; SI: $vgpr0 = COPY [[BITCAST2]](<2 x s16>)
315    ; VI-LABEL: name: test_umax_v2s16
316    ; VI: [[COPY:%[0-9]+]]:_(<2 x s16>) = COPY $vgpr0
317    ; VI: [[COPY1:%[0-9]+]]:_(<2 x s16>) = COPY $vgpr1
318    ; VI: [[BITCAST:%[0-9]+]]:_(s32) = G_BITCAST [[COPY]](<2 x s16>)
319    ; VI: [[TRUNC:%[0-9]+]]:_(s16) = G_TRUNC [[BITCAST]](s32)
320    ; VI: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 16
321    ; VI: [[LSHR:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST]], [[C]](s32)
322    ; VI: [[TRUNC1:%[0-9]+]]:_(s16) = G_TRUNC [[LSHR]](s32)
323    ; VI: [[BITCAST1:%[0-9]+]]:_(s32) = G_BITCAST [[COPY1]](<2 x s16>)
324    ; VI: [[TRUNC2:%[0-9]+]]:_(s16) = G_TRUNC [[BITCAST1]](s32)
325    ; VI: [[LSHR1:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST1]], [[C]](s32)
326    ; VI: [[TRUNC3:%[0-9]+]]:_(s16) = G_TRUNC [[LSHR1]](s32)
327    ; VI: [[UMAX:%[0-9]+]]:_(s16) = G_UMAX [[TRUNC]], [[TRUNC2]]
328    ; VI: [[UMAX1:%[0-9]+]]:_(s16) = G_UMAX [[TRUNC1]], [[TRUNC3]]
329    ; VI: [[ZEXT:%[0-9]+]]:_(s32) = G_ZEXT [[UMAX]](s16)
330    ; VI: [[ZEXT1:%[0-9]+]]:_(s32) = G_ZEXT [[UMAX1]](s16)
331    ; VI: [[SHL:%[0-9]+]]:_(s32) = G_SHL [[ZEXT1]], [[C]](s32)
332    ; VI: [[OR:%[0-9]+]]:_(s32) = G_OR [[ZEXT]], [[SHL]]
333    ; VI: [[BITCAST2:%[0-9]+]]:_(<2 x s16>) = G_BITCAST [[OR]](s32)
334    ; VI: $vgpr0 = COPY [[BITCAST2]](<2 x s16>)
335    ; GFX9-LABEL: name: test_umax_v2s16
336    ; GFX9: [[COPY:%[0-9]+]]:_(<2 x s16>) = COPY $vgpr0
337    ; GFX9: [[COPY1:%[0-9]+]]:_(<2 x s16>) = COPY $vgpr1
338    ; GFX9: [[UMAX:%[0-9]+]]:_(<2 x s16>) = G_UMAX [[COPY]], [[COPY1]]
339    ; GFX9: $vgpr0 = COPY [[UMAX]](<2 x s16>)
340    %0:_(<2 x s16>) = COPY $vgpr0
341    %1:_(<2 x s16>) = COPY $vgpr1
342    %2:_(<2 x s16>) = G_UMAX %0, %1
343    $vgpr0 = COPY %2
344...
345
346---
347name: test_umax_v3s16
348body: |
349  bb.0:
350    liveins: $vgpr0, $vgpr1
351
352    ; SI-LABEL: name: test_umax_v3s16
353    ; SI: [[DEF:%[0-9]+]]:_(<4 x s16>) = G_IMPLICIT_DEF
354    ; SI: [[DEF1:%[0-9]+]]:_(<4 x s16>) = G_IMPLICIT_DEF
355    ; SI: [[DEF2:%[0-9]+]]:_(<4 x s16>) = G_IMPLICIT_DEF
356    ; SI: [[UV:%[0-9]+]]:_(<2 x s16>), [[UV1:%[0-9]+]]:_(<2 x s16>) = G_UNMERGE_VALUES [[DEF]](<4 x s16>)
357    ; SI: [[BITCAST:%[0-9]+]]:_(s32) = G_BITCAST [[UV]](<2 x s16>)
358    ; SI: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 16
359    ; SI: [[LSHR:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST]], [[C]](s32)
360    ; SI: [[BITCAST1:%[0-9]+]]:_(s32) = G_BITCAST [[UV1]](<2 x s16>)
361    ; SI: [[LSHR1:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST1]], [[C]](s32)
362    ; SI: [[UV2:%[0-9]+]]:_(<2 x s16>), [[UV3:%[0-9]+]]:_(<2 x s16>) = G_UNMERGE_VALUES [[DEF2]](<4 x s16>)
363    ; SI: [[BITCAST2:%[0-9]+]]:_(s32) = G_BITCAST [[UV2]](<2 x s16>)
364    ; SI: [[LSHR2:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST2]], [[C]](s32)
365    ; SI: [[BITCAST3:%[0-9]+]]:_(s32) = G_BITCAST [[UV3]](<2 x s16>)
366    ; SI: [[LSHR3:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST3]], [[C]](s32)
367    ; SI: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 65535
368    ; SI: [[COPY:%[0-9]+]]:_(s32) = COPY [[BITCAST]](s32)
369    ; SI: [[AND:%[0-9]+]]:_(s32) = G_AND [[COPY]], [[C1]]
370    ; SI: [[COPY1:%[0-9]+]]:_(s32) = COPY [[BITCAST2]](s32)
371    ; SI: [[AND1:%[0-9]+]]:_(s32) = G_AND [[COPY1]], [[C1]]
372    ; SI: [[UMAX:%[0-9]+]]:_(s32) = G_UMAX [[AND]], [[AND1]]
373    ; SI: [[COPY2:%[0-9]+]]:_(s32) = COPY [[LSHR]](s32)
374    ; SI: [[AND2:%[0-9]+]]:_(s32) = G_AND [[COPY2]], [[C1]]
375    ; SI: [[COPY3:%[0-9]+]]:_(s32) = COPY [[LSHR2]](s32)
376    ; SI: [[AND3:%[0-9]+]]:_(s32) = G_AND [[COPY3]], [[C1]]
377    ; SI: [[UMAX1:%[0-9]+]]:_(s32) = G_UMAX [[AND2]], [[AND3]]
378    ; SI: [[COPY4:%[0-9]+]]:_(s32) = COPY [[BITCAST1]](s32)
379    ; SI: [[AND4:%[0-9]+]]:_(s32) = G_AND [[COPY4]], [[C1]]
380    ; SI: [[COPY5:%[0-9]+]]:_(s32) = COPY [[BITCAST3]](s32)
381    ; SI: [[AND5:%[0-9]+]]:_(s32) = G_AND [[COPY5]], [[C1]]
382    ; SI: [[UMAX2:%[0-9]+]]:_(s32) = G_UMAX [[AND4]], [[AND5]]
383    ; SI: [[DEF3:%[0-9]+]]:_(s16) = G_IMPLICIT_DEF
384    ; SI: [[DEF4:%[0-9]+]]:_(<2 x s16>) = G_IMPLICIT_DEF
385    ; SI: [[COPY6:%[0-9]+]]:_(s32) = COPY [[UMAX]](s32)
386    ; SI: [[COPY7:%[0-9]+]]:_(s32) = COPY [[UMAX1]](s32)
387    ; SI: [[COPY8:%[0-9]+]]:_(s32) = COPY [[UMAX2]](s32)
388    ; SI: [[BUILD_VECTOR:%[0-9]+]]:_(<3 x s32>) = G_BUILD_VECTOR [[COPY6]](s32), [[COPY7]](s32), [[COPY8]](s32)
389    ; SI: S_NOP 0, implicit [[BUILD_VECTOR]](<3 x s32>)
390    ; VI-LABEL: name: test_umax_v3s16
391    ; VI: [[DEF:%[0-9]+]]:_(<4 x s16>) = G_IMPLICIT_DEF
392    ; VI: [[DEF1:%[0-9]+]]:_(<4 x s16>) = G_IMPLICIT_DEF
393    ; VI: [[DEF2:%[0-9]+]]:_(<4 x s16>) = G_IMPLICIT_DEF
394    ; VI: [[UV:%[0-9]+]]:_(<2 x s16>), [[UV1:%[0-9]+]]:_(<2 x s16>) = G_UNMERGE_VALUES [[DEF]](<4 x s16>)
395    ; VI: [[BITCAST:%[0-9]+]]:_(s32) = G_BITCAST [[UV]](<2 x s16>)
396    ; VI: [[TRUNC:%[0-9]+]]:_(s16) = G_TRUNC [[BITCAST]](s32)
397    ; VI: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 16
398    ; VI: [[LSHR:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST]], [[C]](s32)
399    ; VI: [[TRUNC1:%[0-9]+]]:_(s16) = G_TRUNC [[LSHR]](s32)
400    ; VI: [[BITCAST1:%[0-9]+]]:_(s32) = G_BITCAST [[UV1]](<2 x s16>)
401    ; VI: [[TRUNC2:%[0-9]+]]:_(s16) = G_TRUNC [[BITCAST1]](s32)
402    ; VI: [[LSHR1:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST1]], [[C]](s32)
403    ; VI: [[UV2:%[0-9]+]]:_(<2 x s16>), [[UV3:%[0-9]+]]:_(<2 x s16>) = G_UNMERGE_VALUES [[DEF2]](<4 x s16>)
404    ; VI: [[BITCAST2:%[0-9]+]]:_(s32) = G_BITCAST [[UV2]](<2 x s16>)
405    ; VI: [[TRUNC3:%[0-9]+]]:_(s16) = G_TRUNC [[BITCAST2]](s32)
406    ; VI: [[LSHR2:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST2]], [[C]](s32)
407    ; VI: [[TRUNC4:%[0-9]+]]:_(s16) = G_TRUNC [[LSHR2]](s32)
408    ; VI: [[BITCAST3:%[0-9]+]]:_(s32) = G_BITCAST [[UV3]](<2 x s16>)
409    ; VI: [[TRUNC5:%[0-9]+]]:_(s16) = G_TRUNC [[BITCAST3]](s32)
410    ; VI: [[LSHR3:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST3]], [[C]](s32)
411    ; VI: [[UMAX:%[0-9]+]]:_(s16) = G_UMAX [[TRUNC]], [[TRUNC3]]
412    ; VI: [[UMAX1:%[0-9]+]]:_(s16) = G_UMAX [[TRUNC1]], [[TRUNC4]]
413    ; VI: [[UMAX2:%[0-9]+]]:_(s16) = G_UMAX [[TRUNC2]], [[TRUNC5]]
414    ; VI: [[DEF3:%[0-9]+]]:_(s16) = G_IMPLICIT_DEF
415    ; VI: [[DEF4:%[0-9]+]]:_(<2 x s16>) = G_IMPLICIT_DEF
416    ; VI: [[ANYEXT:%[0-9]+]]:_(s32) = G_ANYEXT [[UMAX]](s16)
417    ; VI: [[ANYEXT1:%[0-9]+]]:_(s32) = G_ANYEXT [[UMAX1]](s16)
418    ; VI: [[ANYEXT2:%[0-9]+]]:_(s32) = G_ANYEXT [[UMAX2]](s16)
419    ; VI: [[BUILD_VECTOR:%[0-9]+]]:_(<3 x s32>) = G_BUILD_VECTOR [[ANYEXT]](s32), [[ANYEXT1]](s32), [[ANYEXT2]](s32)
420    ; VI: S_NOP 0, implicit [[BUILD_VECTOR]](<3 x s32>)
421    ; GFX9-LABEL: name: test_umax_v3s16
422    ; GFX9: [[DEF:%[0-9]+]]:_(<4 x s16>) = G_IMPLICIT_DEF
423    ; GFX9: [[DEF1:%[0-9]+]]:_(<4 x s16>) = G_IMPLICIT_DEF
424    ; GFX9: [[CONCAT_VECTORS:%[0-9]+]]:_(<12 x s16>) = G_CONCAT_VECTORS [[DEF]](<4 x s16>), [[DEF1]](<4 x s16>), [[DEF1]](<4 x s16>)
425    ; GFX9: [[UV:%[0-9]+]]:_(<3 x s16>), [[UV1:%[0-9]+]]:_(<3 x s16>), [[UV2:%[0-9]+]]:_(<3 x s16>), [[UV3:%[0-9]+]]:_(<3 x s16>) = G_UNMERGE_VALUES [[CONCAT_VECTORS]](<12 x s16>)
426    ; GFX9: [[DEF2:%[0-9]+]]:_(<4 x s16>) = G_IMPLICIT_DEF
427    ; GFX9: [[CONCAT_VECTORS1:%[0-9]+]]:_(<12 x s16>) = G_CONCAT_VECTORS [[DEF2]](<4 x s16>), [[DEF1]](<4 x s16>), [[DEF1]](<4 x s16>)
428    ; GFX9: [[UV4:%[0-9]+]]:_(<3 x s16>), [[UV5:%[0-9]+]]:_(<3 x s16>), [[UV6:%[0-9]+]]:_(<3 x s16>), [[UV7:%[0-9]+]]:_(<3 x s16>) = G_UNMERGE_VALUES [[CONCAT_VECTORS1]](<12 x s16>)
429    ; GFX9: [[INSERT:%[0-9]+]]:_(<4 x s16>) = G_INSERT [[DEF1]], [[UV]](<3 x s16>), 0
430    ; GFX9: [[INSERT1:%[0-9]+]]:_(<4 x s16>) = G_INSERT [[DEF1]], [[UV4]](<3 x s16>), 0
431    ; GFX9: [[UV8:%[0-9]+]]:_(<2 x s16>), [[UV9:%[0-9]+]]:_(<2 x s16>) = G_UNMERGE_VALUES [[INSERT]](<4 x s16>)
432    ; GFX9: [[UV10:%[0-9]+]]:_(<2 x s16>), [[UV11:%[0-9]+]]:_(<2 x s16>) = G_UNMERGE_VALUES [[INSERT1]](<4 x s16>)
433    ; GFX9: [[UMAX:%[0-9]+]]:_(<2 x s16>) = G_UMAX [[UV8]], [[UV10]]
434    ; GFX9: [[UMAX1:%[0-9]+]]:_(<2 x s16>) = G_UMAX [[UV9]], [[UV11]]
435    ; GFX9: [[BITCAST:%[0-9]+]]:_(s32) = G_BITCAST [[UMAX]](<2 x s16>)
436    ; GFX9: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 16
437    ; GFX9: [[LSHR:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST]], [[C]](s32)
438    ; GFX9: [[BITCAST1:%[0-9]+]]:_(s32) = G_BITCAST [[UMAX1]](<2 x s16>)
439    ; GFX9: [[LSHR1:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST1]], [[C]](s32)
440    ; GFX9: [[COPY:%[0-9]+]]:_(s32) = COPY [[BITCAST]](s32)
441    ; GFX9: [[COPY1:%[0-9]+]]:_(s32) = COPY [[LSHR]](s32)
442    ; GFX9: [[COPY2:%[0-9]+]]:_(s32) = COPY [[BITCAST1]](s32)
443    ; GFX9: [[BUILD_VECTOR:%[0-9]+]]:_(<3 x s32>) = G_BUILD_VECTOR [[COPY]](s32), [[COPY1]](s32), [[COPY2]](s32)
444    ; GFX9: S_NOP 0, implicit [[BUILD_VECTOR]](<3 x s32>)
445    %0:_(<3 x s16>) = G_IMPLICIT_DEF
446    %1:_(<3 x s16>) = G_IMPLICIT_DEF
447    %2:_(<3 x s16>) = G_UMAX %0, %1
448    %3:_(<3 x s32>) = G_ANYEXT %2
449    S_NOP 0, implicit %3
450...
451
452---
453name: test_umax_v4s16
454body: |
455  bb.0:
456    liveins: $vgpr0_vgpr1, $vgpr2_vgpr3
457
458    ; SI-LABEL: name: test_umax_v4s16
459    ; SI: [[COPY:%[0-9]+]]:_(<4 x s16>) = COPY $vgpr0_vgpr1
460    ; SI: [[COPY1:%[0-9]+]]:_(<4 x s16>) = COPY $vgpr2_vgpr3
461    ; SI: [[UV:%[0-9]+]]:_(<2 x s16>), [[UV1:%[0-9]+]]:_(<2 x s16>) = G_UNMERGE_VALUES [[COPY]](<4 x s16>)
462    ; SI: [[BITCAST:%[0-9]+]]:_(s32) = G_BITCAST [[UV]](<2 x s16>)
463    ; SI: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 16
464    ; SI: [[LSHR:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST]], [[C]](s32)
465    ; SI: [[BITCAST1:%[0-9]+]]:_(s32) = G_BITCAST [[UV1]](<2 x s16>)
466    ; SI: [[LSHR1:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST1]], [[C]](s32)
467    ; SI: [[UV2:%[0-9]+]]:_(<2 x s16>), [[UV3:%[0-9]+]]:_(<2 x s16>) = G_UNMERGE_VALUES [[COPY1]](<4 x s16>)
468    ; SI: [[BITCAST2:%[0-9]+]]:_(s32) = G_BITCAST [[UV2]](<2 x s16>)
469    ; SI: [[LSHR2:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST2]], [[C]](s32)
470    ; SI: [[BITCAST3:%[0-9]+]]:_(s32) = G_BITCAST [[UV3]](<2 x s16>)
471    ; SI: [[LSHR3:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST3]], [[C]](s32)
472    ; SI: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 65535
473    ; SI: [[COPY2:%[0-9]+]]:_(s32) = COPY [[BITCAST]](s32)
474    ; SI: [[AND:%[0-9]+]]:_(s32) = G_AND [[COPY2]], [[C1]]
475    ; SI: [[COPY3:%[0-9]+]]:_(s32) = COPY [[BITCAST2]](s32)
476    ; SI: [[AND1:%[0-9]+]]:_(s32) = G_AND [[COPY3]], [[C1]]
477    ; SI: [[UMAX:%[0-9]+]]:_(s32) = G_UMAX [[AND]], [[AND1]]
478    ; SI: [[COPY4:%[0-9]+]]:_(s32) = COPY [[LSHR]](s32)
479    ; SI: [[AND2:%[0-9]+]]:_(s32) = G_AND [[COPY4]], [[C1]]
480    ; SI: [[COPY5:%[0-9]+]]:_(s32) = COPY [[LSHR2]](s32)
481    ; SI: [[AND3:%[0-9]+]]:_(s32) = G_AND [[COPY5]], [[C1]]
482    ; SI: [[UMAX1:%[0-9]+]]:_(s32) = G_UMAX [[AND2]], [[AND3]]
483    ; SI: [[COPY6:%[0-9]+]]:_(s32) = COPY [[BITCAST1]](s32)
484    ; SI: [[AND4:%[0-9]+]]:_(s32) = G_AND [[COPY6]], [[C1]]
485    ; SI: [[COPY7:%[0-9]+]]:_(s32) = COPY [[BITCAST3]](s32)
486    ; SI: [[AND5:%[0-9]+]]:_(s32) = G_AND [[COPY7]], [[C1]]
487    ; SI: [[UMAX2:%[0-9]+]]:_(s32) = G_UMAX [[AND4]], [[AND5]]
488    ; SI: [[COPY8:%[0-9]+]]:_(s32) = COPY [[LSHR1]](s32)
489    ; SI: [[AND6:%[0-9]+]]:_(s32) = G_AND [[COPY8]], [[C1]]
490    ; SI: [[COPY9:%[0-9]+]]:_(s32) = COPY [[LSHR3]](s32)
491    ; SI: [[AND7:%[0-9]+]]:_(s32) = G_AND [[COPY9]], [[C1]]
492    ; SI: [[UMAX3:%[0-9]+]]:_(s32) = G_UMAX [[AND6]], [[AND7]]
493    ; SI: [[COPY10:%[0-9]+]]:_(s32) = COPY [[UMAX]](s32)
494    ; SI: [[AND8:%[0-9]+]]:_(s32) = G_AND [[COPY10]], [[C1]]
495    ; SI: [[COPY11:%[0-9]+]]:_(s32) = COPY [[UMAX1]](s32)
496    ; SI: [[AND9:%[0-9]+]]:_(s32) = G_AND [[COPY11]], [[C1]]
497    ; SI: [[SHL:%[0-9]+]]:_(s32) = G_SHL [[AND9]], [[C]](s32)
498    ; SI: [[OR:%[0-9]+]]:_(s32) = G_OR [[AND8]], [[SHL]]
499    ; SI: [[BITCAST4:%[0-9]+]]:_(<2 x s16>) = G_BITCAST [[OR]](s32)
500    ; SI: [[COPY12:%[0-9]+]]:_(s32) = COPY [[UMAX2]](s32)
501    ; SI: [[AND10:%[0-9]+]]:_(s32) = G_AND [[COPY12]], [[C1]]
502    ; SI: [[COPY13:%[0-9]+]]:_(s32) = COPY [[UMAX3]](s32)
503    ; SI: [[AND11:%[0-9]+]]:_(s32) = G_AND [[COPY13]], [[C1]]
504    ; SI: [[SHL1:%[0-9]+]]:_(s32) = G_SHL [[AND11]], [[C]](s32)
505    ; SI: [[OR1:%[0-9]+]]:_(s32) = G_OR [[AND10]], [[SHL1]]
506    ; SI: [[BITCAST5:%[0-9]+]]:_(<2 x s16>) = G_BITCAST [[OR1]](s32)
507    ; SI: [[CONCAT_VECTORS:%[0-9]+]]:_(<4 x s16>) = G_CONCAT_VECTORS [[BITCAST4]](<2 x s16>), [[BITCAST5]](<2 x s16>)
508    ; SI: $vgpr0_vgpr1 = COPY [[CONCAT_VECTORS]](<4 x s16>)
509    ; VI-LABEL: name: test_umax_v4s16
510    ; VI: [[COPY:%[0-9]+]]:_(<4 x s16>) = COPY $vgpr0_vgpr1
511    ; VI: [[COPY1:%[0-9]+]]:_(<4 x s16>) = COPY $vgpr2_vgpr3
512    ; VI: [[UV:%[0-9]+]]:_(<2 x s16>), [[UV1:%[0-9]+]]:_(<2 x s16>) = G_UNMERGE_VALUES [[COPY]](<4 x s16>)
513    ; VI: [[BITCAST:%[0-9]+]]:_(s32) = G_BITCAST [[UV]](<2 x s16>)
514    ; VI: [[TRUNC:%[0-9]+]]:_(s16) = G_TRUNC [[BITCAST]](s32)
515    ; VI: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 16
516    ; VI: [[LSHR:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST]], [[C]](s32)
517    ; VI: [[TRUNC1:%[0-9]+]]:_(s16) = G_TRUNC [[LSHR]](s32)
518    ; VI: [[BITCAST1:%[0-9]+]]:_(s32) = G_BITCAST [[UV1]](<2 x s16>)
519    ; VI: [[TRUNC2:%[0-9]+]]:_(s16) = G_TRUNC [[BITCAST1]](s32)
520    ; VI: [[LSHR1:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST1]], [[C]](s32)
521    ; VI: [[TRUNC3:%[0-9]+]]:_(s16) = G_TRUNC [[LSHR1]](s32)
522    ; VI: [[UV2:%[0-9]+]]:_(<2 x s16>), [[UV3:%[0-9]+]]:_(<2 x s16>) = G_UNMERGE_VALUES [[COPY1]](<4 x s16>)
523    ; VI: [[BITCAST2:%[0-9]+]]:_(s32) = G_BITCAST [[UV2]](<2 x s16>)
524    ; VI: [[TRUNC4:%[0-9]+]]:_(s16) = G_TRUNC [[BITCAST2]](s32)
525    ; VI: [[LSHR2:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST2]], [[C]](s32)
526    ; VI: [[TRUNC5:%[0-9]+]]:_(s16) = G_TRUNC [[LSHR2]](s32)
527    ; VI: [[BITCAST3:%[0-9]+]]:_(s32) = G_BITCAST [[UV3]](<2 x s16>)
528    ; VI: [[TRUNC6:%[0-9]+]]:_(s16) = G_TRUNC [[BITCAST3]](s32)
529    ; VI: [[LSHR3:%[0-9]+]]:_(s32) = G_LSHR [[BITCAST3]], [[C]](s32)
530    ; VI: [[TRUNC7:%[0-9]+]]:_(s16) = G_TRUNC [[LSHR3]](s32)
531    ; VI: [[UMAX:%[0-9]+]]:_(s16) = G_UMAX [[TRUNC]], [[TRUNC4]]
532    ; VI: [[UMAX1:%[0-9]+]]:_(s16) = G_UMAX [[TRUNC1]], [[TRUNC5]]
533    ; VI: [[UMAX2:%[0-9]+]]:_(s16) = G_UMAX [[TRUNC2]], [[TRUNC6]]
534    ; VI: [[UMAX3:%[0-9]+]]:_(s16) = G_UMAX [[TRUNC3]], [[TRUNC7]]
535    ; VI: [[ZEXT:%[0-9]+]]:_(s32) = G_ZEXT [[UMAX]](s16)
536    ; VI: [[ZEXT1:%[0-9]+]]:_(s32) = G_ZEXT [[UMAX1]](s16)
537    ; VI: [[SHL:%[0-9]+]]:_(s32) = G_SHL [[ZEXT1]], [[C]](s32)
538    ; VI: [[OR:%[0-9]+]]:_(s32) = G_OR [[ZEXT]], [[SHL]]
539    ; VI: [[BITCAST4:%[0-9]+]]:_(<2 x s16>) = G_BITCAST [[OR]](s32)
540    ; VI: [[ZEXT2:%[0-9]+]]:_(s32) = G_ZEXT [[UMAX2]](s16)
541    ; VI: [[ZEXT3:%[0-9]+]]:_(s32) = G_ZEXT [[UMAX3]](s16)
542    ; VI: [[SHL1:%[0-9]+]]:_(s32) = G_SHL [[ZEXT3]], [[C]](s32)
543    ; VI: [[OR1:%[0-9]+]]:_(s32) = G_OR [[ZEXT2]], [[SHL1]]
544    ; VI: [[BITCAST5:%[0-9]+]]:_(<2 x s16>) = G_BITCAST [[OR1]](s32)
545    ; VI: [[CONCAT_VECTORS:%[0-9]+]]:_(<4 x s16>) = G_CONCAT_VECTORS [[BITCAST4]](<2 x s16>), [[BITCAST5]](<2 x s16>)
546    ; VI: $vgpr0_vgpr1 = COPY [[CONCAT_VECTORS]](<4 x s16>)
547    ; GFX9-LABEL: name: test_umax_v4s16
548    ; GFX9: [[COPY:%[0-9]+]]:_(<4 x s16>) = COPY $vgpr0_vgpr1
549    ; GFX9: [[COPY1:%[0-9]+]]:_(<4 x s16>) = COPY $vgpr2_vgpr3
550    ; GFX9: [[UV:%[0-9]+]]:_(<2 x s16>), [[UV1:%[0-9]+]]:_(<2 x s16>) = G_UNMERGE_VALUES [[COPY]](<4 x s16>)
551    ; GFX9: [[UV2:%[0-9]+]]:_(<2 x s16>), [[UV3:%[0-9]+]]:_(<2 x s16>) = G_UNMERGE_VALUES [[COPY1]](<4 x s16>)
552    ; GFX9: [[UMAX:%[0-9]+]]:_(<2 x s16>) = G_UMAX [[UV]], [[UV2]]
553    ; GFX9: [[UMAX1:%[0-9]+]]:_(<2 x s16>) = G_UMAX [[UV1]], [[UV3]]
554    ; GFX9: [[CONCAT_VECTORS:%[0-9]+]]:_(<4 x s16>) = G_CONCAT_VECTORS [[UMAX]](<2 x s16>), [[UMAX1]](<2 x s16>)
555    ; GFX9: $vgpr0_vgpr1 = COPY [[CONCAT_VECTORS]](<4 x s16>)
556    %0:_(<4 x s16>) = COPY $vgpr0_vgpr1
557    %1:_(<4 x s16>) = COPY $vgpr2_vgpr3
558    %2:_(<4 x s16>) = G_UMAX %0, %1
559    $vgpr0_vgpr1 = COPY %2
560...
561