1 // RUN: %clang_cc1 -triple s390x-linux-gnu \
2 // RUN: -emit-llvm -o - %s | FileCheck %s
3 // RUN: %clang_cc1 -triple s390x-linux-gnu -target-feature +vector \
4 // RUN: -emit-llvm -o - %s | FileCheck --check-prefix=CHECK-VECTOR %s
5 // RUN: %clang_cc1 -triple s390x-linux-gnu -target-cpu z13 \
6 // RUN: -emit-llvm -o - %s | FileCheck --check-prefix=CHECK-VECTOR %s
7
8 // Vector types
9
10 typedef __attribute__((vector_size(1))) char v1i8;
11
12 typedef __attribute__((vector_size(2))) char v2i8;
13 typedef __attribute__((vector_size(2))) short v1i16;
14
15 typedef __attribute__((vector_size(4))) char v4i8;
16 typedef __attribute__((vector_size(4))) short v2i16;
17 typedef __attribute__((vector_size(4))) int v1i32;
18 typedef __attribute__((vector_size(4))) float v1f32;
19
20 typedef __attribute__((vector_size(8))) char v8i8;
21 typedef __attribute__((vector_size(8))) short v4i16;
22 typedef __attribute__((vector_size(8))) int v2i32;
23 typedef __attribute__((vector_size(8))) long long v1i64;
24 typedef __attribute__((vector_size(8))) float v2f32;
25 typedef __attribute__((vector_size(8))) double v1f64;
26
27 typedef __attribute__((vector_size(16))) char v16i8;
28 typedef __attribute__((vector_size(16))) short v8i16;
29 typedef __attribute__((vector_size(16))) int v4i32;
30 typedef __attribute__((vector_size(16))) long long v2i64;
31 typedef __attribute__((vector_size(16))) __int128 v1i128;
32 typedef __attribute__((vector_size(16))) float v4f32;
33 typedef __attribute__((vector_size(16))) double v2f64;
34 typedef __attribute__((vector_size(16))) long double v1f128;
35
36 typedef __attribute__((vector_size(32))) char v32i8;
37
38 unsigned int align = __alignof__ (v16i8);
39 // CHECK: @align = global i32 16
40 // CHECK-VECTOR: @align = global i32 8
41
pass_v1i8(v1i8 arg)42 v1i8 pass_v1i8(v1i8 arg) { return arg; }
43 // CHECK-LABEL: define void @pass_v1i8(<1 x i8>* noalias sret %{{.*}}, <1 x i8>*)
44 // CHECK-VECTOR-LABEL: define <1 x i8> @pass_v1i8(<1 x i8> %{{.*}})
45
pass_v2i8(v2i8 arg)46 v2i8 pass_v2i8(v2i8 arg) { return arg; }
47 // CHECK-LABEL: define void @pass_v2i8(<2 x i8>* noalias sret %{{.*}}, <2 x i8>*)
48 // CHECK-VECTOR-LABEL: define <2 x i8> @pass_v2i8(<2 x i8> %{{.*}})
49
pass_v4i8(v4i8 arg)50 v4i8 pass_v4i8(v4i8 arg) { return arg; }
51 // CHECK-LABEL: define void @pass_v4i8(<4 x i8>* noalias sret %{{.*}}, <4 x i8>*)
52 // CHECK-VECTOR-LABEL: define <4 x i8> @pass_v4i8(<4 x i8> %{{.*}})
53
pass_v8i8(v8i8 arg)54 v8i8 pass_v8i8(v8i8 arg) { return arg; }
55 // CHECK-LABEL: define void @pass_v8i8(<8 x i8>* noalias sret %{{.*}}, <8 x i8>*)
56 // CHECK-VECTOR-LABEL: define <8 x i8> @pass_v8i8(<8 x i8> %{{.*}})
57
pass_v16i8(v16i8 arg)58 v16i8 pass_v16i8(v16i8 arg) { return arg; }
59 // CHECK-LABEL: define void @pass_v16i8(<16 x i8>* noalias sret %{{.*}}, <16 x i8>*)
60 // CHECK-VECTOR-LABEL: define <16 x i8> @pass_v16i8(<16 x i8> %{{.*}})
61
pass_v32i8(v32i8 arg)62 v32i8 pass_v32i8(v32i8 arg) { return arg; }
63 // CHECK-LABEL: define void @pass_v32i8(<32 x i8>* noalias sret %{{.*}}, <32 x i8>*)
64 // CHECK-VECTOR-LABEL: define void @pass_v32i8(<32 x i8>* noalias sret %{{.*}}, <32 x i8>*)
65
pass_v1i16(v1i16 arg)66 v1i16 pass_v1i16(v1i16 arg) { return arg; }
67 // CHECK-LABEL: define void @pass_v1i16(<1 x i16>* noalias sret %{{.*}}, <1 x i16>*)
68 // CHECK-VECTOR-LABEL: define <1 x i16> @pass_v1i16(<1 x i16> %{{.*}})
69
pass_v2i16(v2i16 arg)70 v2i16 pass_v2i16(v2i16 arg) { return arg; }
71 // CHECK-LABEL: define void @pass_v2i16(<2 x i16>* noalias sret %{{.*}}, <2 x i16>*)
72 // CHECK-VECTOR-LABEL: define <2 x i16> @pass_v2i16(<2 x i16> %{{.*}})
73
pass_v4i16(v4i16 arg)74 v4i16 pass_v4i16(v4i16 arg) { return arg; }
75 // CHECK-LABEL: define void @pass_v4i16(<4 x i16>* noalias sret %{{.*}}, <4 x i16>*)
76 // CHECK-VECTOR-LABEL: define <4 x i16> @pass_v4i16(<4 x i16> %{{.*}})
77
pass_v8i16(v8i16 arg)78 v8i16 pass_v8i16(v8i16 arg) { return arg; }
79 // CHECK-LABEL: define void @pass_v8i16(<8 x i16>* noalias sret %{{.*}}, <8 x i16>*)
80 // CHECK-VECTOR-LABEL: define <8 x i16> @pass_v8i16(<8 x i16> %{{.*}})
81
pass_v1i32(v1i32 arg)82 v1i32 pass_v1i32(v1i32 arg) { return arg; }
83 // CHECK-LABEL: define void @pass_v1i32(<1 x i32>* noalias sret %{{.*}}, <1 x i32>*)
84 // CHECK-VECTOR-LABEL: define <1 x i32> @pass_v1i32(<1 x i32> %{{.*}})
85
pass_v2i32(v2i32 arg)86 v2i32 pass_v2i32(v2i32 arg) { return arg; }
87 // CHECK-LABEL: define void @pass_v2i32(<2 x i32>* noalias sret %{{.*}}, <2 x i32>*)
88 // CHECK-VECTOR-LABEL: define <2 x i32> @pass_v2i32(<2 x i32> %{{.*}})
89
pass_v4i32(v4i32 arg)90 v4i32 pass_v4i32(v4i32 arg) { return arg; }
91 // CHECK-LABEL: define void @pass_v4i32(<4 x i32>* noalias sret %{{.*}}, <4 x i32>*)
92 // CHECK-VECTOR-LABEL: define <4 x i32> @pass_v4i32(<4 x i32> %{{.*}})
93
pass_v1i64(v1i64 arg)94 v1i64 pass_v1i64(v1i64 arg) { return arg; }
95 // CHECK-LABEL: define void @pass_v1i64(<1 x i64>* noalias sret %{{.*}}, <1 x i64>*)
96 // CHECK-VECTOR-LABEL: define <1 x i64> @pass_v1i64(<1 x i64> %{{.*}})
97
pass_v2i64(v2i64 arg)98 v2i64 pass_v2i64(v2i64 arg) { return arg; }
99 // CHECK-LABEL: define void @pass_v2i64(<2 x i64>* noalias sret %{{.*}}, <2 x i64>*)
100 // CHECK-VECTOR-LABEL: define <2 x i64> @pass_v2i64(<2 x i64> %{{.*}})
101
pass_v1i128(v1i128 arg)102 v1i128 pass_v1i128(v1i128 arg) { return arg; }
103 // CHECK-LABEL: define void @pass_v1i128(<1 x i128>* noalias sret %{{.*}}, <1 x i128>*)
104 // CHECK-VECTOR-LABEL: define <1 x i128> @pass_v1i128(<1 x i128> %{{.*}})
105
pass_v1f32(v1f32 arg)106 v1f32 pass_v1f32(v1f32 arg) { return arg; }
107 // CHECK-LABEL: define void @pass_v1f32(<1 x float>* noalias sret %{{.*}}, <1 x float>*)
108 // CHECK-VECTOR-LABEL: define <1 x float> @pass_v1f32(<1 x float> %{{.*}})
109
pass_v2f32(v2f32 arg)110 v2f32 pass_v2f32(v2f32 arg) { return arg; }
111 // CHECK-LABEL: define void @pass_v2f32(<2 x float>* noalias sret %{{.*}}, <2 x float>*)
112 // CHECK-VECTOR-LABEL: define <2 x float> @pass_v2f32(<2 x float> %{{.*}})
113
pass_v4f32(v4f32 arg)114 v4f32 pass_v4f32(v4f32 arg) { return arg; }
115 // CHECK-LABEL: define void @pass_v4f32(<4 x float>* noalias sret %{{.*}}, <4 x float>*)
116 // CHECK-VECTOR-LABEL: define <4 x float> @pass_v4f32(<4 x float> %{{.*}})
117
pass_v1f64(v1f64 arg)118 v1f64 pass_v1f64(v1f64 arg) { return arg; }
119 // CHECK-LABEL: define void @pass_v1f64(<1 x double>* noalias sret %{{.*}}, <1 x double>*)
120 // CHECK-VECTOR-LABEL: define <1 x double> @pass_v1f64(<1 x double> %{{.*}})
121
pass_v2f64(v2f64 arg)122 v2f64 pass_v2f64(v2f64 arg) { return arg; }
123 // CHECK-LABEL: define void @pass_v2f64(<2 x double>* noalias sret %{{.*}}, <2 x double>*)
124 // CHECK-VECTOR-LABEL: define <2 x double> @pass_v2f64(<2 x double> %{{.*}})
125
pass_v1f128(v1f128 arg)126 v1f128 pass_v1f128(v1f128 arg) { return arg; }
127 // CHECK-LABEL: define void @pass_v1f128(<1 x fp128>* noalias sret %{{.*}}, <1 x fp128>*)
128 // CHECK-VECTOR-LABEL: define <1 x fp128> @pass_v1f128(<1 x fp128> %{{.*}})
129
130
131 // Vector-like aggregate types
132
133 struct agg_v1i8 { v1i8 a; };
pass_agg_v1i8(struct agg_v1i8 arg)134 struct agg_v1i8 pass_agg_v1i8(struct agg_v1i8 arg) { return arg; }
135 // CHECK-LABEL: define void @pass_agg_v1i8(%struct.agg_v1i8* noalias sret %{{.*}}, i8 %{{.*}})
136 // CHECK-VECTOR-LABEL: define void @pass_agg_v1i8(%struct.agg_v1i8* noalias sret %{{.*}}, <1 x i8> %{{.*}})
137
138 struct agg_v2i8 { v2i8 a; };
pass_agg_v2i8(struct agg_v2i8 arg)139 struct agg_v2i8 pass_agg_v2i8(struct agg_v2i8 arg) { return arg; }
140 // CHECK-LABEL: define void @pass_agg_v2i8(%struct.agg_v2i8* noalias sret %{{.*}}, i16 %{{.*}})
141 // CHECK-VECTOR-LABEL: define void @pass_agg_v2i8(%struct.agg_v2i8* noalias sret %{{.*}}, <2 x i8> %{{.*}})
142
143 struct agg_v4i8 { v4i8 a; };
pass_agg_v4i8(struct agg_v4i8 arg)144 struct agg_v4i8 pass_agg_v4i8(struct agg_v4i8 arg) { return arg; }
145 // CHECK-LABEL: define void @pass_agg_v4i8(%struct.agg_v4i8* noalias sret %{{.*}}, i32 %{{.*}})
146 // CHECK-VECTOR-LABEL: define void @pass_agg_v4i8(%struct.agg_v4i8* noalias sret %{{.*}}, <4 x i8> %{{.*}})
147
148 struct agg_v8i8 { v8i8 a; };
pass_agg_v8i8(struct agg_v8i8 arg)149 struct agg_v8i8 pass_agg_v8i8(struct agg_v8i8 arg) { return arg; }
150 // CHECK-LABEL: define void @pass_agg_v8i8(%struct.agg_v8i8* noalias sret %{{.*}}, i64 %{{.*}})
151 // CHECK-VECTOR-LABEL: define void @pass_agg_v8i8(%struct.agg_v8i8* noalias sret %{{.*}}, <8 x i8> %{{.*}})
152
153 struct agg_v16i8 { v16i8 a; };
pass_agg_v16i8(struct agg_v16i8 arg)154 struct agg_v16i8 pass_agg_v16i8(struct agg_v16i8 arg) { return arg; }
155 // CHECK-LABEL: define void @pass_agg_v16i8(%struct.agg_v16i8* noalias sret %{{.*}}, %struct.agg_v16i8* %{{.*}})
156 // CHECK-VECTOR-LABEL: define void @pass_agg_v16i8(%struct.agg_v16i8* noalias sret %{{.*}}, <16 x i8> %{{.*}})
157
158 struct agg_v32i8 { v32i8 a; };
pass_agg_v32i8(struct agg_v32i8 arg)159 struct agg_v32i8 pass_agg_v32i8(struct agg_v32i8 arg) { return arg; }
160 // CHECK-LABEL: define void @pass_agg_v32i8(%struct.agg_v32i8* noalias sret %{{.*}}, %struct.agg_v32i8* %{{.*}})
161 // CHECK-VECTOR-LABEL: define void @pass_agg_v32i8(%struct.agg_v32i8* noalias sret %{{.*}}, %struct.agg_v32i8* %{{.*}})
162
163
164 // Verify that the following are *not* vector-like aggregate types
165
166 struct agg_novector1 { v4i8 a; v4i8 b; };
pass_agg_novector1(struct agg_novector1 arg)167 struct agg_novector1 pass_agg_novector1(struct agg_novector1 arg) { return arg; }
168 // CHECK-LABEL: define void @pass_agg_novector1(%struct.agg_novector1* noalias sret %{{.*}}, i64 %{{.*}})
169 // CHECK-VECTOR-LABEL: define void @pass_agg_novector1(%struct.agg_novector1* noalias sret %{{.*}}, i64 %{{.*}})
170
171 struct agg_novector2 { v4i8 a; float b; };
pass_agg_novector2(struct agg_novector2 arg)172 struct agg_novector2 pass_agg_novector2(struct agg_novector2 arg) { return arg; }
173 // CHECK-LABEL: define void @pass_agg_novector2(%struct.agg_novector2* noalias sret %{{.*}}, i64 %{{.*}})
174 // CHECK-VECTOR-LABEL: define void @pass_agg_novector2(%struct.agg_novector2* noalias sret %{{.*}}, i64 %{{.*}})
175
176 struct agg_novector3 { v4i8 a; int : 0; };
pass_agg_novector3(struct agg_novector3 arg)177 struct agg_novector3 pass_agg_novector3(struct agg_novector3 arg) { return arg; }
178 // CHECK-LABEL: define void @pass_agg_novector3(%struct.agg_novector3* noalias sret %{{.*}}, i32 %{{.*}})
179 // CHECK-VECTOR-LABEL: define void @pass_agg_novector3(%struct.agg_novector3* noalias sret %{{.*}}, i32 %{{.*}})
180
181 struct agg_novector4 { v4i8 a __attribute__((aligned (8))); };
pass_agg_novector4(struct agg_novector4 arg)182 struct agg_novector4 pass_agg_novector4(struct agg_novector4 arg) { return arg; }
183 // CHECK-LABEL: define void @pass_agg_novector4(%struct.agg_novector4* noalias sret %{{.*}}, i64 %{{.*}})
184 // CHECK-VECTOR-LABEL: define void @pass_agg_novector4(%struct.agg_novector4* noalias sret %{{.*}}, i64 %{{.*}})
185
186
187 // Accessing variable argument lists
188
va_v1i8(__builtin_va_list l)189 v1i8 va_v1i8(__builtin_va_list l) { return __builtin_va_arg(l, v1i8); }
190 // CHECK-LABEL: define void @va_v1i8(<1 x i8>* noalias sret %{{.*}}, %struct.__va_list_tag* %{{.*}})
191 // CHECK: [[REG_COUNT_PTR:%[^ ]+]] = getelementptr inbounds %struct.__va_list_tag, %struct.__va_list_tag* %{{.*}}, i32 0, i32 0
192 // CHECK: [[REG_COUNT:%[^ ]+]] = load i64, i64* [[REG_COUNT_PTR]]
193 // CHECK: [[FITS_IN_REGS:%[^ ]+]] = icmp ult i64 [[REG_COUNT]], 5
194 // CHECK: br i1 [[FITS_IN_REGS]],
195 // CHECK: [[SCALED_REG_COUNT:%[^ ]+]] = mul i64 [[REG_COUNT]], 8
196 // CHECK: [[REG_OFFSET:%[^ ]+]] = add i64 [[SCALED_REG_COUNT]], 16
197 // CHECK: [[REG_SAVE_AREA_PTR:%[^ ]+]] = getelementptr inbounds %struct.__va_list_tag, %struct.__va_list_tag* %{{.*}}, i32 0, i32 3
198 // CHECK: [[REG_SAVE_AREA:%[^ ]+]] = load i8*, i8** [[REG_SAVE_AREA_PTR:[^ ]+]]
199 // CHECK: [[RAW_REG_ADDR:%[^ ]+]] = getelementptr i8, i8* [[REG_SAVE_AREA]], i64 [[REG_OFFSET]]
200 // CHECK: [[REG_ADDR:%[^ ]+]] = bitcast i8* [[RAW_REG_ADDR]] to <1 x i8>**
201 // CHECK: [[REG_COUNT1:%[^ ]+]] = add i64 [[REG_COUNT]], 1
202 // CHECK: store i64 [[REG_COUNT1]], i64* [[REG_COUNT_PTR]]
203 // CHECK: [[OVERFLOW_ARG_AREA_PTR:%[^ ]+]] = getelementptr inbounds %struct.__va_list_tag, %struct.__va_list_tag* %{{.*}}, i32 0, i32 2
204 // CHECK: [[OVERFLOW_ARG_AREA:%[^ ]+]] = load i8*, i8** [[OVERFLOW_ARG_AREA_PTR]]
205 // CHECK: [[RAW_MEM_ADDR:%[^ ]+]] = getelementptr i8, i8* [[OVERFLOW_ARG_AREA]], i64 0
206 // CHECK: [[MEM_ADDR:%[^ ]+]] = bitcast i8* [[RAW_MEM_ADDR]] to <1 x i8>**
207 // CHECK: [[OVERFLOW_ARG_AREA2:%[^ ]+]] = getelementptr i8, i8* [[OVERFLOW_ARG_AREA]], i64 8
208 // CHECK: store i8* [[OVERFLOW_ARG_AREA2]], i8** [[OVERFLOW_ARG_AREA_PTR]]
209 // CHECK: [[VA_ARG_ADDR:%[^ ]+]] = phi <1 x i8>** [ [[REG_ADDR]], %{{.*}} ], [ [[MEM_ADDR]], %{{.*}} ]
210 // CHECK: [[INDIRECT_ARG:%[^ ]+]] = load <1 x i8>*, <1 x i8>** [[VA_ARG_ADDR]]
211 // CHECK: ret void
212 // CHECK-VECTOR-LABEL: define <1 x i8> @va_v1i8(%struct.__va_list_tag* %{{.*}})
213 // CHECK-VECTOR: [[OVERFLOW_ARG_AREA_PTR:%[^ ]+]] = getelementptr inbounds %struct.__va_list_tag, %struct.__va_list_tag* %{{.*}}, i32 0, i32 2
214 // CHECK-VECTOR: [[OVERFLOW_ARG_AREA:%[^ ]+]] = load i8*, i8** [[OVERFLOW_ARG_AREA_PTR]]
215 // CHECK-VECTOR: [[MEM_ADDR:%[^ ]+]] = bitcast i8* [[OVERFLOW_ARG_AREA]] to <1 x i8>*
216 // CHECK-VECTOR: [[OVERFLOW_ARG_AREA1:%[^ ]+]] = getelementptr i8, i8* [[OVERFLOW_ARG_AREA]], i64 8
217 // CHECK-VECTOR: store i8* [[OVERFLOW_ARG_AREA1]], i8** [[OVERFLOW_ARG_AREA_PTR]]
218 // CHECK-VECTOR: [[RET:%[^ ]+]] = load <1 x i8>, <1 x i8>* [[MEM_ADDR]]
219 // CHECK-VECTOR: ret <1 x i8> [[RET]]
220
va_v2i8(__builtin_va_list l)221 v2i8 va_v2i8(__builtin_va_list l) { return __builtin_va_arg(l, v2i8); }
222 // CHECK-LABEL: define void @va_v2i8(<2 x i8>* noalias sret %{{.*}}, %struct.__va_list_tag* %{{.*}})
223 // CHECK: [[REG_COUNT_PTR:%[^ ]+]] = getelementptr inbounds %struct.__va_list_tag, %struct.__va_list_tag* %{{.*}}, i32 0, i32 0
224 // CHECK: [[REG_COUNT:%[^ ]+]] = load i64, i64* [[REG_COUNT_PTR]]
225 // CHECK: [[FITS_IN_REGS:%[^ ]+]] = icmp ult i64 [[REG_COUNT]], 5
226 // CHECK: br i1 [[FITS_IN_REGS]],
227 // CHECK: [[SCALED_REG_COUNT:%[^ ]+]] = mul i64 [[REG_COUNT]], 8
228 // CHECK: [[REG_OFFSET:%[^ ]+]] = add i64 [[SCALED_REG_COUNT]], 16
229 // CHECK: [[REG_SAVE_AREA_PTR:%[^ ]+]] = getelementptr inbounds %struct.__va_list_tag, %struct.__va_list_tag* %{{.*}}, i32 0, i32 3
230 // CHECK: [[REG_SAVE_AREA:%[^ ]+]] = load i8*, i8** [[REG_SAVE_AREA_PTR:[^ ]+]]
231 // CHECK: [[RAW_REG_ADDR:%[^ ]+]] = getelementptr i8, i8* [[REG_SAVE_AREA]], i64 [[REG_OFFSET]]
232 // CHECK: [[REG_ADDR:%[^ ]+]] = bitcast i8* [[RAW_REG_ADDR]] to <2 x i8>**
233 // CHECK: [[REG_COUNT1:%[^ ]+]] = add i64 [[REG_COUNT]], 1
234 // CHECK: store i64 [[REG_COUNT1]], i64* [[REG_COUNT_PTR]]
235 // CHECK: [[OVERFLOW_ARG_AREA_PTR:%[^ ]+]] = getelementptr inbounds %struct.__va_list_tag, %struct.__va_list_tag* %{{.*}}, i32 0, i32 2
236 // CHECK: [[OVERFLOW_ARG_AREA:%[^ ]+]] = load i8*, i8** [[OVERFLOW_ARG_AREA_PTR]]
237 // CHECK: [[RAW_MEM_ADDR:%[^ ]+]] = getelementptr i8, i8* [[OVERFLOW_ARG_AREA]], i64 0
238 // CHECK: [[MEM_ADDR:%[^ ]+]] = bitcast i8* [[RAW_MEM_ADDR]] to <2 x i8>**
239 // CHECK: [[OVERFLOW_ARG_AREA2:%[^ ]+]] = getelementptr i8, i8* [[OVERFLOW_ARG_AREA]], i64 8
240 // CHECK: store i8* [[OVERFLOW_ARG_AREA2]], i8** [[OVERFLOW_ARG_AREA_PTR]]
241 // CHECK: [[VA_ARG_ADDR:%[^ ]+]] = phi <2 x i8>** [ [[REG_ADDR]], %{{.*}} ], [ [[MEM_ADDR]], %{{.*}} ]
242 // CHECK: [[INDIRECT_ARG:%[^ ]+]] = load <2 x i8>*, <2 x i8>** [[VA_ARG_ADDR]]
243 // CHECK: ret void
244 // CHECK-VECTOR-LABEL: define <2 x i8> @va_v2i8(%struct.__va_list_tag* %{{.*}})
245 // CHECK-VECTOR: [[OVERFLOW_ARG_AREA_PTR:%[^ ]+]] = getelementptr inbounds %struct.__va_list_tag, %struct.__va_list_tag* %{{.*}}, i32 0, i32 2
246 // CHECK-VECTOR: [[OVERFLOW_ARG_AREA:%[^ ]+]] = load i8*, i8** [[OVERFLOW_ARG_AREA_PTR]]
247 // CHECK-VECTOR: [[MEM_ADDR:%[^ ]+]] = bitcast i8* [[OVERFLOW_ARG_AREA]] to <2 x i8>*
248 // CHECK-VECTOR: [[OVERFLOW_ARG_AREA1:%[^ ]+]] = getelementptr i8, i8* [[OVERFLOW_ARG_AREA]], i64 8
249 // CHECK-VECTOR: store i8* [[OVERFLOW_ARG_AREA1]], i8** [[OVERFLOW_ARG_AREA_PTR]]
250 // CHECK-VECTOR: [[RET:%[^ ]+]] = load <2 x i8>, <2 x i8>* [[MEM_ADDR]]
251 // CHECK-VECTOR: ret <2 x i8> [[RET]]
252
va_v4i8(__builtin_va_list l)253 v4i8 va_v4i8(__builtin_va_list l) { return __builtin_va_arg(l, v4i8); }
254 // CHECK-LABEL: define void @va_v4i8(<4 x i8>* noalias sret %{{.*}}, %struct.__va_list_tag* %{{.*}})
255 // CHECK: [[REG_COUNT_PTR:%[^ ]+]] = getelementptr inbounds %struct.__va_list_tag, %struct.__va_list_tag* %{{.*}}, i32 0, i32 0
256 // CHECK: [[REG_COUNT:%[^ ]+]] = load i64, i64* [[REG_COUNT_PTR]]
257 // CHECK: [[FITS_IN_REGS:%[^ ]+]] = icmp ult i64 [[REG_COUNT]], 5
258 // CHECK: br i1 [[FITS_IN_REGS]],
259 // CHECK: [[SCALED_REG_COUNT:%[^ ]+]] = mul i64 [[REG_COUNT]], 8
260 // CHECK: [[REG_OFFSET:%[^ ]+]] = add i64 [[SCALED_REG_COUNT]], 16
261 // CHECK: [[REG_SAVE_AREA_PTR:%[^ ]+]] = getelementptr inbounds %struct.__va_list_tag, %struct.__va_list_tag* %{{.*}}, i32 0, i32 3
262 // CHECK: [[REG_SAVE_AREA:%[^ ]+]] = load i8*, i8** [[REG_SAVE_AREA_PTR:[^ ]+]]
263 // CHECK: [[RAW_REG_ADDR:%[^ ]+]] = getelementptr i8, i8* [[REG_SAVE_AREA]], i64 [[REG_OFFSET]]
264 // CHECK: [[REG_ADDR:%[^ ]+]] = bitcast i8* [[RAW_REG_ADDR]] to <4 x i8>**
265 // CHECK: [[REG_COUNT1:%[^ ]+]] = add i64 [[REG_COUNT]], 1
266 // CHECK: store i64 [[REG_COUNT1]], i64* [[REG_COUNT_PTR]]
267 // CHECK: [[OVERFLOW_ARG_AREA_PTR:%[^ ]+]] = getelementptr inbounds %struct.__va_list_tag, %struct.__va_list_tag* %{{.*}}, i32 0, i32 2
268 // CHECK: [[OVERFLOW_ARG_AREA:%[^ ]+]] = load i8*, i8** [[OVERFLOW_ARG_AREA_PTR]]
269 // CHECK: [[RAW_MEM_ADDR:%[^ ]+]] = getelementptr i8, i8* [[OVERFLOW_ARG_AREA]], i64 0
270 // CHECK: [[MEM_ADDR:%[^ ]+]] = bitcast i8* [[RAW_MEM_ADDR]] to <4 x i8>**
271 // CHECK: [[OVERFLOW_ARG_AREA2:%[^ ]+]] = getelementptr i8, i8* [[OVERFLOW_ARG_AREA]], i64 8
272 // CHECK: store i8* [[OVERFLOW_ARG_AREA2]], i8** [[OVERFLOW_ARG_AREA_PTR]]
273 // CHECK: [[VA_ARG_ADDR:%[^ ]+]] = phi <4 x i8>** [ [[REG_ADDR]], %{{.*}} ], [ [[MEM_ADDR]], %{{.*}} ]
274 // CHECK: [[INDIRECT_ARG:%[^ ]+]] = load <4 x i8>*, <4 x i8>** [[VA_ARG_ADDR]]
275 // CHECK: ret void
276 // CHECK-VECTOR-LABEL: define <4 x i8> @va_v4i8(%struct.__va_list_tag* %{{.*}})
277 // CHECK-VECTOR: [[OVERFLOW_ARG_AREA_PTR:%[^ ]+]] = getelementptr inbounds %struct.__va_list_tag, %struct.__va_list_tag* %{{.*}}, i32 0, i32 2
278 // CHECK-VECTOR: [[OVERFLOW_ARG_AREA:%[^ ]+]] = load i8*, i8** [[OVERFLOW_ARG_AREA_PTR]]
279 // CHECK-VECTOR: [[MEM_ADDR:%[^ ]+]] = bitcast i8* [[OVERFLOW_ARG_AREA]] to <4 x i8>*
280 // CHECK-VECTOR: [[OVERFLOW_ARG_AREA1:%[^ ]+]] = getelementptr i8, i8* [[OVERFLOW_ARG_AREA]], i64 8
281 // CHECK-VECTOR: store i8* [[OVERFLOW_ARG_AREA1]], i8** [[OVERFLOW_ARG_AREA_PTR]]
282 // CHECK-VECTOR: [[RET:%[^ ]+]] = load <4 x i8>, <4 x i8>* [[MEM_ADDR]]
283 // CHECK-VECTOR: ret <4 x i8> [[RET]]
284
va_v8i8(__builtin_va_list l)285 v8i8 va_v8i8(__builtin_va_list l) { return __builtin_va_arg(l, v8i8); }
286 // CHECK-LABEL: define void @va_v8i8(<8 x i8>* noalias sret %{{.*}}, %struct.__va_list_tag* %{{.*}})
287 // CHECK: [[REG_COUNT_PTR:%[^ ]+]] = getelementptr inbounds %struct.__va_list_tag, %struct.__va_list_tag* %{{.*}}, i32 0, i32 0
288 // CHECK: [[REG_COUNT:%[^ ]+]] = load i64, i64* [[REG_COUNT_PTR]]
289 // CHECK: [[FITS_IN_REGS:%[^ ]+]] = icmp ult i64 [[REG_COUNT]], 5
290 // CHECK: br i1 [[FITS_IN_REGS]],
291 // CHECK: [[SCALED_REG_COUNT:%[^ ]+]] = mul i64 [[REG_COUNT]], 8
292 // CHECK: [[REG_OFFSET:%[^ ]+]] = add i64 [[SCALED_REG_COUNT]], 16
293 // CHECK: [[REG_SAVE_AREA_PTR:%[^ ]+]] = getelementptr inbounds %struct.__va_list_tag, %struct.__va_list_tag* %{{.*}}, i32 0, i32 3
294 // CHECK: [[REG_SAVE_AREA:%[^ ]+]] = load i8*, i8** [[REG_SAVE_AREA_PTR:[^ ]+]]
295 // CHECK: [[RAW_REG_ADDR:%[^ ]+]] = getelementptr i8, i8* [[REG_SAVE_AREA]], i64 [[REG_OFFSET]]
296 // CHECK: [[REG_ADDR:%[^ ]+]] = bitcast i8* [[RAW_REG_ADDR]] to <8 x i8>**
297 // CHECK: [[REG_COUNT1:%[^ ]+]] = add i64 [[REG_COUNT]], 1
298 // CHECK: store i64 [[REG_COUNT1]], i64* [[REG_COUNT_PTR]]
299 // CHECK: [[OVERFLOW_ARG_AREA_PTR:%[^ ]+]] = getelementptr inbounds %struct.__va_list_tag, %struct.__va_list_tag* %{{.*}}, i32 0, i32 2
300 // CHECK: [[OVERFLOW_ARG_AREA:%[^ ]+]] = load i8*, i8** [[OVERFLOW_ARG_AREA_PTR]]
301 // CHECK: [[RAW_MEM_ADDR:%[^ ]+]] = getelementptr i8, i8* [[OVERFLOW_ARG_AREA]], i64 0
302 // CHECK: [[MEM_ADDR:%[^ ]+]] = bitcast i8* [[RAW_MEM_ADDR]] to <8 x i8>**
303 // CHECK: [[OVERFLOW_ARG_AREA2:%[^ ]+]] = getelementptr i8, i8* [[OVERFLOW_ARG_AREA]], i64 8
304 // CHECK: store i8* [[OVERFLOW_ARG_AREA2]], i8** [[OVERFLOW_ARG_AREA_PTR]]
305 // CHECK: [[VA_ARG_ADDR:%[^ ]+]] = phi <8 x i8>** [ [[REG_ADDR]], %{{.*}} ], [ [[MEM_ADDR]], %{{.*}} ]
306 // CHECK: [[INDIRECT_ARG:%[^ ]+]] = load <8 x i8>*, <8 x i8>** [[VA_ARG_ADDR]]
307 // CHECK: ret void
308 // CHECK-VECTOR-LABEL: define <8 x i8> @va_v8i8(%struct.__va_list_tag* %{{.*}})
309 // CHECK-VECTOR: [[OVERFLOW_ARG_AREA_PTR:%[^ ]+]] = getelementptr inbounds %struct.__va_list_tag, %struct.__va_list_tag* %{{.*}}, i32 0, i32 2
310 // CHECK-VECTOR: [[OVERFLOW_ARG_AREA:%[^ ]+]] = load i8*, i8** [[OVERFLOW_ARG_AREA_PTR]]
311 // CHECK-VECTOR: [[MEM_ADDR:%[^ ]+]] = bitcast i8* [[OVERFLOW_ARG_AREA]] to <8 x i8>*
312 // CHECK-VECTOR: [[OVERFLOW_ARG_AREA1:%[^ ]+]] = getelementptr i8, i8* [[OVERFLOW_ARG_AREA]], i64 8
313 // CHECK-VECTOR: store i8* [[OVERFLOW_ARG_AREA1]], i8** [[OVERFLOW_ARG_AREA_PTR]]
314 // CHECK-VECTOR: [[RET:%[^ ]+]] = load <8 x i8>, <8 x i8>* [[MEM_ADDR]]
315 // CHECK-VECTOR: ret <8 x i8> [[RET]]
316
va_v16i8(__builtin_va_list l)317 v16i8 va_v16i8(__builtin_va_list l) { return __builtin_va_arg(l, v16i8); }
318 // CHECK-LABEL: define void @va_v16i8(<16 x i8>* noalias sret %{{.*}}, %struct.__va_list_tag* %{{.*}})
319 // CHECK: [[REG_COUNT_PTR:%[^ ]+]] = getelementptr inbounds %struct.__va_list_tag, %struct.__va_list_tag* %{{.*}}, i32 0, i32 0
320 // CHECK: [[REG_COUNT:%[^ ]+]] = load i64, i64* [[REG_COUNT_PTR]]
321 // CHECK: [[FITS_IN_REGS:%[^ ]+]] = icmp ult i64 [[REG_COUNT]], 5
322 // CHECK: br i1 [[FITS_IN_REGS]],
323 // CHECK: [[SCALED_REG_COUNT:%[^ ]+]] = mul i64 [[REG_COUNT]], 8
324 // CHECK: [[REG_OFFSET:%[^ ]+]] = add i64 [[SCALED_REG_COUNT]], 16
325 // CHECK: [[REG_SAVE_AREA_PTR:%[^ ]+]] = getelementptr inbounds %struct.__va_list_tag, %struct.__va_list_tag* %{{.*}}, i32 0, i32 3
326 // CHECK: [[REG_SAVE_AREA:%[^ ]+]] = load i8*, i8** [[REG_SAVE_AREA_PTR:[^ ]+]]
327 // CHECK: [[RAW_REG_ADDR:%[^ ]+]] = getelementptr i8, i8* [[REG_SAVE_AREA]], i64 [[REG_OFFSET]]
328 // CHECK: [[REG_ADDR:%[^ ]+]] = bitcast i8* [[RAW_REG_ADDR]] to <16 x i8>**
329 // CHECK: [[REG_COUNT1:%[^ ]+]] = add i64 [[REG_COUNT]], 1
330 // CHECK: store i64 [[REG_COUNT1]], i64* [[REG_COUNT_PTR]]
331 // CHECK: [[OVERFLOW_ARG_AREA_PTR:%[^ ]+]] = getelementptr inbounds %struct.__va_list_tag, %struct.__va_list_tag* %{{.*}}, i32 0, i32 2
332 // CHECK: [[OVERFLOW_ARG_AREA:%[^ ]+]] = load i8*, i8** [[OVERFLOW_ARG_AREA_PTR]]
333 // CHECK: [[RAW_MEM_ADDR:%[^ ]+]] = getelementptr i8, i8* [[OVERFLOW_ARG_AREA]], i64 0
334 // CHECK: [[MEM_ADDR:%[^ ]+]] = bitcast i8* [[RAW_MEM_ADDR]] to <16 x i8>**
335 // CHECK: [[OVERFLOW_ARG_AREA2:%[^ ]+]] = getelementptr i8, i8* [[OVERFLOW_ARG_AREA]], i64 8
336 // CHECK: store i8* [[OVERFLOW_ARG_AREA2]], i8** [[OVERFLOW_ARG_AREA_PTR]]
337 // CHECK: [[VA_ARG_ADDR:%[^ ]+]] = phi <16 x i8>** [ [[REG_ADDR]], %{{.*}} ], [ [[MEM_ADDR]], %{{.*}} ]
338 // CHECK: [[INDIRECT_ARG:%[^ ]+]] = load <16 x i8>*, <16 x i8>** [[VA_ARG_ADDR]]
339 // CHECK: ret void
340 // CHECK-VECTOR-LABEL: define <16 x i8> @va_v16i8(%struct.__va_list_tag* %{{.*}})
341 // CHECK-VECTOR: [[OVERFLOW_ARG_AREA_PTR:%[^ ]+]] = getelementptr inbounds %struct.__va_list_tag, %struct.__va_list_tag* %{{.*}}, i32 0, i32 2
342 // CHECK-VECTOR: [[OVERFLOW_ARG_AREA:%[^ ]+]] = load i8*, i8** [[OVERFLOW_ARG_AREA_PTR]]
343 // CHECK-VECTOR: [[MEM_ADDR:%[^ ]+]] = bitcast i8* [[OVERFLOW_ARG_AREA]] to <16 x i8>*
344 // CHECK-VECTOR: [[OVERFLOW_ARG_AREA1:%[^ ]+]] = getelementptr i8, i8* [[OVERFLOW_ARG_AREA]], i64 16
345 // CHECK-VECTOR: store i8* [[OVERFLOW_ARG_AREA1]], i8** [[OVERFLOW_ARG_AREA_PTR]]
346 // CHECK-VECTOR: [[RET:%[^ ]+]] = load <16 x i8>, <16 x i8>* [[MEM_ADDR]]
347 // CHECK-VECTOR: ret <16 x i8> [[RET]]
348
va_v32i8(__builtin_va_list l)349 v32i8 va_v32i8(__builtin_va_list l) { return __builtin_va_arg(l, v32i8); }
350 // CHECK-LABEL: define void @va_v32i8(<32 x i8>* noalias sret %{{.*}}, %struct.__va_list_tag* %{{.*}})
351 // CHECK: [[REG_COUNT_PTR:%[^ ]+]] = getelementptr inbounds %struct.__va_list_tag, %struct.__va_list_tag* %{{.*}}, i32 0, i32 0
352 // CHECK: [[REG_COUNT:%[^ ]+]] = load i64, i64* [[REG_COUNT_PTR]]
353 // CHECK: [[FITS_IN_REGS:%[^ ]+]] = icmp ult i64 [[REG_COUNT]], 5
354 // CHECK: br i1 [[FITS_IN_REGS]],
355 // CHECK: [[SCALED_REG_COUNT:%[^ ]+]] = mul i64 [[REG_COUNT]], 8
356 // CHECK: [[REG_OFFSET:%[^ ]+]] = add i64 [[SCALED_REG_COUNT]], 16
357 // CHECK: [[REG_SAVE_AREA_PTR:%[^ ]+]] = getelementptr inbounds %struct.__va_list_tag, %struct.__va_list_tag* %{{.*}}, i32 0, i32 3
358 // CHECK: [[REG_SAVE_AREA:%[^ ]+]] = load i8*, i8** [[REG_SAVE_AREA_PTR:[^ ]+]]
359 // CHECK: [[RAW_REG_ADDR:%[^ ]+]] = getelementptr i8, i8* [[REG_SAVE_AREA]], i64 [[REG_OFFSET]]
360 // CHECK: [[REG_ADDR:%[^ ]+]] = bitcast i8* [[RAW_REG_ADDR]] to <32 x i8>**
361 // CHECK: [[REG_COUNT1:%[^ ]+]] = add i64 [[REG_COUNT]], 1
362 // CHECK: store i64 [[REG_COUNT1]], i64* [[REG_COUNT_PTR]]
363 // CHECK: [[OVERFLOW_ARG_AREA_PTR:%[^ ]+]] = getelementptr inbounds %struct.__va_list_tag, %struct.__va_list_tag* %{{.*}}, i32 0, i32 2
364 // CHECK: [[OVERFLOW_ARG_AREA:%[^ ]+]] = load i8*, i8** [[OVERFLOW_ARG_AREA_PTR]]
365 // CHECK: [[RAW_MEM_ADDR:%[^ ]+]] = getelementptr i8, i8* [[OVERFLOW_ARG_AREA]], i64 0
366 // CHECK: [[MEM_ADDR:%[^ ]+]] = bitcast i8* [[RAW_MEM_ADDR]] to <32 x i8>**
367 // CHECK: [[OVERFLOW_ARG_AREA2:%[^ ]+]] = getelementptr i8, i8* [[OVERFLOW_ARG_AREA]], i64 8
368 // CHECK: store i8* [[OVERFLOW_ARG_AREA2]], i8** [[OVERFLOW_ARG_AREA_PTR]]
369 // CHECK: [[VA_ARG_ADDR:%[^ ]+]] = phi <32 x i8>** [ [[REG_ADDR]], %{{.*}} ], [ [[MEM_ADDR]], %{{.*}} ]
370 // CHECK: [[INDIRECT_ARG:%[^ ]+]] = load <32 x i8>*, <32 x i8>** [[VA_ARG_ADDR]]
371 // CHECK: ret void
372 // CHECK-VECTOR-LABEL: define void @va_v32i8(<32 x i8>* noalias sret %{{.*}}, %struct.__va_list_tag* %{{.*}})
373 // CHECK-VECTOR: [[REG_COUNT_PTR:%[^ ]+]] = getelementptr inbounds %struct.__va_list_tag, %struct.__va_list_tag* %{{.*}}, i32 0, i32 0
374 // CHECK-VECTOR: [[REG_COUNT:%[^ ]+]] = load i64, i64* [[REG_COUNT_PTR]]
375 // CHECK-VECTOR: [[FITS_IN_REGS:%[^ ]+]] = icmp ult i64 [[REG_COUNT]], 5
376 // CHECK-VECTOR: br i1 [[FITS_IN_REGS]],
377 // CHECK-VECTOR: [[SCALED_REG_COUNT:%[^ ]+]] = mul i64 [[REG_COUNT]], 8
378 // CHECK-VECTOR: [[REG_OFFSET:%[^ ]+]] = add i64 [[SCALED_REG_COUNT]], 16
379 // CHECK-VECTOR: [[REG_SAVE_AREA_PTR:%[^ ]+]] = getelementptr inbounds %struct.__va_list_tag, %struct.__va_list_tag* %{{.*}}, i32 0, i32 3
380 // CHECK-VECTOR: [[REG_SAVE_AREA:%[^ ]+]] = load i8*, i8** [[REG_SAVE_AREA_PTR:[^ ]+]]
381 // CHECK-VECTOR: [[RAW_REG_ADDR:%[^ ]+]] = getelementptr i8, i8* [[REG_SAVE_AREA]], i64 [[REG_OFFSET]]
382 // CHECK-VECTOR: [[REG_ADDR:%[^ ]+]] = bitcast i8* [[RAW_REG_ADDR]] to <32 x i8>**
383 // CHECK-VECTOR: [[REG_COUNT1:%[^ ]+]] = add i64 [[REG_COUNT]], 1
384 // CHECK-VECTOR: store i64 [[REG_COUNT1]], i64* [[REG_COUNT_PTR]]
385 // CHECK-VECTOR: [[OVERFLOW_ARG_AREA_PTR:%[^ ]+]] = getelementptr inbounds %struct.__va_list_tag, %struct.__va_list_tag* %{{.*}}, i32 0, i32 2
386 // CHECK-VECTOR: [[OVERFLOW_ARG_AREA:%[^ ]+]] = load i8*, i8** [[OVERFLOW_ARG_AREA_PTR]]
387 // CHECK-VECTOR: [[RAW_MEM_ADDR:%[^ ]+]] = getelementptr i8, i8* [[OVERFLOW_ARG_AREA]], i64 0
388 // CHECK-VECTOR: [[MEM_ADDR:%[^ ]+]] = bitcast i8* [[RAW_MEM_ADDR]] to <32 x i8>**
389 // CHECK-VECTOR: [[OVERFLOW_ARG_AREA2:%[^ ]+]] = getelementptr i8, i8* [[OVERFLOW_ARG_AREA]], i64 8
390 // CHECK-VECTOR: store i8* [[OVERFLOW_ARG_AREA2]], i8** [[OVERFLOW_ARG_AREA_PTR]]
391 // CHECK-VECTOR: [[VA_ARG_ADDR:%[^ ]+]] = phi <32 x i8>** [ [[REG_ADDR]], %{{.*}} ], [ [[MEM_ADDR]], %{{.*}} ]
392 // CHECK-VECTOR: [[INDIRECT_ARG:%[^ ]+]] = load <32 x i8>*, <32 x i8>** [[VA_ARG_ADDR]]
393 // CHECK-VECTOR: ret void
394
va_agg_v1i8(__builtin_va_list l)395 struct agg_v1i8 va_agg_v1i8(__builtin_va_list l) { return __builtin_va_arg(l, struct agg_v1i8); }
396 // CHECK-LABEL: define void @va_agg_v1i8(%struct.agg_v1i8* noalias sret %{{.*}}, %struct.__va_list_tag* %{{.*}})
397 // CHECK: [[REG_COUNT_PTR:%[^ ]+]] = getelementptr inbounds %struct.__va_list_tag, %struct.__va_list_tag* %{{.*}}, i32 0, i32 0
398 // CHECK: [[REG_COUNT:%[^ ]+]] = load i64, i64* [[REG_COUNT_PTR]]
399 // CHECK: [[FITS_IN_REGS:%[^ ]+]] = icmp ult i64 [[REG_COUNT]], 5
400 // CHECK: br i1 [[FITS_IN_REGS]],
401 // CHECK: [[SCALED_REG_COUNT:%[^ ]+]] = mul i64 [[REG_COUNT]], 8
402 // CHECK: [[REG_OFFSET:%[^ ]+]] = add i64 [[SCALED_REG_COUNT]], 23
403 // CHECK: [[REG_SAVE_AREA_PTR:%[^ ]+]] = getelementptr inbounds %struct.__va_list_tag, %struct.__va_list_tag* %{{.*}}, i32 0, i32 3
404 // CHECK: [[REG_SAVE_AREA:%[^ ]+]] = load i8*, i8** [[REG_SAVE_AREA_PTR:[^ ]+]]
405 // CHECK: [[RAW_REG_ADDR:%[^ ]+]] = getelementptr i8, i8* [[REG_SAVE_AREA]], i64 [[REG_OFFSET]]
406 // CHECK: [[REG_ADDR:%[^ ]+]] = bitcast i8* [[RAW_REG_ADDR]] to %struct.agg_v1i8*
407 // CHECK: [[REG_COUNT1:%[^ ]+]] = add i64 [[REG_COUNT]], 1
408 // CHECK: store i64 [[REG_COUNT1]], i64* [[REG_COUNT_PTR]]
409 // CHECK: [[OVERFLOW_ARG_AREA_PTR:%[^ ]+]] = getelementptr inbounds %struct.__va_list_tag, %struct.__va_list_tag* %{{.*}}, i32 0, i32 2
410 // CHECK: [[OVERFLOW_ARG_AREA:%[^ ]+]] = load i8*, i8** [[OVERFLOW_ARG_AREA_PTR]]
411 // CHECK: [[RAW_MEM_ADDR:%[^ ]+]] = getelementptr i8, i8* [[OVERFLOW_ARG_AREA]], i64 7
412 // CHECK: [[MEM_ADDR:%[^ ]+]] = bitcast i8* [[RAW_MEM_ADDR]] to %struct.agg_v1i8*
413 // CHECK: [[OVERFLOW_ARG_AREA2:%[^ ]+]] = getelementptr i8, i8* [[OVERFLOW_ARG_AREA]], i64 8
414 // CHECK: store i8* [[OVERFLOW_ARG_AREA2]], i8** [[OVERFLOW_ARG_AREA_PTR]]
415 // CHECK: [[VA_ARG_ADDR:%[^ ]+]] = phi %struct.agg_v1i8* [ [[REG_ADDR]], %{{.*}} ], [ [[MEM_ADDR]], %{{.*}} ]
416 // CHECK: ret void
417 // CHECK-VECTOR-LABEL: define void @va_agg_v1i8(%struct.agg_v1i8* noalias sret %{{.*}}, %struct.__va_list_tag* %{{.*}})
418 // CHECK-VECTOR: [[OVERFLOW_ARG_AREA_PTR:%[^ ]+]] = getelementptr inbounds %struct.__va_list_tag, %struct.__va_list_tag* %{{.*}}, i32 0, i32 2
419 // CHECK-VECTOR: [[OVERFLOW_ARG_AREA:%[^ ]+]] = load i8*, i8** [[OVERFLOW_ARG_AREA_PTR]]
420 // CHECK-VECTOR: [[MEM_ADDR:%[^ ]+]] = bitcast i8* [[OVERFLOW_ARG_AREA]] to %struct.agg_v1i8*
421 // CHECK-VECTOR: [[OVERFLOW_ARG_AREA1:%[^ ]+]] = getelementptr i8, i8* [[OVERFLOW_ARG_AREA]], i64 8
422 // CHECK-VECTOR: store i8* [[OVERFLOW_ARG_AREA1]], i8** [[OVERFLOW_ARG_AREA_PTR]]
423 // CHECK-VECTOR: ret void
424
va_agg_v2i8(__builtin_va_list l)425 struct agg_v2i8 va_agg_v2i8(__builtin_va_list l) { return __builtin_va_arg(l, struct agg_v2i8); }
426 // CHECK-LABEL: define void @va_agg_v2i8(%struct.agg_v2i8* noalias sret %{{.*}}, %struct.__va_list_tag* %{{.*}})
427 // CHECK: [[REG_COUNT_PTR:%[^ ]+]] = getelementptr inbounds %struct.__va_list_tag, %struct.__va_list_tag* %{{.*}}, i32 0, i32 0
428 // CHECK: [[REG_COUNT:%[^ ]+]] = load i64, i64* [[REG_COUNT_PTR]]
429 // CHECK: [[FITS_IN_REGS:%[^ ]+]] = icmp ult i64 [[REG_COUNT]], 5
430 // CHECK: br i1 [[FITS_IN_REGS]],
431 // CHECK: [[SCALED_REG_COUNT:%[^ ]+]] = mul i64 [[REG_COUNT]], 8
432 // CHECK: [[REG_OFFSET:%[^ ]+]] = add i64 [[SCALED_REG_COUNT]], 22
433 // CHECK: [[REG_SAVE_AREA_PTR:%[^ ]+]] = getelementptr inbounds %struct.__va_list_tag, %struct.__va_list_tag* %{{.*}}, i32 0, i32 3
434 // CHECK: [[REG_SAVE_AREA:%[^ ]+]] = load i8*, i8** [[REG_SAVE_AREA_PTR:[^ ]+]]
435 // CHECK: [[RAW_REG_ADDR:%[^ ]+]] = getelementptr i8, i8* [[REG_SAVE_AREA]], i64 [[REG_OFFSET]]
436 // CHECK: [[REG_ADDR:%[^ ]+]] = bitcast i8* [[RAW_REG_ADDR]] to %struct.agg_v2i8*
437 // CHECK: [[REG_COUNT1:%[^ ]+]] = add i64 [[REG_COUNT]], 1
438 // CHECK: store i64 [[REG_COUNT1]], i64* [[REG_COUNT_PTR]]
439 // CHECK: [[OVERFLOW_ARG_AREA_PTR:%[^ ]+]] = getelementptr inbounds %struct.__va_list_tag, %struct.__va_list_tag* %{{.*}}, i32 0, i32 2
440 // CHECK: [[OVERFLOW_ARG_AREA:%[^ ]+]] = load i8*, i8** [[OVERFLOW_ARG_AREA_PTR]]
441 // CHECK: [[RAW_MEM_ADDR:%[^ ]+]] = getelementptr i8, i8* [[OVERFLOW_ARG_AREA]], i64 6
442 // CHECK: [[MEM_ADDR:%[^ ]+]] = bitcast i8* [[RAW_MEM_ADDR]] to %struct.agg_v2i8*
443 // CHECK: [[OVERFLOW_ARG_AREA2:%[^ ]+]] = getelementptr i8, i8* [[OVERFLOW_ARG_AREA]], i64 8
444 // CHECK: store i8* [[OVERFLOW_ARG_AREA2]], i8** [[OVERFLOW_ARG_AREA_PTR]]
445 // CHECK: [[VA_ARG_ADDR:%[^ ]+]] = phi %struct.agg_v2i8* [ [[REG_ADDR]], %{{.*}} ], [ [[MEM_ADDR]], %{{.*}} ]
446 // CHECK: ret void
447 // CHECK-VECTOR-LABEL: define void @va_agg_v2i8(%struct.agg_v2i8* noalias sret %{{.*}}, %struct.__va_list_tag* %{{.*}})
448 // CHECK-VECTOR: [[OVERFLOW_ARG_AREA_PTR:%[^ ]+]] = getelementptr inbounds %struct.__va_list_tag, %struct.__va_list_tag* %{{.*}}, i32 0, i32 2
449 // CHECK-VECTOR: [[OVERFLOW_ARG_AREA:%[^ ]+]] = load i8*, i8** [[OVERFLOW_ARG_AREA_PTR]]
450 // CHECK-VECTOR: [[MEM_ADDR:%[^ ]+]] = bitcast i8* [[OVERFLOW_ARG_AREA]] to %struct.agg_v2i8*
451 // CHECK-VECTOR: [[OVERFLOW_ARG_AREA1:%[^ ]+]] = getelementptr i8, i8* [[OVERFLOW_ARG_AREA]], i64 8
452 // CHECK-VECTOR: store i8* [[OVERFLOW_ARG_AREA1]], i8** [[OVERFLOW_ARG_AREA_PTR]]
453 // CHECK-VECTOR: ret void
454
va_agg_v4i8(__builtin_va_list l)455 struct agg_v4i8 va_agg_v4i8(__builtin_va_list l) { return __builtin_va_arg(l, struct agg_v4i8); }
456 // CHECK-LABEL: define void @va_agg_v4i8(%struct.agg_v4i8* noalias sret %{{.*}}, %struct.__va_list_tag* %{{.*}})
457 // CHECK: [[REG_COUNT_PTR:%[^ ]+]] = getelementptr inbounds %struct.__va_list_tag, %struct.__va_list_tag* %{{.*}}, i32 0, i32 0
458 // CHECK: [[REG_COUNT:%[^ ]+]] = load i64, i64* [[REG_COUNT_PTR]]
459 // CHECK: [[FITS_IN_REGS:%[^ ]+]] = icmp ult i64 [[REG_COUNT]], 5
460 // CHECK: br i1 [[FITS_IN_REGS]],
461 // CHECK: [[SCALED_REG_COUNT:%[^ ]+]] = mul i64 [[REG_COUNT]], 8
462 // CHECK: [[REG_OFFSET:%[^ ]+]] = add i64 [[SCALED_REG_COUNT]], 20
463 // CHECK: [[REG_SAVE_AREA_PTR:%[^ ]+]] = getelementptr inbounds %struct.__va_list_tag, %struct.__va_list_tag* %{{.*}}, i32 0, i32 3
464 // CHECK: [[REG_SAVE_AREA:%[^ ]+]] = load i8*, i8** [[REG_SAVE_AREA_PTR:[^ ]+]]
465 // CHECK: [[RAW_REG_ADDR:%[^ ]+]] = getelementptr i8, i8* [[REG_SAVE_AREA]], i64 [[REG_OFFSET]]
466 // CHECK: [[REG_ADDR:%[^ ]+]] = bitcast i8* [[RAW_REG_ADDR]] to %struct.agg_v4i8*
467 // CHECK: [[REG_COUNT1:%[^ ]+]] = add i64 [[REG_COUNT]], 1
468 // CHECK: store i64 [[REG_COUNT1]], i64* [[REG_COUNT_PTR]]
469 // CHECK: [[OVERFLOW_ARG_AREA_PTR:%[^ ]+]] = getelementptr inbounds %struct.__va_list_tag, %struct.__va_list_tag* %{{.*}}, i32 0, i32 2
470 // CHECK: [[OVERFLOW_ARG_AREA:%[^ ]+]] = load i8*, i8** [[OVERFLOW_ARG_AREA_PTR]]
471 // CHECK: [[RAW_MEM_ADDR:%[^ ]+]] = getelementptr i8, i8* [[OVERFLOW_ARG_AREA]], i64 4
472 // CHECK: [[MEM_ADDR:%[^ ]+]] = bitcast i8* [[RAW_MEM_ADDR]] to %struct.agg_v4i8*
473 // CHECK: [[OVERFLOW_ARG_AREA2:%[^ ]+]] = getelementptr i8, i8* [[OVERFLOW_ARG_AREA]], i64 8
474 // CHECK: store i8* [[OVERFLOW_ARG_AREA2]], i8** [[OVERFLOW_ARG_AREA_PTR]]
475 // CHECK: [[VA_ARG_ADDR:%[^ ]+]] = phi %struct.agg_v4i8* [ [[REG_ADDR]], %{{.*}} ], [ [[MEM_ADDR]], %{{.*}} ]
476 // CHECK: ret void
477 // CHECK-VECTOR-LABEL: define void @va_agg_v4i8(%struct.agg_v4i8* noalias sret %{{.*}}, %struct.__va_list_tag* %{{.*}})
478 // CHECK-VECTOR: [[OVERFLOW_ARG_AREA_PTR:%[^ ]+]] = getelementptr inbounds %struct.__va_list_tag, %struct.__va_list_tag* %{{.*}}, i32 0, i32 2
479 // CHECK-VECTOR: [[OVERFLOW_ARG_AREA:%[^ ]+]] = load i8*, i8** [[OVERFLOW_ARG_AREA_PTR]]
480 // CHECK-VECTOR: [[MEM_ADDR:%[^ ]+]] = bitcast i8* [[OVERFLOW_ARG_AREA]] to %struct.agg_v4i8*
481 // CHECK-VECTOR: [[OVERFLOW_ARG_AREA1:%[^ ]+]] = getelementptr i8, i8* [[OVERFLOW_ARG_AREA]], i64 8
482 // CHECK-VECTOR: store i8* [[OVERFLOW_ARG_AREA1]], i8** [[OVERFLOW_ARG_AREA_PTR]]
483 // CHECK-VECTOR: ret void
484
va_agg_v8i8(__builtin_va_list l)485 struct agg_v8i8 va_agg_v8i8(__builtin_va_list l) { return __builtin_va_arg(l, struct agg_v8i8); }
486 // CHECK-LABEL: define void @va_agg_v8i8(%struct.agg_v8i8* noalias sret %{{.*}}, %struct.__va_list_tag* %{{.*}})
487 // CHECK: [[REG_COUNT_PTR:%[^ ]+]] = getelementptr inbounds %struct.__va_list_tag, %struct.__va_list_tag* %{{.*}}, i32 0, i32 0
488 // CHECK: [[REG_COUNT:%[^ ]+]] = load i64, i64* [[REG_COUNT_PTR]]
489 // CHECK: [[FITS_IN_REGS:%[^ ]+]] = icmp ult i64 [[REG_COUNT]], 5
490 // CHECK: br i1 [[FITS_IN_REGS]],
491 // CHECK: [[SCALED_REG_COUNT:%[^ ]+]] = mul i64 [[REG_COUNT]], 8
492 // CHECK: [[REG_OFFSET:%[^ ]+]] = add i64 [[SCALED_REG_COUNT]], 16
493 // CHECK: [[REG_SAVE_AREA_PTR:%[^ ]+]] = getelementptr inbounds %struct.__va_list_tag, %struct.__va_list_tag* %{{.*}}, i32 0, i32 3
494 // CHECK: [[REG_SAVE_AREA:%[^ ]+]] = load i8*, i8** [[REG_SAVE_AREA_PTR:[^ ]+]]
495 // CHECK: [[RAW_REG_ADDR:%[^ ]+]] = getelementptr i8, i8* [[REG_SAVE_AREA]], i64 [[REG_OFFSET]]
496 // CHECK: [[REG_ADDR:%[^ ]+]] = bitcast i8* [[RAW_REG_ADDR]] to %struct.agg_v8i8*
497 // CHECK: [[REG_COUNT1:%[^ ]+]] = add i64 [[REG_COUNT]], 1
498 // CHECK: store i64 [[REG_COUNT1]], i64* [[REG_COUNT_PTR]]
499 // CHECK: [[OVERFLOW_ARG_AREA_PTR:%[^ ]+]] = getelementptr inbounds %struct.__va_list_tag, %struct.__va_list_tag* %{{.*}}, i32 0, i32 2
500 // CHECK: [[OVERFLOW_ARG_AREA:%[^ ]+]] = load i8*, i8** [[OVERFLOW_ARG_AREA_PTR]]
501 // CHECK: [[RAW_MEM_ADDR:%[^ ]+]] = getelementptr i8, i8* [[OVERFLOW_ARG_AREA]], i64 0
502 // CHECK: [[MEM_ADDR:%[^ ]+]] = bitcast i8* [[RAW_MEM_ADDR]] to %struct.agg_v8i8*
503 // CHECK: [[OVERFLOW_ARG_AREA2:%[^ ]+]] = getelementptr i8, i8* [[OVERFLOW_ARG_AREA]], i64 8
504 // CHECK: store i8* [[OVERFLOW_ARG_AREA2]], i8** [[OVERFLOW_ARG_AREA_PTR]]
505 // CHECK: [[VA_ARG_ADDR:%[^ ]+]] = phi %struct.agg_v8i8* [ [[REG_ADDR]], %{{.*}} ], [ [[MEM_ADDR]], %{{.*}} ]
506 // CHECK: ret void
507 // CHECK-VECTOR-LABEL: define void @va_agg_v8i8(%struct.agg_v8i8* noalias sret %{{.*}}, %struct.__va_list_tag* %{{.*}})
508 // CHECK-VECTOR: [[OVERFLOW_ARG_AREA_PTR:%[^ ]+]] = getelementptr inbounds %struct.__va_list_tag, %struct.__va_list_tag* %{{.*}}, i32 0, i32 2
509 // CHECK-VECTOR: [[OVERFLOW_ARG_AREA:%[^ ]+]] = load i8*, i8** [[OVERFLOW_ARG_AREA_PTR]]
510 // CHECK-VECTOR: [[MEM_ADDR:%[^ ]+]] = bitcast i8* [[OVERFLOW_ARG_AREA]] to %struct.agg_v8i8*
511 // CHECK-VECTOR: [[OVERFLOW_ARG_AREA1:%[^ ]+]] = getelementptr i8, i8* [[OVERFLOW_ARG_AREA]], i64 8
512 // CHECK-VECTOR: store i8* [[OVERFLOW_ARG_AREA1]], i8** [[OVERFLOW_ARG_AREA_PTR]]
513 // CHECK-VECTOR: ret void
514
va_agg_v16i8(__builtin_va_list l)515 struct agg_v16i8 va_agg_v16i8(__builtin_va_list l) { return __builtin_va_arg(l, struct agg_v16i8); }
516 // CHECK-LABEL: define void @va_agg_v16i8(%struct.agg_v16i8* noalias sret %{{.*}}, %struct.__va_list_tag* %{{.*}})
517 // CHECK: [[REG_COUNT_PTR:%[^ ]+]] = getelementptr inbounds %struct.__va_list_tag, %struct.__va_list_tag* %{{.*}}, i32 0, i32 0
518 // CHECK: [[REG_COUNT:%[^ ]+]] = load i64, i64* [[REG_COUNT_PTR]]
519 // CHECK: [[FITS_IN_REGS:%[^ ]+]] = icmp ult i64 [[REG_COUNT]], 5
520 // CHECK: br i1 [[FITS_IN_REGS]],
521 // CHECK: [[SCALED_REG_COUNT:%[^ ]+]] = mul i64 [[REG_COUNT]], 8
522 // CHECK: [[REG_OFFSET:%[^ ]+]] = add i64 [[SCALED_REG_COUNT]], 16
523 // CHECK: [[REG_SAVE_AREA_PTR:%[^ ]+]] = getelementptr inbounds %struct.__va_list_tag, %struct.__va_list_tag* %{{.*}}, i32 0, i32 3
524 // CHECK: [[REG_SAVE_AREA:%[^ ]+]] = load i8*, i8** [[REG_SAVE_AREA_PTR:[^ ]+]]
525 // CHECK: [[RAW_REG_ADDR:%[^ ]+]] = getelementptr i8, i8* [[REG_SAVE_AREA]], i64 [[REG_OFFSET]]
526 // CHECK: [[REG_ADDR:%[^ ]+]] = bitcast i8* [[RAW_REG_ADDR]] to %struct.agg_v16i8**
527 // CHECK: [[REG_COUNT1:%[^ ]+]] = add i64 [[REG_COUNT]], 1
528 // CHECK: store i64 [[REG_COUNT1]], i64* [[REG_COUNT_PTR]]
529 // CHECK: [[OVERFLOW_ARG_AREA_PTR:%[^ ]+]] = getelementptr inbounds %struct.__va_list_tag, %struct.__va_list_tag* %{{.*}}, i32 0, i32 2
530 // CHECK: [[OVERFLOW_ARG_AREA:%[^ ]+]] = load i8*, i8** [[OVERFLOW_ARG_AREA_PTR]]
531 // CHECK: [[RAW_MEM_ADDR:%[^ ]+]] = getelementptr i8, i8* [[OVERFLOW_ARG_AREA]], i64 0
532 // CHECK: [[MEM_ADDR:%[^ ]+]] = bitcast i8* [[RAW_MEM_ADDR]] to %struct.agg_v16i8**
533 // CHECK: [[OVERFLOW_ARG_AREA2:%[^ ]+]] = getelementptr i8, i8* [[OVERFLOW_ARG_AREA]], i64 8
534 // CHECK: store i8* [[OVERFLOW_ARG_AREA2]], i8** [[OVERFLOW_ARG_AREA_PTR]]
535 // CHECK: [[VA_ARG_ADDR:%[^ ]+]] = phi %struct.agg_v16i8** [ [[REG_ADDR]], %{{.*}} ], [ [[MEM_ADDR]], %{{.*}} ]
536 // CHECK: [[INDIRECT_ARG:%[^ ]+]] = load %struct.agg_v16i8*, %struct.agg_v16i8** [[VA_ARG_ADDR]]
537 // CHECK: ret void
538 // CHECK-VECTOR-LABEL: define void @va_agg_v16i8(%struct.agg_v16i8* noalias sret %{{.*}}, %struct.__va_list_tag* %{{.*}})
539 // CHECK-VECTOR: [[OVERFLOW_ARG_AREA_PTR:%[^ ]+]] = getelementptr inbounds %struct.__va_list_tag, %struct.__va_list_tag* %{{.*}}, i32 0, i32 2
540 // CHECK-VECTOR: [[OVERFLOW_ARG_AREA:%[^ ]+]] = load i8*, i8** [[OVERFLOW_ARG_AREA_PTR]]
541 // CHECK-VECTOR: [[MEM_ADDR:%[^ ]+]] = bitcast i8* [[OVERFLOW_ARG_AREA]] to %struct.agg_v16i8*
542 // CHECK-VECTOR: [[OVERFLOW_ARG_AREA1:%[^ ]+]] = getelementptr i8, i8* [[OVERFLOW_ARG_AREA]], i64 16
543 // CHECK-VECTOR: store i8* [[OVERFLOW_ARG_AREA1]], i8** [[OVERFLOW_ARG_AREA_PTR]]
544 // CHECK-VECTOR: ret void
545
va_agg_v32i8(__builtin_va_list l)546 struct agg_v32i8 va_agg_v32i8(__builtin_va_list l) { return __builtin_va_arg(l, struct agg_v32i8); }
547 // CHECK-LABEL: define void @va_agg_v32i8(%struct.agg_v32i8* noalias sret %{{.*}}, %struct.__va_list_tag* %{{.*}})
548 // CHECK: [[REG_COUNT_PTR:%[^ ]+]] = getelementptr inbounds %struct.__va_list_tag, %struct.__va_list_tag* %{{.*}}, i32 0, i32 0
549 // CHECK: [[REG_COUNT:%[^ ]+]] = load i64, i64* [[REG_COUNT_PTR]]
550 // CHECK: [[FITS_IN_REGS:%[^ ]+]] = icmp ult i64 [[REG_COUNT]], 5
551 // CHECK: br i1 [[FITS_IN_REGS]],
552 // CHECK: [[SCALED_REG_COUNT:%[^ ]+]] = mul i64 [[REG_COUNT]], 8
553 // CHECK: [[REG_OFFSET:%[^ ]+]] = add i64 [[SCALED_REG_COUNT]], 16
554 // CHECK: [[REG_SAVE_AREA_PTR:%[^ ]+]] = getelementptr inbounds %struct.__va_list_tag, %struct.__va_list_tag* %{{.*}}, i32 0, i32 3
555 // CHECK: [[REG_SAVE_AREA:%[^ ]+]] = load i8*, i8** [[REG_SAVE_AREA_PTR:[^ ]+]]
556 // CHECK: [[RAW_REG_ADDR:%[^ ]+]] = getelementptr i8, i8* [[REG_SAVE_AREA]], i64 [[REG_OFFSET]]
557 // CHECK: [[REG_ADDR:%[^ ]+]] = bitcast i8* [[RAW_REG_ADDR]] to %struct.agg_v32i8**
558 // CHECK: [[REG_COUNT1:%[^ ]+]] = add i64 [[REG_COUNT]], 1
559 // CHECK: store i64 [[REG_COUNT1]], i64* [[REG_COUNT_PTR]]
560 // CHECK: [[OVERFLOW_ARG_AREA_PTR:%[^ ]+]] = getelementptr inbounds %struct.__va_list_tag, %struct.__va_list_tag* %{{.*}}, i32 0, i32 2
561 // CHECK: [[OVERFLOW_ARG_AREA:%[^ ]+]] = load i8*, i8** [[OVERFLOW_ARG_AREA_PTR]]
562 // CHECK: [[RAW_MEM_ADDR:%[^ ]+]] = getelementptr i8, i8* [[OVERFLOW_ARG_AREA]], i64 0
563 // CHECK: [[MEM_ADDR:%[^ ]+]] = bitcast i8* [[RAW_MEM_ADDR]] to %struct.agg_v32i8**
564 // CHECK: [[OVERFLOW_ARG_AREA2:%[^ ]+]] = getelementptr i8, i8* [[OVERFLOW_ARG_AREA]], i64 8
565 // CHECK: store i8* [[OVERFLOW_ARG_AREA2]], i8** [[OVERFLOW_ARG_AREA_PTR]]
566 // CHECK: [[VA_ARG_ADDR:%[^ ]+]] = phi %struct.agg_v32i8** [ [[REG_ADDR]], %{{.*}} ], [ [[MEM_ADDR]], %{{.*}} ]
567 // CHECK: [[INDIRECT_ARG:%[^ ]+]] = load %struct.agg_v32i8*, %struct.agg_v32i8** [[VA_ARG_ADDR]]
568 // CHECK: ret void
569 // CHECK-VECTOR-LABEL: define void @va_agg_v32i8(%struct.agg_v32i8* noalias sret %{{.*}}, %struct.__va_list_tag* %{{.*}})
570 // CHECK-VECTOR: [[REG_COUNT_PTR:%[^ ]+]] = getelementptr inbounds %struct.__va_list_tag, %struct.__va_list_tag* %{{.*}}, i32 0, i32 0
571 // CHECK-VECTOR: [[REG_COUNT:%[^ ]+]] = load i64, i64* [[REG_COUNT_PTR]]
572 // CHECK-VECTOR: [[FITS_IN_REGS:%[^ ]+]] = icmp ult i64 [[REG_COUNT]], 5
573 // CHECK-VECTOR: br i1 [[FITS_IN_REGS]],
574 // CHECK-VECTOR: [[SCALED_REG_COUNT:%[^ ]+]] = mul i64 [[REG_COUNT]], 8
575 // CHECK-VECTOR: [[REG_OFFSET:%[^ ]+]] = add i64 [[SCALED_REG_COUNT]], 16
576 // CHECK-VECTOR: [[REG_SAVE_AREA_PTR:%[^ ]+]] = getelementptr inbounds %struct.__va_list_tag, %struct.__va_list_tag* %{{.*}}, i32 0, i32 3
577 // CHECK-VECTOR: [[REG_SAVE_AREA:%[^ ]+]] = load i8*, i8** [[REG_SAVE_AREA_PTR:[^ ]+]]
578 // CHECK-VECTOR: [[RAW_REG_ADDR:%[^ ]+]] = getelementptr i8, i8* [[REG_SAVE_AREA]], i64 [[REG_OFFSET]]
579 // CHECK-VECTOR: [[REG_ADDR:%[^ ]+]] = bitcast i8* [[RAW_REG_ADDR]] to %struct.agg_v32i8**
580 // CHECK-VECTOR: [[REG_COUNT1:%[^ ]+]] = add i64 [[REG_COUNT]], 1
581 // CHECK-VECTOR: store i64 [[REG_COUNT1]], i64* [[REG_COUNT_PTR]]
582 // CHECK-VECTOR: [[OVERFLOW_ARG_AREA_PTR:%[^ ]+]] = getelementptr inbounds %struct.__va_list_tag, %struct.__va_list_tag* %{{.*}}, i32 0, i32 2
583 // CHECK-VECTOR: [[OVERFLOW_ARG_AREA:%[^ ]+]] = load i8*, i8** [[OVERFLOW_ARG_AREA_PTR]]
584 // CHECK-VECTOR: [[RAW_MEM_ADDR:%[^ ]+]] = getelementptr i8, i8* [[OVERFLOW_ARG_AREA]], i64 0
585 // CHECK-VECTOR: [[MEM_ADDR:%[^ ]+]] = bitcast i8* [[RAW_MEM_ADDR]] to %struct.agg_v32i8**
586 // CHECK-VECTOR: [[OVERFLOW_ARG_AREA2:%[^ ]+]] = getelementptr i8, i8* [[OVERFLOW_ARG_AREA]], i64 8
587 // CHECK-VECTOR: store i8* [[OVERFLOW_ARG_AREA2]], i8** [[OVERFLOW_ARG_AREA_PTR]]
588 // CHECK-VECTOR: [[VA_ARG_ADDR:%[^ ]+]] = phi %struct.agg_v32i8** [ [[REG_ADDR]], %{{.*}} ], [ [[MEM_ADDR]], %{{.*}} ]
589 // CHECK-VECTOR: [[INDIRECT_ARG:%[^ ]+]] = load %struct.agg_v32i8*, %struct.agg_v32i8** [[VA_ARG_ADDR]]
590 // CHECK-VECTOR: ret void
591