• Home
  • Raw
  • Download

Lines Matching refs:addrspace

13 ; CHECK-NEXT:    [[LOC_BC:%.*]] = bitcast i64* [[LOC]] to i8 addrspace(4)**
14 ; CHECK-NEXT: [[PTR:%.*]] = load i8 addrspace(4)*, i8 addrspace(4)** [[LOC_BC]], align 8
15 ; CHECK-NEXT: store i8 5, i8 addrspace(4)* [[PTR]], align 1
25 %loc.bc = bitcast i64* %loc to i8 addrspace(4)**
26 %ptr = load i8 addrspace(4)*, i8 addrspace(4)** %loc.bc
27 store i8 5, i8 addrspace(4)* %ptr
34 define i64 @f1(i1 %alwaysFalse, i8 addrspace(4)* %val, i8 addrspace(4)** %loc) {
37 ; CHECK-NEXT: store i8 addrspace(4)* [[VAL:%.*]], i8 addrspace(4)** [[LOC:%.*]], align 8
40 ; CHECK-NEXT: [[LOC_BC:%.*]] = bitcast i8 addrspace(4)** [[LOC]] to i64*
47 store i8 addrspace(4)* %val, i8 addrspace(4)** %loc
51 %loc.bc = bitcast i8 addrspace(4)** %loc to i64*
62 declare void @llvm.memset.p4i8.i64(i8 addrspace(4)* nocapture, i8, i64, i1) nounwind
65 define i8 addrspace(4)* @neg_forward_memset(i8 addrspace(4)* addrspace(4)* %loc) {
68 ; CHECK-NEXT: [[LOC_BC:%.*]] = bitcast i8 addrspace(4)* addrspace(4)* [[LOC:%.*]] to i8 addrspac…
69 ; CHECK-NEXT: call void @llvm.memset.p4i8.i64(i8 addrspace(4)* align 4 [[LOC_BC]], i8 7, i64 8, …
70 ; CHECK-NEXT: [[REF:%.*]] = load i8 addrspace(4)*, i8 addrspace(4)* addrspace(4)* [[LOC]], align…
71 ; CHECK-NEXT: ret i8 addrspace(4)* [[REF]]
74 %loc.bc = bitcast i8 addrspace(4)* addrspace(4)* %loc to i8 addrspace(4)*
75 call void @llvm.memset.p4i8.i64(i8 addrspace(4)* align 4 %loc.bc, i8 7, i64 8, i1 false)
76 %ref = load i8 addrspace(4)*, i8 addrspace(4)* addrspace(4)* %loc
77 ret i8 addrspace(4)* %ref
80 define <1 x i8 addrspace(4)*> @neg_forward_memset_vload(<1 x i8 addrspace(4)*> addrspace(4)* %loc) {
83 …CHECK-NEXT: [[LOC_BC:%.*]] = bitcast <1 x i8 addrspace(4)*> addrspace(4)* [[LOC:%.*]] to i8 add…
84 ; CHECK-NEXT: call void @llvm.memset.p4i8.i64(i8 addrspace(4)* align 4 [[LOC_BC]], i8 7, i64 8, …
85 ; CHECK-NEXT: [[REF:%.*]] = load <1 x i8 addrspace(4)*>, <1 x i8 addrspace(4)*> addrspace(4)* [[…
86 ; CHECK-NEXT: ret <1 x i8 addrspace(4)*> [[REF]]
89 %loc.bc = bitcast <1 x i8 addrspace(4)*> addrspace(4)* %loc to i8 addrspace(4)*
90 call void @llvm.memset.p4i8.i64(i8 addrspace(4)* align 4 %loc.bc, i8 7, i64 8, i1 false)
91 %ref = load <1 x i8 addrspace(4)*>, <1 x i8 addrspace(4)*> addrspace(4)* %loc
92 ret <1 x i8 addrspace(4)*> %ref
97 define i8 addrspace(4)* @forward_memset_zero(i8 addrspace(4)* addrspace(4)* %loc) {
100 ; CHECK-NEXT: [[LOC_BC:%.*]] = bitcast i8 addrspace(4)* addrspace(4)* [[LOC:%.*]] to i8 addrspac…
101 ; CHECK-NEXT: call void @llvm.memset.p4i8.i64(i8 addrspace(4)* align 4 [[LOC_BC]], i8 0, i64 8, …
102 ; CHECK-NEXT: ret i8 addrspace(4)* null
105 %loc.bc = bitcast i8 addrspace(4)* addrspace(4)* %loc to i8 addrspace(4)*
106 call void @llvm.memset.p4i8.i64(i8 addrspace(4)* align 4 %loc.bc, i8 0, i64 8, i1 false)
107 %ref = load i8 addrspace(4)*, i8 addrspace(4)* addrspace(4)* %loc
108 ret i8 addrspace(4)* %ref
112 define i8 addrspace(4)* @neg_forward_store(i8 addrspace(4)* addrspace(4)* %loc) {
115 ; CHECK-NEXT: [[LOC_BC:%.*]] = bitcast i8 addrspace(4)* addrspace(4)* [[LOC:%.*]] to i64 addrspa…
116 ; CHECK-NEXT: store i64 5, i64 addrspace(4)* [[LOC_BC]], align 8
117 ; CHECK-NEXT: [[REF:%.*]] = load i8 addrspace(4)*, i8 addrspace(4)* addrspace(4)* [[LOC]], align…
118 ; CHECK-NEXT: ret i8 addrspace(4)* [[REF]]
121 %loc.bc = bitcast i8 addrspace(4)* addrspace(4)* %loc to i64 addrspace(4)*
122 store i64 5, i64 addrspace(4)* %loc.bc
123 %ref = load i8 addrspace(4)*, i8 addrspace(4)* addrspace(4)* %loc
124 ret i8 addrspace(4)* %ref
127 define <1 x i8 addrspace(4)*> @neg_forward_store_vload(<1 x i8 addrspace(4)*> addrspace(4)* %loc) {
130 …CHECK-NEXT: [[LOC_BC:%.*]] = bitcast <1 x i8 addrspace(4)*> addrspace(4)* [[LOC:%.*]] to i64 ad…
131 ; CHECK-NEXT: store i64 5, i64 addrspace(4)* [[LOC_BC]], align 8
132 ; CHECK-NEXT: [[REF:%.*]] = load <1 x i8 addrspace(4)*>, <1 x i8 addrspace(4)*> addrspace(4)* [[…
133 ; CHECK-NEXT: ret <1 x i8 addrspace(4)*> [[REF]]
136 %loc.bc = bitcast <1 x i8 addrspace(4)*> addrspace(4)* %loc to i64 addrspace(4)*
137 store i64 5, i64 addrspace(4)* %loc.bc
138 %ref = load <1 x i8 addrspace(4)*>, <1 x i8 addrspace(4)*> addrspace(4)* %loc
139 ret <1 x i8 addrspace(4)*> %ref
143 define i8 addrspace(4)* @forward_store_zero(i8 addrspace(4)* addrspace(4)* %loc) {
146 ; CHECK-NEXT: [[LOC_BC:%.*]] = bitcast i8 addrspace(4)* addrspace(4)* [[LOC:%.*]] to i64 addrspa…
147 ; CHECK-NEXT: store i64 0, i64 addrspace(4)* [[LOC_BC]], align 8
148 ; CHECK-NEXT: ret i8 addrspace(4)* null
151 %loc.bc = bitcast i8 addrspace(4)* addrspace(4)* %loc to i64 addrspace(4)*
152 store i64 0, i64 addrspace(4)* %loc.bc
153 %ref = load i8 addrspace(4)*, i8 addrspace(4)* addrspace(4)* %loc
154 ret i8 addrspace(4)* %ref
158 define i8 addrspace(4)* @forward_store_zero2(i8 addrspace(4)* addrspace(4)* %loc) {
161 ; CHECK-NEXT: [[LOC_BC:%.*]] = bitcast i8 addrspace(4)* addrspace(4)* [[LOC:%.*]] to <2 x i32> a…
162 ; CHECK-NEXT: store <2 x i32> zeroinitializer, <2 x i32> addrspace(4)* [[LOC_BC]], align 8
163 ; CHECK-NEXT: ret i8 addrspace(4)* null
166 %loc.bc = bitcast i8 addrspace(4)* addrspace(4)* %loc to <2 x i32> addrspace(4)*
167 store <2 x i32> zeroinitializer, <2 x i32> addrspace(4)* %loc.bc
168 %ref = load i8 addrspace(4)*, i8 addrspace(4)* addrspace(4)* %loc
169 ret i8 addrspace(4)* %ref
175 @NonZeroConstant2 = constant <4 x i64 addrspace(4)*> <
176 i64 addrspace(4)* getelementptr (i64, i64 addrspace(4)* null, i32 3),
177 i64 addrspace(4)* getelementptr (i64, i64 addrspace(4)* null, i32 3),
178 i64 addrspace(4)* getelementptr (i64, i64 addrspace(4)* null, i32 3),
179 i64 addrspace(4)* getelementptr (i64, i64 addrspace(4)* null, i32 3)>
184 define i8 addrspace(4)* @neg_forward_memcopy(i8 addrspace(4)* addrspace(4)* %loc) {
187 ; CHECK-NEXT: [[LOC_BC:%.*]] = bitcast i8 addrspace(4)* addrspace(4)* [[LOC:%.*]] to i8 addrspac…
188 ; CHECK-NEXT: call void @llvm.memcpy.p4i8.p0i8.i64(i8 addrspace(4)* align 4 [[LOC_BC]], i8* bitc…
189 ; CHECK-NEXT: [[REF:%.*]] = load i8 addrspace(4)*, i8 addrspace(4)* addrspace(4)* [[LOC]], align…
190 ; CHECK-NEXT: ret i8 addrspace(4)* [[REF]]
193 %loc.bc = bitcast i8 addrspace(4)* addrspace(4)* %loc to i8 addrspace(4)*
195 …call void @llvm.memcpy.p4i8.p0i8.i64(i8 addrspace(4)* align 4 %loc.bc, i8* %src.bc, i64 8, i1 fals…
196 %ref = load i8 addrspace(4)*, i8 addrspace(4)* addrspace(4)* %loc
197 ret i8 addrspace(4)* %ref
200 define i64 addrspace(4)* @neg_forward_memcopy2(i64 addrspace(4)* addrspace(4)* %loc) {
203 ; CHECK-NEXT: [[LOC_BC:%.*]] = bitcast i64 addrspace(4)* addrspace(4)* [[LOC:%.*]] to i8 addrspa…
204 ; CHECK-NEXT: call void @llvm.memcpy.p4i8.p0i8.i64(i8 addrspace(4)* align 4 [[LOC_BC]], i8* bitc…
205 ; CHECK-NEXT: [[REF:%.*]] = load i64 addrspace(4)*, i64 addrspace(4)* addrspace(4)* [[LOC]], ali…
206 ; CHECK-NEXT: ret i64 addrspace(4)* [[REF]]
209 %loc.bc = bitcast i64 addrspace(4)* addrspace(4)* %loc to i8 addrspace(4)*
211 …call void @llvm.memcpy.p4i8.p0i8.i64(i8 addrspace(4)* align 4 %loc.bc, i8* %src.bc, i64 8, i1 fals…
212 %ref = load i64 addrspace(4)*, i64 addrspace(4)* addrspace(4)* %loc
213 ret i64 addrspace(4)* %ref
217 define i8 addrspace(4)* @forward_memcopy(i8 addrspace(4)* addrspace(4)* %loc) {
220 ; CHECK-NEXT: [[LOC_BC:%.*]] = bitcast i8 addrspace(4)* addrspace(4)* [[LOC:%.*]] to i8 addrspac…
221 … call void @llvm.memcpy.p4i8.p0i8.i64(i8 addrspace(4)* align 4 [[LOC_BC]], i8* bitcast (<4 x i6…
222 ; CHECK-NEXT: [[REF:%.*]] = load i8 addrspace(4)*, i8 addrspace(4)* addrspace(4)* [[LOC]], align…
223 ; CHECK-NEXT: ret i8 addrspace(4)* [[REF]]
226 %loc.bc = bitcast i8 addrspace(4)* addrspace(4)* %loc to i8 addrspace(4)*
227 %src.bc = bitcast <4 x i64 addrspace(4)*>* @NonZeroConstant2 to i8*
228 …call void @llvm.memcpy.p4i8.p0i8.i64(i8 addrspace(4)* align 4 %loc.bc, i8* %src.bc, i64 8, i1 fals…
229 %ref = load i8 addrspace(4)*, i8 addrspace(4)* addrspace(4)* %loc
230 ret i8 addrspace(4)* %ref
233 define i64 addrspace(4)* @forward_memcopy2(i64 addrspace(4)* addrspace(4)* %loc) {
236 ; CHECK-NEXT: [[LOC_BC:%.*]] = bitcast i64 addrspace(4)* addrspace(4)* [[LOC:%.*]] to i8 addrspa…
237 … call void @llvm.memcpy.p4i8.p0i8.i64(i8 addrspace(4)* align 4 [[LOC_BC]], i8* bitcast (<4 x i6…
238 ; CHECK-NEXT: ret i64 addrspace(4)* getelementptr (i64, i64 addrspace(4)* null, i32 3)
241 %loc.bc = bitcast i64 addrspace(4)* addrspace(4)* %loc to i8 addrspace(4)*
242 %src.bc = bitcast <4 x i64 addrspace(4)*>* @NonZeroConstant2 to i8*
243 …call void @llvm.memcpy.p4i8.p0i8.i64(i8 addrspace(4)* align 4 %loc.bc, i8* %src.bc, i64 8, i1 fals…
244 %ref = load i64 addrspace(4)*, i64 addrspace(4)* addrspace(4)* %loc
245 ret i64 addrspace(4)* %ref
248 define <1 x i8 addrspace(4)*> @neg_forward_memcpy_vload(<1 x i8 addrspace(4)*> addrspace(4)* %loc) {
251 …CHECK-NEXT: [[LOC_BC:%.*]] = bitcast <1 x i8 addrspace(4)*> addrspace(4)* [[LOC:%.*]] to i8 add…
252 ; CHECK-NEXT: call void @llvm.memcpy.p4i8.p0i8.i64(i8 addrspace(4)* align 4 [[LOC_BC]], i8* bitc…
253 ; CHECK-NEXT: [[REF:%.*]] = load <1 x i8 addrspace(4)*>, <1 x i8 addrspace(4)*> addrspace(4)* [[…
254 ; CHECK-NEXT: ret <1 x i8 addrspace(4)*> [[REF]]
257 %loc.bc = bitcast <1 x i8 addrspace(4)*> addrspace(4)* %loc to i8 addrspace(4)*
259 …call void @llvm.memcpy.p4i8.p0i8.i64(i8 addrspace(4)* align 4 %loc.bc, i8* %src.bc, i64 8, i1 fals…
260 %ref = load <1 x i8 addrspace(4)*>, <1 x i8 addrspace(4)*> addrspace(4)* %loc
261 ret <1 x i8 addrspace(4)*> %ref
264 define <4 x i64 addrspace(4)*> @neg_forward_memcpy_vload2(<4 x i64 addrspace(4)*> addrspace(4)* %lo…
267 …HECK-NEXT: [[LOC_BC:%.*]] = bitcast <4 x i64 addrspace(4)*> addrspace(4)* [[LOC:%.*]] to i8 add…
268 ; CHECK-NEXT: call void @llvm.memcpy.p4i8.p0i8.i64(i8 addrspace(4)* align 4 [[LOC_BC]], i8* bitc…
269 ; CHECK-NEXT: [[REF:%.*]] = load <4 x i64 addrspace(4)*>, <4 x i64 addrspace(4)*> addrspace(4)* …
270 ; CHECK-NEXT: ret <4 x i64 addrspace(4)*> [[REF]]
273 %loc.bc = bitcast <4 x i64 addrspace(4)*> addrspace(4)* %loc to i8 addrspace(4)*
275 …call void @llvm.memcpy.p4i8.p0i8.i64(i8 addrspace(4)* align 4 %loc.bc, i8* %src.bc, i64 32, i1 fal…
276 %ref = load <4 x i64 addrspace(4)*>, <4 x i64 addrspace(4)*> addrspace(4)* %loc
277 ret <4 x i64 addrspace(4)*> %ref
280 define <4 x i64> @neg_forward_memcpy_vload3(<4 x i64> addrspace(4)* %loc) {
283 ; CHECK-NEXT: [[LOC_BC:%.*]] = bitcast <4 x i64> addrspace(4)* [[LOC:%.*]] to i8 addrspace(4)*
284 … call void @llvm.memcpy.p4i8.p0i8.i64(i8 addrspace(4)* align 4 [[LOC_BC]], i8* bitcast (<4 x i6…
285 ; CHECK-NEXT: [[REF:%.*]] = load <4 x i64>, <4 x i64> addrspace(4)* [[LOC]], align 32
289 %loc.bc = bitcast <4 x i64> addrspace(4)* %loc to i8 addrspace(4)*
290 %src.bc = bitcast <4 x i64 addrspace(4)*>* @NonZeroConstant2 to i8*
291 …call void @llvm.memcpy.p4i8.p0i8.i64(i8 addrspace(4)* align 4 %loc.bc, i8* %src.bc, i64 32, i1 fal…
292 %ref = load <4 x i64>, <4 x i64> addrspace(4)* %loc
296 define <1 x i64 addrspace(4)*> @forward_memcpy_vload3(<4 x i64 addrspace(4)*> addrspace(4)* %loc) {
299 …HECK-NEXT: [[LOC_BC:%.*]] = bitcast <4 x i64 addrspace(4)*> addrspace(4)* [[LOC:%.*]] to i8 add…
300 … call void @llvm.memcpy.p4i8.p0i8.i64(i8 addrspace(4)* align 4 [[LOC_BC]], i8* bitcast (<4 x i6…
301 ; CHECK-NEXT: ret <1 x i64 addrspace(4)*> <i64 addrspace(4)* getelementptr (i64, i64 addrspace(4…
304 %loc.bc = bitcast <4 x i64 addrspace(4)*> addrspace(4)* %loc to i8 addrspace(4)*
305 %src.bc = bitcast <4 x i64 addrspace(4)*>* @NonZeroConstant2 to i8*
306 …call void @llvm.memcpy.p4i8.p0i8.i64(i8 addrspace(4)* align 4 %loc.bc, i8* %src.bc, i64 32, i1 fal…
307 %ref = load <4 x i64 addrspace(4)*>, <4 x i64 addrspace(4)*> addrspace(4)* %loc
308 %val = extractelement <4 x i64 addrspace(4)*> %ref, i32 0
309 %ret = insertelement <1 x i64 addrspace(4)*> undef, i64 addrspace(4)* %val, i32 0
310 ret <1 x i64 addrspace(4)*> %ret
314 define i8 addrspace(4)* @forward_memcpy_zero(i8 addrspace(4)* addrspace(4)* %loc) {
317 ; CHECK-NEXT: [[LOC_BC:%.*]] = bitcast i8 addrspace(4)* addrspace(4)* [[LOC:%.*]] to i8 addrspac…
318 ; CHECK-NEXT: call void @llvm.memcpy.p4i8.p0i8.i64(i8 addrspace(4)* align 4 [[LOC_BC]], i8* bitc…
319 ; CHECK-NEXT: ret i8 addrspace(4)* null
322 %loc.bc = bitcast i8 addrspace(4)* addrspace(4)* %loc to i8 addrspace(4)*
324 …call void @llvm.memcpy.p4i8.p0i8.i64(i8 addrspace(4)* align 4 %loc.bc, i8* %src.bc, i64 8, i1 fals…
325 %ref = load i8 addrspace(4)*, i8 addrspace(4)* addrspace(4)* %loc
326 ret i8 addrspace(4)* %ref
329 declare void @llvm.memcpy.p4i8.p0i8.i64(i8 addrspace(4)* nocapture, i8* nocapture, i64, i1) nounwind
334 define i8 addrspace(4)* @neg_store_clobber(i8 addrspace(4)* addrspace(4)* %loc) {
337 ; CHECK-NEXT: [[LOC_BC:%.*]] = bitcast i8 addrspace(4)* addrspace(4)* [[LOC:%.*]] to <2 x i64> a…
338 ; CHECK-NEXT: store <2 x i64> <i64 4, i64 4>, <2 x i64> addrspace(4)* [[LOC_BC]], align 16
339 ; CHECK-NEXT: [[LOC_OFF:%.*]] = getelementptr i8 addrspace(4)*, i8 addrspace(4)* addrspace(4)* […
340 ; CHECK-NEXT: [[REF:%.*]] = load i8 addrspace(4)*, i8 addrspace(4)* addrspace(4)* [[LOC_OFF]], a…
341 ; CHECK-NEXT: ret i8 addrspace(4)* [[REF]]
344 %loc.bc = bitcast i8 addrspace(4)* addrspace(4)* %loc to <2 x i64> addrspace(4)*
345 store <2 x i64> <i64 4, i64 4>, <2 x i64> addrspace(4)* %loc.bc
346 %loc.off = getelementptr i8 addrspace(4)*, i8 addrspace(4)* addrspace(4)* %loc, i64 1
347 %ref = load i8 addrspace(4)*, i8 addrspace(4)* addrspace(4)* %loc.off
348 ret i8 addrspace(4)* %ref
355 define i8 addrspace(4)* @neg_load_clobber(i8 addrspace(4)* addrspace(4)* %loc) {
358 ; CHECK-NEXT: [[LOC_BC:%.*]] = bitcast i8 addrspace(4)* addrspace(4)* [[LOC:%.*]] to <2 x i64> a…
359 ; CHECK-NEXT: [[V:%.*]] = load <2 x i64>, <2 x i64> addrspace(4)* [[LOC_BC]], align 16
361 ; CHECK-NEXT: [[LOC_OFF:%.*]] = getelementptr i8 addrspace(4)*, i8 addrspace(4)* addrspace(4)* […
362 ; CHECK-NEXT: [[REF:%.*]] = load i8 addrspace(4)*, i8 addrspace(4)* addrspace(4)* [[LOC_OFF]], a…
363 ; CHECK-NEXT: ret i8 addrspace(4)* [[REF]]
366 %loc.bc = bitcast i8 addrspace(4)* addrspace(4)* %loc to <2 x i64> addrspace(4)*
367 %v = load <2 x i64>, <2 x i64> addrspace(4)* %loc.bc
369 %loc.off = getelementptr i8 addrspace(4)*, i8 addrspace(4)* addrspace(4)* %loc, i64 1
370 %ref = load i8 addrspace(4)*, i8 addrspace(4)* addrspace(4)* %loc.off
371 ret i8 addrspace(4)* %ref
374 define i8 addrspace(4)* @store_clobber_zero(i8 addrspace(4)* addrspace(4)* %loc) {
377 ; CHECK-NEXT: [[LOC_BC:%.*]] = bitcast i8 addrspace(4)* addrspace(4)* [[LOC:%.*]] to <2 x i64> a…
378 ; CHECK-NEXT: store <2 x i64> zeroinitializer, <2 x i64> addrspace(4)* [[LOC_BC]], align 16
379 ; CHECK-NEXT: [[LOC_OFF:%.*]] = getelementptr i8 addrspace(4)*, i8 addrspace(4)* addrspace(4)* […
380 ; CHECK-NEXT: ret i8 addrspace(4)* null
383 %loc.bc = bitcast i8 addrspace(4)* addrspace(4)* %loc to <2 x i64> addrspace(4)*
384 store <2 x i64> zeroinitializer, <2 x i64> addrspace(4)* %loc.bc
385 %loc.off = getelementptr i8 addrspace(4)*, i8 addrspace(4)* addrspace(4)* %loc, i64 1
386 %ref = load i8 addrspace(4)*, i8 addrspace(4)* addrspace(4)* %loc.off
387 ret i8 addrspace(4)* %ref
394 ; CHECK-NEXT: [[A:%.*]] = bitcast i8* [[P:%.*]] to <4 x i64 addrspace(4)*>*
395 ; CHECK-NEXT: [[B:%.*]] = bitcast i8* [[P]] to <2 x i64 addrspace(4)*>*
396 ; CHECK-NEXT: [[V4:%.*]] = load <4 x i64 addrspace(4)*>, <4 x i64 addrspace(4)*>* [[A]], align 32
397 ; CHECK-NEXT: [[V2:%.*]] = load <2 x i64 addrspace(4)*>, <2 x i64 addrspace(4)*>* [[B]], align 32
398 ; CHECK-NEXT: call void @use.v2(<2 x i64 addrspace(4)*> [[V2]])
399 ; CHECK-NEXT: call void @use.v4(<4 x i64 addrspace(4)*> [[V4]])
403 %a = bitcast i8* %p to <4 x i64 addrspace(4)*>*
404 %b = bitcast i8* %p to <2 x i64 addrspace(4)*>*
405 %v4 = load <4 x i64 addrspace(4)*>, <4 x i64 addrspace(4)*>* %a, align 32
406 %v2 = load <2 x i64 addrspace(4)*>, <2 x i64 addrspace(4)*>* %b, align 32
407 call void @use.v2(<2 x i64 addrspace(4)*> %v2)
408 call void @use.v4(<4 x i64 addrspace(4)*> %v4)
412 define i64 addrspace(4)* @vector_extract(i8* %p) {
415 ; CHECK-NEXT: [[A:%.*]] = bitcast i8* [[P:%.*]] to <4 x i64 addrspace(4)*>*
416 ; CHECK-NEXT: [[B:%.*]] = bitcast i8* [[P]] to i64 addrspace(4)**
417 ; CHECK-NEXT: [[V4:%.*]] = load <4 x i64 addrspace(4)*>, <4 x i64 addrspace(4)*>* [[A]], align 32
418 ; CHECK-NEXT: [[RES:%.*]] = load i64 addrspace(4)*, i64 addrspace(4)** [[B]], align 32
419 ; CHECK-NEXT: call void @use.v4(<4 x i64 addrspace(4)*> [[V4]])
420 ; CHECK-NEXT: ret i64 addrspace(4)* [[RES]]
423 %a = bitcast i8* %p to <4 x i64 addrspace(4)*>*
424 %b = bitcast i8* %p to i64 addrspace(4)**
425 %v4 = load <4 x i64 addrspace(4)*>, <4 x i64 addrspace(4)*>* %a, align 32
426 %res = load i64 addrspace(4)*, i64 addrspace(4)** %b, align 32
427 call void @use.v4(<4 x i64 addrspace(4)*> %v4)
428 ret i64 addrspace(4)* %res
431 declare void @use.v2(<2 x i64 addrspace(4)*>)
432 declare void @use.v4(<4 x i64 addrspace(4)*>)
433 define i8 addrspace(5)* @multini(i1 %alwaysFalse, i8 addrspace(4)* %val, i8 addrspace(4)** %loc) {
439 store i8 addrspace(4)* %val, i8 addrspace(4)** %loc
443 %loc.bc = bitcast i8 addrspace(4)** %loc to i8 addrspace(5)**
444 %differentas = load i8 addrspace(5)*, i8 addrspace(5)** %loc.bc
445 ret i8 addrspace(5)* %differentas
448 ret i8 addrspace(5)* null