• Home
  • Raw
  • Download

Lines Matching refs:addrspace

6 define void @atomic_add_i32_offset(i32 addrspace(4)* %out, i32 %in) {
8 %gep = getelementptr i32, i32 addrspace(4)* %out, i32 4
9 %val = atomicrmw volatile add i32 addrspace(4)* %gep, i32 %in seq_cst
16 define void @atomic_add_i32_ret_offset(i32 addrspace(4)* %out, i32 addrspace(4)* %out2, i32 %in) {
18 %gep = getelementptr i32, i32 addrspace(4)* %out, i32 4
19 %val = atomicrmw volatile add i32 addrspace(4)* %gep, i32 %in seq_cst
20 store i32 %val, i32 addrspace(4)* %out2
26 define void @atomic_add_i32_addr64_offset(i32 addrspace(4)* %out, i32 %in, i64 %index) {
28 %ptr = getelementptr i32, i32 addrspace(4)* %out, i64 %index
29 %gep = getelementptr i32, i32 addrspace(4)* %ptr, i32 4
30 %val = atomicrmw volatile add i32 addrspace(4)* %gep, i32 %in seq_cst
37 define void @atomic_add_i32_ret_addr64_offset(i32 addrspace(4)* %out, i32 addrspace(4)* %out2, i32 …
39 %ptr = getelementptr i32, i32 addrspace(4)* %out, i64 %index
40 %gep = getelementptr i32, i32 addrspace(4)* %ptr, i32 4
41 %val = atomicrmw volatile add i32 addrspace(4)* %gep, i32 %in seq_cst
42 store i32 %val, i32 addrspace(4)* %out2
48 define void @atomic_add_i32(i32 addrspace(4)* %out, i32 %in) {
50 %val = atomicrmw volatile add i32 addrspace(4)* %out, i32 %in seq_cst
57 define void @atomic_add_i32_ret(i32 addrspace(4)* %out, i32 addrspace(4)* %out2, i32 %in) {
59 %val = atomicrmw volatile add i32 addrspace(4)* %out, i32 %in seq_cst
60 store i32 %val, i32 addrspace(4)* %out2
66 define void @atomic_add_i32_addr64(i32 addrspace(4)* %out, i32 %in, i64 %index) {
68 %ptr = getelementptr i32, i32 addrspace(4)* %out, i64 %index
69 %val = atomicrmw volatile add i32 addrspace(4)* %ptr, i32 %in seq_cst
76 define void @atomic_add_i32_ret_addr64(i32 addrspace(4)* %out, i32 addrspace(4)* %out2, i32 %in, i6…
78 %ptr = getelementptr i32, i32 addrspace(4)* %out, i64 %index
79 %val = atomicrmw volatile add i32 addrspace(4)* %ptr, i32 %in seq_cst
80 store i32 %val, i32 addrspace(4)* %out2
86 define void @atomic_and_i32_offset(i32 addrspace(4)* %out, i32 %in) {
88 %gep = getelementptr i32, i32 addrspace(4)* %out, i32 4
89 %val = atomicrmw volatile and i32 addrspace(4)* %gep, i32 %in seq_cst
96 define void @atomic_and_i32_ret_offset(i32 addrspace(4)* %out, i32 addrspace(4)* %out2, i32 %in) {
98 %gep = getelementptr i32, i32 addrspace(4)* %out, i32 4
99 %val = atomicrmw volatile and i32 addrspace(4)* %gep, i32 %in seq_cst
100 store i32 %val, i32 addrspace(4)* %out2
106 define void @atomic_and_i32_addr64_offset(i32 addrspace(4)* %out, i32 %in, i64 %index) {
108 %ptr = getelementptr i32, i32 addrspace(4)* %out, i64 %index
109 %gep = getelementptr i32, i32 addrspace(4)* %ptr, i32 4
110 %val = atomicrmw volatile and i32 addrspace(4)* %gep, i32 %in seq_cst
117 define void @atomic_and_i32_ret_addr64_offset(i32 addrspace(4)* %out, i32 addrspace(4)* %out2, i32 …
119 %ptr = getelementptr i32, i32 addrspace(4)* %out, i64 %index
120 %gep = getelementptr i32, i32 addrspace(4)* %ptr, i32 4
121 %val = atomicrmw volatile and i32 addrspace(4)* %gep, i32 %in seq_cst
122 store i32 %val, i32 addrspace(4)* %out2
128 define void @atomic_and_i32(i32 addrspace(4)* %out, i32 %in) {
130 %val = atomicrmw volatile and i32 addrspace(4)* %out, i32 %in seq_cst
137 define void @atomic_and_i32_ret(i32 addrspace(4)* %out, i32 addrspace(4)* %out2, i32 %in) {
139 %val = atomicrmw volatile and i32 addrspace(4)* %out, i32 %in seq_cst
140 store i32 %val, i32 addrspace(4)* %out2
146 define void @atomic_and_i32_addr64(i32 addrspace(4)* %out, i32 %in, i64 %index) {
148 %ptr = getelementptr i32, i32 addrspace(4)* %out, i64 %index
149 %val = atomicrmw volatile and i32 addrspace(4)* %ptr, i32 %in seq_cst
156 define void @atomic_and_i32_ret_addr64(i32 addrspace(4)* %out, i32 addrspace(4)* %out2, i32 %in, i6…
158 %ptr = getelementptr i32, i32 addrspace(4)* %out, i64 %index
159 %val = atomicrmw volatile and i32 addrspace(4)* %ptr, i32 %in seq_cst
160 store i32 %val, i32 addrspace(4)* %out2
166 define void @atomic_sub_i32_offset(i32 addrspace(4)* %out, i32 %in) {
168 %gep = getelementptr i32, i32 addrspace(4)* %out, i32 4
169 %val = atomicrmw volatile sub i32 addrspace(4)* %gep, i32 %in seq_cst
176 define void @atomic_sub_i32_ret_offset(i32 addrspace(4)* %out, i32 addrspace(4)* %out2, i32 %in) {
178 %gep = getelementptr i32, i32 addrspace(4)* %out, i32 4
179 %val = atomicrmw volatile sub i32 addrspace(4)* %gep, i32 %in seq_cst
180 store i32 %val, i32 addrspace(4)* %out2
186 define void @atomic_sub_i32_addr64_offset(i32 addrspace(4)* %out, i32 %in, i64 %index) {
188 %ptr = getelementptr i32, i32 addrspace(4)* %out, i64 %index
189 %gep = getelementptr i32, i32 addrspace(4)* %ptr, i32 4
190 %val = atomicrmw volatile sub i32 addrspace(4)* %gep, i32 %in seq_cst
197 define void @atomic_sub_i32_ret_addr64_offset(i32 addrspace(4)* %out, i32 addrspace(4)* %out2, i32 …
199 %ptr = getelementptr i32, i32 addrspace(4)* %out, i64 %index
200 %gep = getelementptr i32, i32 addrspace(4)* %ptr, i32 4
201 %val = atomicrmw volatile sub i32 addrspace(4)* %gep, i32 %in seq_cst
202 store i32 %val, i32 addrspace(4)* %out2
208 define void @atomic_sub_i32(i32 addrspace(4)* %out, i32 %in) {
210 %val = atomicrmw volatile sub i32 addrspace(4)* %out, i32 %in seq_cst
217 define void @atomic_sub_i32_ret(i32 addrspace(4)* %out, i32 addrspace(4)* %out2, i32 %in) {
219 %val = atomicrmw volatile sub i32 addrspace(4)* %out, i32 %in seq_cst
220 store i32 %val, i32 addrspace(4)* %out2
226 define void @atomic_sub_i32_addr64(i32 addrspace(4)* %out, i32 %in, i64 %index) {
228 %ptr = getelementptr i32, i32 addrspace(4)* %out, i64 %index
229 %val = atomicrmw volatile sub i32 addrspace(4)* %ptr, i32 %in seq_cst
236 define void @atomic_sub_i32_ret_addr64(i32 addrspace(4)* %out, i32 addrspace(4)* %out2, i32 %in, i6…
238 %ptr = getelementptr i32, i32 addrspace(4)* %out, i64 %index
239 %val = atomicrmw volatile sub i32 addrspace(4)* %ptr, i32 %in seq_cst
240 store i32 %val, i32 addrspace(4)* %out2
246 define void @atomic_max_i32_offset(i32 addrspace(4)* %out, i32 %in) {
248 %gep = getelementptr i32, i32 addrspace(4)* %out, i32 4
249 %val = atomicrmw volatile max i32 addrspace(4)* %gep, i32 %in seq_cst
256 define void @atomic_max_i32_ret_offset(i32 addrspace(4)* %out, i32 addrspace(4)* %out2, i32 %in) {
258 %gep = getelementptr i32, i32 addrspace(4)* %out, i32 4
259 %val = atomicrmw volatile max i32 addrspace(4)* %gep, i32 %in seq_cst
260 store i32 %val, i32 addrspace(4)* %out2
266 define void @atomic_max_i32_addr64_offset(i32 addrspace(4)* %out, i32 %in, i64 %index) {
268 %ptr = getelementptr i32, i32 addrspace(4)* %out, i64 %index
269 %gep = getelementptr i32, i32 addrspace(4)* %ptr, i32 4
270 %val = atomicrmw volatile max i32 addrspace(4)* %gep, i32 %in seq_cst
277 define void @atomic_max_i32_ret_addr64_offset(i32 addrspace(4)* %out, i32 addrspace(4)* %out2, i32 …
279 %ptr = getelementptr i32, i32 addrspace(4)* %out, i64 %index
280 %gep = getelementptr i32, i32 addrspace(4)* %ptr, i32 4
281 %val = atomicrmw volatile max i32 addrspace(4)* %gep, i32 %in seq_cst
282 store i32 %val, i32 addrspace(4)* %out2
288 define void @atomic_max_i32(i32 addrspace(4)* %out, i32 %in) {
290 %val = atomicrmw volatile max i32 addrspace(4)* %out, i32 %in seq_cst
297 define void @atomic_max_i32_ret(i32 addrspace(4)* %out, i32 addrspace(4)* %out2, i32 %in) {
299 %val = atomicrmw volatile max i32 addrspace(4)* %out, i32 %in seq_cst
300 store i32 %val, i32 addrspace(4)* %out2
306 define void @atomic_max_i32_addr64(i32 addrspace(4)* %out, i32 %in, i64 %index) {
308 %ptr = getelementptr i32, i32 addrspace(4)* %out, i64 %index
309 %val = atomicrmw volatile max i32 addrspace(4)* %ptr, i32 %in seq_cst
316 define void @atomic_max_i32_ret_addr64(i32 addrspace(4)* %out, i32 addrspace(4)* %out2, i32 %in, i6…
318 %ptr = getelementptr i32, i32 addrspace(4)* %out, i64 %index
319 %val = atomicrmw volatile max i32 addrspace(4)* %ptr, i32 %in seq_cst
320 store i32 %val, i32 addrspace(4)* %out2
326 define void @atomic_umax_i32_offset(i32 addrspace(4)* %out, i32 %in) {
328 %gep = getelementptr i32, i32 addrspace(4)* %out, i32 4
329 %val = atomicrmw volatile umax i32 addrspace(4)* %gep, i32 %in seq_cst
336 define void @atomic_umax_i32_ret_offset(i32 addrspace(4)* %out, i32 addrspace(4)* %out2, i32 %in) {
338 %gep = getelementptr i32, i32 addrspace(4)* %out, i32 4
339 %val = atomicrmw volatile umax i32 addrspace(4)* %gep, i32 %in seq_cst
340 store i32 %val, i32 addrspace(4)* %out2
346 define void @atomic_umax_i32_addr64_offset(i32 addrspace(4)* %out, i32 %in, i64 %index) {
348 %ptr = getelementptr i32, i32 addrspace(4)* %out, i64 %index
349 %gep = getelementptr i32, i32 addrspace(4)* %ptr, i32 4
350 %val = atomicrmw volatile umax i32 addrspace(4)* %gep, i32 %in seq_cst
357 define void @atomic_umax_i32_ret_addr64_offset(i32 addrspace(4)* %out, i32 addrspace(4)* %out2, i32…
359 %ptr = getelementptr i32, i32 addrspace(4)* %out, i64 %index
360 %gep = getelementptr i32, i32 addrspace(4)* %ptr, i32 4
361 %val = atomicrmw volatile umax i32 addrspace(4)* %gep, i32 %in seq_cst
362 store i32 %val, i32 addrspace(4)* %out2
368 define void @atomic_umax_i32(i32 addrspace(4)* %out, i32 %in) {
370 %val = atomicrmw volatile umax i32 addrspace(4)* %out, i32 %in seq_cst
377 define void @atomic_umax_i32_ret(i32 addrspace(4)* %out, i32 addrspace(4)* %out2, i32 %in) {
379 %val = atomicrmw volatile umax i32 addrspace(4)* %out, i32 %in seq_cst
380 store i32 %val, i32 addrspace(4)* %out2
386 define void @atomic_umax_i32_addr64(i32 addrspace(4)* %out, i32 %in, i64 %index) {
388 %ptr = getelementptr i32, i32 addrspace(4)* %out, i64 %index
389 %val = atomicrmw volatile umax i32 addrspace(4)* %ptr, i32 %in seq_cst
396 define void @atomic_umax_i32_ret_addr64(i32 addrspace(4)* %out, i32 addrspace(4)* %out2, i32 %in, i…
398 %ptr = getelementptr i32, i32 addrspace(4)* %out, i64 %index
399 %val = atomicrmw volatile umax i32 addrspace(4)* %ptr, i32 %in seq_cst
400 store i32 %val, i32 addrspace(4)* %out2
406 define void @atomic_min_i32_offset(i32 addrspace(4)* %out, i32 %in) {
408 %gep = getelementptr i32, i32 addrspace(4)* %out, i32 4
409 %val = atomicrmw volatile min i32 addrspace(4)* %gep, i32 %in seq_cst
416 define void @atomic_min_i32_ret_offset(i32 addrspace(4)* %out, i32 addrspace(4)* %out2, i32 %in) {
418 %gep = getelementptr i32, i32 addrspace(4)* %out, i32 4
419 %val = atomicrmw volatile min i32 addrspace(4)* %gep, i32 %in seq_cst
420 store i32 %val, i32 addrspace(4)* %out2
426 define void @atomic_min_i32_addr64_offset(i32 addrspace(4)* %out, i32 %in, i64 %index) {
428 %ptr = getelementptr i32, i32 addrspace(4)* %out, i64 %index
429 %gep = getelementptr i32, i32 addrspace(4)* %ptr, i32 4
430 %val = atomicrmw volatile min i32 addrspace(4)* %gep, i32 %in seq_cst
437 define void @atomic_min_i32_ret_addr64_offset(i32 addrspace(4)* %out, i32 addrspace(4)* %out2, i32 …
439 %ptr = getelementptr i32, i32 addrspace(4)* %out, i64 %index
440 %gep = getelementptr i32, i32 addrspace(4)* %ptr, i32 4
441 %val = atomicrmw volatile min i32 addrspace(4)* %gep, i32 %in seq_cst
442 store i32 %val, i32 addrspace(4)* %out2
448 define void @atomic_min_i32(i32 addrspace(4)* %out, i32 %in) {
450 %val = atomicrmw volatile min i32 addrspace(4)* %out, i32 %in seq_cst
457 define void @atomic_min_i32_ret(i32 addrspace(4)* %out, i32 addrspace(4)* %out2, i32 %in) {
459 %val = atomicrmw volatile min i32 addrspace(4)* %out, i32 %in seq_cst
460 store i32 %val, i32 addrspace(4)* %out2
466 define void @atomic_min_i32_addr64(i32 addrspace(4)* %out, i32 %in, i64 %index) {
468 %ptr = getelementptr i32, i32 addrspace(4)* %out, i64 %index
469 %val = atomicrmw volatile min i32 addrspace(4)* %ptr, i32 %in seq_cst
476 define void @atomic_min_i32_ret_addr64(i32 addrspace(4)* %out, i32 addrspace(4)* %out2, i32 %in, i6…
478 %ptr = getelementptr i32, i32 addrspace(4)* %out, i64 %index
479 %val = atomicrmw volatile min i32 addrspace(4)* %ptr, i32 %in seq_cst
480 store i32 %val, i32 addrspace(4)* %out2
486 define void @atomic_umin_i32_offset(i32 addrspace(4)* %out, i32 %in) {
488 %gep = getelementptr i32, i32 addrspace(4)* %out, i32 4
489 %val = atomicrmw volatile umin i32 addrspace(4)* %gep, i32 %in seq_cst
496 define void @atomic_umin_i32_ret_offset(i32 addrspace(4)* %out, i32 addrspace(4)* %out2, i32 %in) {
498 %gep = getelementptr i32, i32 addrspace(4)* %out, i32 4
499 %val = atomicrmw volatile umin i32 addrspace(4)* %gep, i32 %in seq_cst
500 store i32 %val, i32 addrspace(4)* %out2
506 define void @atomic_umin_i32_addr64_offset(i32 addrspace(4)* %out, i32 %in, i64 %index) {
508 %ptr = getelementptr i32, i32 addrspace(4)* %out, i64 %index
509 %gep = getelementptr i32, i32 addrspace(4)* %ptr, i32 4
510 %val = atomicrmw volatile umin i32 addrspace(4)* %gep, i32 %in seq_cst
517 define void @atomic_umin_i32_ret_addr64_offset(i32 addrspace(4)* %out, i32 addrspace(4)* %out2, i32…
519 %ptr = getelementptr i32, i32 addrspace(4)* %out, i64 %index
520 %gep = getelementptr i32, i32 addrspace(4)* %ptr, i32 4
521 %val = atomicrmw volatile umin i32 addrspace(4)* %gep, i32 %in seq_cst
522 store i32 %val, i32 addrspace(4)* %out2
528 define void @atomic_umin_i32(i32 addrspace(4)* %out, i32 %in) {
530 %val = atomicrmw volatile umin i32 addrspace(4)* %out, i32 %in seq_cst
537 define void @atomic_umin_i32_ret(i32 addrspace(4)* %out, i32 addrspace(4)* %out2, i32 %in) {
539 %val = atomicrmw volatile umin i32 addrspace(4)* %out, i32 %in seq_cst
540 store i32 %val, i32 addrspace(4)* %out2
546 define void @atomic_umin_i32_addr64(i32 addrspace(4)* %out, i32 %in, i64 %index) {
548 %ptr = getelementptr i32, i32 addrspace(4)* %out, i64 %index
549 %val = atomicrmw volatile umin i32 addrspace(4)* %ptr, i32 %in seq_cst
556 …define void @atomic_umin_i32_ret_addr64(i32 addrspace(4)* %out, i32 addrspace(4)* %out2, i32 %in, …
558 %ptr = getelementptr i32, i32 addrspace(4)* %out, i64 %index
559 %val = atomicrmw volatile umin i32 addrspace(4)* %ptr, i32 %in seq_cst
560 store i32 %val, i32 addrspace(4)* %out2
566 define void @atomic_or_i32_offset(i32 addrspace(4)* %out, i32 %in) {
568 %gep = getelementptr i32, i32 addrspace(4)* %out, i32 4
569 %val = atomicrmw volatile or i32 addrspace(4)* %gep, i32 %in seq_cst
576 define void @atomic_or_i32_ret_offset(i32 addrspace(4)* %out, i32 addrspace(4)* %out2, i32 %in) {
578 %gep = getelementptr i32, i32 addrspace(4)* %out, i32 4
579 %val = atomicrmw volatile or i32 addrspace(4)* %gep, i32 %in seq_cst
580 store i32 %val, i32 addrspace(4)* %out2
586 define void @atomic_or_i32_addr64_offset(i32 addrspace(4)* %out, i32 %in, i64 %index) {
588 %ptr = getelementptr i32, i32 addrspace(4)* %out, i64 %index
589 %gep = getelementptr i32, i32 addrspace(4)* %ptr, i32 4
590 %val = atomicrmw volatile or i32 addrspace(4)* %gep, i32 %in seq_cst
597 define void @atomic_or_i32_ret_addr64_offset(i32 addrspace(4)* %out, i32 addrspace(4)* %out2, i32 %…
599 %ptr = getelementptr i32, i32 addrspace(4)* %out, i64 %index
600 %gep = getelementptr i32, i32 addrspace(4)* %ptr, i32 4
601 %val = atomicrmw volatile or i32 addrspace(4)* %gep, i32 %in seq_cst
602 store i32 %val, i32 addrspace(4)* %out2
608 define void @atomic_or_i32(i32 addrspace(4)* %out, i32 %in) {
610 %val = atomicrmw volatile or i32 addrspace(4)* %out, i32 %in seq_cst
617 define void @atomic_or_i32_ret(i32 addrspace(4)* %out, i32 addrspace(4)* %out2, i32 %in) {
619 %val = atomicrmw volatile or i32 addrspace(4)* %out, i32 %in seq_cst
620 store i32 %val, i32 addrspace(4)* %out2
626 define void @atomic_or_i32_addr64(i32 addrspace(4)* %out, i32 %in, i64 %index) {
628 %ptr = getelementptr i32, i32 addrspace(4)* %out, i64 %index
629 %val = atomicrmw volatile or i32 addrspace(4)* %ptr, i32 %in seq_cst
636 define void @atomic_or_i32_ret_addr64(i32 addrspace(4)* %out, i32 addrspace(4)* %out2, i32 %in, i64…
638 %ptr = getelementptr i32, i32 addrspace(4)* %out, i64 %index
639 %val = atomicrmw volatile or i32 addrspace(4)* %ptr, i32 %in seq_cst
640 store i32 %val, i32 addrspace(4)* %out2
646 define void @atomic_xchg_i32_offset(i32 addrspace(4)* %out, i32 %in) {
648 %gep = getelementptr i32, i32 addrspace(4)* %out, i32 4
649 %val = atomicrmw volatile xchg i32 addrspace(4)* %gep, i32 %in seq_cst
656 define void @atomic_xchg_i32_ret_offset(i32 addrspace(4)* %out, i32 addrspace(4)* %out2, i32 %in) {
658 %gep = getelementptr i32, i32 addrspace(4)* %out, i32 4
659 %val = atomicrmw volatile xchg i32 addrspace(4)* %gep, i32 %in seq_cst
660 store i32 %val, i32 addrspace(4)* %out2
666 define void @atomic_xchg_i32_addr64_offset(i32 addrspace(4)* %out, i32 %in, i64 %index) {
668 %ptr = getelementptr i32, i32 addrspace(4)* %out, i64 %index
669 %gep = getelementptr i32, i32 addrspace(4)* %ptr, i32 4
670 %val = atomicrmw volatile xchg i32 addrspace(4)* %gep, i32 %in seq_cst
677 define void @atomic_xchg_i32_ret_addr64_offset(i32 addrspace(4)* %out, i32 addrspace(4)* %out2, i32…
679 %ptr = getelementptr i32, i32 addrspace(4)* %out, i64 %index
680 %gep = getelementptr i32, i32 addrspace(4)* %ptr, i32 4
681 %val = atomicrmw volatile xchg i32 addrspace(4)* %gep, i32 %in seq_cst
682 store i32 %val, i32 addrspace(4)* %out2
688 define void @atomic_xchg_i32(i32 addrspace(4)* %out, i32 %in) {
690 %val = atomicrmw volatile xchg i32 addrspace(4)* %out, i32 %in seq_cst
697 define void @atomic_xchg_i32_ret(i32 addrspace(4)* %out, i32 addrspace(4)* %out2, i32 %in) {
699 %val = atomicrmw volatile xchg i32 addrspace(4)* %out, i32 %in seq_cst
700 store i32 %val, i32 addrspace(4)* %out2
706 define void @atomic_xchg_i32_addr64(i32 addrspace(4)* %out, i32 %in, i64 %index) {
708 %ptr = getelementptr i32, i32 addrspace(4)* %out, i64 %index
709 %val = atomicrmw volatile xchg i32 addrspace(4)* %ptr, i32 %in seq_cst
716 define void @atomic_xchg_i32_ret_addr64(i32 addrspace(4)* %out, i32 addrspace(4)* %out2, i32 %in, i…
718 %ptr = getelementptr i32, i32 addrspace(4)* %out, i64 %index
719 %val = atomicrmw volatile xchg i32 addrspace(4)* %ptr, i32 %in seq_cst
720 store i32 %val, i32 addrspace(4)* %out2
728 define void @atomic_cmpxchg_i32_offset(i32 addrspace(4)* %out, i32 %in, i32 %old) {
730 %gep = getelementptr i32, i32 addrspace(4)* %out, i32 4
731 %val = cmpxchg volatile i32 addrspace(4)* %gep, i32 %old, i32 %in seq_cst seq_cst
738 define void @atomic_cmpxchg_i32_ret_offset(i32 addrspace(4)* %out, i32 addrspace(4)* %out2, i32 %in…
740 %gep = getelementptr i32, i32 addrspace(4)* %out, i32 4
741 %val = cmpxchg volatile i32 addrspace(4)* %gep, i32 %old, i32 %in seq_cst seq_cst
743 store i32 %flag, i32 addrspace(4)* %out2
749 define void @atomic_cmpxchg_i32_addr64_offset(i32 addrspace(4)* %out, i32 %in, i64 %index, i32 %old…
751 %ptr = getelementptr i32, i32 addrspace(4)* %out, i64 %index
752 %gep = getelementptr i32, i32 addrspace(4)* %ptr, i32 4
753 %val = cmpxchg volatile i32 addrspace(4)* %gep, i32 %old, i32 %in seq_cst seq_cst
760 define void @atomic_cmpxchg_i32_ret_addr64_offset(i32 addrspace(4)* %out, i32 addrspace(4)* %out2, …
762 %ptr = getelementptr i32, i32 addrspace(4)* %out, i64 %index
763 %gep = getelementptr i32, i32 addrspace(4)* %ptr, i32 4
764 %val = cmpxchg volatile i32 addrspace(4)* %gep, i32 %old, i32 %in seq_cst seq_cst
766 store i32 %flag, i32 addrspace(4)* %out2
772 define void @atomic_cmpxchg_i32(i32 addrspace(4)* %out, i32 %in, i32 %old) {
774 %val = cmpxchg volatile i32 addrspace(4)* %out, i32 %old, i32 %in seq_cst seq_cst
781 define void @atomic_cmpxchg_i32_ret(i32 addrspace(4)* %out, i32 addrspace(4)* %out2, i32 %in, i32 %…
783 %val = cmpxchg volatile i32 addrspace(4)* %out, i32 %old, i32 %in seq_cst seq_cst
785 store i32 %flag, i32 addrspace(4)* %out2
791 define void @atomic_cmpxchg_i32_addr64(i32 addrspace(4)* %out, i32 %in, i64 %index, i32 %old) {
793 %ptr = getelementptr i32, i32 addrspace(4)* %out, i64 %index
794 %val = cmpxchg volatile i32 addrspace(4)* %ptr, i32 %old, i32 %in seq_cst seq_cst
801 define void @atomic_cmpxchg_i32_ret_addr64(i32 addrspace(4)* %out, i32 addrspace(4)* %out2, i32 %in…
803 %ptr = getelementptr i32, i32 addrspace(4)* %out, i64 %index
804 %val = cmpxchg volatile i32 addrspace(4)* %ptr, i32 %old, i32 %in seq_cst seq_cst
806 store i32 %flag, i32 addrspace(4)* %out2
812 define void @atomic_xor_i32_offset(i32 addrspace(4)* %out, i32 %in) {
814 %gep = getelementptr i32, i32 addrspace(4)* %out, i32 4
815 %val = atomicrmw volatile xor i32 addrspace(4)* %gep, i32 %in seq_cst
822 define void @atomic_xor_i32_ret_offset(i32 addrspace(4)* %out, i32 addrspace(4)* %out2, i32 %in) {
824 %gep = getelementptr i32, i32 addrspace(4)* %out, i32 4
825 %val = atomicrmw volatile xor i32 addrspace(4)* %gep, i32 %in seq_cst
826 store i32 %val, i32 addrspace(4)* %out2
832 define void @atomic_xor_i32_addr64_offset(i32 addrspace(4)* %out, i32 %in, i64 %index) {
834 %ptr = getelementptr i32, i32 addrspace(4)* %out, i64 %index
835 %gep = getelementptr i32, i32 addrspace(4)* %ptr, i32 4
836 %val = atomicrmw volatile xor i32 addrspace(4)* %gep, i32 %in seq_cst
843 define void @atomic_xor_i32_ret_addr64_offset(i32 addrspace(4)* %out, i32 addrspace(4)* %out2, i32 …
845 %ptr = getelementptr i32, i32 addrspace(4)* %out, i64 %index
846 %gep = getelementptr i32, i32 addrspace(4)* %ptr, i32 4
847 %val = atomicrmw volatile xor i32 addrspace(4)* %gep, i32 %in seq_cst
848 store i32 %val, i32 addrspace(4)* %out2
854 define void @atomic_xor_i32(i32 addrspace(4)* %out, i32 %in) {
856 %val = atomicrmw volatile xor i32 addrspace(4)* %out, i32 %in seq_cst
863 define void @atomic_xor_i32_ret(i32 addrspace(4)* %out, i32 addrspace(4)* %out2, i32 %in) {
865 %val = atomicrmw volatile xor i32 addrspace(4)* %out, i32 %in seq_cst
866 store i32 %val, i32 addrspace(4)* %out2
872 define void @atomic_xor_i32_addr64(i32 addrspace(4)* %out, i32 %in, i64 %index) {
874 %ptr = getelementptr i32, i32 addrspace(4)* %out, i64 %index
875 %val = atomicrmw volatile xor i32 addrspace(4)* %ptr, i32 %in seq_cst
882 define void @atomic_xor_i32_ret_addr64(i32 addrspace(4)* %out, i32 addrspace(4)* %out2, i32 %in, i6…
884 %ptr = getelementptr i32, i32 addrspace(4)* %out, i64 %index
885 %val = atomicrmw volatile xor i32 addrspace(4)* %ptr, i32 %in seq_cst
886 store i32 %val, i32 addrspace(4)* %out2
893 define void @atomic_load_i32_offset(i32 addrspace(4)* %in, i32 addrspace(4)* %out) {
895 %gep = getelementptr i32, i32 addrspace(4)* %in, i32 4
896 %val = load atomic i32, i32 addrspace(4)* %gep seq_cst, align 4
897 store i32 %val, i32 addrspace(4)* %out
904 define void @atomic_load_i32(i32 addrspace(4)* %in, i32 addrspace(4)* %out) {
906 %val = load atomic i32, i32 addrspace(4)* %in seq_cst, align 4
907 store i32 %val, i32 addrspace(4)* %out
914 define void @atomic_load_i32_addr64_offset(i32 addrspace(4)* %in, i32 addrspace(4)* %out, i64 %inde…
916 %ptr = getelementptr i32, i32 addrspace(4)* %in, i64 %index
917 %gep = getelementptr i32, i32 addrspace(4)* %ptr, i32 4
918 %val = load atomic i32, i32 addrspace(4)* %gep seq_cst, align 4
919 store i32 %val, i32 addrspace(4)* %out
926 define void @atomic_load_i32_addr64(i32 addrspace(4)* %in, i32 addrspace(4)* %out, i64 %index) {
928 %ptr = getelementptr i32, i32 addrspace(4)* %in, i64 %index
929 %val = load atomic i32, i32 addrspace(4)* %ptr seq_cst, align 4
930 store i32 %val, i32 addrspace(4)* %out
936 define void @atomic_store_i32_offset(i32 %in, i32 addrspace(4)* %out) {
938 %gep = getelementptr i32, i32 addrspace(4)* %out, i32 4
939 store atomic i32 %in, i32 addrspace(4)* %gep seq_cst, align 4
945 define void @atomic_store_i32(i32 %in, i32 addrspace(4)* %out) {
947 store atomic i32 %in, i32 addrspace(4)* %out seq_cst, align 4
953 define void @atomic_store_i32_addr64_offset(i32 %in, i32 addrspace(4)* %out, i64 %index) {
955 %ptr = getelementptr i32, i32 addrspace(4)* %out, i64 %index
956 %gep = getelementptr i32, i32 addrspace(4)* %ptr, i32 4
957 store atomic i32 %in, i32 addrspace(4)* %gep seq_cst, align 4
963 define void @atomic_store_i32_addr64(i32 %in, i32 addrspace(4)* %out, i64 %index) {
965 %ptr = getelementptr i32, i32 addrspace(4)* %out, i64 %index
966 store atomic i32 %in, i32 addrspace(4)* %ptr seq_cst, align 4