• Home
  • Raw
  • Download

Lines Matching refs:addrspace

6 define void @atomic_add_i64_offset(i64 addrspace(4)* %out, i64 %in) {
8 %gep = getelementptr i64, i64 addrspace(4)* %out, i64 4
9 %tmp0 = atomicrmw volatile add i64 addrspace(4)* %gep, i64 %in seq_cst
16 define void @atomic_add_i64_ret_offset(i64 addrspace(4)* %out, i64 addrspace(4)* %out2, i64 %in) {
18 %gep = getelementptr i64, i64 addrspace(4)* %out, i64 4
19 %tmp0 = atomicrmw volatile add i64 addrspace(4)* %gep, i64 %in seq_cst
20 store i64 %tmp0, i64 addrspace(4)* %out2
26 define void @atomic_add_i64_addr64_offset(i64 addrspace(4)* %out, i64 %in, i64 %index) {
28 %ptr = getelementptr i64, i64 addrspace(4)* %out, i64 %index
29 %gep = getelementptr i64, i64 addrspace(4)* %ptr, i64 4
30 %tmp0 = atomicrmw volatile add i64 addrspace(4)* %gep, i64 %in seq_cst
37 define void @atomic_add_i64_ret_addr64_offset(i64 addrspace(4)* %out, i64 addrspace(4)* %out2, i64 …
39 %ptr = getelementptr i64, i64 addrspace(4)* %out, i64 %index
40 %gep = getelementptr i64, i64 addrspace(4)* %ptr, i64 4
41 %tmp0 = atomicrmw volatile add i64 addrspace(4)* %gep, i64 %in seq_cst
42 store i64 %tmp0, i64 addrspace(4)* %out2
48 define void @atomic_add_i64(i64 addrspace(4)* %out, i64 %in) {
50 %tmp0 = atomicrmw volatile add i64 addrspace(4)* %out, i64 %in seq_cst
57 define void @atomic_add_i64_ret(i64 addrspace(4)* %out, i64 addrspace(4)* %out2, i64 %in) {
59 %tmp0 = atomicrmw volatile add i64 addrspace(4)* %out, i64 %in seq_cst
60 store i64 %tmp0, i64 addrspace(4)* %out2
66 define void @atomic_add_i64_addr64(i64 addrspace(4)* %out, i64 %in, i64 %index) {
68 %ptr = getelementptr i64, i64 addrspace(4)* %out, i64 %index
69 %tmp0 = atomicrmw volatile add i64 addrspace(4)* %ptr, i64 %in seq_cst
76 define void @atomic_add_i64_ret_addr64(i64 addrspace(4)* %out, i64 addrspace(4)* %out2, i64 %in, i6…
78 %ptr = getelementptr i64, i64 addrspace(4)* %out, i64 %index
79 %tmp0 = atomicrmw volatile add i64 addrspace(4)* %ptr, i64 %in seq_cst
80 store i64 %tmp0, i64 addrspace(4)* %out2
86 define void @atomic_and_i64_offset(i64 addrspace(4)* %out, i64 %in) {
88 %gep = getelementptr i64, i64 addrspace(4)* %out, i64 4
89 %tmp0 = atomicrmw volatile and i64 addrspace(4)* %gep, i64 %in seq_cst
96 define void @atomic_and_i64_ret_offset(i64 addrspace(4)* %out, i64 addrspace(4)* %out2, i64 %in) {
98 %gep = getelementptr i64, i64 addrspace(4)* %out, i64 4
99 %tmp0 = atomicrmw volatile and i64 addrspace(4)* %gep, i64 %in seq_cst
100 store i64 %tmp0, i64 addrspace(4)* %out2
106 define void @atomic_and_i64_addr64_offset(i64 addrspace(4)* %out, i64 %in, i64 %index) {
108 %ptr = getelementptr i64, i64 addrspace(4)* %out, i64 %index
109 %gep = getelementptr i64, i64 addrspace(4)* %ptr, i64 4
110 %tmp0 = atomicrmw volatile and i64 addrspace(4)* %gep, i64 %in seq_cst
117 define void @atomic_and_i64_ret_addr64_offset(i64 addrspace(4)* %out, i64 addrspace(4)* %out2, i64 …
119 %ptr = getelementptr i64, i64 addrspace(4)* %out, i64 %index
120 %gep = getelementptr i64, i64 addrspace(4)* %ptr, i64 4
121 %tmp0 = atomicrmw volatile and i64 addrspace(4)* %gep, i64 %in seq_cst
122 store i64 %tmp0, i64 addrspace(4)* %out2
128 define void @atomic_and_i64(i64 addrspace(4)* %out, i64 %in) {
130 %tmp0 = atomicrmw volatile and i64 addrspace(4)* %out, i64 %in seq_cst
137 define void @atomic_and_i64_ret(i64 addrspace(4)* %out, i64 addrspace(4)* %out2, i64 %in) {
139 %tmp0 = atomicrmw volatile and i64 addrspace(4)* %out, i64 %in seq_cst
140 store i64 %tmp0, i64 addrspace(4)* %out2
146 define void @atomic_and_i64_addr64(i64 addrspace(4)* %out, i64 %in, i64 %index) {
148 %ptr = getelementptr i64, i64 addrspace(4)* %out, i64 %index
149 %tmp0 = atomicrmw volatile and i64 addrspace(4)* %ptr, i64 %in seq_cst
156 define void @atomic_and_i64_ret_addr64(i64 addrspace(4)* %out, i64 addrspace(4)* %out2, i64 %in, i6…
158 %ptr = getelementptr i64, i64 addrspace(4)* %out, i64 %index
159 %tmp0 = atomicrmw volatile and i64 addrspace(4)* %ptr, i64 %in seq_cst
160 store i64 %tmp0, i64 addrspace(4)* %out2
166 define void @atomic_sub_i64_offset(i64 addrspace(4)* %out, i64 %in) {
168 %gep = getelementptr i64, i64 addrspace(4)* %out, i64 4
169 %tmp0 = atomicrmw volatile sub i64 addrspace(4)* %gep, i64 %in seq_cst
176 define void @atomic_sub_i64_ret_offset(i64 addrspace(4)* %out, i64 addrspace(4)* %out2, i64 %in) {
178 %gep = getelementptr i64, i64 addrspace(4)* %out, i64 4
179 %tmp0 = atomicrmw volatile sub i64 addrspace(4)* %gep, i64 %in seq_cst
180 store i64 %tmp0, i64 addrspace(4)* %out2
186 define void @atomic_sub_i64_addr64_offset(i64 addrspace(4)* %out, i64 %in, i64 %index) {
188 %ptr = getelementptr i64, i64 addrspace(4)* %out, i64 %index
189 %gep = getelementptr i64, i64 addrspace(4)* %ptr, i64 4
190 %tmp0 = atomicrmw volatile sub i64 addrspace(4)* %gep, i64 %in seq_cst
197 define void @atomic_sub_i64_ret_addr64_offset(i64 addrspace(4)* %out, i64 addrspace(4)* %out2, i64 …
199 %ptr = getelementptr i64, i64 addrspace(4)* %out, i64 %index
200 %gep = getelementptr i64, i64 addrspace(4)* %ptr, i64 4
201 %tmp0 = atomicrmw volatile sub i64 addrspace(4)* %gep, i64 %in seq_cst
202 store i64 %tmp0, i64 addrspace(4)* %out2
208 define void @atomic_sub_i64(i64 addrspace(4)* %out, i64 %in) {
210 %tmp0 = atomicrmw volatile sub i64 addrspace(4)* %out, i64 %in seq_cst
217 define void @atomic_sub_i64_ret(i64 addrspace(4)* %out, i64 addrspace(4)* %out2, i64 %in) {
219 %tmp0 = atomicrmw volatile sub i64 addrspace(4)* %out, i64 %in seq_cst
220 store i64 %tmp0, i64 addrspace(4)* %out2
226 define void @atomic_sub_i64_addr64(i64 addrspace(4)* %out, i64 %in, i64 %index) {
228 %ptr = getelementptr i64, i64 addrspace(4)* %out, i64 %index
229 %tmp0 = atomicrmw volatile sub i64 addrspace(4)* %ptr, i64 %in seq_cst
236 define void @atomic_sub_i64_ret_addr64(i64 addrspace(4)* %out, i64 addrspace(4)* %out2, i64 %in, i6…
238 %ptr = getelementptr i64, i64 addrspace(4)* %out, i64 %index
239 %tmp0 = atomicrmw volatile sub i64 addrspace(4)* %ptr, i64 %in seq_cst
240 store i64 %tmp0, i64 addrspace(4)* %out2
246 define void @atomic_max_i64_offset(i64 addrspace(4)* %out, i64 %in) {
248 %gep = getelementptr i64, i64 addrspace(4)* %out, i64 4
249 %tmp0 = atomicrmw volatile max i64 addrspace(4)* %gep, i64 %in seq_cst
256 define void @atomic_max_i64_ret_offset(i64 addrspace(4)* %out, i64 addrspace(4)* %out2, i64 %in) {
258 %gep = getelementptr i64, i64 addrspace(4)* %out, i64 4
259 %tmp0 = atomicrmw volatile max i64 addrspace(4)* %gep, i64 %in seq_cst
260 store i64 %tmp0, i64 addrspace(4)* %out2
266 define void @atomic_max_i64_addr64_offset(i64 addrspace(4)* %out, i64 %in, i64 %index) {
268 %ptr = getelementptr i64, i64 addrspace(4)* %out, i64 %index
269 %gep = getelementptr i64, i64 addrspace(4)* %ptr, i64 4
270 %tmp0 = atomicrmw volatile max i64 addrspace(4)* %gep, i64 %in seq_cst
277 define void @atomic_max_i64_ret_addr64_offset(i64 addrspace(4)* %out, i64 addrspace(4)* %out2, i64 …
279 %ptr = getelementptr i64, i64 addrspace(4)* %out, i64 %index
280 %gep = getelementptr i64, i64 addrspace(4)* %ptr, i64 4
281 %tmp0 = atomicrmw volatile max i64 addrspace(4)* %gep, i64 %in seq_cst
282 store i64 %tmp0, i64 addrspace(4)* %out2
288 define void @atomic_max_i64(i64 addrspace(4)* %out, i64 %in) {
290 %tmp0 = atomicrmw volatile max i64 addrspace(4)* %out, i64 %in seq_cst
297 define void @atomic_max_i64_ret(i64 addrspace(4)* %out, i64 addrspace(4)* %out2, i64 %in) {
299 %tmp0 = atomicrmw volatile max i64 addrspace(4)* %out, i64 %in seq_cst
300 store i64 %tmp0, i64 addrspace(4)* %out2
306 define void @atomic_max_i64_addr64(i64 addrspace(4)* %out, i64 %in, i64 %index) {
308 %ptr = getelementptr i64, i64 addrspace(4)* %out, i64 %index
309 %tmp0 = atomicrmw volatile max i64 addrspace(4)* %ptr, i64 %in seq_cst
316 define void @atomic_max_i64_ret_addr64(i64 addrspace(4)* %out, i64 addrspace(4)* %out2, i64 %in, i6…
318 %ptr = getelementptr i64, i64 addrspace(4)* %out, i64 %index
319 %tmp0 = atomicrmw volatile max i64 addrspace(4)* %ptr, i64 %in seq_cst
320 store i64 %tmp0, i64 addrspace(4)* %out2
326 define void @atomic_umax_i64_offset(i64 addrspace(4)* %out, i64 %in) {
328 %gep = getelementptr i64, i64 addrspace(4)* %out, i64 4
329 %tmp0 = atomicrmw volatile umax i64 addrspace(4)* %gep, i64 %in seq_cst
336 define void @atomic_umax_i64_ret_offset(i64 addrspace(4)* %out, i64 addrspace(4)* %out2, i64 %in) {
338 %gep = getelementptr i64, i64 addrspace(4)* %out, i64 4
339 %tmp0 = atomicrmw volatile umax i64 addrspace(4)* %gep, i64 %in seq_cst
340 store i64 %tmp0, i64 addrspace(4)* %out2
346 define void @atomic_umax_i64_addr64_offset(i64 addrspace(4)* %out, i64 %in, i64 %index) {
348 %ptr = getelementptr i64, i64 addrspace(4)* %out, i64 %index
349 %gep = getelementptr i64, i64 addrspace(4)* %ptr, i64 4
350 %tmp0 = atomicrmw volatile umax i64 addrspace(4)* %gep, i64 %in seq_cst
357 define void @atomic_umax_i64_ret_addr64_offset(i64 addrspace(4)* %out, i64 addrspace(4)* %out2, i64…
359 %ptr = getelementptr i64, i64 addrspace(4)* %out, i64 %index
360 %gep = getelementptr i64, i64 addrspace(4)* %ptr, i64 4
361 %tmp0 = atomicrmw volatile umax i64 addrspace(4)* %gep, i64 %in seq_cst
362 store i64 %tmp0, i64 addrspace(4)* %out2
368 define void @atomic_umax_i64(i64 addrspace(4)* %out, i64 %in) {
370 %tmp0 = atomicrmw volatile umax i64 addrspace(4)* %out, i64 %in seq_cst
377 define void @atomic_umax_i64_ret(i64 addrspace(4)* %out, i64 addrspace(4)* %out2, i64 %in) {
379 %tmp0 = atomicrmw volatile umax i64 addrspace(4)* %out, i64 %in seq_cst
380 store i64 %tmp0, i64 addrspace(4)* %out2
386 define void @atomic_umax_i64_addr64(i64 addrspace(4)* %out, i64 %in, i64 %index) {
388 %ptr = getelementptr i64, i64 addrspace(4)* %out, i64 %index
389 %tmp0 = atomicrmw volatile umax i64 addrspace(4)* %ptr, i64 %in seq_cst
396 define void @atomic_umax_i64_ret_addr64(i64 addrspace(4)* %out, i64 addrspace(4)* %out2, i64 %in, i…
398 %ptr = getelementptr i64, i64 addrspace(4)* %out, i64 %index
399 %tmp0 = atomicrmw volatile umax i64 addrspace(4)* %ptr, i64 %in seq_cst
400 store i64 %tmp0, i64 addrspace(4)* %out2
406 define void @atomic_min_i64_offset(i64 addrspace(4)* %out, i64 %in) {
408 %gep = getelementptr i64, i64 addrspace(4)* %out, i64 4
409 %tmp0 = atomicrmw volatile min i64 addrspace(4)* %gep, i64 %in seq_cst
416 define void @atomic_min_i64_ret_offset(i64 addrspace(4)* %out, i64 addrspace(4)* %out2, i64 %in) {
418 %gep = getelementptr i64, i64 addrspace(4)* %out, i64 4
419 %tmp0 = atomicrmw volatile min i64 addrspace(4)* %gep, i64 %in seq_cst
420 store i64 %tmp0, i64 addrspace(4)* %out2
426 define void @atomic_min_i64_addr64_offset(i64 addrspace(4)* %out, i64 %in, i64 %index) {
428 %ptr = getelementptr i64, i64 addrspace(4)* %out, i64 %index
429 %gep = getelementptr i64, i64 addrspace(4)* %ptr, i64 4
430 %tmp0 = atomicrmw volatile min i64 addrspace(4)* %gep, i64 %in seq_cst
437 define void @atomic_min_i64_ret_addr64_offset(i64 addrspace(4)* %out, i64 addrspace(4)* %out2, i64 …
439 %ptr = getelementptr i64, i64 addrspace(4)* %out, i64 %index
440 %gep = getelementptr i64, i64 addrspace(4)* %ptr, i64 4
441 %tmp0 = atomicrmw volatile min i64 addrspace(4)* %gep, i64 %in seq_cst
442 store i64 %tmp0, i64 addrspace(4)* %out2
448 define void @atomic_min_i64(i64 addrspace(4)* %out, i64 %in) {
450 %tmp0 = atomicrmw volatile min i64 addrspace(4)* %out, i64 %in seq_cst
457 define void @atomic_min_i64_ret(i64 addrspace(4)* %out, i64 addrspace(4)* %out2, i64 %in) {
459 %tmp0 = atomicrmw volatile min i64 addrspace(4)* %out, i64 %in seq_cst
460 store i64 %tmp0, i64 addrspace(4)* %out2
466 define void @atomic_min_i64_addr64(i64 addrspace(4)* %out, i64 %in, i64 %index) {
468 %ptr = getelementptr i64, i64 addrspace(4)* %out, i64 %index
469 %tmp0 = atomicrmw volatile min i64 addrspace(4)* %ptr, i64 %in seq_cst
476 define void @atomic_min_i64_ret_addr64(i64 addrspace(4)* %out, i64 addrspace(4)* %out2, i64 %in, i6…
478 %ptr = getelementptr i64, i64 addrspace(4)* %out, i64 %index
479 %tmp0 = atomicrmw volatile min i64 addrspace(4)* %ptr, i64 %in seq_cst
480 store i64 %tmp0, i64 addrspace(4)* %out2
486 define void @atomic_umin_i64_offset(i64 addrspace(4)* %out, i64 %in) {
488 %gep = getelementptr i64, i64 addrspace(4)* %out, i64 4
489 %tmp0 = atomicrmw volatile umin i64 addrspace(4)* %gep, i64 %in seq_cst
496 define void @atomic_umin_i64_ret_offset(i64 addrspace(4)* %out, i64 addrspace(4)* %out2, i64 %in) {
498 %gep = getelementptr i64, i64 addrspace(4)* %out, i64 4
499 %tmp0 = atomicrmw volatile umin i64 addrspace(4)* %gep, i64 %in seq_cst
500 store i64 %tmp0, i64 addrspace(4)* %out2
506 define void @atomic_umin_i64_addr64_offset(i64 addrspace(4)* %out, i64 %in, i64 %index) {
508 %ptr = getelementptr i64, i64 addrspace(4)* %out, i64 %index
509 %gep = getelementptr i64, i64 addrspace(4)* %ptr, i64 4
510 %tmp0 = atomicrmw volatile umin i64 addrspace(4)* %gep, i64 %in seq_cst
517 define void @atomic_umin_i64_ret_addr64_offset(i64 addrspace(4)* %out, i64 addrspace(4)* %out2, i64…
519 %ptr = getelementptr i64, i64 addrspace(4)* %out, i64 %index
520 %gep = getelementptr i64, i64 addrspace(4)* %ptr, i64 4
521 %tmp0 = atomicrmw volatile umin i64 addrspace(4)* %gep, i64 %in seq_cst
522 store i64 %tmp0, i64 addrspace(4)* %out2
528 define void @atomic_umin_i64(i64 addrspace(4)* %out, i64 %in) {
530 %tmp0 = atomicrmw volatile umin i64 addrspace(4)* %out, i64 %in seq_cst
537 define void @atomic_umin_i64_ret(i64 addrspace(4)* %out, i64 addrspace(4)* %out2, i64 %in) {
539 %tmp0 = atomicrmw volatile umin i64 addrspace(4)* %out, i64 %in seq_cst
540 store i64 %tmp0, i64 addrspace(4)* %out2
546 define void @atomic_umin_i64_addr64(i64 addrspace(4)* %out, i64 %in, i64 %index) {
548 %ptr = getelementptr i64, i64 addrspace(4)* %out, i64 %index
549 %tmp0 = atomicrmw volatile umin i64 addrspace(4)* %ptr, i64 %in seq_cst
556 define void @atomic_umin_i64_ret_addr64(i64 addrspace(4)* %out, i64 addrspace(4)* %out2, i64 %in, i…
558 %ptr = getelementptr i64, i64 addrspace(4)* %out, i64 %index
559 %tmp0 = atomicrmw volatile umin i64 addrspace(4)* %ptr, i64 %in seq_cst
560 store i64 %tmp0, i64 addrspace(4)* %out2
566 define void @atomic_or_i64_offset(i64 addrspace(4)* %out, i64 %in) {
568 %gep = getelementptr i64, i64 addrspace(4)* %out, i64 4
569 %tmp0 = atomicrmw volatile or i64 addrspace(4)* %gep, i64 %in seq_cst
576 define void @atomic_or_i64_ret_offset(i64 addrspace(4)* %out, i64 addrspace(4)* %out2, i64 %in) {
578 %gep = getelementptr i64, i64 addrspace(4)* %out, i64 4
579 %tmp0 = atomicrmw volatile or i64 addrspace(4)* %gep, i64 %in seq_cst
580 store i64 %tmp0, i64 addrspace(4)* %out2
586 define void @atomic_or_i64_addr64_offset(i64 addrspace(4)* %out, i64 %in, i64 %index) {
588 %ptr = getelementptr i64, i64 addrspace(4)* %out, i64 %index
589 %gep = getelementptr i64, i64 addrspace(4)* %ptr, i64 4
590 %tmp0 = atomicrmw volatile or i64 addrspace(4)* %gep, i64 %in seq_cst
597 define void @atomic_or_i64_ret_addr64_offset(i64 addrspace(4)* %out, i64 addrspace(4)* %out2, i64 %…
599 %ptr = getelementptr i64, i64 addrspace(4)* %out, i64 %index
600 %gep = getelementptr i64, i64 addrspace(4)* %ptr, i64 4
601 %tmp0 = atomicrmw volatile or i64 addrspace(4)* %gep, i64 %in seq_cst
602 store i64 %tmp0, i64 addrspace(4)* %out2
608 define void @atomic_or_i64(i64 addrspace(4)* %out, i64 %in) {
610 %tmp0 = atomicrmw volatile or i64 addrspace(4)* %out, i64 %in seq_cst
617 define void @atomic_or_i64_ret(i64 addrspace(4)* %out, i64 addrspace(4)* %out2, i64 %in) {
619 %tmp0 = atomicrmw volatile or i64 addrspace(4)* %out, i64 %in seq_cst
620 store i64 %tmp0, i64 addrspace(4)* %out2
626 define void @atomic_or_i64_addr64(i64 addrspace(4)* %out, i64 %in, i64 %index) {
628 %ptr = getelementptr i64, i64 addrspace(4)* %out, i64 %index
629 %tmp0 = atomicrmw volatile or i64 addrspace(4)* %ptr, i64 %in seq_cst
636 define void @atomic_or_i64_ret_addr64(i64 addrspace(4)* %out, i64 addrspace(4)* %out2, i64 %in, i64…
638 %ptr = getelementptr i64, i64 addrspace(4)* %out, i64 %index
639 %tmp0 = atomicrmw volatile or i64 addrspace(4)* %ptr, i64 %in seq_cst
640 store i64 %tmp0, i64 addrspace(4)* %out2
646 define void @atomic_xchg_i64_offset(i64 addrspace(4)* %out, i64 %in) {
648 %gep = getelementptr i64, i64 addrspace(4)* %out, i64 4
649 %tmp0 = atomicrmw volatile xchg i64 addrspace(4)* %gep, i64 %in seq_cst
656 define void @atomic_xchg_i64_ret_offset(i64 addrspace(4)* %out, i64 addrspace(4)* %out2, i64 %in) {
658 %gep = getelementptr i64, i64 addrspace(4)* %out, i64 4
659 %tmp0 = atomicrmw volatile xchg i64 addrspace(4)* %gep, i64 %in seq_cst
660 store i64 %tmp0, i64 addrspace(4)* %out2
666 define void @atomic_xchg_i64_addr64_offset(i64 addrspace(4)* %out, i64 %in, i64 %index) {
668 %ptr = getelementptr i64, i64 addrspace(4)* %out, i64 %index
669 %gep = getelementptr i64, i64 addrspace(4)* %ptr, i64 4
670 %tmp0 = atomicrmw volatile xchg i64 addrspace(4)* %gep, i64 %in seq_cst
677 define void @atomic_xchg_i64_ret_addr64_offset(i64 addrspace(4)* %out, i64 addrspace(4)* %out2, i64…
679 %ptr = getelementptr i64, i64 addrspace(4)* %out, i64 %index
680 %gep = getelementptr i64, i64 addrspace(4)* %ptr, i64 4
681 %tmp0 = atomicrmw volatile xchg i64 addrspace(4)* %gep, i64 %in seq_cst
682 store i64 %tmp0, i64 addrspace(4)* %out2
688 define void @atomic_xchg_i64(i64 addrspace(4)* %out, i64 %in) {
690 %tmp0 = atomicrmw volatile xchg i64 addrspace(4)* %out, i64 %in seq_cst
697 define void @atomic_xchg_i64_ret(i64 addrspace(4)* %out, i64 addrspace(4)* %out2, i64 %in) {
699 %tmp0 = atomicrmw volatile xchg i64 addrspace(4)* %out, i64 %in seq_cst
700 store i64 %tmp0, i64 addrspace(4)* %out2
706 define void @atomic_xchg_i64_addr64(i64 addrspace(4)* %out, i64 %in, i64 %index) {
708 %ptr = getelementptr i64, i64 addrspace(4)* %out, i64 %index
709 %tmp0 = atomicrmw volatile xchg i64 addrspace(4)* %ptr, i64 %in seq_cst
716 define void @atomic_xchg_i64_ret_addr64(i64 addrspace(4)* %out, i64 addrspace(4)* %out2, i64 %in, i…
718 %ptr = getelementptr i64, i64 addrspace(4)* %out, i64 %index
719 %tmp0 = atomicrmw volatile xchg i64 addrspace(4)* %ptr, i64 %in seq_cst
720 store i64 %tmp0, i64 addrspace(4)* %out2
726 define void @atomic_xor_i64_offset(i64 addrspace(4)* %out, i64 %in) {
728 %gep = getelementptr i64, i64 addrspace(4)* %out, i64 4
729 %tmp0 = atomicrmw volatile xor i64 addrspace(4)* %gep, i64 %in seq_cst
736 define void @atomic_xor_i64_ret_offset(i64 addrspace(4)* %out, i64 addrspace(4)* %out2, i64 %in) {
738 %gep = getelementptr i64, i64 addrspace(4)* %out, i64 4
739 %tmp0 = atomicrmw volatile xor i64 addrspace(4)* %gep, i64 %in seq_cst
740 store i64 %tmp0, i64 addrspace(4)* %out2
746 define void @atomic_xor_i64_addr64_offset(i64 addrspace(4)* %out, i64 %in, i64 %index) {
748 %ptr = getelementptr i64, i64 addrspace(4)* %out, i64 %index
749 %gep = getelementptr i64, i64 addrspace(4)* %ptr, i64 4
750 %tmp0 = atomicrmw volatile xor i64 addrspace(4)* %gep, i64 %in seq_cst
757 define void @atomic_xor_i64_ret_addr64_offset(i64 addrspace(4)* %out, i64 addrspace(4)* %out2, i64 …
759 %ptr = getelementptr i64, i64 addrspace(4)* %out, i64 %index
760 %gep = getelementptr i64, i64 addrspace(4)* %ptr, i64 4
761 %tmp0 = atomicrmw volatile xor i64 addrspace(4)* %gep, i64 %in seq_cst
762 store i64 %tmp0, i64 addrspace(4)* %out2
768 define void @atomic_xor_i64(i64 addrspace(4)* %out, i64 %in) {
770 %tmp0 = atomicrmw volatile xor i64 addrspace(4)* %out, i64 %in seq_cst
777 define void @atomic_xor_i64_ret(i64 addrspace(4)* %out, i64 addrspace(4)* %out2, i64 %in) {
779 %tmp0 = atomicrmw volatile xor i64 addrspace(4)* %out, i64 %in seq_cst
780 store i64 %tmp0, i64 addrspace(4)* %out2
786 define void @atomic_xor_i64_addr64(i64 addrspace(4)* %out, i64 %in, i64 %index) {
788 %ptr = getelementptr i64, i64 addrspace(4)* %out, i64 %index
789 %tmp0 = atomicrmw volatile xor i64 addrspace(4)* %ptr, i64 %in seq_cst
796 define void @atomic_xor_i64_ret_addr64(i64 addrspace(4)* %out, i64 addrspace(4)* %out2, i64 %in, i6…
798 %ptr = getelementptr i64, i64 addrspace(4)* %out, i64 %index
799 %tmp0 = atomicrmw volatile xor i64 addrspace(4)* %ptr, i64 %in seq_cst
800 store i64 %tmp0, i64 addrspace(4)* %out2
807 define void @atomic_load_i64_offset(i64 addrspace(4)* %in, i64 addrspace(4)* %out) {
809 %gep = getelementptr i64, i64 addrspace(4)* %in, i64 4
810 %val = load atomic i64, i64 addrspace(4)* %gep seq_cst, align 8
811 store i64 %val, i64 addrspace(4)* %out
818 define void @atomic_load_i64(i64 addrspace(4)* %in, i64 addrspace(4)* %out) {
820 %val = load atomic i64, i64 addrspace(4)* %in seq_cst, align 8
821 store i64 %val, i64 addrspace(4)* %out
828 define void @atomic_load_i64_addr64_offset(i64 addrspace(4)* %in, i64 addrspace(4)* %out, i64 %inde…
830 %ptr = getelementptr i64, i64 addrspace(4)* %in, i64 %index
831 %gep = getelementptr i64, i64 addrspace(4)* %ptr, i64 4
832 %val = load atomic i64, i64 addrspace(4)* %gep seq_cst, align 8
833 store i64 %val, i64 addrspace(4)* %out
840 define void @atomic_load_i64_addr64(i64 addrspace(4)* %in, i64 addrspace(4)* %out, i64 %index) {
842 %ptr = getelementptr i64, i64 addrspace(4)* %in, i64 %index
843 %val = load atomic i64, i64 addrspace(4)* %ptr seq_cst, align 8
844 store i64 %val, i64 addrspace(4)* %out
850 define void @atomic_store_i64_offset(i64 %in, i64 addrspace(4)* %out) {
852 %gep = getelementptr i64, i64 addrspace(4)* %out, i64 4
853 store atomic i64 %in, i64 addrspace(4)* %gep seq_cst, align 8
859 define void @atomic_store_i64(i64 %in, i64 addrspace(4)* %out) {
861 store atomic i64 %in, i64 addrspace(4)* %out seq_cst, align 8
867 define void @atomic_store_i64_addr64_offset(i64 %in, i64 addrspace(4)* %out, i64 %index) {
869 %ptr = getelementptr i64, i64 addrspace(4)* %out, i64 %index
870 %gep = getelementptr i64, i64 addrspace(4)* %ptr, i64 4
871 store atomic i64 %in, i64 addrspace(4)* %gep seq_cst, align 8
877 define void @atomic_store_i64_addr64(i64 %in, i64 addrspace(4)* %out, i64 %index) {
879 %ptr = getelementptr i64, i64 addrspace(4)* %out, i64 %index
880 store atomic i64 %in, i64 addrspace(4)* %ptr seq_cst, align 8
886 define void @atomic_cmpxchg_i64_offset(i64 addrspace(4)* %out, i64 %in, i64 %old) {
888 %gep = getelementptr i64, i64 addrspace(4)* %out, i64 4
889 %val = cmpxchg volatile i64 addrspace(4)* %gep, i64 %old, i64 %in seq_cst seq_cst
895 define void @atomic_cmpxchg_i64_soffset(i64 addrspace(4)* %out, i64 %in, i64 %old) {
897 %gep = getelementptr i64, i64 addrspace(4)* %out, i64 9000
898 %val = cmpxchg volatile i64 addrspace(4)* %gep, i64 %old, i64 %in seq_cst seq_cst
905 define void @atomic_cmpxchg_i64_ret_offset(i64 addrspace(4)* %out, i64 addrspace(4)* %out2, i64 %in…
907 %gep = getelementptr i64, i64 addrspace(4)* %out, i64 4
908 %val = cmpxchg volatile i64 addrspace(4)* %gep, i64 %old, i64 %in seq_cst seq_cst
910 store i64 %extract0, i64 addrspace(4)* %out2
916 define void @atomic_cmpxchg_i64_addr64_offset(i64 addrspace(4)* %out, i64 %in, i64 %index, i64 %old…
918 %ptr = getelementptr i64, i64 addrspace(4)* %out, i64 %index
919 %gep = getelementptr i64, i64 addrspace(4)* %ptr, i64 4
920 %val = cmpxchg volatile i64 addrspace(4)* %gep, i64 %old, i64 %in seq_cst seq_cst
927 define void @atomic_cmpxchg_i64_ret_addr64_offset(i64 addrspace(4)* %out, i64 addrspace(4)* %out2, …
929 %ptr = getelementptr i64, i64 addrspace(4)* %out, i64 %index
930 %gep = getelementptr i64, i64 addrspace(4)* %ptr, i64 4
931 %val = cmpxchg volatile i64 addrspace(4)* %gep, i64 %old, i64 %in seq_cst seq_cst
933 store i64 %extract0, i64 addrspace(4)* %out2
939 define void @atomic_cmpxchg_i64(i64 addrspace(4)* %out, i64 %in, i64 %old) {
941 %val = cmpxchg volatile i64 addrspace(4)* %out, i64 %old, i64 %in seq_cst seq_cst
948 define void @atomic_cmpxchg_i64_ret(i64 addrspace(4)* %out, i64 addrspace(4)* %out2, i64 %in, i64 %…
950 %val = cmpxchg volatile i64 addrspace(4)* %out, i64 %old, i64 %in seq_cst seq_cst
952 store i64 %extract0, i64 addrspace(4)* %out2
958 define void @atomic_cmpxchg_i64_addr64(i64 addrspace(4)* %out, i64 %in, i64 %index, i64 %old) {
960 %ptr = getelementptr i64, i64 addrspace(4)* %out, i64 %index
961 %val = cmpxchg volatile i64 addrspace(4)* %ptr, i64 %old, i64 %in seq_cst seq_cst
968 define void @atomic_cmpxchg_i64_ret_addr64(i64 addrspace(4)* %out, i64 addrspace(4)* %out2, i64 %in…
970 %ptr = getelementptr i64, i64 addrspace(4)* %out, i64 %index
971 %val = cmpxchg volatile i64 addrspace(4)* %ptr, i64 %old, i64 %in seq_cst seq_cst
973 store i64 %extract0, i64 addrspace(4)* %out2