Lines Matching refs:addrspace
5 define void @atomic_add_i32_offset(i32 addrspace(1)* %out, i32 %in) {
7 %gep = getelementptr i32, i32 addrspace(1)* %out, i32 4
8 %0 = atomicrmw volatile add i32 addrspace(1)* %gep, i32 %in seq_cst
15 define void @atomic_add_i32_ret_offset(i32 addrspace(1)* %out, i32 addrspace(1)* %out2, i32 %in) {
17 %gep = getelementptr i32, i32 addrspace(1)* %out, i32 4
18 %0 = atomicrmw volatile add i32 addrspace(1)* %gep, i32 %in seq_cst
19 store i32 %0, i32 addrspace(1)* %out2
25 define void @atomic_add_i32_addr64_offset(i32 addrspace(1)* %out, i32 %in, i64 %index) {
27 %ptr = getelementptr i32, i32 addrspace(1)* %out, i64 %index
28 %gep = getelementptr i32, i32 addrspace(1)* %ptr, i32 4
29 %0 = atomicrmw volatile add i32 addrspace(1)* %gep, i32 %in seq_cst
36 define void @atomic_add_i32_ret_addr64_offset(i32 addrspace(1)* %out, i32 addrspace(1)* %out2, i32 …
38 %ptr = getelementptr i32, i32 addrspace(1)* %out, i64 %index
39 %gep = getelementptr i32, i32 addrspace(1)* %ptr, i32 4
40 %0 = atomicrmw volatile add i32 addrspace(1)* %gep, i32 %in seq_cst
41 store i32 %0, i32 addrspace(1)* %out2
47 define void @atomic_add_i32(i32 addrspace(1)* %out, i32 %in) {
49 %0 = atomicrmw volatile add i32 addrspace(1)* %out, i32 %in seq_cst
56 define void @atomic_add_i32_ret(i32 addrspace(1)* %out, i32 addrspace(1)* %out2, i32 %in) {
58 %0 = atomicrmw volatile add i32 addrspace(1)* %out, i32 %in seq_cst
59 store i32 %0, i32 addrspace(1)* %out2
65 define void @atomic_add_i32_addr64(i32 addrspace(1)* %out, i32 %in, i64 %index) {
67 %ptr = getelementptr i32, i32 addrspace(1)* %out, i64 %index
68 %0 = atomicrmw volatile add i32 addrspace(1)* %ptr, i32 %in seq_cst
75 define void @atomic_add_i32_ret_addr64(i32 addrspace(1)* %out, i32 addrspace(1)* %out2, i32 %in, i6…
77 %ptr = getelementptr i32, i32 addrspace(1)* %out, i64 %index
78 %0 = atomicrmw volatile add i32 addrspace(1)* %ptr, i32 %in seq_cst
79 store i32 %0, i32 addrspace(1)* %out2
85 define void @atomic_and_i32_offset(i32 addrspace(1)* %out, i32 %in) {
87 %gep = getelementptr i32, i32 addrspace(1)* %out, i32 4
88 %0 = atomicrmw volatile and i32 addrspace(1)* %gep, i32 %in seq_cst
95 define void @atomic_and_i32_ret_offset(i32 addrspace(1)* %out, i32 addrspace(1)* %out2, i32 %in) {
97 %gep = getelementptr i32, i32 addrspace(1)* %out, i32 4
98 %0 = atomicrmw volatile and i32 addrspace(1)* %gep, i32 %in seq_cst
99 store i32 %0, i32 addrspace(1)* %out2
105 define void @atomic_and_i32_addr64_offset(i32 addrspace(1)* %out, i32 %in, i64 %index) {
107 %ptr = getelementptr i32, i32 addrspace(1)* %out, i64 %index
108 %gep = getelementptr i32, i32 addrspace(1)* %ptr, i32 4
109 %0 = atomicrmw volatile and i32 addrspace(1)* %gep, i32 %in seq_cst
116 define void @atomic_and_i32_ret_addr64_offset(i32 addrspace(1)* %out, i32 addrspace(1)* %out2, i32 …
118 %ptr = getelementptr i32, i32 addrspace(1)* %out, i64 %index
119 %gep = getelementptr i32, i32 addrspace(1)* %ptr, i32 4
120 %0 = atomicrmw volatile and i32 addrspace(1)* %gep, i32 %in seq_cst
121 store i32 %0, i32 addrspace(1)* %out2
127 define void @atomic_and_i32(i32 addrspace(1)* %out, i32 %in) {
129 %0 = atomicrmw volatile and i32 addrspace(1)* %out, i32 %in seq_cst
136 define void @atomic_and_i32_ret(i32 addrspace(1)* %out, i32 addrspace(1)* %out2, i32 %in) {
138 %0 = atomicrmw volatile and i32 addrspace(1)* %out, i32 %in seq_cst
139 store i32 %0, i32 addrspace(1)* %out2
145 define void @atomic_and_i32_addr64(i32 addrspace(1)* %out, i32 %in, i64 %index) {
147 %ptr = getelementptr i32, i32 addrspace(1)* %out, i64 %index
148 %0 = atomicrmw volatile and i32 addrspace(1)* %ptr, i32 %in seq_cst
155 define void @atomic_and_i32_ret_addr64(i32 addrspace(1)* %out, i32 addrspace(1)* %out2, i32 %in, i6…
157 %ptr = getelementptr i32, i32 addrspace(1)* %out, i64 %index
158 %0 = atomicrmw volatile and i32 addrspace(1)* %ptr, i32 %in seq_cst
159 store i32 %0, i32 addrspace(1)* %out2
165 define void @atomic_sub_i32_offset(i32 addrspace(1)* %out, i32 %in) {
167 %gep = getelementptr i32, i32 addrspace(1)* %out, i32 4
168 %0 = atomicrmw volatile sub i32 addrspace(1)* %gep, i32 %in seq_cst
175 define void @atomic_sub_i32_ret_offset(i32 addrspace(1)* %out, i32 addrspace(1)* %out2, i32 %in) {
177 %gep = getelementptr i32, i32 addrspace(1)* %out, i32 4
178 %0 = atomicrmw volatile sub i32 addrspace(1)* %gep, i32 %in seq_cst
179 store i32 %0, i32 addrspace(1)* %out2
185 define void @atomic_sub_i32_addr64_offset(i32 addrspace(1)* %out, i32 %in, i64 %index) {
187 %ptr = getelementptr i32, i32 addrspace(1)* %out, i64 %index
188 %gep = getelementptr i32, i32 addrspace(1)* %ptr, i32 4
189 %0 = atomicrmw volatile sub i32 addrspace(1)* %gep, i32 %in seq_cst
196 define void @atomic_sub_i32_ret_addr64_offset(i32 addrspace(1)* %out, i32 addrspace(1)* %out2, i32 …
198 %ptr = getelementptr i32, i32 addrspace(1)* %out, i64 %index
199 %gep = getelementptr i32, i32 addrspace(1)* %ptr, i32 4
200 %0 = atomicrmw volatile sub i32 addrspace(1)* %gep, i32 %in seq_cst
201 store i32 %0, i32 addrspace(1)* %out2
207 define void @atomic_sub_i32(i32 addrspace(1)* %out, i32 %in) {
209 %0 = atomicrmw volatile sub i32 addrspace(1)* %out, i32 %in seq_cst
216 define void @atomic_sub_i32_ret(i32 addrspace(1)* %out, i32 addrspace(1)* %out2, i32 %in) {
218 %0 = atomicrmw volatile sub i32 addrspace(1)* %out, i32 %in seq_cst
219 store i32 %0, i32 addrspace(1)* %out2
225 define void @atomic_sub_i32_addr64(i32 addrspace(1)* %out, i32 %in, i64 %index) {
227 %ptr = getelementptr i32, i32 addrspace(1)* %out, i64 %index
228 %0 = atomicrmw volatile sub i32 addrspace(1)* %ptr, i32 %in seq_cst
235 define void @atomic_sub_i32_ret_addr64(i32 addrspace(1)* %out, i32 addrspace(1)* %out2, i32 %in, i6…
237 %ptr = getelementptr i32, i32 addrspace(1)* %out, i64 %index
238 %0 = atomicrmw volatile sub i32 addrspace(1)* %ptr, i32 %in seq_cst
239 store i32 %0, i32 addrspace(1)* %out2
245 define void @atomic_max_i32_offset(i32 addrspace(1)* %out, i32 %in) {
247 %gep = getelementptr i32, i32 addrspace(1)* %out, i32 4
248 %0 = atomicrmw volatile max i32 addrspace(1)* %gep, i32 %in seq_cst
255 define void @atomic_max_i32_ret_offset(i32 addrspace(1)* %out, i32 addrspace(1)* %out2, i32 %in) {
257 %gep = getelementptr i32, i32 addrspace(1)* %out, i32 4
258 %0 = atomicrmw volatile max i32 addrspace(1)* %gep, i32 %in seq_cst
259 store i32 %0, i32 addrspace(1)* %out2
265 define void @atomic_max_i32_addr64_offset(i32 addrspace(1)* %out, i32 %in, i64 %index) {
267 %ptr = getelementptr i32, i32 addrspace(1)* %out, i64 %index
268 %gep = getelementptr i32, i32 addrspace(1)* %ptr, i32 4
269 %0 = atomicrmw volatile max i32 addrspace(1)* %gep, i32 %in seq_cst
276 define void @atomic_max_i32_ret_addr64_offset(i32 addrspace(1)* %out, i32 addrspace(1)* %out2, i32 …
278 %ptr = getelementptr i32, i32 addrspace(1)* %out, i64 %index
279 %gep = getelementptr i32, i32 addrspace(1)* %ptr, i32 4
280 %0 = atomicrmw volatile max i32 addrspace(1)* %gep, i32 %in seq_cst
281 store i32 %0, i32 addrspace(1)* %out2
287 define void @atomic_max_i32(i32 addrspace(1)* %out, i32 %in) {
289 %0 = atomicrmw volatile max i32 addrspace(1)* %out, i32 %in seq_cst
296 define void @atomic_max_i32_ret(i32 addrspace(1)* %out, i32 addrspace(1)* %out2, i32 %in) {
298 %0 = atomicrmw volatile max i32 addrspace(1)* %out, i32 %in seq_cst
299 store i32 %0, i32 addrspace(1)* %out2
305 define void @atomic_max_i32_addr64(i32 addrspace(1)* %out, i32 %in, i64 %index) {
307 %ptr = getelementptr i32, i32 addrspace(1)* %out, i64 %index
308 %0 = atomicrmw volatile max i32 addrspace(1)* %ptr, i32 %in seq_cst
315 define void @atomic_max_i32_ret_addr64(i32 addrspace(1)* %out, i32 addrspace(1)* %out2, i32 %in, i6…
317 %ptr = getelementptr i32, i32 addrspace(1)* %out, i64 %index
318 %0 = atomicrmw volatile max i32 addrspace(1)* %ptr, i32 %in seq_cst
319 store i32 %0, i32 addrspace(1)* %out2
325 define void @atomic_umax_i32_offset(i32 addrspace(1)* %out, i32 %in) {
327 %gep = getelementptr i32, i32 addrspace(1)* %out, i32 4
328 %0 = atomicrmw volatile umax i32 addrspace(1)* %gep, i32 %in seq_cst
335 define void @atomic_umax_i32_ret_offset(i32 addrspace(1)* %out, i32 addrspace(1)* %out2, i32 %in) {
337 %gep = getelementptr i32, i32 addrspace(1)* %out, i32 4
338 %0 = atomicrmw volatile umax i32 addrspace(1)* %gep, i32 %in seq_cst
339 store i32 %0, i32 addrspace(1)* %out2
345 define void @atomic_umax_i32_addr64_offset(i32 addrspace(1)* %out, i32 %in, i64 %index) {
347 %ptr = getelementptr i32, i32 addrspace(1)* %out, i64 %index
348 %gep = getelementptr i32, i32 addrspace(1)* %ptr, i32 4
349 %0 = atomicrmw volatile umax i32 addrspace(1)* %gep, i32 %in seq_cst
356 define void @atomic_umax_i32_ret_addr64_offset(i32 addrspace(1)* %out, i32 addrspace(1)* %out2, i32…
358 %ptr = getelementptr i32, i32 addrspace(1)* %out, i64 %index
359 %gep = getelementptr i32, i32 addrspace(1)* %ptr, i32 4
360 %0 = atomicrmw volatile umax i32 addrspace(1)* %gep, i32 %in seq_cst
361 store i32 %0, i32 addrspace(1)* %out2
367 define void @atomic_umax_i32(i32 addrspace(1)* %out, i32 %in) {
369 %0 = atomicrmw volatile umax i32 addrspace(1)* %out, i32 %in seq_cst
376 define void @atomic_umax_i32_ret(i32 addrspace(1)* %out, i32 addrspace(1)* %out2, i32 %in) {
378 %0 = atomicrmw volatile umax i32 addrspace(1)* %out, i32 %in seq_cst
379 store i32 %0, i32 addrspace(1)* %out2
385 define void @atomic_umax_i32_addr64(i32 addrspace(1)* %out, i32 %in, i64 %index) {
387 %ptr = getelementptr i32, i32 addrspace(1)* %out, i64 %index
388 %0 = atomicrmw volatile umax i32 addrspace(1)* %ptr, i32 %in seq_cst
395 define void @atomic_umax_i32_ret_addr64(i32 addrspace(1)* %out, i32 addrspace(1)* %out2, i32 %in, i…
397 %ptr = getelementptr i32, i32 addrspace(1)* %out, i64 %index
398 %0 = atomicrmw volatile umax i32 addrspace(1)* %ptr, i32 %in seq_cst
399 store i32 %0, i32 addrspace(1)* %out2
405 define void @atomic_min_i32_offset(i32 addrspace(1)* %out, i32 %in) {
407 %gep = getelementptr i32, i32 addrspace(1)* %out, i32 4
408 %0 = atomicrmw volatile min i32 addrspace(1)* %gep, i32 %in seq_cst
415 define void @atomic_min_i32_ret_offset(i32 addrspace(1)* %out, i32 addrspace(1)* %out2, i32 %in) {
417 %gep = getelementptr i32, i32 addrspace(1)* %out, i32 4
418 %0 = atomicrmw volatile min i32 addrspace(1)* %gep, i32 %in seq_cst
419 store i32 %0, i32 addrspace(1)* %out2
425 define void @atomic_min_i32_addr64_offset(i32 addrspace(1)* %out, i32 %in, i64 %index) {
427 %ptr = getelementptr i32, i32 addrspace(1)* %out, i64 %index
428 %gep = getelementptr i32, i32 addrspace(1)* %ptr, i32 4
429 %0 = atomicrmw volatile min i32 addrspace(1)* %gep, i32 %in seq_cst
436 define void @atomic_min_i32_ret_addr64_offset(i32 addrspace(1)* %out, i32 addrspace(1)* %out2, i32 …
438 %ptr = getelementptr i32, i32 addrspace(1)* %out, i64 %index
439 %gep = getelementptr i32, i32 addrspace(1)* %ptr, i32 4
440 %0 = atomicrmw volatile min i32 addrspace(1)* %gep, i32 %in seq_cst
441 store i32 %0, i32 addrspace(1)* %out2
447 define void @atomic_min_i32(i32 addrspace(1)* %out, i32 %in) {
449 %0 = atomicrmw volatile min i32 addrspace(1)* %out, i32 %in seq_cst
456 define void @atomic_min_i32_ret(i32 addrspace(1)* %out, i32 addrspace(1)* %out2, i32 %in) {
458 %0 = atomicrmw volatile min i32 addrspace(1)* %out, i32 %in seq_cst
459 store i32 %0, i32 addrspace(1)* %out2
465 define void @atomic_min_i32_addr64(i32 addrspace(1)* %out, i32 %in, i64 %index) {
467 %ptr = getelementptr i32, i32 addrspace(1)* %out, i64 %index
468 %0 = atomicrmw volatile min i32 addrspace(1)* %ptr, i32 %in seq_cst
475 define void @atomic_min_i32_ret_addr64(i32 addrspace(1)* %out, i32 addrspace(1)* %out2, i32 %in, i6…
477 %ptr = getelementptr i32, i32 addrspace(1)* %out, i64 %index
478 %0 = atomicrmw volatile min i32 addrspace(1)* %ptr, i32 %in seq_cst
479 store i32 %0, i32 addrspace(1)* %out2
485 define void @atomic_umin_i32_offset(i32 addrspace(1)* %out, i32 %in) {
487 %gep = getelementptr i32, i32 addrspace(1)* %out, i32 4
488 %0 = atomicrmw volatile umin i32 addrspace(1)* %gep, i32 %in seq_cst
495 define void @atomic_umin_i32_ret_offset(i32 addrspace(1)* %out, i32 addrspace(1)* %out2, i32 %in) {
497 %gep = getelementptr i32, i32 addrspace(1)* %out, i32 4
498 %0 = atomicrmw volatile umin i32 addrspace(1)* %gep, i32 %in seq_cst
499 store i32 %0, i32 addrspace(1)* %out2
505 define void @atomic_umin_i32_addr64_offset(i32 addrspace(1)* %out, i32 %in, i64 %index) {
507 %ptr = getelementptr i32, i32 addrspace(1)* %out, i64 %index
508 %gep = getelementptr i32, i32 addrspace(1)* %ptr, i32 4
509 %0 = atomicrmw volatile umin i32 addrspace(1)* %gep, i32 %in seq_cst
516 define void @atomic_umin_i32_ret_addr64_offset(i32 addrspace(1)* %out, i32 addrspace(1)* %out2, i32…
518 %ptr = getelementptr i32, i32 addrspace(1)* %out, i64 %index
519 %gep = getelementptr i32, i32 addrspace(1)* %ptr, i32 4
520 %0 = atomicrmw volatile umin i32 addrspace(1)* %gep, i32 %in seq_cst
521 store i32 %0, i32 addrspace(1)* %out2
527 define void @atomic_umin_i32(i32 addrspace(1)* %out, i32 %in) {
529 %0 = atomicrmw volatile umin i32 addrspace(1)* %out, i32 %in seq_cst
536 define void @atomic_umin_i32_ret(i32 addrspace(1)* %out, i32 addrspace(1)* %out2, i32 %in) {
538 %0 = atomicrmw volatile umin i32 addrspace(1)* %out, i32 %in seq_cst
539 store i32 %0, i32 addrspace(1)* %out2
545 define void @atomic_umin_i32_addr64(i32 addrspace(1)* %out, i32 %in, i64 %index) {
547 %ptr = getelementptr i32, i32 addrspace(1)* %out, i64 %index
548 %0 = atomicrmw volatile umin i32 addrspace(1)* %ptr, i32 %in seq_cst
555 define void @atomic_umin_i32_ret_addr64(i32 addrspace(1)* %out, i32 addrspace(1)* %out2, i32 %in, i…
557 %ptr = getelementptr i32, i32 addrspace(1)* %out, i64 %index
558 %0 = atomicrmw volatile umin i32 addrspace(1)* %ptr, i32 %in seq_cst
559 store i32 %0, i32 addrspace(1)* %out2
565 define void @atomic_or_i32_offset(i32 addrspace(1)* %out, i32 %in) {
567 %gep = getelementptr i32, i32 addrspace(1)* %out, i32 4
568 %0 = atomicrmw volatile or i32 addrspace(1)* %gep, i32 %in seq_cst
575 define void @atomic_or_i32_ret_offset(i32 addrspace(1)* %out, i32 addrspace(1)* %out2, i32 %in) {
577 %gep = getelementptr i32, i32 addrspace(1)* %out, i32 4
578 %0 = atomicrmw volatile or i32 addrspace(1)* %gep, i32 %in seq_cst
579 store i32 %0, i32 addrspace(1)* %out2
585 define void @atomic_or_i32_addr64_offset(i32 addrspace(1)* %out, i32 %in, i64 %index) {
587 %ptr = getelementptr i32, i32 addrspace(1)* %out, i64 %index
588 %gep = getelementptr i32, i32 addrspace(1)* %ptr, i32 4
589 %0 = atomicrmw volatile or i32 addrspace(1)* %gep, i32 %in seq_cst
596 define void @atomic_or_i32_ret_addr64_offset(i32 addrspace(1)* %out, i32 addrspace(1)* %out2, i32 %…
598 %ptr = getelementptr i32, i32 addrspace(1)* %out, i64 %index
599 %gep = getelementptr i32, i32 addrspace(1)* %ptr, i32 4
600 %0 = atomicrmw volatile or i32 addrspace(1)* %gep, i32 %in seq_cst
601 store i32 %0, i32 addrspace(1)* %out2
607 define void @atomic_or_i32(i32 addrspace(1)* %out, i32 %in) {
609 %0 = atomicrmw volatile or i32 addrspace(1)* %out, i32 %in seq_cst
616 define void @atomic_or_i32_ret(i32 addrspace(1)* %out, i32 addrspace(1)* %out2, i32 %in) {
618 %0 = atomicrmw volatile or i32 addrspace(1)* %out, i32 %in seq_cst
619 store i32 %0, i32 addrspace(1)* %out2
625 define void @atomic_or_i32_addr64(i32 addrspace(1)* %out, i32 %in, i64 %index) {
627 %ptr = getelementptr i32, i32 addrspace(1)* %out, i64 %index
628 %0 = atomicrmw volatile or i32 addrspace(1)* %ptr, i32 %in seq_cst
635 define void @atomic_or_i32_ret_addr64(i32 addrspace(1)* %out, i32 addrspace(1)* %out2, i32 %in, i64…
637 %ptr = getelementptr i32, i32 addrspace(1)* %out, i64 %index
638 %0 = atomicrmw volatile or i32 addrspace(1)* %ptr, i32 %in seq_cst
639 store i32 %0, i32 addrspace(1)* %out2
645 define void @atomic_xchg_i32_offset(i32 addrspace(1)* %out, i32 %in) {
647 %gep = getelementptr i32, i32 addrspace(1)* %out, i32 4
648 %0 = atomicrmw volatile xchg i32 addrspace(1)* %gep, i32 %in seq_cst
655 define void @atomic_xchg_i32_ret_offset(i32 addrspace(1)* %out, i32 addrspace(1)* %out2, i32 %in) {
657 %gep = getelementptr i32, i32 addrspace(1)* %out, i32 4
658 %0 = atomicrmw volatile xchg i32 addrspace(1)* %gep, i32 %in seq_cst
659 store i32 %0, i32 addrspace(1)* %out2
665 define void @atomic_xchg_i32_addr64_offset(i32 addrspace(1)* %out, i32 %in, i64 %index) {
667 %ptr = getelementptr i32, i32 addrspace(1)* %out, i64 %index
668 %gep = getelementptr i32, i32 addrspace(1)* %ptr, i32 4
669 %0 = atomicrmw volatile xchg i32 addrspace(1)* %gep, i32 %in seq_cst
676 define void @atomic_xchg_i32_ret_addr64_offset(i32 addrspace(1)* %out, i32 addrspace(1)* %out2, i32…
678 %ptr = getelementptr i32, i32 addrspace(1)* %out, i64 %index
679 %gep = getelementptr i32, i32 addrspace(1)* %ptr, i32 4
680 %0 = atomicrmw volatile xchg i32 addrspace(1)* %gep, i32 %in seq_cst
681 store i32 %0, i32 addrspace(1)* %out2
687 define void @atomic_xchg_i32(i32 addrspace(1)* %out, i32 %in) {
689 %0 = atomicrmw volatile xchg i32 addrspace(1)* %out, i32 %in seq_cst
696 define void @atomic_xchg_i32_ret(i32 addrspace(1)* %out, i32 addrspace(1)* %out2, i32 %in) {
698 %0 = atomicrmw volatile xchg i32 addrspace(1)* %out, i32 %in seq_cst
699 store i32 %0, i32 addrspace(1)* %out2
705 define void @atomic_xchg_i32_addr64(i32 addrspace(1)* %out, i32 %in, i64 %index) {
707 %ptr = getelementptr i32, i32 addrspace(1)* %out, i64 %index
708 %0 = atomicrmw volatile xchg i32 addrspace(1)* %ptr, i32 %in seq_cst
715 define void @atomic_xchg_i32_ret_addr64(i32 addrspace(1)* %out, i32 addrspace(1)* %out2, i32 %in, i…
717 %ptr = getelementptr i32, i32 addrspace(1)* %out, i64 %index
718 %0 = atomicrmw volatile xchg i32 addrspace(1)* %ptr, i32 %in seq_cst
719 store i32 %0, i32 addrspace(1)* %out2
725 define void @atomic_xor_i32_offset(i32 addrspace(1)* %out, i32 %in) {
727 %gep = getelementptr i32, i32 addrspace(1)* %out, i32 4
728 %0 = atomicrmw volatile xor i32 addrspace(1)* %gep, i32 %in seq_cst
735 define void @atomic_xor_i32_ret_offset(i32 addrspace(1)* %out, i32 addrspace(1)* %out2, i32 %in) {
737 %gep = getelementptr i32, i32 addrspace(1)* %out, i32 4
738 %0 = atomicrmw volatile xor i32 addrspace(1)* %gep, i32 %in seq_cst
739 store i32 %0, i32 addrspace(1)* %out2
745 define void @atomic_xor_i32_addr64_offset(i32 addrspace(1)* %out, i32 %in, i64 %index) {
747 %ptr = getelementptr i32, i32 addrspace(1)* %out, i64 %index
748 %gep = getelementptr i32, i32 addrspace(1)* %ptr, i32 4
749 %0 = atomicrmw volatile xor i32 addrspace(1)* %gep, i32 %in seq_cst
756 define void @atomic_xor_i32_ret_addr64_offset(i32 addrspace(1)* %out, i32 addrspace(1)* %out2, i32 …
758 %ptr = getelementptr i32, i32 addrspace(1)* %out, i64 %index
759 %gep = getelementptr i32, i32 addrspace(1)* %ptr, i32 4
760 %0 = atomicrmw volatile xor i32 addrspace(1)* %gep, i32 %in seq_cst
761 store i32 %0, i32 addrspace(1)* %out2
767 define void @atomic_xor_i32(i32 addrspace(1)* %out, i32 %in) {
769 %0 = atomicrmw volatile xor i32 addrspace(1)* %out, i32 %in seq_cst
776 define void @atomic_xor_i32_ret(i32 addrspace(1)* %out, i32 addrspace(1)* %out2, i32 %in) {
778 %0 = atomicrmw volatile xor i32 addrspace(1)* %out, i32 %in seq_cst
779 store i32 %0, i32 addrspace(1)* %out2
785 define void @atomic_xor_i32_addr64(i32 addrspace(1)* %out, i32 %in, i64 %index) {
787 %ptr = getelementptr i32, i32 addrspace(1)* %out, i64 %index
788 %0 = atomicrmw volatile xor i32 addrspace(1)* %ptr, i32 %in seq_cst
795 define void @atomic_xor_i32_ret_addr64(i32 addrspace(1)* %out, i32 addrspace(1)* %out2, i32 %in, i6…
797 %ptr = getelementptr i32, i32 addrspace(1)* %out, i64 %index
798 %0 = atomicrmw volatile xor i32 addrspace(1)* %ptr, i32 %in seq_cst
799 store i32 %0, i32 addrspace(1)* %out2