• Home
  • Raw
  • Download

Lines Matching full:dbg

7   %0 = load atomic i8, i8* %a unordered, align 1, !dbg !7
8 ret i8 %0, !dbg !7
11 ; CHECK: call i8 @__tsan_atomic8_load(i8* %a, i32 0), !dbg
15 %0 = load atomic i8, i8* %a monotonic, align 1, !dbg !7
16 ret i8 %0, !dbg !7
19 ; CHECK: call i8 @__tsan_atomic8_load(i8* %a, i32 0), !dbg
23 %0 = load atomic i8, i8* %a acquire, align 1, !dbg !7
24 ret i8 %0, !dbg !7
27 ; CHECK: call i8 @__tsan_atomic8_load(i8* %a, i32 2), !dbg
31 %0 = load atomic i8, i8* %a seq_cst, align 1, !dbg !7
32 ret i8 %0, !dbg !7
35 ; CHECK: call i8 @__tsan_atomic8_load(i8* %a, i32 5), !dbg
39 store atomic i8 0, i8* %a unordered, align 1, !dbg !7
40 ret void, !dbg !7
43 ; CHECK: call void @__tsan_atomic8_store(i8* %a, i8 0, i32 0), !dbg
47 store atomic i8 0, i8* %a monotonic, align 1, !dbg !7
48 ret void, !dbg !7
51 ; CHECK: call void @__tsan_atomic8_store(i8* %a, i8 0, i32 0), !dbg
55 store atomic i8 0, i8* %a release, align 1, !dbg !7
56 ret void, !dbg !7
59 ; CHECK: call void @__tsan_atomic8_store(i8* %a, i8 0, i32 3), !dbg
63 store atomic i8 0, i8* %a seq_cst, align 1, !dbg !7
64 ret void, !dbg !7
67 ; CHECK: call void @__tsan_atomic8_store(i8* %a, i8 0, i32 5), !dbg
71 atomicrmw xchg i8* %a, i8 0 monotonic, !dbg !7
72 ret void, !dbg !7
75 ; CHECK: call i8 @__tsan_atomic8_exchange(i8* %a, i8 0, i32 0), !dbg
79 atomicrmw add i8* %a, i8 0 monotonic, !dbg !7
80 ret void, !dbg !7
83 ; CHECK: call i8 @__tsan_atomic8_fetch_add(i8* %a, i8 0, i32 0), !dbg
87 atomicrmw sub i8* %a, i8 0 monotonic, !dbg !7
88 ret void, !dbg !7
91 ; CHECK: call i8 @__tsan_atomic8_fetch_sub(i8* %a, i8 0, i32 0), !dbg
95 atomicrmw and i8* %a, i8 0 monotonic, !dbg !7
96 ret void, !dbg !7
99 ; CHECK: call i8 @__tsan_atomic8_fetch_and(i8* %a, i8 0, i32 0), !dbg
103 atomicrmw or i8* %a, i8 0 monotonic, !dbg !7
104 ret void, !dbg !7
107 ; CHECK: call i8 @__tsan_atomic8_fetch_or(i8* %a, i8 0, i32 0), !dbg
111 atomicrmw xor i8* %a, i8 0 monotonic, !dbg !7
112 ret void, !dbg !7
115 ; CHECK: call i8 @__tsan_atomic8_fetch_xor(i8* %a, i8 0, i32 0), !dbg
119 atomicrmw nand i8* %a, i8 0 monotonic, !dbg !7
120 ret void, !dbg !7
123 ; CHECK: call i8 @__tsan_atomic8_fetch_nand(i8* %a, i8 0, i32 0), !dbg
127 atomicrmw xchg i8* %a, i8 0 acquire, !dbg !7
128 ret void, !dbg !7
131 ; CHECK: call i8 @__tsan_atomic8_exchange(i8* %a, i8 0, i32 2), !dbg
135 atomicrmw add i8* %a, i8 0 acquire, !dbg !7
136 ret void, !dbg !7
139 ; CHECK: call i8 @__tsan_atomic8_fetch_add(i8* %a, i8 0, i32 2), !dbg
143 atomicrmw sub i8* %a, i8 0 acquire, !dbg !7
144 ret void, !dbg !7
147 ; CHECK: call i8 @__tsan_atomic8_fetch_sub(i8* %a, i8 0, i32 2), !dbg
151 atomicrmw and i8* %a, i8 0 acquire, !dbg !7
152 ret void, !dbg !7
155 ; CHECK: call i8 @__tsan_atomic8_fetch_and(i8* %a, i8 0, i32 2), !dbg
159 atomicrmw or i8* %a, i8 0 acquire, !dbg !7
160 ret void, !dbg !7
163 ; CHECK: call i8 @__tsan_atomic8_fetch_or(i8* %a, i8 0, i32 2), !dbg
167 atomicrmw xor i8* %a, i8 0 acquire, !dbg !7
168 ret void, !dbg !7
171 ; CHECK: call i8 @__tsan_atomic8_fetch_xor(i8* %a, i8 0, i32 2), !dbg
175 atomicrmw nand i8* %a, i8 0 acquire, !dbg !7
176 ret void, !dbg !7
179 ; CHECK: call i8 @__tsan_atomic8_fetch_nand(i8* %a, i8 0, i32 2), !dbg
183 atomicrmw xchg i8* %a, i8 0 release, !dbg !7
184 ret void, !dbg !7
187 ; CHECK: call i8 @__tsan_atomic8_exchange(i8* %a, i8 0, i32 3), !dbg
191 atomicrmw add i8* %a, i8 0 release, !dbg !7
192 ret void, !dbg !7
195 ; CHECK: call i8 @__tsan_atomic8_fetch_add(i8* %a, i8 0, i32 3), !dbg
199 atomicrmw sub i8* %a, i8 0 release, !dbg !7
200 ret void, !dbg !7
203 ; CHECK: call i8 @__tsan_atomic8_fetch_sub(i8* %a, i8 0, i32 3), !dbg
207 atomicrmw and i8* %a, i8 0 release, !dbg !7
208 ret void, !dbg !7
211 ; CHECK: call i8 @__tsan_atomic8_fetch_and(i8* %a, i8 0, i32 3), !dbg
215 atomicrmw or i8* %a, i8 0 release, !dbg !7
216 ret void, !dbg !7
219 ; CHECK: call i8 @__tsan_atomic8_fetch_or(i8* %a, i8 0, i32 3), !dbg
223 atomicrmw xor i8* %a, i8 0 release, !dbg !7
224 ret void, !dbg !7
227 ; CHECK: call i8 @__tsan_atomic8_fetch_xor(i8* %a, i8 0, i32 3), !dbg
231 atomicrmw nand i8* %a, i8 0 release, !dbg !7
232 ret void, !dbg !7
235 ; CHECK: call i8 @__tsan_atomic8_fetch_nand(i8* %a, i8 0, i32 3), !dbg
239 atomicrmw xchg i8* %a, i8 0 acq_rel, !dbg !7
240 ret void, !dbg !7
243 ; CHECK: call i8 @__tsan_atomic8_exchange(i8* %a, i8 0, i32 4), !dbg
247 atomicrmw add i8* %a, i8 0 acq_rel, !dbg !7
248 ret void, !dbg !7
251 ; CHECK: call i8 @__tsan_atomic8_fetch_add(i8* %a, i8 0, i32 4), !dbg
255 atomicrmw sub i8* %a, i8 0 acq_rel, !dbg !7
256 ret void, !dbg !7
259 ; CHECK: call i8 @__tsan_atomic8_fetch_sub(i8* %a, i8 0, i32 4), !dbg
263 atomicrmw and i8* %a, i8 0 acq_rel, !dbg !7
264 ret void, !dbg !7
267 ; CHECK: call i8 @__tsan_atomic8_fetch_and(i8* %a, i8 0, i32 4), !dbg
271 atomicrmw or i8* %a, i8 0 acq_rel, !dbg !7
272 ret void, !dbg !7
275 ; CHECK: call i8 @__tsan_atomic8_fetch_or(i8* %a, i8 0, i32 4), !dbg
279 atomicrmw xor i8* %a, i8 0 acq_rel, !dbg !7
280 ret void, !dbg !7
283 ; CHECK: call i8 @__tsan_atomic8_fetch_xor(i8* %a, i8 0, i32 4), !dbg
287 atomicrmw nand i8* %a, i8 0 acq_rel, !dbg !7
288 ret void, !dbg !7
291 ; CHECK: call i8 @__tsan_atomic8_fetch_nand(i8* %a, i8 0, i32 4), !dbg
295 atomicrmw xchg i8* %a, i8 0 seq_cst, !dbg !7
296 ret void, !dbg !7
299 ; CHECK: call i8 @__tsan_atomic8_exchange(i8* %a, i8 0, i32 5), !dbg
303 atomicrmw add i8* %a, i8 0 seq_cst, !dbg !7
304 ret void, !dbg !7
307 ; CHECK: call i8 @__tsan_atomic8_fetch_add(i8* %a, i8 0, i32 5), !dbg
311 atomicrmw sub i8* %a, i8 0 seq_cst, !dbg !7
312 ret void, !dbg !7
315 ; CHECK: call i8 @__tsan_atomic8_fetch_sub(i8* %a, i8 0, i32 5), !dbg
319 atomicrmw and i8* %a, i8 0 seq_cst, !dbg !7
320 ret void, !dbg !7
323 ; CHECK: call i8 @__tsan_atomic8_fetch_and(i8* %a, i8 0, i32 5), !dbg
327 atomicrmw or i8* %a, i8 0 seq_cst, !dbg !7
328 ret void, !dbg !7
331 ; CHECK: call i8 @__tsan_atomic8_fetch_or(i8* %a, i8 0, i32 5), !dbg
335 atomicrmw xor i8* %a, i8 0 seq_cst, !dbg !7
336 ret void, !dbg !7
339 ; CHECK: call i8 @__tsan_atomic8_fetch_xor(i8* %a, i8 0, i32 5), !dbg
343 atomicrmw nand i8* %a, i8 0 seq_cst, !dbg !7
344 ret void, !dbg !7
347 ; CHECK: call i8 @__tsan_atomic8_fetch_nand(i8* %a, i8 0, i32 5), !dbg
351 cmpxchg i8* %a, i8 0, i8 1 monotonic monotonic, !dbg !7
352 ret void, !dbg !7
355 ; CHECK: call i8 @__tsan_atomic8_compare_exchange_val(i8* %a, i8 0, i8 1, i32 0, i32 0), !dbg
359 cmpxchg i8* %a, i8 0, i8 1 acquire acquire, !dbg !7
360 ret void, !dbg !7
363 ; CHECK: call i8 @__tsan_atomic8_compare_exchange_val(i8* %a, i8 0, i8 1, i32 2, i32 2), !dbg
367 cmpxchg i8* %a, i8 0, i8 1 release monotonic, !dbg !7
368 ret void, !dbg !7
371 ; CHECK: call i8 @__tsan_atomic8_compare_exchange_val(i8* %a, i8 0, i8 1, i32 3, i32 0), !dbg
375 cmpxchg i8* %a, i8 0, i8 1 acq_rel acquire, !dbg !7
376 ret void, !dbg !7
379 ; CHECK: call i8 @__tsan_atomic8_compare_exchange_val(i8* %a, i8 0, i8 1, i32 4, i32 2), !dbg
383 cmpxchg i8* %a, i8 0, i8 1 seq_cst seq_cst, !dbg !7
384 ret void, !dbg !7
387 ; CHECK: call i8 @__tsan_atomic8_compare_exchange_val(i8* %a, i8 0, i8 1, i32 5, i32 5), !dbg
391 %0 = load atomic i16, i16* %a unordered, align 2, !dbg !7
392 ret i16 %0, !dbg !7
395 ; CHECK: call i16 @__tsan_atomic16_load(i16* %a, i32 0), !dbg
399 %0 = load atomic i16, i16* %a monotonic, align 2, !dbg !7
400 ret i16 %0, !dbg !7
403 ; CHECK: call i16 @__tsan_atomic16_load(i16* %a, i32 0), !dbg
407 %0 = load atomic i16, i16* %a acquire, align 2, !dbg !7
408 ret i16 %0, !dbg !7
411 ; CHECK: call i16 @__tsan_atomic16_load(i16* %a, i32 2), !dbg
415 %0 = load atomic i16, i16* %a seq_cst, align 2, !dbg !7
416 ret i16 %0, !dbg !7
419 ; CHECK: call i16 @__tsan_atomic16_load(i16* %a, i32 5), !dbg
423 store atomic i16 0, i16* %a unordered, align 2, !dbg !7
424 ret void, !dbg !7
427 ; CHECK: call void @__tsan_atomic16_store(i16* %a, i16 0, i32 0), !dbg
431 store atomic i16 0, i16* %a monotonic, align 2, !dbg !7
432 ret void, !dbg !7
435 ; CHECK: call void @__tsan_atomic16_store(i16* %a, i16 0, i32 0), !dbg
439 store atomic i16 0, i16* %a release, align 2, !dbg !7
440 ret void, !dbg !7
443 ; CHECK: call void @__tsan_atomic16_store(i16* %a, i16 0, i32 3), !dbg
447 store atomic i16 0, i16* %a seq_cst, align 2, !dbg !7
448 ret void, !dbg !7
451 ; CHECK: call void @__tsan_atomic16_store(i16* %a, i16 0, i32 5), !dbg
455 atomicrmw xchg i16* %a, i16 0 monotonic, !dbg !7
456 ret void, !dbg !7
459 ; CHECK: call i16 @__tsan_atomic16_exchange(i16* %a, i16 0, i32 0), !dbg
463 atomicrmw add i16* %a, i16 0 monotonic, !dbg !7
464 ret void, !dbg !7
467 ; CHECK: call i16 @__tsan_atomic16_fetch_add(i16* %a, i16 0, i32 0), !dbg
471 atomicrmw sub i16* %a, i16 0 monotonic, !dbg !7
472 ret void, !dbg !7
475 ; CHECK: call i16 @__tsan_atomic16_fetch_sub(i16* %a, i16 0, i32 0), !dbg
479 atomicrmw and i16* %a, i16 0 monotonic, !dbg !7
480 ret void, !dbg !7
483 ; CHECK: call i16 @__tsan_atomic16_fetch_and(i16* %a, i16 0, i32 0), !dbg
487 atomicrmw or i16* %a, i16 0 monotonic, !dbg !7
488 ret void, !dbg !7
491 ; CHECK: call i16 @__tsan_atomic16_fetch_or(i16* %a, i16 0, i32 0), !dbg
495 atomicrmw xor i16* %a, i16 0 monotonic, !dbg !7
496 ret void, !dbg !7
499 ; CHECK: call i16 @__tsan_atomic16_fetch_xor(i16* %a, i16 0, i32 0), !dbg
503 atomicrmw nand i16* %a, i16 0 monotonic, !dbg !7
504 ret void, !dbg !7
507 ; CHECK: call i16 @__tsan_atomic16_fetch_nand(i16* %a, i16 0, i32 0), !dbg
511 atomicrmw xchg i16* %a, i16 0 acquire, !dbg !7
512 ret void, !dbg !7
515 ; CHECK: call i16 @__tsan_atomic16_exchange(i16* %a, i16 0, i32 2), !dbg
519 atomicrmw add i16* %a, i16 0 acquire, !dbg !7
520 ret void, !dbg !7
523 ; CHECK: call i16 @__tsan_atomic16_fetch_add(i16* %a, i16 0, i32 2), !dbg
527 atomicrmw sub i16* %a, i16 0 acquire, !dbg !7
528 ret void, !dbg !7
531 ; CHECK: call i16 @__tsan_atomic16_fetch_sub(i16* %a, i16 0, i32 2), !dbg
535 atomicrmw and i16* %a, i16 0 acquire, !dbg !7
536 ret void, !dbg !7
539 ; CHECK: call i16 @__tsan_atomic16_fetch_and(i16* %a, i16 0, i32 2), !dbg
543 atomicrmw or i16* %a, i16 0 acquire, !dbg !7
544 ret void, !dbg !7
547 ; CHECK: call i16 @__tsan_atomic16_fetch_or(i16* %a, i16 0, i32 2), !dbg
551 atomicrmw xor i16* %a, i16 0 acquire, !dbg !7
552 ret void, !dbg !7
555 ; CHECK: call i16 @__tsan_atomic16_fetch_xor(i16* %a, i16 0, i32 2), !dbg
559 atomicrmw nand i16* %a, i16 0 acquire, !dbg !7
560 ret void, !dbg !7
563 ; CHECK: call i16 @__tsan_atomic16_fetch_nand(i16* %a, i16 0, i32 2), !dbg
567 atomicrmw xchg i16* %a, i16 0 release, !dbg !7
568 ret void, !dbg !7
571 ; CHECK: call i16 @__tsan_atomic16_exchange(i16* %a, i16 0, i32 3), !dbg
575 atomicrmw add i16* %a, i16 0 release, !dbg !7
576 ret void, !dbg !7
579 ; CHECK: call i16 @__tsan_atomic16_fetch_add(i16* %a, i16 0, i32 3), !dbg
583 atomicrmw sub i16* %a, i16 0 release, !dbg !7
584 ret void, !dbg !7
587 ; CHECK: call i16 @__tsan_atomic16_fetch_sub(i16* %a, i16 0, i32 3), !dbg
591 atomicrmw and i16* %a, i16 0 release, !dbg !7
592 ret void, !dbg !7
595 ; CHECK: call i16 @__tsan_atomic16_fetch_and(i16* %a, i16 0, i32 3), !dbg
599 atomicrmw or i16* %a, i16 0 release, !dbg !7
600 ret void, !dbg !7
603 ; CHECK: call i16 @__tsan_atomic16_fetch_or(i16* %a, i16 0, i32 3), !dbg
607 atomicrmw xor i16* %a, i16 0 release, !dbg !7
608 ret void, !dbg !7
611 ; CHECK: call i16 @__tsan_atomic16_fetch_xor(i16* %a, i16 0, i32 3), !dbg
615 atomicrmw nand i16* %a, i16 0 release, !dbg !7
616 ret void, !dbg !7
619 ; CHECK: call i16 @__tsan_atomic16_fetch_nand(i16* %a, i16 0, i32 3), !dbg
623 atomicrmw xchg i16* %a, i16 0 acq_rel, !dbg !7
624 ret void, !dbg !7
627 ; CHECK: call i16 @__tsan_atomic16_exchange(i16* %a, i16 0, i32 4), !dbg
631 atomicrmw add i16* %a, i16 0 acq_rel, !dbg !7
632 ret void, !dbg !7
635 ; CHECK: call i16 @__tsan_atomic16_fetch_add(i16* %a, i16 0, i32 4), !dbg
639 atomicrmw sub i16* %a, i16 0 acq_rel, !dbg !7
640 ret void, !dbg !7
643 ; CHECK: call i16 @__tsan_atomic16_fetch_sub(i16* %a, i16 0, i32 4), !dbg
647 atomicrmw and i16* %a, i16 0 acq_rel, !dbg !7
648 ret void, !dbg !7
651 ; CHECK: call i16 @__tsan_atomic16_fetch_and(i16* %a, i16 0, i32 4), !dbg
655 atomicrmw or i16* %a, i16 0 acq_rel, !dbg !7
656 ret void, !dbg !7
659 ; CHECK: call i16 @__tsan_atomic16_fetch_or(i16* %a, i16 0, i32 4), !dbg
663 atomicrmw xor i16* %a, i16 0 acq_rel, !dbg !7
664 ret void, !dbg !7
667 ; CHECK: call i16 @__tsan_atomic16_fetch_xor(i16* %a, i16 0, i32 4), !dbg
671 atomicrmw nand i16* %a, i16 0 acq_rel, !dbg !7
672 ret void, !dbg !7
675 ; CHECK: call i16 @__tsan_atomic16_fetch_nand(i16* %a, i16 0, i32 4), !dbg
679 atomicrmw xchg i16* %a, i16 0 seq_cst, !dbg !7
680 ret void, !dbg !7
683 ; CHECK: call i16 @__tsan_atomic16_exchange(i16* %a, i16 0, i32 5), !dbg
687 atomicrmw add i16* %a, i16 0 seq_cst, !dbg !7
688 ret void, !dbg !7
691 ; CHECK: call i16 @__tsan_atomic16_fetch_add(i16* %a, i16 0, i32 5), !dbg
695 atomicrmw sub i16* %a, i16 0 seq_cst, !dbg !7
696 ret void, !dbg !7
699 ; CHECK: call i16 @__tsan_atomic16_fetch_sub(i16* %a, i16 0, i32 5), !dbg
703 atomicrmw and i16* %a, i16 0 seq_cst, !dbg !7
704 ret void, !dbg !7
707 ; CHECK: call i16 @__tsan_atomic16_fetch_and(i16* %a, i16 0, i32 5), !dbg
711 atomicrmw or i16* %a, i16 0 seq_cst, !dbg !7
712 ret void, !dbg !7
715 ; CHECK: call i16 @__tsan_atomic16_fetch_or(i16* %a, i16 0, i32 5), !dbg
719 atomicrmw xor i16* %a, i16 0 seq_cst, !dbg !7
720 ret void, !dbg !7
723 ; CHECK: call i16 @__tsan_atomic16_fetch_xor(i16* %a, i16 0, i32 5), !dbg
727 atomicrmw nand i16* %a, i16 0 seq_cst, !dbg !7
728 ret void, !dbg !7
731 ; CHECK: call i16 @__tsan_atomic16_fetch_nand(i16* %a, i16 0, i32 5), !dbg
735 cmpxchg i16* %a, i16 0, i16 1 monotonic monotonic, !dbg !7
736 ret void, !dbg !7
739 ; CHECK: call i16 @__tsan_atomic16_compare_exchange_val(i16* %a, i16 0, i16 1, i32 0, i32 0), !dbg
743 cmpxchg i16* %a, i16 0, i16 1 acquire acquire, !dbg !7
744 ret void, !dbg !7
747 ; CHECK: call i16 @__tsan_atomic16_compare_exchange_val(i16* %a, i16 0, i16 1, i32 2, i32 2), !dbg
751 cmpxchg i16* %a, i16 0, i16 1 release monotonic, !dbg !7
752 ret void, !dbg !7
755 ; CHECK: call i16 @__tsan_atomic16_compare_exchange_val(i16* %a, i16 0, i16 1, i32 3, i32 0), !dbg
759 cmpxchg i16* %a, i16 0, i16 1 acq_rel acquire, !dbg !7
760 ret void, !dbg !7
763 ; CHECK: call i16 @__tsan_atomic16_compare_exchange_val(i16* %a, i16 0, i16 1, i32 4, i32 2), !dbg
767 cmpxchg i16* %a, i16 0, i16 1 seq_cst seq_cst, !dbg !7
768 ret void, !dbg !7
771 ; CHECK: call i16 @__tsan_atomic16_compare_exchange_val(i16* %a, i16 0, i16 1, i32 5, i32 5), !dbg
775 %0 = load atomic i32, i32* %a unordered, align 4, !dbg !7
776 ret i32 %0, !dbg !7
779 ; CHECK: call i32 @__tsan_atomic32_load(i32* %a, i32 0), !dbg
783 %0 = load atomic i32, i32* %a monotonic, align 4, !dbg !7
784 ret i32 %0, !dbg !7
787 ; CHECK: call i32 @__tsan_atomic32_load(i32* %a, i32 0), !dbg
791 %0 = load atomic i32, i32* %a acquire, align 4, !dbg !7
792 ret i32 %0, !dbg !7
795 ; CHECK: call i32 @__tsan_atomic32_load(i32* %a, i32 2), !dbg
799 %0 = load atomic i32, i32* %a seq_cst, align 4, !dbg !7
800 ret i32 %0, !dbg !7
803 ; CHECK: call i32 @__tsan_atomic32_load(i32* %a, i32 5), !dbg
807 store atomic i32 0, i32* %a unordered, align 4, !dbg !7
808 ret void, !dbg !7
811 ; CHECK: call void @__tsan_atomic32_store(i32* %a, i32 0, i32 0), !dbg
815 store atomic i32 0, i32* %a monotonic, align 4, !dbg !7
816 ret void, !dbg !7
819 ; CHECK: call void @__tsan_atomic32_store(i32* %a, i32 0, i32 0), !dbg
823 store atomic i32 0, i32* %a release, align 4, !dbg !7
824 ret void, !dbg !7
827 ; CHECK: call void @__tsan_atomic32_store(i32* %a, i32 0, i32 3), !dbg
831 store atomic i32 0, i32* %a seq_cst, align 4, !dbg !7
832 ret void, !dbg !7
835 ; CHECK: call void @__tsan_atomic32_store(i32* %a, i32 0, i32 5), !dbg
839 atomicrmw xchg i32* %a, i32 0 monotonic, !dbg !7
840 ret void, !dbg !7
843 ; CHECK: call i32 @__tsan_atomic32_exchange(i32* %a, i32 0, i32 0), !dbg
847 atomicrmw add i32* %a, i32 0 monotonic, !dbg !7
848 ret void, !dbg !7
851 ; CHECK: call i32 @__tsan_atomic32_fetch_add(i32* %a, i32 0, i32 0), !dbg
855 atomicrmw sub i32* %a, i32 0 monotonic, !dbg !7
856 ret void, !dbg !7
859 ; CHECK: call i32 @__tsan_atomic32_fetch_sub(i32* %a, i32 0, i32 0), !dbg
863 atomicrmw and i32* %a, i32 0 monotonic, !dbg !7
864 ret void, !dbg !7
867 ; CHECK: call i32 @__tsan_atomic32_fetch_and(i32* %a, i32 0, i32 0), !dbg
871 atomicrmw or i32* %a, i32 0 monotonic, !dbg !7
872 ret void, !dbg !7
875 ; CHECK: call i32 @__tsan_atomic32_fetch_or(i32* %a, i32 0, i32 0), !dbg
879 atomicrmw xor i32* %a, i32 0 monotonic, !dbg !7
880 ret void, !dbg !7
883 ; CHECK: call i32 @__tsan_atomic32_fetch_xor(i32* %a, i32 0, i32 0), !dbg
887 atomicrmw nand i32* %a, i32 0 monotonic, !dbg !7
888 ret void, !dbg !7
891 ; CHECK: call i32 @__tsan_atomic32_fetch_nand(i32* %a, i32 0, i32 0), !dbg
895 atomicrmw xchg i32* %a, i32 0 acquire, !dbg !7
896 ret void, !dbg !7
899 ; CHECK: call i32 @__tsan_atomic32_exchange(i32* %a, i32 0, i32 2), !dbg
903 atomicrmw add i32* %a, i32 0 acquire, !dbg !7
904 ret void, !dbg !7
907 ; CHECK: call i32 @__tsan_atomic32_fetch_add(i32* %a, i32 0, i32 2), !dbg
911 atomicrmw sub i32* %a, i32 0 acquire, !dbg !7
912 ret void, !dbg !7
915 ; CHECK: call i32 @__tsan_atomic32_fetch_sub(i32* %a, i32 0, i32 2), !dbg
919 atomicrmw and i32* %a, i32 0 acquire, !dbg !7
920 ret void, !dbg !7
923 ; CHECK: call i32 @__tsan_atomic32_fetch_and(i32* %a, i32 0, i32 2), !dbg
927 atomicrmw or i32* %a, i32 0 acquire, !dbg !7
928 ret void, !dbg !7
931 ; CHECK: call i32 @__tsan_atomic32_fetch_or(i32* %a, i32 0, i32 2), !dbg
935 atomicrmw xor i32* %a, i32 0 acquire, !dbg !7
936 ret void, !dbg !7
939 ; CHECK: call i32 @__tsan_atomic32_fetch_xor(i32* %a, i32 0, i32 2), !dbg
943 atomicrmw nand i32* %a, i32 0 acquire, !dbg !7
944 ret void, !dbg !7
947 ; CHECK: call i32 @__tsan_atomic32_fetch_nand(i32* %a, i32 0, i32 2), !dbg
951 atomicrmw xchg i32* %a, i32 0 release, !dbg !7
952 ret void, !dbg !7
955 ; CHECK: call i32 @__tsan_atomic32_exchange(i32* %a, i32 0, i32 3), !dbg
959 atomicrmw add i32* %a, i32 0 release, !dbg !7
960 ret void, !dbg !7
963 ; CHECK: call i32 @__tsan_atomic32_fetch_add(i32* %a, i32 0, i32 3), !dbg
967 atomicrmw sub i32* %a, i32 0 release, !dbg !7
968 ret void, !dbg !7
971 ; CHECK: call i32 @__tsan_atomic32_fetch_sub(i32* %a, i32 0, i32 3), !dbg
975 atomicrmw and i32* %a, i32 0 release, !dbg !7
976 ret void, !dbg !7
979 ; CHECK: call i32 @__tsan_atomic32_fetch_and(i32* %a, i32 0, i32 3), !dbg
983 atomicrmw or i32* %a, i32 0 release, !dbg !7
984 ret void, !dbg !7
987 ; CHECK: call i32 @__tsan_atomic32_fetch_or(i32* %a, i32 0, i32 3), !dbg
991 atomicrmw xor i32* %a, i32 0 release, !dbg !7
992 ret void, !dbg !7
995 ; CHECK: call i32 @__tsan_atomic32_fetch_xor(i32* %a, i32 0, i32 3), !dbg
999 atomicrmw nand i32* %a, i32 0 release, !dbg !7
1000 ret void, !dbg !7
1003 ; CHECK: call i32 @__tsan_atomic32_fetch_nand(i32* %a, i32 0, i32 3), !dbg
1007 atomicrmw xchg i32* %a, i32 0 acq_rel, !dbg !7
1008 ret void, !dbg !7
1011 ; CHECK: call i32 @__tsan_atomic32_exchange(i32* %a, i32 0, i32 4), !dbg
1015 atomicrmw add i32* %a, i32 0 acq_rel, !dbg !7
1016 ret void, !dbg !7
1019 ; CHECK: call i32 @__tsan_atomic32_fetch_add(i32* %a, i32 0, i32 4), !dbg
1023 atomicrmw sub i32* %a, i32 0 acq_rel, !dbg !7
1024 ret void, !dbg !7
1027 ; CHECK: call i32 @__tsan_atomic32_fetch_sub(i32* %a, i32 0, i32 4), !dbg
1031 atomicrmw and i32* %a, i32 0 acq_rel, !dbg !7
1032 ret void, !dbg !7
1035 ; CHECK: call i32 @__tsan_atomic32_fetch_and(i32* %a, i32 0, i32 4), !dbg
1039 atomicrmw or i32* %a, i32 0 acq_rel, !dbg !7
1040 ret void, !dbg !7
1043 ; CHECK: call i32 @__tsan_atomic32_fetch_or(i32* %a, i32 0, i32 4), !dbg
1047 atomicrmw xor i32* %a, i32 0 acq_rel, !dbg !7
1048 ret void, !dbg !7
1051 ; CHECK: call i32 @__tsan_atomic32_fetch_xor(i32* %a, i32 0, i32 4), !dbg
1055 atomicrmw nand i32* %a, i32 0 acq_rel, !dbg !7
1056 ret void, !dbg !7
1059 ; CHECK: call i32 @__tsan_atomic32_fetch_nand(i32* %a, i32 0, i32 4), !dbg
1063 atomicrmw xchg i32* %a, i32 0 seq_cst, !dbg !7
1064 ret void, !dbg !7
1067 ; CHECK: call i32 @__tsan_atomic32_exchange(i32* %a, i32 0, i32 5), !dbg
1071 atomicrmw add i32* %a, i32 0 seq_cst, !dbg !7
1072 ret void, !dbg !7
1075 ; CHECK: call i32 @__tsan_atomic32_fetch_add(i32* %a, i32 0, i32 5), !dbg
1079 atomicrmw sub i32* %a, i32 0 seq_cst, !dbg !7
1080 ret void, !dbg !7
1083 ; CHECK: call i32 @__tsan_atomic32_fetch_sub(i32* %a, i32 0, i32 5), !dbg
1087 atomicrmw and i32* %a, i32 0 seq_cst, !dbg !7
1088 ret void, !dbg !7
1091 ; CHECK: call i32 @__tsan_atomic32_fetch_and(i32* %a, i32 0, i32 5), !dbg
1095 atomicrmw or i32* %a, i32 0 seq_cst, !dbg !7
1096 ret void, !dbg !7
1099 ; CHECK: call i32 @__tsan_atomic32_fetch_or(i32* %a, i32 0, i32 5), !dbg
1103 atomicrmw xor i32* %a, i32 0 seq_cst, !dbg !7
1104 ret void, !dbg !7
1107 ; CHECK: call i32 @__tsan_atomic32_fetch_xor(i32* %a, i32 0, i32 5), !dbg
1111 atomicrmw nand i32* %a, i32 0 seq_cst, !dbg !7
1112 ret void, !dbg !7
1115 ; CHECK: call i32 @__tsan_atomic32_fetch_nand(i32* %a, i32 0, i32 5), !dbg
1119 cmpxchg i32* %a, i32 0, i32 1 monotonic monotonic, !dbg !7
1120 ret void, !dbg !7
1123 ; CHECK: call i32 @__tsan_atomic32_compare_exchange_val(i32* %a, i32 0, i32 1, i32 0, i32 0), !dbg
1127 cmpxchg i32* %a, i32 0, i32 1 acquire acquire, !dbg !7
1128 ret void, !dbg !7
1131 ; CHECK: call i32 @__tsan_atomic32_compare_exchange_val(i32* %a, i32 0, i32 1, i32 2, i32 2), !dbg
1135 cmpxchg i32* %a, i32 0, i32 1 release monotonic, !dbg !7
1136 ret void, !dbg !7
1139 ; CHECK: call i32 @__tsan_atomic32_compare_exchange_val(i32* %a, i32 0, i32 1, i32 3, i32 0), !dbg
1143 cmpxchg i32* %a, i32 0, i32 1 acq_rel acquire, !dbg !7
1144 ret void, !dbg !7
1147 ; CHECK: call i32 @__tsan_atomic32_compare_exchange_val(i32* %a, i32 0, i32 1, i32 4, i32 2), !dbg
1151 cmpxchg i32* %a, i32 0, i32 1 seq_cst seq_cst, !dbg !7
1152 ret void, !dbg !7
1155 ; CHECK: call i32 @__tsan_atomic32_compare_exchange_val(i32* %a, i32 0, i32 1, i32 5, i32 5), !dbg
1159 %0 = load atomic i64, i64* %a unordered, align 8, !dbg !7
1160 ret i64 %0, !dbg !7
1163 ; CHECK: call i64 @__tsan_atomic64_load(i64* %a, i32 0), !dbg
1167 %0 = load atomic i64, i64* %a monotonic, align 8, !dbg !7
1168 ret i64 %0, !dbg !7
1171 ; CHECK: call i64 @__tsan_atomic64_load(i64* %a, i32 0), !dbg
1175 %0 = load atomic i64, i64* %a acquire, align 8, !dbg !7
1176 ret i64 %0, !dbg !7
1179 ; CHECK: call i64 @__tsan_atomic64_load(i64* %a, i32 2), !dbg
1183 %0 = load atomic i64, i64* %a seq_cst, align 8, !dbg !7
1184 ret i64 %0, !dbg !7
1187 ; CHECK: call i64 @__tsan_atomic64_load(i64* %a, i32 5), !dbg
1191 %0 = load atomic i8*, i8** %a seq_cst, align 8, !dbg !7
1192 ret i8* %0, !dbg !7
1196 ; CHECK-NEXT: call i64 @__tsan_atomic64_load(i64* %{{.+}}, i32 5), !dbg
1201 store atomic i64 0, i64* %a unordered, align 8, !dbg !7
1202 ret void, !dbg !7
1205 ; CHECK: call void @__tsan_atomic64_store(i64* %a, i64 0, i32 0), !dbg
1209 store atomic i64 0, i64* %a monotonic, align 8, !dbg !7
1210 ret void, !dbg !7
1213 ; CHECK: call void @__tsan_atomic64_store(i64* %a, i64 0, i32 0), !dbg
1217 store atomic i64 0, i64* %a release, align 8, !dbg !7
1218 ret void, !dbg !7
1221 ; CHECK: call void @__tsan_atomic64_store(i64* %a, i64 0, i32 3), !dbg
1225 store atomic i64 0, i64* %a seq_cst, align 8, !dbg !7
1226 ret void, !dbg !7
1229 ; CHECK: call void @__tsan_atomic64_store(i64* %a, i64 0, i32 5), !dbg
1233 store atomic i8* %v, i8** %a seq_cst, align 8, !dbg !7
1234 ret void, !dbg !7
1239 ; CHECK-NEXT: call void @__tsan_atomic64_store(i64* %{{.*}}, i64 %{{.*}}, i32 5), !dbg
1243 atomicrmw xchg i64* %a, i64 0 monotonic, !dbg !7
1244 ret void, !dbg !7
1247 ; CHECK: call i64 @__tsan_atomic64_exchange(i64* %a, i64 0, i32 0), !dbg
1251 atomicrmw add i64* %a, i64 0 monotonic, !dbg !7
1252 ret void, !dbg !7
1255 ; CHECK: call i64 @__tsan_atomic64_fetch_add(i64* %a, i64 0, i32 0), !dbg
1259 atomicrmw sub i64* %a, i64 0 monotonic, !dbg !7
1260 ret void, !dbg !7
1263 ; CHECK: call i64 @__tsan_atomic64_fetch_sub(i64* %a, i64 0, i32 0), !dbg
1267 atomicrmw and i64* %a, i64 0 monotonic, !dbg !7
1268 ret void, !dbg !7
1271 ; CHECK: call i64 @__tsan_atomic64_fetch_and(i64* %a, i64 0, i32 0), !dbg
1275 atomicrmw or i64* %a, i64 0 monotonic, !dbg !7
1276 ret void, !dbg !7
1279 ; CHECK: call i64 @__tsan_atomic64_fetch_or(i64* %a, i64 0, i32 0), !dbg
1283 atomicrmw xor i64* %a, i64 0 monotonic, !dbg !7
1284 ret void, !dbg !7
1287 ; CHECK: call i64 @__tsan_atomic64_fetch_xor(i64* %a, i64 0, i32 0), !dbg
1291 atomicrmw nand i64* %a, i64 0 monotonic, !dbg !7
1292 ret void, !dbg !7
1295 ; CHECK: call i64 @__tsan_atomic64_fetch_nand(i64* %a, i64 0, i32 0), !dbg
1299 atomicrmw xchg i64* %a, i64 0 acquire, !dbg !7
1300 ret void, !dbg !7
1303 ; CHECK: call i64 @__tsan_atomic64_exchange(i64* %a, i64 0, i32 2), !dbg
1307 atomicrmw add i64* %a, i64 0 acquire, !dbg !7
1308 ret void, !dbg !7
1311 ; CHECK: call i64 @__tsan_atomic64_fetch_add(i64* %a, i64 0, i32 2), !dbg
1315 atomicrmw sub i64* %a, i64 0 acquire, !dbg !7
1316 ret void, !dbg !7
1319 ; CHECK: call i64 @__tsan_atomic64_fetch_sub(i64* %a, i64 0, i32 2), !dbg
1323 atomicrmw and i64* %a, i64 0 acquire, !dbg !7
1324 ret void, !dbg !7
1327 ; CHECK: call i64 @__tsan_atomic64_fetch_and(i64* %a, i64 0, i32 2), !dbg
1331 atomicrmw or i64* %a, i64 0 acquire, !dbg !7
1332 ret void, !dbg !7
1335 ; CHECK: call i64 @__tsan_atomic64_fetch_or(i64* %a, i64 0, i32 2), !dbg
1339 atomicrmw xor i64* %a, i64 0 acquire, !dbg !7
1340 ret void, !dbg !7
1343 ; CHECK: call i64 @__tsan_atomic64_fetch_xor(i64* %a, i64 0, i32 2), !dbg
1347 atomicrmw nand i64* %a, i64 0 acquire, !dbg !7
1348 ret void, !dbg !7
1351 ; CHECK: call i64 @__tsan_atomic64_fetch_nand(i64* %a, i64 0, i32 2), !dbg
1355 atomicrmw xchg i64* %a, i64 0 release, !dbg !7
1356 ret void, !dbg !7
1359 ; CHECK: call i64 @__tsan_atomic64_exchange(i64* %a, i64 0, i32 3), !dbg
1363 atomicrmw add i64* %a, i64 0 release, !dbg !7
1364 ret void, !dbg !7
1367 ; CHECK: call i64 @__tsan_atomic64_fetch_add(i64* %a, i64 0, i32 3), !dbg
1371 atomicrmw sub i64* %a, i64 0 release, !dbg !7
1372 ret void, !dbg !7
1375 ; CHECK: call i64 @__tsan_atomic64_fetch_sub(i64* %a, i64 0, i32 3), !dbg
1379 atomicrmw and i64* %a, i64 0 release, !dbg !7
1380 ret void, !dbg !7
1383 ; CHECK: call i64 @__tsan_atomic64_fetch_and(i64* %a, i64 0, i32 3), !dbg
1387 atomicrmw or i64* %a, i64 0 release, !dbg !7
1388 ret void, !dbg !7
1391 ; CHECK: call i64 @__tsan_atomic64_fetch_or(i64* %a, i64 0, i32 3), !dbg
1395 atomicrmw xor i64* %a, i64 0 release, !dbg !7
1396 ret void, !dbg !7
1399 ; CHECK: call i64 @__tsan_atomic64_fetch_xor(i64* %a, i64 0, i32 3), !dbg
1403 atomicrmw nand i64* %a, i64 0 release, !dbg !7
1404 ret void, !dbg !7
1407 ; CHECK: call i64 @__tsan_atomic64_fetch_nand(i64* %a, i64 0, i32 3), !dbg
1411 atomicrmw xchg i64* %a, i64 0 acq_rel, !dbg !7
1412 ret void, !dbg !7
1415 ; CHECK: call i64 @__tsan_atomic64_exchange(i64* %a, i64 0, i32 4), !dbg
1419 atomicrmw add i64* %a, i64 0 acq_rel, !dbg !7
1420 ret void, !dbg !7
1423 ; CHECK: call i64 @__tsan_atomic64_fetch_add(i64* %a, i64 0, i32 4), !dbg
1427 atomicrmw sub i64* %a, i64 0 acq_rel, !dbg !7
1428 ret void, !dbg !7
1431 ; CHECK: call i64 @__tsan_atomic64_fetch_sub(i64* %a, i64 0, i32 4), !dbg
1435 atomicrmw and i64* %a, i64 0 acq_rel, !dbg !7
1436 ret void, !dbg !7
1439 ; CHECK: call i64 @__tsan_atomic64_fetch_and(i64* %a, i64 0, i32 4), !dbg
1443 atomicrmw or i64* %a, i64 0 acq_rel, !dbg !7
1444 ret void, !dbg !7
1447 ; CHECK: call i64 @__tsan_atomic64_fetch_or(i64* %a, i64 0, i32 4), !dbg
1451 atomicrmw xor i64* %a, i64 0 acq_rel, !dbg !7
1452 ret void, !dbg !7
1455 ; CHECK: call i64 @__tsan_atomic64_fetch_xor(i64* %a, i64 0, i32 4), !dbg
1459 atomicrmw nand i64* %a, i64 0 acq_rel, !dbg !7
1460 ret void, !dbg !7
1463 ; CHECK: call i64 @__tsan_atomic64_fetch_nand(i64* %a, i64 0, i32 4), !dbg
1467 atomicrmw xchg i64* %a, i64 0 seq_cst, !dbg !7
1468 ret void, !dbg !7
1471 ; CHECK: call i64 @__tsan_atomic64_exchange(i64* %a, i64 0, i32 5), !dbg
1475 atomicrmw add i64* %a, i64 0 seq_cst, !dbg !7
1476 ret void, !dbg !7
1479 ; CHECK: call i64 @__tsan_atomic64_fetch_add(i64* %a, i64 0, i32 5), !dbg
1483 atomicrmw sub i64* %a, i64 0 seq_cst, !dbg !7
1484 ret void, !dbg !7
1487 ; CHECK: call i64 @__tsan_atomic64_fetch_sub(i64* %a, i64 0, i32 5), !dbg
1491 atomicrmw and i64* %a, i64 0 seq_cst, !dbg !7
1492 ret void, !dbg !7
1495 ; CHECK: call i64 @__tsan_atomic64_fetch_and(i64* %a, i64 0, i32 5), !dbg
1499 atomicrmw or i64* %a, i64 0 seq_cst, !dbg !7
1500 ret void, !dbg !7
1503 ; CHECK: call i64 @__tsan_atomic64_fetch_or(i64* %a, i64 0, i32 5), !dbg
1507 atomicrmw xor i64* %a, i64 0 seq_cst, !dbg !7
1508 ret void, !dbg !7
1511 ; CHECK: call i64 @__tsan_atomic64_fetch_xor(i64* %a, i64 0, i32 5), !dbg
1515 atomicrmw nand i64* %a, i64 0 seq_cst, !dbg !7
1516 ret void, !dbg !7
1519 ; CHECK: call i64 @__tsan_atomic64_fetch_nand(i64* %a, i64 0, i32 5), !dbg
1523 cmpxchg i64* %a, i64 0, i64 1 monotonic monotonic, !dbg !7
1524 ret void, !dbg !7
1527 ; CHECK: call i64 @__tsan_atomic64_compare_exchange_val(i64* %a, i64 0, i64 1, i32 0, i32 0), !dbg
1531 cmpxchg i64* %a, i64 0, i64 1 acquire acquire, !dbg !7
1532 ret void, !dbg !7
1535 ; CHECK: call i64 @__tsan_atomic64_compare_exchange_val(i64* %a, i64 0, i64 1, i32 2, i32 2), !dbg
1539 cmpxchg i64* %a, i64 0, i64 1 release monotonic, !dbg !7
1540 ret void, !dbg !7
1543 ; CHECK: call i64 @__tsan_atomic64_compare_exchange_val(i64* %a, i64 0, i64 1, i32 3, i32 0), !dbg
1547 cmpxchg i64* %a, i64 0, i64 1 acq_rel acquire, !dbg !7
1548 ret void, !dbg !7
1551 ; CHECK: call i64 @__tsan_atomic64_compare_exchange_val(i64* %a, i64 0, i64 1, i32 4, i32 2), !dbg
1555 cmpxchg i64* %a, i64 0, i64 1 seq_cst seq_cst, !dbg !7
1556 ret void, !dbg !7
1559 ; CHECK: call i64 @__tsan_atomic64_compare_exchange_val(i64* %a, i64 0, i64 1, i32 5, i32 5), !dbg
1563 cmpxchg i8** %a, i8* %v1, i8* %v2 seq_cst seq_cst, !dbg !7
1570 … i64 @__tsan_atomic64_compare_exchange_val(i64* {{.*}}, i64 {{.*}}, i64 {{.*}}, i32 5, i32 5), !dbg
1578 %0 = load atomic i128, i128* %a unordered, align 16, !dbg !7
1579 ret i128 %0, !dbg !7
1582 ; CHECK: call i128 @__tsan_atomic128_load(i128* %a, i32 0), !dbg
1586 %0 = load atomic i128, i128* %a monotonic, align 16, !dbg !7
1587 ret i128 %0, !dbg !7
1590 ; CHECK: call i128 @__tsan_atomic128_load(i128* %a, i32 0), !dbg
1594 %0 = load atomic i128, i128* %a acquire, align 16, !dbg !7
1595 ret i128 %0, !dbg !7
1598 ; CHECK: call i128 @__tsan_atomic128_load(i128* %a, i32 2), !dbg
1602 %0 = load atomic i128, i128* %a seq_cst, align 16, !dbg !7
1603 ret i128 %0, !dbg !7
1606 ; CHECK: call i128 @__tsan_atomic128_load(i128* %a, i32 5), !dbg
1610 store atomic i128 0, i128* %a unordered, align 16, !dbg !7
1611 ret void, !dbg !7
1614 ; CHECK: call void @__tsan_atomic128_store(i128* %a, i128 0, i32 0), !dbg
1618 store atomic i128 0, i128* %a monotonic, align 16, !dbg !7
1619 ret void, !dbg !7
1622 ; CHECK: call void @__tsan_atomic128_store(i128* %a, i128 0, i32 0), !dbg
1626 store atomic i128 0, i128* %a release, align 16, !dbg !7
1627 ret void, !dbg !7
1630 ; CHECK: call void @__tsan_atomic128_store(i128* %a, i128 0, i32 3), !dbg
1634 store atomic i128 0, i128* %a seq_cst, align 16, !dbg !7
1635 ret void, !dbg !7
1638 ; CHECK: call void @__tsan_atomic128_store(i128* %a, i128 0, i32 5), !dbg
1642 atomicrmw xchg i128* %a, i128 0 monotonic, !dbg !7
1643 ret void, !dbg !7
1646 ; CHECK: call i128 @__tsan_atomic128_exchange(i128* %a, i128 0, i32 0), !dbg
1650 atomicrmw add i128* %a, i128 0 monotonic, !dbg !7
1651 ret void, !dbg !7
1654 ; CHECK: call i128 @__tsan_atomic128_fetch_add(i128* %a, i128 0, i32 0), !dbg
1658 atomicrmw sub i128* %a, i128 0 monotonic, !dbg !7
1659 ret void, !dbg !7
1662 ; CHECK: call i128 @__tsan_atomic128_fetch_sub(i128* %a, i128 0, i32 0), !dbg
1666 atomicrmw and i128* %a, i128 0 monotonic, !dbg !7
1667 ret void, !dbg !7
1670 ; CHECK: call i128 @__tsan_atomic128_fetch_and(i128* %a, i128 0, i32 0), !dbg
1674 atomicrmw or i128* %a, i128 0 monotonic, !dbg !7
1675 ret void, !dbg !7
1678 ; CHECK: call i128 @__tsan_atomic128_fetch_or(i128* %a, i128 0, i32 0), !dbg
1682 atomicrmw xor i128* %a, i128 0 monotonic, !dbg !7
1683 ret void, !dbg !7
1686 ; CHECK: call i128 @__tsan_atomic128_fetch_xor(i128* %a, i128 0, i32 0), !dbg
1690 atomicrmw nand i128* %a, i128 0 monotonic, !dbg !7
1691 ret void, !dbg !7
1694 ; CHECK: call i128 @__tsan_atomic128_fetch_nand(i128* %a, i128 0, i32 0), !dbg
1698 atomicrmw xchg i128* %a, i128 0 acquire, !dbg !7
1699 ret void, !dbg !7
1702 ; CHECK: call i128 @__tsan_atomic128_exchange(i128* %a, i128 0, i32 2), !dbg
1706 atomicrmw add i128* %a, i128 0 acquire, !dbg !7
1707 ret void, !dbg !7
1710 ; CHECK: call i128 @__tsan_atomic128_fetch_add(i128* %a, i128 0, i32 2), !dbg
1714 atomicrmw sub i128* %a, i128 0 acquire, !dbg !7
1715 ret void, !dbg !7
1718 ; CHECK: call i128 @__tsan_atomic128_fetch_sub(i128* %a, i128 0, i32 2), !dbg
1722 atomicrmw and i128* %a, i128 0 acquire, !dbg !7
1723 ret void, !dbg !7
1726 ; CHECK: call i128 @__tsan_atomic128_fetch_and(i128* %a, i128 0, i32 2), !dbg
1730 atomicrmw or i128* %a, i128 0 acquire, !dbg !7
1731 ret void, !dbg !7
1734 ; CHECK: call i128 @__tsan_atomic128_fetch_or(i128* %a, i128 0, i32 2), !dbg
1738 atomicrmw xor i128* %a, i128 0 acquire, !dbg !7
1739 ret void, !dbg !7
1742 ; CHECK: call i128 @__tsan_atomic128_fetch_xor(i128* %a, i128 0, i32 2), !dbg
1746 atomicrmw nand i128* %a, i128 0 acquire, !dbg !7
1747 ret void, !dbg !7
1750 ; CHECK: call i128 @__tsan_atomic128_fetch_nand(i128* %a, i128 0, i32 2), !dbg
1754 atomicrmw xchg i128* %a, i128 0 release, !dbg !7
1755 ret void, !dbg !7
1758 ; CHECK: call i128 @__tsan_atomic128_exchange(i128* %a, i128 0, i32 3), !dbg
1762 atomicrmw add i128* %a, i128 0 release, !dbg !7
1763 ret void, !dbg !7
1766 ; CHECK: call i128 @__tsan_atomic128_fetch_add(i128* %a, i128 0, i32 3), !dbg
1770 atomicrmw sub i128* %a, i128 0 release, !dbg !7
1771 ret void, !dbg !7
1774 ; CHECK: call i128 @__tsan_atomic128_fetch_sub(i128* %a, i128 0, i32 3), !dbg
1778 atomicrmw and i128* %a, i128 0 release, !dbg !7
1779 ret void, !dbg !7
1782 ; CHECK: call i128 @__tsan_atomic128_fetch_and(i128* %a, i128 0, i32 3), !dbg
1786 atomicrmw or i128* %a, i128 0 release, !dbg !7
1787 ret void, !dbg !7
1790 ; CHECK: call i128 @__tsan_atomic128_fetch_or(i128* %a, i128 0, i32 3), !dbg
1794 atomicrmw xor i128* %a, i128 0 release, !dbg !7
1795 ret void, !dbg !7
1798 ; CHECK: call i128 @__tsan_atomic128_fetch_xor(i128* %a, i128 0, i32 3), !dbg
1802 atomicrmw nand i128* %a, i128 0 release, !dbg !7
1803 ret void, !dbg !7
1806 ; CHECK: call i128 @__tsan_atomic128_fetch_nand(i128* %a, i128 0, i32 3), !dbg
1810 atomicrmw xchg i128* %a, i128 0 acq_rel, !dbg !7
1811 ret void, !dbg !7
1814 ; CHECK: call i128 @__tsan_atomic128_exchange(i128* %a, i128 0, i32 4), !dbg
1818 atomicrmw add i128* %a, i128 0 acq_rel, !dbg !7
1819 ret void, !dbg !7
1822 ; CHECK: call i128 @__tsan_atomic128_fetch_add(i128* %a, i128 0, i32 4), !dbg
1826 atomicrmw sub i128* %a, i128 0 acq_rel, !dbg !7
1827 ret void, !dbg !7
1830 ; CHECK: call i128 @__tsan_atomic128_fetch_sub(i128* %a, i128 0, i32 4), !dbg
1834 atomicrmw and i128* %a, i128 0 acq_rel, !dbg !7
1835 ret void, !dbg !7
1838 ; CHECK: call i128 @__tsan_atomic128_fetch_and(i128* %a, i128 0, i32 4), !dbg
1842 atomicrmw or i128* %a, i128 0 acq_rel, !dbg !7
1843 ret void, !dbg !7
1846 ; CHECK: call i128 @__tsan_atomic128_fetch_or(i128* %a, i128 0, i32 4), !dbg
1850 atomicrmw xor i128* %a, i128 0 acq_rel, !dbg !7
1851 ret void, !dbg !7
1854 ; CHECK: call i128 @__tsan_atomic128_fetch_xor(i128* %a, i128 0, i32 4), !dbg
1858 atomicrmw nand i128* %a, i128 0 acq_rel, !dbg !7
1859 ret void, !dbg !7
1862 ; CHECK: call i128 @__tsan_atomic128_fetch_nand(i128* %a, i128 0, i32 4), !dbg
1866 atomicrmw xchg i128* %a, i128 0 seq_cst, !dbg !7
1867 ret void, !dbg !7
1870 ; CHECK: call i128 @__tsan_atomic128_exchange(i128* %a, i128 0, i32 5), !dbg
1874 atomicrmw add i128* %a, i128 0 seq_cst, !dbg !7
1875 ret void, !dbg !7
1878 ; CHECK: call i128 @__tsan_atomic128_fetch_add(i128* %a, i128 0, i32 5), !dbg
1882 atomicrmw sub i128* %a, i128 0 seq_cst, !dbg !7
1883 ret void, !dbg !7
1886 ; CHECK: call i128 @__tsan_atomic128_fetch_sub(i128* %a, i128 0, i32 5), !dbg
1890 atomicrmw and i128* %a, i128 0 seq_cst, !dbg !7
1891 ret void, !dbg !7
1894 ; CHECK: call i128 @__tsan_atomic128_fetch_and(i128* %a, i128 0, i32 5), !dbg
1898 atomicrmw or i128* %a, i128 0 seq_cst, !dbg !7
1899 ret void, !dbg !7
1902 ; CHECK: call i128 @__tsan_atomic128_fetch_or(i128* %a, i128 0, i32 5), !dbg
1906 atomicrmw xor i128* %a, i128 0 seq_cst, !dbg !7
1907 ret void, !dbg !7
1910 ; CHECK: call i128 @__tsan_atomic128_fetch_xor(i128* %a, i128 0, i32 5), !dbg
1914 atomicrmw nand i128* %a, i128 0 seq_cst, !dbg !7
1915 ret void, !dbg !7
1918 ; CHECK: call i128 @__tsan_atomic128_fetch_nand(i128* %a, i128 0, i32 5), !dbg
1922 cmpxchg i128* %a, i128 0, i128 1 monotonic monotonic, !dbg !7
1923 ret void, !dbg !7
1926 …ECK: call i128 @__tsan_atomic128_compare_exchange_val(i128* %a, i128 0, i128 1, i32 0, i32 0), !dbg
1930 cmpxchg i128* %a, i128 0, i128 1 acquire acquire, !dbg !7
1931 ret void, !dbg !7
1934 …ECK: call i128 @__tsan_atomic128_compare_exchange_val(i128* %a, i128 0, i128 1, i32 2, i32 2), !dbg
1938 cmpxchg i128* %a, i128 0, i128 1 release monotonic, !dbg !7
1939 ret void, !dbg !7
1942 …ECK: call i128 @__tsan_atomic128_compare_exchange_val(i128* %a, i128 0, i128 1, i32 3, i32 0), !dbg
1946 cmpxchg i128* %a, i128 0, i128 1 acq_rel acquire, !dbg !7
1947 ret void, !dbg !7
1950 …ECK: call i128 @__tsan_atomic128_compare_exchange_val(i128* %a, i128 0, i128 1, i32 4, i32 2), !dbg
1954 cmpxchg i128* %a, i128 0, i128 1 seq_cst seq_cst, !dbg !7
1955 ret void, !dbg !7
1958 …ECK: call i128 @__tsan_atomic128_compare_exchange_val(i128* %a, i128 0, i128 1, i32 5, i32 5), !dbg
1962 fence singlethread acquire, !dbg !7
1963 ret void, !dbg !7
1966 ; CHECK: call void @__tsan_atomic_signal_fence(i32 2), !dbg
1970 fence acquire, !dbg !7
1971 ret void, !dbg !7
1974 ; CHECK: call void @__tsan_atomic_thread_fence(i32 2), !dbg
1978 fence singlethread release, !dbg !7
1979 ret void, !dbg !7
1982 ; CHECK: call void @__tsan_atomic_signal_fence(i32 3), !dbg
1986 fence release, !dbg !7
1987 ret void, !dbg !7
1990 ; CHECK: call void @__tsan_atomic_thread_fence(i32 3), !dbg
1994 fence singlethread acq_rel, !dbg !7
1995 ret void, !dbg !7
1998 ; CHECK: call void @__tsan_atomic_signal_fence(i32 4), !dbg
2002 fence acq_rel, !dbg !7
2003 ret void, !dbg !7
2006 ; CHECK: call void @__tsan_atomic_thread_fence(i32 4), !dbg
2010 fence singlethread seq_cst, !dbg !7
2011 ret void, !dbg !7
2014 ; CHECK: call void @__tsan_atomic_signal_fence(i32 5), !dbg
2018 fence seq_cst, !dbg !7
2019 ret void, !dbg !7
2022 ; CHECK: call void @__tsan_atomic_thread_fence(i32 5), !dbg
2025 !llvm.dbg.cu = !{!8}