1; RUN: llc < %s -O0 -march=x86-64 -mcpu=corei7 -verify-machineinstrs | FileCheck %s --check-prefix X64 2 3@sc64 = external global i64 4 5define void @atomic_fetch_add64() nounwind { 6; X64-LABEL: atomic_fetch_add64: 7; X32-LABEL: atomic_fetch_add64: 8entry: 9 %t1 = atomicrmw add i64* @sc64, i64 1 acquire 10; X64: lock 11; X64: incq 12 %t2 = atomicrmw add i64* @sc64, i64 3 acquire 13; X64: lock 14; X64: addq $3 15 %t3 = atomicrmw add i64* @sc64, i64 5 acquire 16; X64: lock 17; X64: xaddq 18 %t4 = atomicrmw add i64* @sc64, i64 %t3 acquire 19; X64: lock 20; X64: addq 21 ret void 22; X64: ret 23} 24 25define void @atomic_fetch_sub64() nounwind { 26; X64-LABEL: atomic_fetch_sub64: 27; X32-LABEL: atomic_fetch_sub64: 28 %t1 = atomicrmw sub i64* @sc64, i64 1 acquire 29; X64: lock 30; X64: decq 31 %t2 = atomicrmw sub i64* @sc64, i64 3 acquire 32; X64: lock 33; X64: subq $3 34 %t3 = atomicrmw sub i64* @sc64, i64 5 acquire 35; X64: lock 36; X64: xaddq 37 %t4 = atomicrmw sub i64* @sc64, i64 %t3 acquire 38; X64: lock 39; X64: subq 40 ret void 41; X64: ret 42} 43 44define void @atomic_fetch_and64() nounwind { 45; X64-LABEL: atomic_fetch_and64: 46; X32-LABEL: atomic_fetch_and64: 47 %t1 = atomicrmw and i64* @sc64, i64 3 acquire 48; X64: lock 49; X64: andq $3 50 %t2 = atomicrmw and i64* @sc64, i64 5 acquire 51; X64: andl 52; X64: lock 53; X64: cmpxchgq 54 %t3 = atomicrmw and i64* @sc64, i64 %t2 acquire 55; X64: lock 56; X64: andq 57 ret void 58; X64: ret 59} 60 61define void @atomic_fetch_or64() nounwind { 62; X64-LABEL: atomic_fetch_or64: 63; X32-LABEL: atomic_fetch_or64: 64 %t1 = atomicrmw or i64* @sc64, i64 3 acquire 65; X64: lock 66; X64: orq $3 67 %t2 = atomicrmw or i64* @sc64, i64 5 acquire 68; X64: orq 69; X64: lock 70; X64: cmpxchgq 71 %t3 = atomicrmw or i64* @sc64, i64 %t2 acquire 72; X64: lock 73; X64: orq 74 ret void 75; X64: ret 76} 77 78define void @atomic_fetch_xor64() nounwind { 79; X64-LABEL: atomic_fetch_xor64: 80; X32-LABEL: atomic_fetch_xor64: 81 %t1 = atomicrmw xor i64* @sc64, i64 3 acquire 82; X64: lock 83; X64: xorq $3 84 %t2 = atomicrmw xor i64* @sc64, i64 5 acquire 85; X64: xorq 86; X64: lock 87; X64: cmpxchgq 88 %t3 = atomicrmw xor i64* @sc64, i64 %t2 acquire 89; X64: lock 90; X64: xorq 91 ret void 92; X64: ret 93} 94 95define void @atomic_fetch_nand64(i64 %x) nounwind { 96; X64-LABEL: atomic_fetch_nand64: 97; X32-LABEL: atomic_fetch_nand64: 98 %t1 = atomicrmw nand i64* @sc64, i64 %x acquire 99; X64: andq 100; X64: notq 101; X64: lock 102; X64: cmpxchgq 103; X32: andl 104; X32: andl 105; X32: notl 106; X32: notl 107; X32: lock 108; X32: cmpxchg8b 109 ret void 110; X64: ret 111; X32: ret 112} 113 114define void @atomic_fetch_max64(i64 %x) nounwind { 115; X64-LABEL: atomic_fetch_max64: 116; X32-LABEL: atomic_fetch_max64: 117 %t1 = atomicrmw max i64* @sc64, i64 %x acquire 118; X64: subq 119; X64: cmov 120; X64: lock 121; X64: cmpxchgq 122 123; X32: cmpl 124; X32: cmpl 125; X32: cmov 126; X32: cmov 127; X32: cmov 128; X32: lock 129; X32: cmpxchg8b 130 ret void 131; X64: ret 132; X32: ret 133} 134 135define void @atomic_fetch_min64(i64 %x) nounwind { 136; X64-LABEL: atomic_fetch_min64: 137; X32-LABEL: atomic_fetch_min64: 138 %t1 = atomicrmw min i64* @sc64, i64 %x acquire 139; X64: subq 140; X64: cmov 141; X64: lock 142; X64: cmpxchgq 143 144; X32: cmpl 145; X32: cmpl 146; X32: cmov 147; X32: cmov 148; X32: cmov 149; X32: lock 150; X32: cmpxchg8b 151 ret void 152; X64: ret 153; X32: ret 154} 155 156define void @atomic_fetch_umax64(i64 %x) nounwind { 157; X64-LABEL: atomic_fetch_umax64: 158; X32-LABEL: atomic_fetch_umax64: 159 %t1 = atomicrmw umax i64* @sc64, i64 %x acquire 160; X64: subq 161; X64: cmov 162; X64: lock 163; X64: cmpxchgq 164 165; X32: cmpl 166; X32: cmpl 167; X32: cmov 168; X32: cmov 169; X32: cmov 170; X32: lock 171; X32: cmpxchg8b 172 ret void 173; X64: ret 174; X32: ret 175} 176 177define void @atomic_fetch_umin64(i64 %x) nounwind { 178; X64-LABEL: atomic_fetch_umin64: 179; X32-LABEL: atomic_fetch_umin64: 180 %t1 = atomicrmw umin i64* @sc64, i64 %x acquire 181; X64: subq 182; X64: cmov 183; X64: lock 184; X64: cmpxchgq 185 186; X32: cmpl 187; X32: cmpl 188; X32: cmov 189; X32: cmov 190; X32: cmov 191; X32: lock 192; X32: cmpxchg8b 193 ret void 194; X64: ret 195; X32: ret 196} 197 198define void @atomic_fetch_cmpxchg64() nounwind { 199; X64-LABEL: atomic_fetch_cmpxchg64: 200; X32-LABEL: atomic_fetch_cmpxchg64: 201 %t1 = cmpxchg i64* @sc64, i64 0, i64 1 acquire acquire 202; X64: lock 203; X64: cmpxchgq 204; X32: lock 205; X32: cmpxchg8b 206 ret void 207; X64: ret 208; X32: ret 209} 210 211define void @atomic_fetch_store64(i64 %x) nounwind { 212; X64-LABEL: atomic_fetch_store64: 213; X32-LABEL: atomic_fetch_store64: 214 store atomic i64 %x, i64* @sc64 release, align 8 215; X64-NOT: lock 216; X64: movq 217; X32: lock 218; X32: cmpxchg8b 219 ret void 220; X64: ret 221; X32: ret 222} 223 224define void @atomic_fetch_swap64(i64 %x) nounwind { 225; X64-LABEL: atomic_fetch_swap64: 226; X32-LABEL: atomic_fetch_swap64: 227 %t1 = atomicrmw xchg i64* @sc64, i64 %x acquire 228; X64-NOT: lock 229; X64: xchgq 230; X32: lock 231; X32: xchg8b 232 ret void 233; X64: ret 234; X32: ret 235} 236