1; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py 2; RUN: llc < %s -fast-isel -mtriple=i686-unknown-unknown -mattr=+tbm | FileCheck %s --check-prefix=X86 3; RUN: llc < %s -fast-isel -mtriple=x86_64-unknown-unknown -mattr=+tbm | FileCheck %s --check-prefix=X64 4 5; NOTE: This should use IR equivalent to what is generated by clang/test/CodeGen/tbm-builtins.c 6 7define i32 @test__bextri_u32(i32 %a0) { 8; X86-LABEL: test__bextri_u32: 9; X86: # %bb.0: 10; X86-NEXT: bextrl $3841, {{[0-9]+}}(%esp), %eax # imm = 0xF01 11; X86-NEXT: retl 12; 13; X64-LABEL: test__bextri_u32: 14; X64: # %bb.0: 15; X64-NEXT: bextrl $3841, %edi, %eax # imm = 0xF01 16; X64-NEXT: retq 17 %1 = call i32 @llvm.x86.tbm.bextri.u32(i32 %a0, i32 3841) 18 ret i32 %1 19} 20 21define i32 @test__blcfill_u32(i32 %a0) { 22; X86-LABEL: test__blcfill_u32: 23; X86: # %bb.0: 24; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx 25; X86-NEXT: leal 1(%ecx), %eax 26; X86-NEXT: andl %ecx, %eax 27; X86-NEXT: retl 28; 29; X64-LABEL: test__blcfill_u32: 30; X64: # %bb.0: 31; X64-NEXT: # kill: def $edi killed $edi def $rdi 32; X64-NEXT: leal 1(%rdi), %eax 33; X64-NEXT: andl %edi, %eax 34; X64-NEXT: retq 35 %1 = add i32 %a0, 1 36 %2 = and i32 %a0, %1 37 ret i32 %2 38} 39 40define i32 @test__blci_u32(i32 %a0) { 41; X86-LABEL: test__blci_u32: 42; X86: # %bb.0: 43; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx 44; X86-NEXT: leal 1(%ecx), %eax 45; X86-NEXT: xorl $-1, %eax 46; X86-NEXT: orl %ecx, %eax 47; X86-NEXT: retl 48; 49; X64-LABEL: test__blci_u32: 50; X64: # %bb.0: 51; X64-NEXT: # kill: def $edi killed $edi def $rdi 52; X64-NEXT: leal 1(%rdi), %eax 53; X64-NEXT: xorl $-1, %eax 54; X64-NEXT: orl %edi, %eax 55; X64-NEXT: retq 56 %1 = add i32 %a0, 1 57 %2 = xor i32 %1, -1 58 %3 = or i32 %a0, %2 59 ret i32 %3 60} 61 62define i32 @test__blcic_u32(i32 %a0) { 63; X86-LABEL: test__blcic_u32: 64; X86: # %bb.0: 65; X86-NEXT: movl {{[0-9]+}}(%esp), %eax 66; X86-NEXT: movl %eax, %ecx 67; X86-NEXT: xorl $-1, %ecx 68; X86-NEXT: addl $1, %eax 69; X86-NEXT: andl %ecx, %eax 70; X86-NEXT: retl 71; 72; X64-LABEL: test__blcic_u32: 73; X64: # %bb.0: 74; X64-NEXT: movl %edi, %eax 75; X64-NEXT: movl %edi, %ecx 76; X64-NEXT: xorl $-1, %ecx 77; X64-NEXT: addl $1, %eax 78; X64-NEXT: andl %ecx, %eax 79; X64-NEXT: retq 80 %1 = xor i32 %a0, -1 81 %2 = add i32 %a0, 1 82 %3 = and i32 %1, %2 83 ret i32 %3 84} 85 86define i32 @test__blcmsk_u32(i32 %a0) { 87; X86-LABEL: test__blcmsk_u32: 88; X86: # %bb.0: 89; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx 90; X86-NEXT: leal 1(%ecx), %eax 91; X86-NEXT: xorl %ecx, %eax 92; X86-NEXT: retl 93; 94; X64-LABEL: test__blcmsk_u32: 95; X64: # %bb.0: 96; X64-NEXT: # kill: def $edi killed $edi def $rdi 97; X64-NEXT: leal 1(%rdi), %eax 98; X64-NEXT: xorl %edi, %eax 99; X64-NEXT: retq 100 %1 = add i32 %a0, 1 101 %2 = xor i32 %a0, %1 102 ret i32 %2 103} 104 105define i32 @test__blcs_u32(i32 %a0) { 106; X86-LABEL: test__blcs_u32: 107; X86: # %bb.0: 108; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx 109; X86-NEXT: leal 1(%ecx), %eax 110; X86-NEXT: orl %ecx, %eax 111; X86-NEXT: retl 112; 113; X64-LABEL: test__blcs_u32: 114; X64: # %bb.0: 115; X64-NEXT: # kill: def $edi killed $edi def $rdi 116; X64-NEXT: leal 1(%rdi), %eax 117; X64-NEXT: orl %edi, %eax 118; X64-NEXT: retq 119 %1 = add i32 %a0, 1 120 %2 = or i32 %a0, %1 121 ret i32 %2 122} 123 124define i32 @test__blsfill_u32(i32 %a0) { 125; X86-LABEL: test__blsfill_u32: 126; X86: # %bb.0: 127; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx 128; X86-NEXT: leal -1(%ecx), %eax 129; X86-NEXT: orl %ecx, %eax 130; X86-NEXT: retl 131; 132; X64-LABEL: test__blsfill_u32: 133; X64: # %bb.0: 134; X64-NEXT: # kill: def $edi killed $edi def $rdi 135; X64-NEXT: leal -1(%rdi), %eax 136; X64-NEXT: orl %edi, %eax 137; X64-NEXT: retq 138 %1 = sub i32 %a0, 1 139 %2 = or i32 %a0, %1 140 ret i32 %2 141} 142 143define i32 @test__blsic_u32(i32 %a0) { 144; X86-LABEL: test__blsic_u32: 145; X86: # %bb.0: 146; X86-NEXT: movl {{[0-9]+}}(%esp), %eax 147; X86-NEXT: movl %eax, %ecx 148; X86-NEXT: xorl $-1, %ecx 149; X86-NEXT: subl $1, %eax 150; X86-NEXT: orl %ecx, %eax 151; X86-NEXT: retl 152; 153; X64-LABEL: test__blsic_u32: 154; X64: # %bb.0: 155; X64-NEXT: movl %edi, %eax 156; X64-NEXT: movl %edi, %ecx 157; X64-NEXT: xorl $-1, %ecx 158; X64-NEXT: subl $1, %eax 159; X64-NEXT: orl %ecx, %eax 160; X64-NEXT: retq 161 %1 = xor i32 %a0, -1 162 %2 = sub i32 %a0, 1 163 %3 = or i32 %1, %2 164 ret i32 %3 165} 166 167define i32 @test__t1mskc_u32(i32 %a0) { 168; X86-LABEL: test__t1mskc_u32: 169; X86: # %bb.0: 170; X86-NEXT: movl {{[0-9]+}}(%esp), %eax 171; X86-NEXT: movl %eax, %ecx 172; X86-NEXT: xorl $-1, %ecx 173; X86-NEXT: addl $1, %eax 174; X86-NEXT: orl %ecx, %eax 175; X86-NEXT: retl 176; 177; X64-LABEL: test__t1mskc_u32: 178; X64: # %bb.0: 179; X64-NEXT: movl %edi, %eax 180; X64-NEXT: movl %edi, %ecx 181; X64-NEXT: xorl $-1, %ecx 182; X64-NEXT: addl $1, %eax 183; X64-NEXT: orl %ecx, %eax 184; X64-NEXT: retq 185 %1 = xor i32 %a0, -1 186 %2 = add i32 %a0, 1 187 %3 = or i32 %1, %2 188 ret i32 %3 189} 190 191define i32 @test__tzmsk_u32(i32 %a0) { 192; X86-LABEL: test__tzmsk_u32: 193; X86: # %bb.0: 194; X86-NEXT: movl {{[0-9]+}}(%esp), %eax 195; X86-NEXT: movl %eax, %ecx 196; X86-NEXT: xorl $-1, %ecx 197; X86-NEXT: subl $1, %eax 198; X86-NEXT: andl %ecx, %eax 199; X86-NEXT: retl 200; 201; X64-LABEL: test__tzmsk_u32: 202; X64: # %bb.0: 203; X64-NEXT: movl %edi, %eax 204; X64-NEXT: movl %edi, %ecx 205; X64-NEXT: xorl $-1, %ecx 206; X64-NEXT: subl $1, %eax 207; X64-NEXT: andl %ecx, %eax 208; X64-NEXT: retq 209 %1 = xor i32 %a0, -1 210 %2 = sub i32 %a0, 1 211 %3 = and i32 %1, %2 212 ret i32 %3 213} 214 215declare i32 @llvm.x86.tbm.bextri.u32(i32, i32) 216