• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
2; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mattr=-bmi | FileCheck %s --check-prefix=ALL --check-prefix=NO_BMI
3; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mattr=+bmi | FileCheck %s --check-prefix=ALL --check-prefix=BMI
4
5; Clear high bits via shift, set them with xor (not), then mask them off.
6
7define i32 @shrink_xor_constant1(i32 %x) {
8; ALL-LABEL: shrink_xor_constant1:
9; ALL:       # %bb.0:
10; ALL-NEXT:    shrl $31, %edi
11; ALL-NEXT:    xorl $1, %edi
12; ALL-NEXT:    movl %edi, %eax
13; ALL-NEXT:    retq
14  %sh = lshr i32 %x, 31
15  %not = xor i32 %sh, -1
16  %and = and i32 %not, 1
17  ret i32 %and
18}
19
20define <4 x i32> @shrink_xor_constant1_splat(<4 x i32> %x) {
21; ALL-LABEL: shrink_xor_constant1_splat:
22; ALL:       # %bb.0:
23; ALL-NEXT:    psrld $31, %xmm0
24; ALL-NEXT:    pandn {{.*}}(%rip), %xmm0
25; ALL-NEXT:    retq
26  %sh = lshr <4 x i32> %x, <i32 31, i32 31, i32 31, i32 31>
27  %not = xor <4 x i32> %sh, <i32 -1, i32 -1, i32 -1, i32 -1>
28  %and = and <4 x i32> %not, <i32 1, i32 1, i32 1, i32 1>
29  ret <4 x i32> %and
30}
31
32; Clear low bits via shift, set them with xor (not), then mask them off.
33
34define i8 @shrink_xor_constant2(i8 %x) {
35; ALL-LABEL: shrink_xor_constant2:
36; ALL:       # %bb.0:
37; ALL-NEXT:    shlb $5, %dil
38; ALL-NEXT:    xorb $-32, %dil
39; ALL-NEXT:    movl %edi, %eax
40; ALL-NEXT:    retq
41  %sh = shl i8 %x, 5
42  %not = xor i8 %sh, -1
43  %and = and i8 %not, 224 ; 0xE0
44  ret i8 %and
45}
46
47define <16 x i8> @shrink_xor_constant2_splat(<16 x i8> %x) {
48; ALL-LABEL: shrink_xor_constant2_splat:
49; ALL:       # %bb.0:
50; ALL-NEXT:    movaps {{.*#+}} xmm0 = [1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1]
51; ALL-NEXT:    retq
52  %sh = shl <16 x i8> %x, <i8 5, i8 5, i8 5, i8 5, i8 5, i8 5, i8 5, i8 5, i8 5, i8 5, i8 5, i8 5, i8 5, i8 5, i8 5, i8 5>
53  %not = xor <16 x i8> %sh, <i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1>
54  %and = and <16 x i8> %not, <i8 1, i8 1, i8 1, i8 1, i8 1, i8 1, i8 1, i8 1, i8 1, i8 1, i8 1, i8 1, i8 1, i8 1, i8 1, i8 1>
55  ret <16 x i8> %and
56}
57
58