• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // RUN: %clang_cc1 %s -O3 -triple=x86_64-unknown-unknown -target-feature +tbm -emit-llvm -o - | FileCheck %s
2 // FIXME: The code generation checks for add/sub and/or are depending on the optimizer.
3 // The REQUIRES keyword will be removed when the FIXME is complete.
4 // REQUIRES: x86-registered-target
5 
6 // Don't include mm_malloc.h, it's system specific.
7 #define __MM_MALLOC_H
8 
9 #include <x86intrin.h>
10 
test__bextri_u32(unsigned int a)11 unsigned int test__bextri_u32(unsigned int a) {
12   // CHECK: call i32 @llvm.x86.tbm.bextri.u32
13   return __bextri_u32(a, 1);
14 }
15 
test__bextri_u64(unsigned long long a)16 unsigned long long test__bextri_u64(unsigned long long a) {
17   // CHECK: call i64 @llvm.x86.tbm.bextri.u64
18   return __bextri_u64(a, 2);
19 }
20 
test__bextri_u64_bigint(unsigned long long a)21 unsigned long long test__bextri_u64_bigint(unsigned long long a) {
22   // CHECK: call i64 @llvm.x86.tbm.bextri.u64
23   return __bextri_u64(a, 0x7fffffffffLL);
24 }
25 
test__blcfill_u32(unsigned int a)26 unsigned int test__blcfill_u32(unsigned int a) {
27   // CHECK: [[TMP:%.*]] = add i32 [[SRC:%.*]], 1
28   // CHECK-NEXT: %{{.*}} = and i32 [[TMP]], [[SRC]]
29   return __blcfill_u32(a);
30 }
31 
test__blcfill_u64(unsigned long long a)32 unsigned long long test__blcfill_u64(unsigned long long a) {
33   // CHECK: [[TMPT:%.*]] = add i64 [[SRC:%.*]], 1
34   // CHECK-NEXT: %{{.*}} = and i64 [[TMP]], [[SRC]]
35   return __blcfill_u64(a);
36 }
37 
test__blci_u32(unsigned int a)38 unsigned int test__blci_u32(unsigned int a) {
39   // CHECK: [[TMP:%.*]] = sub i32 -2, [[SRC:%.*]]
40   // CHECK-NEXT: %{{.*}} = or i32 [[TMP]], [[SRC]]
41   return __blci_u32(a);
42 }
43 
test__blci_u64(unsigned long long a)44 unsigned long long test__blci_u64(unsigned long long a) {
45   // CHECK: [[TMP:%.*]] = sub i64 -2, [[SRC:%.*]]
46   // CHECK-NEXT: %{{.*}} = or i64 [[TMP]], [[SRC]]
47   return __blci_u64(a);
48 }
49 
test__blcic_u32(unsigned int a)50 unsigned int test__blcic_u32(unsigned int a) {
51   // CHECK: [[TMP1:%.*]] = xor i32 [[SRC:%.*]], -1
52   // CHECK-NEXT: [[TMP2:%.*]] = add i32 [[SRC]], 1
53   // CHECK-NEXT: {{.*}} = and i32 [[TMP2]], [[TMP1]]
54   return __blcic_u32(a);
55 }
56 
test__blcic_u64(unsigned long long a)57 unsigned long long test__blcic_u64(unsigned long long a) {
58   // CHECK: [[TMP1:%.*]] = xor i64 [[SRC:%.*]], -1
59   // CHECK-NEXT: [[TMP2:%.*]] = add i64 [[SRC]], 1
60   // CHECK-NEXT: {{.*}} = and i64 [[TMP2]], [[TMP1]]
61   return __blcic_u64(a);
62 }
63 
test__blcmsk_u32(unsigned int a)64 unsigned int test__blcmsk_u32(unsigned int a) {
65   // CHECK: [[TMP:%.*]] = add i32 [[SRC:%.*]], 1
66   // CHECK-NEXT: {{.*}} = xor i32 [[TMP]], [[SRC]]
67   return __blcmsk_u32(a);
68 }
69 
test__blcmsk_u64(unsigned long long a)70 unsigned long long test__blcmsk_u64(unsigned long long a) {
71   // CHECK: [[TMP:%.*]] = add i64 [[SRC:%.*]], 1
72   // CHECK-NEXT: {{.*}} = xor i64 [[TMP]], [[SRC]]
73   return __blcmsk_u64(a);
74 }
75 
test__blcs_u32(unsigned int a)76 unsigned int test__blcs_u32(unsigned int a) {
77   // CHECK: [[TMP:%.*]] = add i32 [[SRC:%.*]], 1
78   // CHECK-NEXT: {{.*}} = or i32 [[TMP]], [[SRC]]
79   return __blcs_u32(a);
80 }
81 
test__blcs_u64(unsigned long long a)82 unsigned long long test__blcs_u64(unsigned long long a) {
83   // CHECK: [[TMP:%.*]] = add i64 [[SRC:%.*]], 1
84   // CHECK-NEXT: {{.*}} = or i64 [[TMP]], [[SRC]]
85   return __blcs_u64(a);
86 }
87 
test__blsfill_u32(unsigned int a)88 unsigned int test__blsfill_u32(unsigned int a) {
89   // CHECK: [[TMP:%.*]] = add i32 [[SRC:%.*]], -1
90   // CHECK-NEXT: {{.*}} = or i32 [[TMP]], [[SRC]]
91   return __blsfill_u32(a);
92 }
93 
test__blsfill_u64(unsigned long long a)94 unsigned long long test__blsfill_u64(unsigned long long a) {
95   // CHECK: [[TMP:%.*]] = add i64 [[SRC:%.*]], -1
96   // CHECK-NEXT: {{.*}} = or i64 [[TMP]], [[SRC]]
97   return __blsfill_u64(a);
98 }
99 
test__blsic_u32(unsigned int a)100 unsigned int test__blsic_u32(unsigned int a) {
101   // CHECK: [[TMP1:%.*]] = xor i32 [[SRC:%.*]], -1
102   // CHECK-NEXT: [[TMP2:%.*]] = add i32 [[SRC:%.*]], -1
103   // CHECK-NEXT: {{.*}} = or i32 [[TMP2]], [[TMP1]]
104   return __blsic_u32(a);
105 }
106 
test__blsic_u64(unsigned long long a)107 unsigned long long test__blsic_u64(unsigned long long a) {
108   // CHECK: [[TMP1:%.*]] = xor i64 [[SRC:%.*]], -1
109   // CHECK-NEXT: [[TMP2:%.*]] = add i64 [[SRC:%.*]], -1
110   // CHECK-NEXT: {{.*}} = or i64 [[TMP2]], [[TMP1]]
111   return __blsic_u64(a);
112 }
113 
test__t1mskc_u32(unsigned int a)114 unsigned int test__t1mskc_u32(unsigned int a) {
115   // CHECK: [[TMP1:%.*]] = xor i32 [[SRC:%.*]], -1
116   // CHECK-NEXT: [[TMP2:%.*]] = add i32 [[SRC:%.*]], 1
117   // CHECK-NEXT: {{.*}} = or i32 [[TMP2]], [[TMP1]]
118   return __t1mskc_u32(a);
119 }
120 
test__t1mskc_u64(unsigned long long a)121 unsigned long long test__t1mskc_u64(unsigned long long a) {
122   // CHECK: [[TMP1:%.*]] = xor i64 [[SRC:%.*]], -1
123   // CHECK-NEXT: [[TMP2:%.*]] = add i64 [[SRC:%.*]], 1
124   // CHECK-NEXT: {{.*}} = or i64 [[TMP2]], [[TMP1]]
125   return __t1mskc_u64(a);
126 }
127 
test__tzmsk_u32(unsigned int a)128 unsigned int test__tzmsk_u32(unsigned int a) {
129   // CHECK: [[TMP1:%.*]] = xor i32 [[SRC:%.*]], -1
130   // CHECK-NEXT: [[TMP2:%.*]] = add i32 [[SRC:%.*]], -1
131   // CHECK-NEXT: {{.*}} = and i32 [[TMP2]], [[TMP1]]
132   return __tzmsk_u32(a);
133 }
134 
test__tzmsk_u64(unsigned long long a)135 unsigned long long test__tzmsk_u64(unsigned long long a) {
136   // CHECK: [[TMP1:%.*]] = xor i64 [[SRC:%.*]], -1
137   // CHECK-NEXT: [[TMP2:%.*]] = add i64 [[SRC:%.*]], -1
138   // CHECK-NEXT: {{.*}} = and i64 [[TMP2]], [[TMP1]]
139   return __tzmsk_u64(a);
140 }
141