• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
2; RUN: llc < %s -mtriple=i686-unknown -mattr=+avx512cd | FileCheck %s --check-prefixes=CHECK,X86
3; RUN: llc < %s -mtriple=x86_64-unknown -mattr=+avx512cd | FileCheck %s --check-prefixes=CHECK,X64
4
5declare <16 x i32> @llvm.x86.avx512.mask.conflict.d.512(<16 x i32>, <16 x i32>, i16) nounwind readonly
6
7define <8 x i64> @test_conflict_q(<8 x i64> %a) {
8; CHECK-LABEL: test_conflict_q:
9; CHECK:       # %bb.0:
10; CHECK-NEXT:    vpconflictq %zmm0, %zmm0
11; CHECK-NEXT:    ret{{[l|q]}}
12  %res = call <8 x i64> @llvm.x86.avx512.mask.conflict.q.512(<8 x i64> %a, <8 x i64> zeroinitializer, i8 -1)
13  ret <8 x i64> %res
14}
15
16declare <8 x i64> @llvm.x86.avx512.mask.conflict.q.512(<8 x i64>, <8 x i64>, i8) nounwind readonly
17
18define <16 x i32> @test_maskz_conflict_d(<16 x i32> %a, i16 %mask) {
19; X86-LABEL: test_maskz_conflict_d:
20; X86:       # %bb.0:
21; X86-NEXT:    kmovw {{[0-9]+}}(%esp), %k1
22; X86-NEXT:    vpconflictd %zmm0, %zmm0 {%k1} {z}
23; X86-NEXT:    retl
24;
25; X64-LABEL: test_maskz_conflict_d:
26; X64:       # %bb.0:
27; X64-NEXT:    kmovw %edi, %k1
28; X64-NEXT:    vpconflictd %zmm0, %zmm0 {%k1} {z}
29; X64-NEXT:    retq
30  %res = call <16 x i32> @llvm.x86.avx512.mask.conflict.d.512(<16 x i32> %a, <16 x i32> zeroinitializer, i16 %mask)
31  ret <16 x i32> %res
32}
33
34define <8 x i64> @test_mask_conflict_q(<8 x i64> %a, <8 x i64> %b, i8 %mask) {
35; X86-LABEL: test_mask_conflict_q:
36; X86:       # %bb.0:
37; X86-NEXT:    movzbl {{[0-9]+}}(%esp), %eax
38; X86-NEXT:    kmovw %eax, %k1
39; X86-NEXT:    vpconflictq %zmm0, %zmm1 {%k1}
40; X86-NEXT:    vmovdqa64 %zmm1, %zmm0
41; X86-NEXT:    retl
42;
43; X64-LABEL: test_mask_conflict_q:
44; X64:       # %bb.0:
45; X64-NEXT:    kmovw %edi, %k1
46; X64-NEXT:    vpconflictq %zmm0, %zmm1 {%k1}
47; X64-NEXT:    vmovdqa64 %zmm1, %zmm0
48; X64-NEXT:    retq
49  %res = call <8 x i64> @llvm.x86.avx512.mask.conflict.q.512(<8 x i64> %a, <8 x i64> %b, i8 %mask)
50  ret <8 x i64> %res
51}
52
53define <16 x i32> @test_lzcnt_d(<16 x i32> %a) {
54; CHECK-LABEL: test_lzcnt_d:
55; CHECK:       # %bb.0:
56; CHECK-NEXT:    vplzcntd %zmm0, %zmm0
57; CHECK-NEXT:    ret{{[l|q]}}
58  %1 = call <16 x i32> @llvm.ctlz.v16i32(<16 x i32> %a, i1 false)
59  ret <16 x i32> %1
60}
61declare <16 x i32> @llvm.ctlz.v16i32(<16 x i32>, i1) #0
62
63define <8 x i64> @test_lzcnt_q(<8 x i64> %a) {
64; CHECK-LABEL: test_lzcnt_q:
65; CHECK:       # %bb.0:
66; CHECK-NEXT:    vplzcntq %zmm0, %zmm0
67; CHECK-NEXT:    ret{{[l|q]}}
68  %1 = call <8 x i64> @llvm.ctlz.v8i64(<8 x i64> %a, i1 false)
69  ret <8 x i64> %1
70}
71declare <8 x i64> @llvm.ctlz.v8i64(<8 x i64>, i1) #0
72
73define <16 x i32> @test_mask_lzcnt_d(<16 x i32> %a, <16 x i32> %b, i16 %mask) {
74; X86-LABEL: test_mask_lzcnt_d:
75; X86:       # %bb.0:
76; X86-NEXT:    kmovw {{[0-9]+}}(%esp), %k1
77; X86-NEXT:    vplzcntd %zmm0, %zmm1 {%k1}
78; X86-NEXT:    vmovdqa64 %zmm1, %zmm0
79; X86-NEXT:    retl
80;
81; X64-LABEL: test_mask_lzcnt_d:
82; X64:       # %bb.0:
83; X64-NEXT:    kmovw %edi, %k1
84; X64-NEXT:    vplzcntd %zmm0, %zmm1 {%k1}
85; X64-NEXT:    vmovdqa64 %zmm1, %zmm0
86; X64-NEXT:    retq
87  %1 = call <16 x i32> @llvm.ctlz.v16i32(<16 x i32> %a, i1 false)
88  %2 = bitcast i16 %mask to <16 x i1>
89  %3 = select <16 x i1> %2, <16 x i32> %1, <16 x i32> %b
90  ret <16 x i32> %3
91}
92
93define <8 x i64> @test_mask_lzcnt_q(<8 x i64> %a, <8 x i64> %b, i8 %mask) {
94; X86-LABEL: test_mask_lzcnt_q:
95; X86:       # %bb.0:
96; X86-NEXT:    movzbl {{[0-9]+}}(%esp), %eax
97; X86-NEXT:    kmovw %eax, %k1
98; X86-NEXT:    vplzcntq %zmm0, %zmm1 {%k1}
99; X86-NEXT:    vmovdqa64 %zmm1, %zmm0
100; X86-NEXT:    retl
101;
102; X64-LABEL: test_mask_lzcnt_q:
103; X64:       # %bb.0:
104; X64-NEXT:    kmovw %edi, %k1
105; X64-NEXT:    vplzcntq %zmm0, %zmm1 {%k1}
106; X64-NEXT:    vmovdqa64 %zmm1, %zmm0
107; X64-NEXT:    retq
108  %1 = call <8 x i64> @llvm.ctlz.v8i64(<8 x i64> %a, i1 false)
109  %2 = bitcast i8 %mask to <8 x i1>
110  %3 = select <8 x i1> %2, <8 x i64> %1, <8 x i64> %b
111  ret <8 x i64> %3
112}
113