• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
2; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mattr=+avx512f,+avx512bw,+avx512vl,+avx512dq -O2 | FileCheck %s --check-prefix=AVX512-ALL
3; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mattr=+avx512f -O2| FileCheck %s --check-prefix=AVX512-ONLY
4
5define void @load_v1i2_trunc_v1i1_store(<1 x i2>* %a0,<1 x i1>* %a1) {
6; AVX512-ALL-LABEL: load_v1i2_trunc_v1i1_store:
7; AVX512-ALL:       # %bb.0:
8; AVX512-ALL-NEXT:    kmovb (%rdi), %k0
9; AVX512-ALL-NEXT:    kshiftlb $7, %k0, %k0
10; AVX512-ALL-NEXT:    kshiftrb $7, %k0, %k0
11; AVX512-ALL-NEXT:    kmovb %k0, (%rsi)
12; AVX512-ALL-NEXT:    retq
13;
14; AVX512-ONLY-LABEL: load_v1i2_trunc_v1i1_store:
15; AVX512-ONLY:       # %bb.0:
16; AVX512-ONLY-NEXT:    movb (%rdi), %al
17; AVX512-ONLY-NEXT:    andl $1, %eax
18; AVX512-ONLY-NEXT:    kmovw %eax, %k0
19; AVX512-ONLY-NEXT:    kmovw %k0, %eax
20; AVX512-ONLY-NEXT:    movb %al, (%rsi)
21; AVX512-ONLY-NEXT:    retq
22    %d0 = load <1 x i2>, <1 x i2>* %a0
23    %d1 = trunc <1 x i2> %d0 to <1 x i1>
24    store <1 x i1> %d1, <1 x i1>* %a1
25    ret void
26}
27define void @load_v1i3_trunc_v1i1_store(<1 x i3>* %a0,<1 x i1>* %a1) {
28; AVX512-ALL-LABEL: load_v1i3_trunc_v1i1_store:
29; AVX512-ALL:       # %bb.0:
30; AVX512-ALL-NEXT:    kmovb (%rdi), %k0
31; AVX512-ALL-NEXT:    kshiftlb $7, %k0, %k0
32; AVX512-ALL-NEXT:    kshiftrb $7, %k0, %k0
33; AVX512-ALL-NEXT:    kmovb %k0, (%rsi)
34; AVX512-ALL-NEXT:    retq
35;
36; AVX512-ONLY-LABEL: load_v1i3_trunc_v1i1_store:
37; AVX512-ONLY:       # %bb.0:
38; AVX512-ONLY-NEXT:    movb (%rdi), %al
39; AVX512-ONLY-NEXT:    andl $1, %eax
40; AVX512-ONLY-NEXT:    kmovw %eax, %k0
41; AVX512-ONLY-NEXT:    kmovw %k0, %eax
42; AVX512-ONLY-NEXT:    movb %al, (%rsi)
43; AVX512-ONLY-NEXT:    retq
44    %d0 = load <1 x i3>, <1 x i3>* %a0
45    %d1 = trunc <1 x i3> %d0 to <1 x i1>
46    store <1 x i1> %d1, <1 x i1>* %a1
47    ret void
48}
49define void @load_v1i4_trunc_v1i1_store(<1 x i4>* %a0,<1 x i1>* %a1) {
50; AVX512-ALL-LABEL: load_v1i4_trunc_v1i1_store:
51; AVX512-ALL:       # %bb.0:
52; AVX512-ALL-NEXT:    kmovb (%rdi), %k0
53; AVX512-ALL-NEXT:    kshiftlb $7, %k0, %k0
54; AVX512-ALL-NEXT:    kshiftrb $7, %k0, %k0
55; AVX512-ALL-NEXT:    kmovb %k0, (%rsi)
56; AVX512-ALL-NEXT:    retq
57;
58; AVX512-ONLY-LABEL: load_v1i4_trunc_v1i1_store:
59; AVX512-ONLY:       # %bb.0:
60; AVX512-ONLY-NEXT:    movb (%rdi), %al
61; AVX512-ONLY-NEXT:    andl $1, %eax
62; AVX512-ONLY-NEXT:    kmovw %eax, %k0
63; AVX512-ONLY-NEXT:    kmovw %k0, %eax
64; AVX512-ONLY-NEXT:    movb %al, (%rsi)
65; AVX512-ONLY-NEXT:    retq
66    %d0 = load <1 x i4>, <1 x i4>* %a0
67    %d1 = trunc <1 x i4> %d0 to <1 x i1>
68    store <1 x i1> %d1, <1 x i1>* %a1
69    ret void
70}
71define void @load_v1i8_trunc_v1i1_store(<1 x i8>* %a0,<1 x i1>* %a1) {
72; AVX512-ALL-LABEL: load_v1i8_trunc_v1i1_store:
73; AVX512-ALL:       # %bb.0:
74; AVX512-ALL-NEXT:    kmovb (%rdi), %k0
75; AVX512-ALL-NEXT:    kshiftlb $7, %k0, %k0
76; AVX512-ALL-NEXT:    kshiftrb $7, %k0, %k0
77; AVX512-ALL-NEXT:    kmovb %k0, (%rsi)
78; AVX512-ALL-NEXT:    retq
79;
80; AVX512-ONLY-LABEL: load_v1i8_trunc_v1i1_store:
81; AVX512-ONLY:       # %bb.0:
82; AVX512-ONLY-NEXT:    movb (%rdi), %al
83; AVX512-ONLY-NEXT:    andl $1, %eax
84; AVX512-ONLY-NEXT:    kmovw %eax, %k0
85; AVX512-ONLY-NEXT:    kmovw %k0, %eax
86; AVX512-ONLY-NEXT:    movb %al, (%rsi)
87; AVX512-ONLY-NEXT:    retq
88    %d0 = load <1 x i8>, <1 x i8>* %a0
89    %d1 = trunc <1 x i8> %d0 to <1 x i1>
90    store <1 x i1> %d1, <1 x i1>* %a1
91    ret void
92}
93define void @load_v1i16_trunc_v1i1_store(<1 x i16>* %a0,<1 x i1>* %a1) {
94; AVX512-ALL-LABEL: load_v1i16_trunc_v1i1_store:
95; AVX512-ALL:       # %bb.0:
96; AVX512-ALL-NEXT:    kmovb (%rdi), %k0
97; AVX512-ALL-NEXT:    kshiftlb $7, %k0, %k0
98; AVX512-ALL-NEXT:    kshiftrb $7, %k0, %k0
99; AVX512-ALL-NEXT:    kmovb %k0, (%rsi)
100; AVX512-ALL-NEXT:    retq
101;
102; AVX512-ONLY-LABEL: load_v1i16_trunc_v1i1_store:
103; AVX512-ONLY:       # %bb.0:
104; AVX512-ONLY-NEXT:    movb (%rdi), %al
105; AVX512-ONLY-NEXT:    andl $1, %eax
106; AVX512-ONLY-NEXT:    kmovw %eax, %k0
107; AVX512-ONLY-NEXT:    kmovw %k0, %eax
108; AVX512-ONLY-NEXT:    movb %al, (%rsi)
109; AVX512-ONLY-NEXT:    retq
110    %d0 = load <1 x i16>, <1 x i16>* %a0
111    %d1 = trunc <1 x i16> %d0 to <1 x i1>
112    store <1 x i1> %d1, <1 x i1>* %a1
113    ret void
114}
115define void @load_v1i32_trunc_v1i1_store(<1 x i32>* %a0,<1 x i1>* %a1) {
116; AVX512-ALL-LABEL: load_v1i32_trunc_v1i1_store:
117; AVX512-ALL:       # %bb.0:
118; AVX512-ALL-NEXT:    kmovb (%rdi), %k0
119; AVX512-ALL-NEXT:    kshiftlb $7, %k0, %k0
120; AVX512-ALL-NEXT:    kshiftrb $7, %k0, %k0
121; AVX512-ALL-NEXT:    kmovb %k0, (%rsi)
122; AVX512-ALL-NEXT:    retq
123;
124; AVX512-ONLY-LABEL: load_v1i32_trunc_v1i1_store:
125; AVX512-ONLY:       # %bb.0:
126; AVX512-ONLY-NEXT:    movb (%rdi), %al
127; AVX512-ONLY-NEXT:    andl $1, %eax
128; AVX512-ONLY-NEXT:    kmovw %eax, %k0
129; AVX512-ONLY-NEXT:    kmovw %k0, %eax
130; AVX512-ONLY-NEXT:    movb %al, (%rsi)
131; AVX512-ONLY-NEXT:    retq
132    %d0 = load <1 x i32>, <1 x i32>* %a0
133    %d1 = trunc <1 x i32> %d0 to <1 x i1>
134    store <1 x i1> %d1, <1 x i1>* %a1
135    ret void
136}
137define void @load_v1i64_trunc_v1i1_store(<1 x i64>* %a0,<1 x i1>* %a1) {
138; AVX512-ALL-LABEL: load_v1i64_trunc_v1i1_store:
139; AVX512-ALL:       # %bb.0:
140; AVX512-ALL-NEXT:    kmovb (%rdi), %k0
141; AVX512-ALL-NEXT:    kshiftlb $7, %k0, %k0
142; AVX512-ALL-NEXT:    kshiftrb $7, %k0, %k0
143; AVX512-ALL-NEXT:    kmovb %k0, (%rsi)
144; AVX512-ALL-NEXT:    retq
145;
146; AVX512-ONLY-LABEL: load_v1i64_trunc_v1i1_store:
147; AVX512-ONLY:       # %bb.0:
148; AVX512-ONLY-NEXT:    movb (%rdi), %al
149; AVX512-ONLY-NEXT:    andl $1, %eax
150; AVX512-ONLY-NEXT:    kmovw %eax, %k0
151; AVX512-ONLY-NEXT:    kmovw %k0, %eax
152; AVX512-ONLY-NEXT:    movb %al, (%rsi)
153; AVX512-ONLY-NEXT:    retq
154    %d0 = load <1 x i64>, <1 x i64>* %a0
155    %d1 = trunc <1 x i64> %d0 to <1 x i1>
156    store <1 x i1> %d1, <1 x i1>* %a1
157    ret void
158}
159
160