• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
2; RUN: llc < %s -mtriple=x86_64-unknown -mattr=+ssse3 | FileCheck %s
3
4; Test that the pshufb mask comment is correct.
5
6define <16 x i8> @test1(<16 x i8> %V) {
7; CHECK-LABEL: test1:
8; CHECK:       # BB#0:
9; CHECK-NEXT:    pshufb {{.*#+}} xmm0 = xmm0[1,0,0,0,0,2,0,0,0,0,3,0,0,0,0,4]
10; CHECK-NEXT:    retq
11  %1 = tail call <16 x i8> @llvm.x86.ssse3.pshuf.b.128(<16 x i8> %V, <16 x i8> <i8 1, i8 0, i8 0, i8 0, i8 0, i8 2, i8 0, i8 0, i8 0, i8 0, i8 3, i8 0, i8 0, i8 0, i8 0, i8 4>)
12  ret <16 x i8> %1
13}
14
15; Test that indexes larger than the size of the vector are shown masked (bottom 4 bits).
16
17define <16 x i8> @test2(<16 x i8> %V) {
18; CHECK-LABEL: test2:
19; CHECK:       # BB#0:
20; CHECK-NEXT:    pshufb {{.*#+}} xmm0 = xmm0[15,0,0,0,0,0,0,0,0,0,1,0,0,0,0,2]
21; CHECK-NEXT:    retq
22  %1 = tail call <16 x i8> @llvm.x86.ssse3.pshuf.b.128(<16 x i8> %V, <16 x i8> <i8 15, i8 0, i8 0, i8 0, i8 0, i8 16, i8 0, i8 0, i8 0, i8 0, i8 17, i8 0, i8 0, i8 0, i8 0, i8 50>)
23  ret <16 x i8> %1
24}
25
26; Test that indexes with bit seven set are shown as zero.
27
28define <16 x i8> @test3(<16 x i8> %V) {
29; CHECK-LABEL: test3:
30; CHECK:       # BB#0:
31; CHECK-NEXT:    pshufb {{.*#+}} xmm0 = xmm0[1,0,0,15,0,2,0,0],zero,xmm0[0,3,0,0],zero,xmm0[0,4]
32; CHECK-NEXT:    retq
33  %1 = tail call <16 x i8> @llvm.x86.ssse3.pshuf.b.128(<16 x i8> %V, <16 x i8> <i8 1, i8 0, i8 0, i8 127, i8 0, i8 2, i8 0, i8 0, i8 128, i8 0, i8 3, i8 0, i8 0, i8 255, i8 0, i8 4>)
34  ret <16 x i8> %1
35}
36
37; Test that we won't crash when the constant was reused for another instruction.
38
39define <16 x i8> @test4(<16 x i8> %V, <2 x i64>* %P) {
40; CHECK-LABEL: test4:
41; CHECK:       # BB#0:
42; CHECK-NEXT:    movaps {{.*#+}} xmm1 = [1084818905618843912,506097522914230528]
43; CHECK-NEXT:    movaps %xmm1, (%rdi)
44; CHECK-NEXT:    pshufd {{.*#+}} xmm0 = xmm0[2,3,0,1]
45; CHECK-NEXT:    retq
46  %1 = insertelement <2 x i64> undef, i64 1084818905618843912, i32 0
47  %2 = insertelement <2 x i64>    %1, i64  506097522914230528, i32 1
48  store <2 x i64> %2, <2 x i64>* %P, align 16
49  %3 = bitcast <2 x i64> %2 to <16 x i8>
50  %4 = tail call <16 x i8> @llvm.x86.ssse3.pshuf.b.128(<16 x i8> %V, <16 x i8> %3)
51  ret <16 x i8> %4
52}
53
54define <16 x i8> @test5(<16 x i8> %V) {
55; CHECK-LABEL: test5:
56; CHECK:       # BB#0:
57; CHECK-NEXT:    movl $1, %eax
58; CHECK-NEXT:    movd %rax, %xmm1
59; CHECK-NEXT:    movaps %xmm1, (%rax)
60; CHECK-NEXT:    movdqa {{.*#+}} xmm1 = [1,1]
61; CHECK-NEXT:    movdqa %xmm1, (%rax)
62; CHECK-NEXT:    pshufb %xmm1, %xmm0
63; CHECK-NEXT:    retq
64  store <2 x i64> <i64 1, i64 0>, <2 x i64>* undef, align 16
65  %l = load <2 x i64>, <2 x i64>* undef, align 16
66  %shuffle = shufflevector <2 x i64> %l, <2 x i64> undef, <2 x i32> zeroinitializer
67  store <2 x i64> %shuffle, <2 x i64>* undef, align 16
68  %1 = load <16 x i8>, <16 x i8>* undef, align 16
69  %2 = call <16 x i8> @llvm.x86.ssse3.pshuf.b.128(<16 x i8> %V, <16 x i8> %1)
70  ret <16 x i8> %2
71}
72
73; Test for a reused constant that would allow the pshufb to combine to a simpler instruction.
74
75define <16 x i8> @test6(<16 x i8> %V, <2 x i64>* %P) {
76; CHECK-LABEL: test6:
77; CHECK:       # BB#0:
78; CHECK-NEXT:    movaps {{.*#+}} xmm1 = [217019414673948672,506380106026255364]
79; CHECK-NEXT:    movaps %xmm1, (%rdi)
80; CHECK-NEXT:    punpcklbw {{.*#+}} xmm0 = xmm0[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7]
81; CHECK-NEXT:    retq
82  %1 = insertelement <2 x i64> undef, i64 217019414673948672, i32 0
83  %2 = insertelement <2 x i64>    %1, i64 506380106026255364, i32 1
84  store <2 x i64> %2, <2 x i64>* %P, align 16
85  %3 = bitcast <2 x i64> %2 to <16 x i8>
86  %4 = tail call <16 x i8> @llvm.x86.ssse3.pshuf.b.128(<16 x i8> %V, <16 x i8> %3)
87  ret <16 x i8> %4
88}
89
90declare <16 x i8> @llvm.x86.ssse3.pshuf.b.128(<16 x i8>, <16 x i8>) nounwind readnone
91