1; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py 2; RUN: llc < %s -mtriple=i686-unknown-unknown -mcpu=skylake | FileCheck %s --check-prefixes=X86,X86-SKYLAKE 3; RUN: llc < %s -mtriple=i686-unknown-unknown -mcpu=skx | FileCheck %s --check-prefixes=X86,X86-SKX 4; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mcpu=skylake | FileCheck %s --check-prefixes=X64,X64-SKYLAKE 5; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mcpu=skx | FileCheck %s --check-prefixes=X64,X64-SKX 6 7define void @fetch_r16g16_snorm_unorm8(<4 x i8>*, i8*, i32, i32, { [2048 x i32], [128 x i64] }*) nounwind { 8; X86-SKYLAKE-LABEL: fetch_r16g16_snorm_unorm8: 9; X86-SKYLAKE: # %bb.0: # %entry 10; X86-SKYLAKE-NEXT: subl $12, %esp 11; X86-SKYLAKE-NEXT: movl {{[0-9]+}}(%esp), %eax 12; X86-SKYLAKE-NEXT: movl {{[0-9]+}}(%esp), %ecx 13; X86-SKYLAKE-NEXT: vmovd {{.*#+}} xmm0 = mem[0],zero,zero,zero 14; X86-SKYLAKE-NEXT: vpxor %xmm1, %xmm1, %xmm1 15; X86-SKYLAKE-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3] 16; X86-SKYLAKE-NEXT: vpsrad $16, %xmm0, %xmm0 17; X86-SKYLAKE-NEXT: vpmaxsd %xmm1, %xmm0, %xmm0 18; X86-SKYLAKE-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0],xmm1[1],xmm0[2],xmm1[3],xmm0[4],xmm1[5],xmm0[6],xmm1[7] 19; X86-SKYLAKE-NEXT: vpsrld $7, %xmm0, %xmm0 20; X86-SKYLAKE-NEXT: vpshufb {{.*#+}} xmm0 = xmm0[0,4],zero,zero,xmm0[u,u,u,u,u,u,u,u,u,u,u,u] 21; X86-SKYLAKE-NEXT: vmovd %xmm0, %ecx 22; X86-SKYLAKE-NEXT: orl $-16777216, %ecx # imm = 0xFF000000 23; X86-SKYLAKE-NEXT: movl %ecx, (%eax) 24; X86-SKYLAKE-NEXT: addl $12, %esp 25; X86-SKYLAKE-NEXT: retl 26; 27; X86-SKX-LABEL: fetch_r16g16_snorm_unorm8: 28; X86-SKX: # %bb.0: # %entry 29; X86-SKX-NEXT: subl $12, %esp 30; X86-SKX-NEXT: movl {{[0-9]+}}(%esp), %eax 31; X86-SKX-NEXT: vmovd {{.*#+}} xmm0 = mem[0],zero,zero,zero 32; X86-SKX-NEXT: vpshufb {{.*#+}} xmm0 = zero,zero,xmm0[0,1],zero,zero,xmm0[2,3],zero,zero,xmm0[u,u],zero,zero,xmm0[u,u] 33; X86-SKX-NEXT: vpsrad $16, %xmm0, %xmm0 34; X86-SKX-NEXT: vpxor %xmm1, %xmm1, %xmm1 35; X86-SKX-NEXT: vpmaxsd %xmm1, %xmm0, %xmm0 36; X86-SKX-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0],xmm1[1],xmm0[2],xmm1[3],xmm0[4],xmm1[5],xmm0[6],xmm1[7] 37; X86-SKX-NEXT: vpsrld $7, %xmm0, %xmm0 38; X86-SKX-NEXT: vpmovzxdq {{.*#+}} xmm1 = xmm0[0],zero,xmm0[1],zero 39; X86-SKX-NEXT: vpmovqw %xmm1, {{[0-9]+}}(%esp) 40; X86-SKX-NEXT: vpmovzxbd {{.*#+}} xmm1 = mem[0],zero,zero,zero,mem[1],zero,zero,zero,mem[2],zero,zero,zero,mem[3],zero,zero,zero 41; X86-SKX-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[2,2,3,3] 42; X86-SKX-NEXT: vpmovqw %xmm0, {{[0-9]+}}(%esp) 43; X86-SKX-NEXT: vpmovzxbd {{.*#+}} xmm0 = mem[0],zero,zero,zero,mem[1],zero,zero,zero,mem[2],zero,zero,zero,mem[3],zero,zero,zero 44; X86-SKX-NEXT: vshufps {{.*#+}} xmm0 = xmm1[0,2],xmm0[0,2] 45; X86-SKX-NEXT: vpmovdb %xmm0, (%esp) 46; X86-SKX-NEXT: movl {{[0-9]+}}(%esp), %eax 47; X86-SKX-NEXT: movzwl (%esp), %ecx 48; X86-SKX-NEXT: orl $-16777216, %ecx # imm = 0xFF000000 49; X86-SKX-NEXT: movl %ecx, (%eax) 50; X86-SKX-NEXT: addl $12, %esp 51; X86-SKX-NEXT: retl 52; 53; X64-SKYLAKE-LABEL: fetch_r16g16_snorm_unorm8: 54; X64-SKYLAKE: # %bb.0: # %entry 55; X64-SKYLAKE-NEXT: vmovd {{.*#+}} xmm0 = mem[0],zero,zero,zero 56; X64-SKYLAKE-NEXT: vpxor %xmm1, %xmm1, %xmm1 57; X64-SKYLAKE-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3] 58; X64-SKYLAKE-NEXT: vpsrad $16, %xmm0, %xmm0 59; X64-SKYLAKE-NEXT: vpmaxsd %xmm1, %xmm0, %xmm0 60; X64-SKYLAKE-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0],xmm1[1],xmm0[2],xmm1[3],xmm0[4],xmm1[5],xmm0[6],xmm1[7] 61; X64-SKYLAKE-NEXT: vpsrld $7, %xmm0, %xmm0 62; X64-SKYLAKE-NEXT: vpshufb {{.*#+}} xmm0 = xmm0[0,4],zero,zero,xmm0[u,u,u,u,u,u,u,u,u,u,u,u] 63; X64-SKYLAKE-NEXT: vmovd %xmm0, %eax 64; X64-SKYLAKE-NEXT: orl $-16777216, %eax # imm = 0xFF000000 65; X64-SKYLAKE-NEXT: movl %eax, (%rdi) 66; X64-SKYLAKE-NEXT: retq 67; 68; X64-SKX-LABEL: fetch_r16g16_snorm_unorm8: 69; X64-SKX: # %bb.0: # %entry 70; X64-SKX-NEXT: vmovd {{.*#+}} xmm0 = mem[0],zero,zero,zero 71; X64-SKX-NEXT: vpshufb {{.*#+}} xmm0 = zero,zero,xmm0[0,1],zero,zero,xmm0[2,3],zero,zero,xmm0[u,u],zero,zero,xmm0[u,u] 72; X64-SKX-NEXT: vpsrad $16, %xmm0, %xmm0 73; X64-SKX-NEXT: vpxor %xmm1, %xmm1, %xmm1 74; X64-SKX-NEXT: vpmaxsd %xmm1, %xmm0, %xmm0 75; X64-SKX-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0],xmm1[1],xmm0[2],xmm1[3],xmm0[4],xmm1[5],xmm0[6],xmm1[7] 76; X64-SKX-NEXT: vpsrld $7, %xmm0, %xmm0 77; X64-SKX-NEXT: vpmovzxwd {{.*#+}} xmm1 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero 78; X64-SKX-NEXT: vpmovqw %xmm1, -{{[0-9]+}}(%rsp) 79; X64-SKX-NEXT: vpmovzxbd {{.*#+}} xmm1 = mem[0],zero,zero,zero,mem[1],zero,zero,zero,mem[2],zero,zero,zero,mem[3],zero,zero,zero 80; X64-SKX-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[2,2,3,3] 81; X64-SKX-NEXT: vpmovqw %xmm0, -{{[0-9]+}}(%rsp) 82; X64-SKX-NEXT: vpmovzxbd {{.*#+}} xmm0 = mem[0],zero,zero,zero,mem[1],zero,zero,zero,mem[2],zero,zero,zero,mem[3],zero,zero,zero 83; X64-SKX-NEXT: vshufps {{.*#+}} xmm0 = xmm1[0,2],xmm0[0,2] 84; X64-SKX-NEXT: vpmovdb %xmm0, -{{[0-9]+}}(%rsp) 85; X64-SKX-NEXT: movzwl -{{[0-9]+}}(%rsp), %eax 86; X64-SKX-NEXT: orl $-16777216, %eax # imm = 0xFF000000 87; X64-SKX-NEXT: movl %eax, (%rdi) 88; X64-SKX-NEXT: retq 89entry: 90 %5 = bitcast i8* %1 to <2 x i16>* 91 %6 = load <2 x i16>, <2 x i16>* %5, align 2 92 %7 = shufflevector <2 x i16> %6, <2 x i16> undef, <4 x i32> <i32 0, i32 1, i32 undef, i32 undef> 93 %8 = icmp sgt <4 x i16> %7, zeroinitializer 94 %9 = select <4 x i1> %8, <4 x i16> %7, <4 x i16> zeroinitializer 95 %10 = lshr <4 x i16> %9, <i16 7, i16 7, i16 7, i16 7> 96 %11 = shufflevector <4 x i16> %10, <4 x i16> undef, <2 x i32> <i32 0, i32 1> 97 %12 = shufflevector <4 x i16> %10, <4 x i16> undef, <2 x i32> <i32 2, i32 3> 98 %13 = bitcast <2 x i16> %11 to <4 x i8> 99 %14 = bitcast <2 x i16> %12 to <4 x i8> 100 %15 = shufflevector <4 x i8> %13, <4 x i8> %14, <4 x i32> <i32 0, i32 2, i32 4, i32 6> 101 %16 = bitcast <4 x i8> %15 to i32 102 %17 = and i32 %16, 65535 103 %18 = or i32 %17, -16777216 104 %19 = bitcast <4 x i8>* %0 to i32* 105 store i32 %18, i32* %19, align 4 106 ret void 107} 108