1; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py 2; RUN: llc < %s -mtriple=i686-unknown-unknown -mattr=+sse3 | FileCheck %s --check-prefix=SSE3 3; RUN: llc < %s -mtriple=i686-unknown-unknown -mattr=+sse4.1 | FileCheck %s --check-prefix=SSE41 4; RUN: llc < %s -mtriple=i686-unknown-unknown -mattr=+avx | FileCheck %s --check-prefix=AVX-32 5; RUN: llc < %s -mtriple=i686-unknown-unknown -mattr=+avx2 | FileCheck %s --check-prefix=AVX-32 6; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mattr=+avx2 | FileCheck %s --check-prefix=AVX-64 7 8define <3 x i16> @zext_i8(<3 x i8>) { 9; SSE3-LABEL: zext_i8: 10; SSE3: # %bb.0: 11; SSE3-NEXT: movzbl {{[0-9]+}}(%esp), %eax 12; SSE3-NEXT: movzbl {{[0-9]+}}(%esp), %ecx 13; SSE3-NEXT: movzbl {{[0-9]+}}(%esp), %edx 14; SSE3-NEXT: # kill: def $ax killed $ax killed $eax 15; SSE3-NEXT: # kill: def $dx killed $dx killed $edx 16; SSE3-NEXT: # kill: def $cx killed $cx killed $ecx 17; SSE3-NEXT: retl 18; 19; SSE41-LABEL: zext_i8: 20; SSE41: # %bb.0: 21; SSE41-NEXT: movd {{.*#+}} xmm0 = mem[0],zero,zero,zero 22; SSE41-NEXT: pinsrb $1, {{[0-9]+}}(%esp), %xmm0 23; SSE41-NEXT: pinsrb $2, {{[0-9]+}}(%esp), %xmm0 24; SSE41-NEXT: pmovzxbw {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero 25; SSE41-NEXT: movd %xmm0, %eax 26; SSE41-NEXT: pextrw $1, %xmm0, %edx 27; SSE41-NEXT: pextrw $2, %xmm0, %ecx 28; SSE41-NEXT: # kill: def $ax killed $ax killed $eax 29; SSE41-NEXT: # kill: def $dx killed $dx killed $edx 30; SSE41-NEXT: # kill: def $cx killed $cx killed $ecx 31; SSE41-NEXT: retl 32; 33; AVX-32-LABEL: zext_i8: 34; AVX-32: # %bb.0: 35; AVX-32-NEXT: vmovd {{.*#+}} xmm0 = mem[0],zero,zero,zero 36; AVX-32-NEXT: vpinsrb $1, {{[0-9]+}}(%esp), %xmm0, %xmm0 37; AVX-32-NEXT: vpinsrb $2, {{[0-9]+}}(%esp), %xmm0, %xmm0 38; AVX-32-NEXT: vpmovzxbw {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero 39; AVX-32-NEXT: vmovd %xmm0, %eax 40; AVX-32-NEXT: vpextrw $1, %xmm0, %edx 41; AVX-32-NEXT: vpextrw $2, %xmm0, %ecx 42; AVX-32-NEXT: # kill: def $ax killed $ax killed $eax 43; AVX-32-NEXT: # kill: def $dx killed $dx killed $edx 44; AVX-32-NEXT: # kill: def $cx killed $cx killed $ecx 45; AVX-32-NEXT: retl 46; 47; AVX-64-LABEL: zext_i8: 48; AVX-64: # %bb.0: 49; AVX-64-NEXT: vmovd %edi, %xmm0 50; AVX-64-NEXT: vpinsrb $1, %esi, %xmm0, %xmm0 51; AVX-64-NEXT: vpinsrb $2, %edx, %xmm0, %xmm0 52; AVX-64-NEXT: vpmovzxbw {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero 53; AVX-64-NEXT: vmovd %xmm0, %eax 54; AVX-64-NEXT: vpextrw $1, %xmm0, %edx 55; AVX-64-NEXT: vpextrw $2, %xmm0, %ecx 56; AVX-64-NEXT: # kill: def $ax killed $ax killed $eax 57; AVX-64-NEXT: # kill: def $dx killed $dx killed $edx 58; AVX-64-NEXT: # kill: def $cx killed $cx killed $ecx 59; AVX-64-NEXT: retq 60 %2 = zext <3 x i8> %0 to <3 x i16> 61 ret <3 x i16> %2 62} 63 64define <3 x i16> @sext_i8(<3 x i8>) { 65; SSE3-LABEL: sext_i8: 66; SSE3: # %bb.0: 67; SSE3-NEXT: movl {{[0-9]+}}(%esp), %eax 68; SSE3-NEXT: shll $8, %eax 69; SSE3-NEXT: movl {{[0-9]+}}(%esp), %ecx 70; SSE3-NEXT: shll $8, %ecx 71; SSE3-NEXT: movd %ecx, %xmm0 72; SSE3-NEXT: pinsrw $1, %eax, %xmm0 73; SSE3-NEXT: movl {{[0-9]+}}(%esp), %eax 74; SSE3-NEXT: shll $8, %eax 75; SSE3-NEXT: pinsrw $2, %eax, %xmm0 76; SSE3-NEXT: psraw $8, %xmm0 77; SSE3-NEXT: movd %xmm0, %eax 78; SSE3-NEXT: pextrw $1, %xmm0, %edx 79; SSE3-NEXT: pextrw $2, %xmm0, %ecx 80; SSE3-NEXT: # kill: def $ax killed $ax killed $eax 81; SSE3-NEXT: # kill: def $dx killed $dx killed $edx 82; SSE3-NEXT: # kill: def $cx killed $cx killed $ecx 83; SSE3-NEXT: retl 84; 85; SSE41-LABEL: sext_i8: 86; SSE41: # %bb.0: 87; SSE41-NEXT: movd {{.*#+}} xmm0 = mem[0],zero,zero,zero 88; SSE41-NEXT: pinsrb $1, {{[0-9]+}}(%esp), %xmm0 89; SSE41-NEXT: pinsrb $2, {{[0-9]+}}(%esp), %xmm0 90; SSE41-NEXT: pmovsxbw %xmm0, %xmm0 91; SSE41-NEXT: movd %xmm0, %eax 92; SSE41-NEXT: pextrw $1, %xmm0, %edx 93; SSE41-NEXT: pextrw $2, %xmm0, %ecx 94; SSE41-NEXT: # kill: def $ax killed $ax killed $eax 95; SSE41-NEXT: # kill: def $dx killed $dx killed $edx 96; SSE41-NEXT: # kill: def $cx killed $cx killed $ecx 97; SSE41-NEXT: retl 98; 99; AVX-32-LABEL: sext_i8: 100; AVX-32: # %bb.0: 101; AVX-32-NEXT: vmovd {{.*#+}} xmm0 = mem[0],zero,zero,zero 102; AVX-32-NEXT: vpinsrb $1, {{[0-9]+}}(%esp), %xmm0, %xmm0 103; AVX-32-NEXT: vpinsrb $2, {{[0-9]+}}(%esp), %xmm0, %xmm0 104; AVX-32-NEXT: vpmovsxbw %xmm0, %xmm0 105; AVX-32-NEXT: vmovd %xmm0, %eax 106; AVX-32-NEXT: vpextrw $1, %xmm0, %edx 107; AVX-32-NEXT: vpextrw $2, %xmm0, %ecx 108; AVX-32-NEXT: # kill: def $ax killed $ax killed $eax 109; AVX-32-NEXT: # kill: def $dx killed $dx killed $edx 110; AVX-32-NEXT: # kill: def $cx killed $cx killed $ecx 111; AVX-32-NEXT: retl 112; 113; AVX-64-LABEL: sext_i8: 114; AVX-64: # %bb.0: 115; AVX-64-NEXT: vmovd %edi, %xmm0 116; AVX-64-NEXT: vpinsrb $1, %esi, %xmm0, %xmm0 117; AVX-64-NEXT: vpinsrb $2, %edx, %xmm0, %xmm0 118; AVX-64-NEXT: vpmovsxbw %xmm0, %xmm0 119; AVX-64-NEXT: vmovd %xmm0, %eax 120; AVX-64-NEXT: vpextrw $1, %xmm0, %edx 121; AVX-64-NEXT: vpextrw $2, %xmm0, %ecx 122; AVX-64-NEXT: # kill: def $ax killed $ax killed $eax 123; AVX-64-NEXT: # kill: def $dx killed $dx killed $edx 124; AVX-64-NEXT: # kill: def $cx killed $cx killed $ecx 125; AVX-64-NEXT: retq 126 %2 = sext <3 x i8> %0 to <3 x i16> 127 ret <3 x i16> %2 128} 129