Lines Matching refs:xmm2
16 ; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm2[0,2,2,3]
26 ; SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm2[0,2,2,3]
36 ; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm2[0,2,2,3]
42 ; AVX1-NEXT: vextractf128 $1, %ymm1, %xmm2
43 ; AVX1-NEXT: vpshufd {{.*#+}} xmm2 = xmm2[0,1,0,2]
45 ; AVX1-NEXT: vpblendw {{.*#+}} xmm1 = xmm1[0,1,2,3],xmm2[4,5,6,7]
46 ; AVX1-NEXT: vextractf128 $1, %ymm0, %xmm2
47 ; AVX1-NEXT: vpshufd {{.*#+}} xmm2 = xmm2[0,1,0,2]
49 ; AVX1-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0,1,2,3],xmm2[4,5,6,7]
77 ; SSE2-NEXT: punpcklwd {{.*#+}} xmm0 = xmm0[0],xmm2[0],xmm0[1],xmm2[1],xmm0[2],xmm2[2],xmm0[3],x…
83 ; SSE2-NEXT: pextrw $4, %xmm2, %eax
85 ; SSE2-NEXT: movd %ecx, %xmm2
86 ; SSE2-NEXT: punpcklwd {{.*#+}} xmm2 = xmm2[0],xmm1[0],xmm2[1],xmm1[1],xmm2[2],xmm1[2],xmm2[3],x…
87 ; SSE2-NEXT: punpcklwd {{.*#+}} xmm2 = xmm2[0],xmm3[0],xmm2[1],xmm3[1],xmm2[2],xmm3[2],xmm2[3],x…
88 ; SSE2-NEXT: punpcklwd {{.*#+}} xmm0 = xmm0[0],xmm2[0],xmm0[1],xmm2[1],xmm0[2],xmm2[2],xmm0[3],x…
96 … SSSE3-NEXT: punpcklwd {{.*#+}} xmm0 = xmm0[0],xmm2[0],xmm0[1],xmm2[1],xmm0[2],xmm2[2],xmm0[3],…
102 ; SSSE3-NEXT: pextrw $4, %xmm2, %eax
104 ; SSSE3-NEXT: movd %ecx, %xmm2
105 ; SSSE3-NEXT: punpcklwd {{.*#+}} xmm2 = xmm2[0],xmm1[0],xmm2[1],xmm1[1],xmm2[2],xmm1[2],xmm2[3],…
106 ; SSSE3-NEXT: punpcklwd {{.*#+}} xmm2 = xmm2[0],xmm3[0],xmm2[1],xmm3[1],xmm2[2],xmm3[2],xmm2[3],…
107 … SSSE3-NEXT: punpcklwd {{.*#+}} xmm0 = xmm0[0],xmm2[0],xmm0[1],xmm2[1],xmm0[2],xmm2[2],xmm0[3],…
114 ; SSE41-NEXT: pblendw {{.*#+}} xmm2 = xmm2[0],xmm4[1,2,3],xmm2[4],xmm4[5,6,7]
115 ; SSE41-NEXT: packusdw %xmm3, %xmm2
119 ; SSE41-NEXT: packusdw %xmm2, %xmm0
124 ; AVX1-NEXT: vextractf128 $1, %ymm1, %xmm2
126 ; AVX1-NEXT: vpblendw {{.*#+}} xmm2 = xmm2[0],xmm3[1,2,3],xmm2[4],xmm3[5,6,7]
128 ; AVX1-NEXT: vpackusdw %xmm2, %xmm1, %xmm1
129 ; AVX1-NEXT: vextractf128 $1, %ymm0, %xmm2
130 ; AVX1-NEXT: vpblendw {{.*#+}} xmm2 = xmm2[0],xmm3[1,2,3],xmm2[4],xmm3[5,6,7]
132 ; AVX1-NEXT: vpackusdw %xmm2, %xmm0, %xmm0
164 ; SSE-NEXT: pand %xmm4, %xmm2
165 ; SSE-NEXT: packuswb %xmm3, %xmm2
169 ; SSE-NEXT: packuswb %xmm2, %xmm0
176 ; AVX1-NEXT: vextractf128 $1, %ymm1, %xmm2
178 ; AVX1-NEXT: vandps %xmm3, %xmm2, %xmm2
180 ; AVX1-NEXT: vpackuswb %xmm2, %xmm1, %xmm1
181 ; AVX1-NEXT: vextractf128 $1, %ymm0, %xmm2
182 ; AVX1-NEXT: vandps %xmm3, %xmm2, %xmm2
184 ; AVX1-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
197 ; AVX2-NEXT: vmovdqa {{.*#+}} xmm2 = <0,4,8,12,u,u,u,u,u,u,u,u,u,u,u,u>
198 ; AVX2-NEXT: vpshufb %xmm2, %xmm1, %xmm1
199 ; AVX2-NEXT: vpshufb %xmm2, %xmm0, %xmm0
227 ; SSSE3-NEXT: movdqa {{.*#+}} xmm2 = [0,1,4,5,8,9,12,13,8,9,12,13,12,13,14,15]
228 ; SSSE3-NEXT: pshufb %xmm2, %xmm1
229 ; SSSE3-NEXT: pshufb %xmm2, %xmm0
235 ; SSE41-NEXT: movdqa {{.*#+}} xmm2 = [0,1,4,5,8,9,12,13,8,9,12,13,12,13,14,15]
236 ; SSE41-NEXT: pshufb %xmm2, %xmm1
237 ; SSE41-NEXT: pshufb %xmm2, %xmm0
244 ; AVX1-NEXT: vmovdqa {{.*#+}} xmm2 = [0,1,4,5,8,9,12,13,8,9,12,13,12,13,14,15]
245 ; AVX1-NEXT: vpshufb %xmm2, %xmm1, %xmm1
246 ; AVX1-NEXT: vpshufb %xmm2, %xmm0, %xmm0
273 ; SSE2-NEXT: movdqa {{.*#+}} xmm2 = [255,0,0,0,255,0,0,0,255,0,0,0,255,0,0,0]
274 ; SSE2-NEXT: pand %xmm2, %xmm1
275 ; SSE2-NEXT: pand %xmm2, %xmm0
283 ; SSSE3-NEXT: movdqa {{.*#+}} xmm2 = <0,4,8,12,u,u,u,u,u,u,u,u,u,u,u,u>
284 ; SSSE3-NEXT: pshufb %xmm2, %xmm1
285 ; SSSE3-NEXT: pshufb %xmm2, %xmm0
292 ; SSE41-NEXT: movdqa {{.*#+}} xmm2 = <0,4,8,12,u,u,u,u,u,u,u,u,u,u,u,u>
293 ; SSE41-NEXT: pshufb %xmm2, %xmm1
294 ; SSE41-NEXT: pshufb %xmm2, %xmm0
302 ; AVX1-NEXT: vmovdqa {{.*#+}} xmm2 = <0,4,8,12,u,u,u,u,u,u,u,u,u,u,u,u>
303 ; AVX1-NEXT: vpshufb %xmm2, %xmm1, %xmm1
304 ; AVX1-NEXT: vpshufb %xmm2, %xmm0, %xmm0
337 ; SSE-NEXT: pand %xmm4, %xmm2
338 ; SSE-NEXT: packuswb %xmm3, %xmm2
342 ; SSE-NEXT: packuswb %xmm2, %xmm0
348 ; AVX1-NEXT: vextractf128 $1, %ymm1, %xmm2
350 ; AVX1-NEXT: vandps %xmm3, %xmm2, %xmm2
352 ; AVX1-NEXT: vpackuswb %xmm2, %xmm1, %xmm1
353 ; AVX1-NEXT: vextractf128 $1, %ymm0, %xmm2
354 ; AVX1-NEXT: vandps %xmm3, %xmm2, %xmm2
356 ; AVX1-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
394 ; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm2[0,2,2,3]
404 ; SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm2[0,2,2,3]
414 ; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm2[0,2,2,3]
420 ; AVX1-NEXT: vextractf128 $1, %ymm0, %xmm2
421 ; AVX1-NEXT: vpshufd {{.*#+}} xmm2 = xmm2[0,1,0,2]
423 ; AVX1-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0,1,2,3],xmm2[4,5,6,7]
424 ; AVX1-NEXT: vextractf128 $1, %ymm1, %xmm2
425 ; AVX1-NEXT: vpshufd {{.*#+}} xmm2 = xmm2[0,1,0,2]
427 ; AVX1-NEXT: vpblendw {{.*#+}} xmm1 = xmm1[0,1,2,3],xmm2[4,5,6,7]
461 ; SSE2-NEXT: punpcklwd {{.*#+}} xmm0 = xmm0[0],xmm2[0],xmm0[1],xmm2[1],xmm0[2],xmm2[2],xmm0[3],x…
467 ; SSE2-NEXT: pextrw $4, %xmm2, %eax
469 ; SSE2-NEXT: movd %ecx, %xmm2
470 ; SSE2-NEXT: punpcklwd {{.*#+}} xmm2 = xmm2[0],xmm1[0],xmm2[1],xmm1[1],xmm2[2],xmm1[2],xmm2[3],x…
471 ; SSE2-NEXT: punpcklwd {{.*#+}} xmm2 = xmm2[0],xmm3[0],xmm2[1],xmm3[1],xmm2[2],xmm3[2],xmm2[3],x…
472 ; SSE2-NEXT: punpcklwd {{.*#+}} xmm0 = xmm0[0],xmm2[0],xmm0[1],xmm2[1],xmm0[2],xmm2[2],xmm0[3],x…
480 … SSSE3-NEXT: punpcklwd {{.*#+}} xmm0 = xmm0[0],xmm2[0],xmm0[1],xmm2[1],xmm0[2],xmm2[2],xmm0[3],…
486 ; SSSE3-NEXT: pextrw $4, %xmm2, %eax
488 ; SSSE3-NEXT: movd %ecx, %xmm2
489 ; SSSE3-NEXT: punpcklwd {{.*#+}} xmm2 = xmm2[0],xmm1[0],xmm2[1],xmm1[1],xmm2[2],xmm1[2],xmm2[3],…
490 ; SSSE3-NEXT: punpcklwd {{.*#+}} xmm2 = xmm2[0],xmm3[0],xmm2[1],xmm3[1],xmm2[2],xmm3[2],xmm2[3],…
491 … SSSE3-NEXT: punpcklwd {{.*#+}} xmm0 = xmm0[0],xmm2[0],xmm0[1],xmm2[1],xmm0[2],xmm2[2],xmm0[3],…
502 ; SSE41-NEXT: movd %xmm2, %eax
504 ; SSE41-NEXT: pextrw $4, %xmm2, %eax
514 ; AVX1-NEXT: vextractf128 $1, %ymm0, %xmm2
515 ; AVX1-NEXT: vpshufd {{.*#+}} xmm2 = xmm2[0,1,0,2]
517 ; AVX1-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0,1,2,3],xmm2[4,5,6,7]
518 ; AVX1-NEXT: vextractf128 $1, %ymm1, %xmm2
519 ; AVX1-NEXT: vpshufd {{.*#+}} xmm2 = xmm2[0,1,0,2]
521 ; AVX1-NEXT: vpblendw {{.*#+}} xmm1 = xmm1[0,1,2,3],xmm2[4,5,6,7]
522 ; AVX1-NEXT: vmovdqa {{.*#+}} xmm2 = [0,1,4,5,8,9,12,13,8,9,12,13,12,13,14,15]
523 ; AVX1-NEXT: vpshufb %xmm2, %xmm1, %xmm1
524 ; AVX1-NEXT: vpshufb %xmm2, %xmm0, %xmm0
535 ; AVX2-NEXT: vmovdqa {{.*#+}} xmm2 = [0,1,4,5,8,9,12,13,8,9,12,13,12,13,14,15]
536 ; AVX2-NEXT: vpshufb %xmm2, %xmm1, %xmm1
537 ; AVX2-NEXT: vpshufb %xmm2, %xmm0, %xmm0
548 ; AVX512BW-NEXT: vmovdqa {{.*#+}} xmm2 = [0,1,4,5,8,9,12,13,8,9,12,13,12,13,14,15]
549 ; AVX512BW-NEXT: vpshufb %xmm2, %xmm1, %xmm1
550 ; AVX512BW-NEXT: vpshufb %xmm2, %xmm0, %xmm0
647 ; SSSE3-NEXT: movdqa {{.*#+}} xmm2 = [0,1,4,5,8,9,12,13,8,9,12,13,12,13,14,15]
648 ; SSSE3-NEXT: pshufb %xmm2, %xmm1
649 ; SSSE3-NEXT: pshufb %xmm2, %xmm0
655 ; SSE41-NEXT: movdqa {{.*#+}} xmm2 = [0,1,4,5,8,9,12,13,8,9,12,13,12,13,14,15]
656 ; SSE41-NEXT: pshufb %xmm2, %xmm1
657 ; SSE41-NEXT: pshufb %xmm2, %xmm0
663 ; AVX-NEXT: vmovdqa {{.*#+}} xmm2 = [0,1,4,5,8,9,12,13,8,9,12,13,12,13,14,15]
664 ; AVX-NEXT: vpshufb %xmm2, %xmm1, %xmm1
665 ; AVX-NEXT: vpshufb %xmm2, %xmm0, %xmm0
671 ; AVX512BW-NEXT: vmovdqa {{.*#+}} xmm2 = [0,1,4,5,8,9,12,13,8,9,12,13,12,13,14,15]
672 ; AVX512BW-NEXT: vpshufb %xmm2, %xmm1, %xmm1
673 ; AVX512BW-NEXT: vpshufb %xmm2, %xmm0, %xmm0
725 ; SSE2-NEXT: movdqa {{.*#+}} xmm2 = [255,255,255,255,255,255,255,255]
726 ; SSE2-NEXT: pand %xmm2, %xmm1
727 ; SSE2-NEXT: pand %xmm2, %xmm0
733 ; SSSE3-NEXT: movdqa {{.*#+}} xmm2 = <0,2,4,6,8,10,12,14,u,u,u,u,u,u,u,u>
734 ; SSSE3-NEXT: pshufb %xmm2, %xmm1
735 ; SSSE3-NEXT: pshufb %xmm2, %xmm0
741 ; SSE41-NEXT: movdqa {{.*#+}} xmm2 = <0,2,4,6,8,10,12,14,u,u,u,u,u,u,u,u>
742 ; SSE41-NEXT: pshufb %xmm2, %xmm1
743 ; SSE41-NEXT: pshufb %xmm2, %xmm0
749 ; AVX-NEXT: vmovdqa {{.*#+}} xmm2 = <0,2,4,6,8,10,12,14,u,u,u,u,u,u,u,u>
750 ; AVX-NEXT: vpshufb %xmm2, %xmm1, %xmm1
751 ; AVX-NEXT: vpshufb %xmm2, %xmm0, %xmm0
757 ; AVX512BW-NEXT: vmovdqa {{.*#+}} xmm2 = <0,2,4,6,8,10,12,14,u,u,u,u,u,u,u,u>
758 ; AVX512BW-NEXT: vpshufb %xmm2, %xmm1, %xmm1
759 ; AVX512BW-NEXT: vpshufb %xmm2, %xmm0, %xmm0