; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py ; RUN: llc < %s -mtriple=x86_64-- -mattr=+sse2 | FileCheck %s --check-prefixes=ANY,SSE,SSE2 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+sse4.1 | FileCheck %s --check-prefixes=ANY,SSE,SSE4,SSE41 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+sse4.2 | FileCheck %s --check-prefixes=ANY,SSE,SSE4,SSE42 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx2 | FileCheck %s --check-prefixes=ANY,AVX,AVX2 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl | FileCheck %s --check-prefixes=ANY,AVX,AVX512 ; There are at least 3 potential patterns corresponding to an unsigned saturated add: min, cmp with sum, cmp with not. ; Test each of those patterns with i8/i16/i32/i64. ; Test each of those with a constant operand and a variable operand. ; Test each of those with a 128-bit vector type. define i8 @unsigned_sat_constant_i8_using_min(i8 %x) { ; ANY-LABEL: unsigned_sat_constant_i8_using_min: ; ANY: # %bb.0: ; ANY-NEXT: cmpb $-43, %dil ; ANY-NEXT: movl $213, %eax ; ANY-NEXT: cmovbl %edi, %eax ; ANY-NEXT: addb $42, %al ; ANY-NEXT: # kill: def $al killed $al killed $eax ; ANY-NEXT: retq %c = icmp ult i8 %x, -43 %s = select i1 %c, i8 %x, i8 -43 %r = add i8 %s, 42 ret i8 %r } define i8 @unsigned_sat_constant_i8_using_cmp_sum(i8 %x) { ; ANY-LABEL: unsigned_sat_constant_i8_using_cmp_sum: ; ANY: # %bb.0: ; ANY-NEXT: addb $42, %dil ; ANY-NEXT: movzbl %dil, %ecx ; ANY-NEXT: movl $255, %eax ; ANY-NEXT: cmovael %ecx, %eax ; ANY-NEXT: # kill: def $al killed $al killed $eax ; ANY-NEXT: retq %a = add i8 %x, 42 %c = icmp ugt i8 %x, %a %r = select i1 %c, i8 -1, i8 %a ret i8 %r } define i8 @unsigned_sat_constant_i8_using_cmp_notval(i8 %x) { ; ANY-LABEL: unsigned_sat_constant_i8_using_cmp_notval: ; ANY: # %bb.0: ; ANY-NEXT: addb $42, %dil ; ANY-NEXT: movzbl %dil, %ecx ; ANY-NEXT: movl $255, %eax ; ANY-NEXT: cmovael %ecx, %eax ; ANY-NEXT: # kill: def $al killed $al killed $eax ; ANY-NEXT: retq %a = add i8 %x, 42 %c = icmp ugt i8 %x, -43 %r = select i1 %c, i8 -1, i8 %a ret i8 %r } define i16 @unsigned_sat_constant_i16_using_min(i16 %x) { ; ANY-LABEL: unsigned_sat_constant_i16_using_min: ; ANY: # %bb.0: ; ANY-NEXT: cmpw $-43, %di ; ANY-NEXT: movl $65493, %eax # imm = 0xFFD5 ; ANY-NEXT: cmovbl %edi, %eax ; ANY-NEXT: addl $42, %eax ; ANY-NEXT: # kill: def $ax killed $ax killed $eax ; ANY-NEXT: retq %c = icmp ult i16 %x, -43 %s = select i1 %c, i16 %x, i16 -43 %r = add i16 %s, 42 ret i16 %r } define i16 @unsigned_sat_constant_i16_using_cmp_sum(i16 %x) { ; ANY-LABEL: unsigned_sat_constant_i16_using_cmp_sum: ; ANY: # %bb.0: ; ANY-NEXT: addw $42, %di ; ANY-NEXT: movl $65535, %eax # imm = 0xFFFF ; ANY-NEXT: cmovael %edi, %eax ; ANY-NEXT: # kill: def $ax killed $ax killed $eax ; ANY-NEXT: retq %a = add i16 %x, 42 %c = icmp ugt i16 %x, %a %r = select i1 %c, i16 -1, i16 %a ret i16 %r } define i16 @unsigned_sat_constant_i16_using_cmp_notval(i16 %x) { ; ANY-LABEL: unsigned_sat_constant_i16_using_cmp_notval: ; ANY: # %bb.0: ; ANY-NEXT: addw $42, %di ; ANY-NEXT: movl $65535, %eax # imm = 0xFFFF ; ANY-NEXT: cmovael %edi, %eax ; ANY-NEXT: # kill: def $ax killed $ax killed $eax ; ANY-NEXT: retq %a = add i16 %x, 42 %c = icmp ugt i16 %x, -43 %r = select i1 %c, i16 -1, i16 %a ret i16 %r } define i32 @unsigned_sat_constant_i32_using_min(i32 %x) { ; ANY-LABEL: unsigned_sat_constant_i32_using_min: ; ANY: # %bb.0: ; ANY-NEXT: cmpl $-43, %edi ; ANY-NEXT: movl $-43, %eax ; ANY-NEXT: cmovbl %edi, %eax ; ANY-NEXT: addl $42, %eax ; ANY-NEXT: retq %c = icmp ult i32 %x, -43 %s = select i1 %c, i32 %x, i32 -43 %r = add i32 %s, 42 ret i32 %r } define i32 @unsigned_sat_constant_i32_using_cmp_sum(i32 %x) { ; ANY-LABEL: unsigned_sat_constant_i32_using_cmp_sum: ; ANY: # %bb.0: ; ANY-NEXT: addl $42, %edi ; ANY-NEXT: movl $-1, %eax ; ANY-NEXT: cmovael %edi, %eax ; ANY-NEXT: retq %a = add i32 %x, 42 %c = icmp ugt i32 %x, %a %r = select i1 %c, i32 -1, i32 %a ret i32 %r } define i32 @unsigned_sat_constant_i32_using_cmp_notval(i32 %x) { ; ANY-LABEL: unsigned_sat_constant_i32_using_cmp_notval: ; ANY: # %bb.0: ; ANY-NEXT: addl $42, %edi ; ANY-NEXT: movl $-1, %eax ; ANY-NEXT: cmovael %edi, %eax ; ANY-NEXT: retq %a = add i32 %x, 42 %c = icmp ugt i32 %x, -43 %r = select i1 %c, i32 -1, i32 %a ret i32 %r } define i64 @unsigned_sat_constant_i64_using_min(i64 %x) { ; ANY-LABEL: unsigned_sat_constant_i64_using_min: ; ANY: # %bb.0: ; ANY-NEXT: cmpq $-43, %rdi ; ANY-NEXT: movq $-43, %rax ; ANY-NEXT: cmovbq %rdi, %rax ; ANY-NEXT: addq $42, %rax ; ANY-NEXT: retq %c = icmp ult i64 %x, -43 %s = select i1 %c, i64 %x, i64 -43 %r = add i64 %s, 42 ret i64 %r } define i64 @unsigned_sat_constant_i64_using_cmp_sum(i64 %x) { ; ANY-LABEL: unsigned_sat_constant_i64_using_cmp_sum: ; ANY: # %bb.0: ; ANY-NEXT: addq $42, %rdi ; ANY-NEXT: movq $-1, %rax ; ANY-NEXT: cmovaeq %rdi, %rax ; ANY-NEXT: retq %a = add i64 %x, 42 %c = icmp ugt i64 %x, %a %r = select i1 %c, i64 -1, i64 %a ret i64 %r } define i64 @unsigned_sat_constant_i64_using_cmp_notval(i64 %x) { ; ANY-LABEL: unsigned_sat_constant_i64_using_cmp_notval: ; ANY: # %bb.0: ; ANY-NEXT: addq $42, %rdi ; ANY-NEXT: movq $-1, %rax ; ANY-NEXT: cmovaeq %rdi, %rax ; ANY-NEXT: retq %a = add i64 %x, 42 %c = icmp ugt i64 %x, -43 %r = select i1 %c, i64 -1, i64 %a ret i64 %r } define i8 @unsigned_sat_variable_i8_using_min(i8 %x, i8 %y) { ; ANY-LABEL: unsigned_sat_variable_i8_using_min: ; ANY: # %bb.0: ; ANY-NEXT: movl %esi, %eax ; ANY-NEXT: notb %al ; ANY-NEXT: cmpb %al, %dil ; ANY-NEXT: movzbl %al, %eax ; ANY-NEXT: cmovbl %edi, %eax ; ANY-NEXT: addb %sil, %al ; ANY-NEXT: # kill: def $al killed $al killed $eax ; ANY-NEXT: retq %noty = xor i8 %y, -1 %c = icmp ult i8 %x, %noty %s = select i1 %c, i8 %x, i8 %noty %r = add i8 %s, %y ret i8 %r } define i8 @unsigned_sat_variable_i8_using_cmp_sum(i8 %x, i8 %y) { ; ANY-LABEL: unsigned_sat_variable_i8_using_cmp_sum: ; ANY: # %bb.0: ; ANY-NEXT: addb %sil, %dil ; ANY-NEXT: movzbl %dil, %ecx ; ANY-NEXT: movl $255, %eax ; ANY-NEXT: cmovael %ecx, %eax ; ANY-NEXT: # kill: def $al killed $al killed $eax ; ANY-NEXT: retq %a = add i8 %x, %y %c = icmp ugt i8 %x, %a %r = select i1 %c, i8 -1, i8 %a ret i8 %r } define i8 @unsigned_sat_variable_i8_using_cmp_notval(i8 %x, i8 %y) { ; ANY-LABEL: unsigned_sat_variable_i8_using_cmp_notval: ; ANY: # %bb.0: ; ANY-NEXT: addb %dil, %sil ; ANY-NEXT: movzbl %sil, %ecx ; ANY-NEXT: movl $255, %eax ; ANY-NEXT: cmovael %ecx, %eax ; ANY-NEXT: # kill: def $al killed $al killed $eax ; ANY-NEXT: retq %noty = xor i8 %y, -1 %a = add i8 %x, %y %c = icmp ugt i8 %x, %noty %r = select i1 %c, i8 -1, i8 %a ret i8 %r } define i16 @unsigned_sat_variable_i16_using_min(i16 %x, i16 %y) { ; ANY-LABEL: unsigned_sat_variable_i16_using_min: ; ANY: # %bb.0: ; ANY-NEXT: # kill: def $esi killed $esi def $rsi ; ANY-NEXT: movl %esi, %eax ; ANY-NEXT: notl %eax ; ANY-NEXT: cmpw %ax, %di ; ANY-NEXT: cmovbl %edi, %eax ; ANY-NEXT: addl %esi, %eax ; ANY-NEXT: # kill: def $ax killed $ax killed $eax ; ANY-NEXT: retq %noty = xor i16 %y, -1 %c = icmp ult i16 %x, %noty %s = select i1 %c, i16 %x, i16 %noty %r = add i16 %s, %y ret i16 %r } define i16 @unsigned_sat_variable_i16_using_cmp_sum(i16 %x, i16 %y) { ; ANY-LABEL: unsigned_sat_variable_i16_using_cmp_sum: ; ANY: # %bb.0: ; ANY-NEXT: addw %si, %di ; ANY-NEXT: movl $65535, %eax # imm = 0xFFFF ; ANY-NEXT: cmovael %edi, %eax ; ANY-NEXT: # kill: def $ax killed $ax killed $eax ; ANY-NEXT: retq %a = add i16 %x, %y %c = icmp ugt i16 %x, %a %r = select i1 %c, i16 -1, i16 %a ret i16 %r } define i16 @unsigned_sat_variable_i16_using_cmp_notval(i16 %x, i16 %y) { ; ANY-LABEL: unsigned_sat_variable_i16_using_cmp_notval: ; ANY: # %bb.0: ; ANY-NEXT: addw %di, %si ; ANY-NEXT: movl $65535, %eax # imm = 0xFFFF ; ANY-NEXT: cmovael %esi, %eax ; ANY-NEXT: # kill: def $ax killed $ax killed $eax ; ANY-NEXT: retq %noty = xor i16 %y, -1 %a = add i16 %x, %y %c = icmp ugt i16 %x, %noty %r = select i1 %c, i16 -1, i16 %a ret i16 %r } define i32 @unsigned_sat_variable_i32_using_min(i32 %x, i32 %y) { ; ANY-LABEL: unsigned_sat_variable_i32_using_min: ; ANY: # %bb.0: ; ANY-NEXT: # kill: def $esi killed $esi def $rsi ; ANY-NEXT: movl %esi, %eax ; ANY-NEXT: notl %eax ; ANY-NEXT: cmpl %eax, %edi ; ANY-NEXT: cmovbl %edi, %eax ; ANY-NEXT: addl %esi, %eax ; ANY-NEXT: retq %noty = xor i32 %y, -1 %c = icmp ult i32 %x, %noty %s = select i1 %c, i32 %x, i32 %noty %r = add i32 %s, %y ret i32 %r } define i32 @unsigned_sat_variable_i32_using_cmp_sum(i32 %x, i32 %y) { ; ANY-LABEL: unsigned_sat_variable_i32_using_cmp_sum: ; ANY: # %bb.0: ; ANY-NEXT: addl %esi, %edi ; ANY-NEXT: movl $-1, %eax ; ANY-NEXT: cmovael %edi, %eax ; ANY-NEXT: retq %a = add i32 %x, %y %c = icmp ugt i32 %x, %a %r = select i1 %c, i32 -1, i32 %a ret i32 %r } define i32 @unsigned_sat_variable_i32_using_cmp_notval(i32 %x, i32 %y) { ; ANY-LABEL: unsigned_sat_variable_i32_using_cmp_notval: ; ANY: # %bb.0: ; ANY-NEXT: addl %esi, %edi ; ANY-NEXT: movl $-1, %eax ; ANY-NEXT: cmovael %edi, %eax ; ANY-NEXT: retq %noty = xor i32 %y, -1 %a = add i32 %x, %y %c = icmp ugt i32 %x, %noty %r = select i1 %c, i32 -1, i32 %a ret i32 %r } define i64 @unsigned_sat_variable_i64_using_min(i64 %x, i64 %y) { ; ANY-LABEL: unsigned_sat_variable_i64_using_min: ; ANY: # %bb.0: ; ANY-NEXT: movq %rsi, %rax ; ANY-NEXT: notq %rax ; ANY-NEXT: cmpq %rax, %rdi ; ANY-NEXT: cmovbq %rdi, %rax ; ANY-NEXT: addq %rsi, %rax ; ANY-NEXT: retq %noty = xor i64 %y, -1 %c = icmp ult i64 %x, %noty %s = select i1 %c, i64 %x, i64 %noty %r = add i64 %s, %y ret i64 %r } define i64 @unsigned_sat_variable_i64_using_cmp_sum(i64 %x, i64 %y) { ; ANY-LABEL: unsigned_sat_variable_i64_using_cmp_sum: ; ANY: # %bb.0: ; ANY-NEXT: addq %rsi, %rdi ; ANY-NEXT: movq $-1, %rax ; ANY-NEXT: cmovaeq %rdi, %rax ; ANY-NEXT: retq %a = add i64 %x, %y %c = icmp ugt i64 %x, %a %r = select i1 %c, i64 -1, i64 %a ret i64 %r } define i64 @unsigned_sat_variable_i64_using_cmp_notval(i64 %x, i64 %y) { ; ANY-LABEL: unsigned_sat_variable_i64_using_cmp_notval: ; ANY: # %bb.0: ; ANY-NEXT: addq %rsi, %rdi ; ANY-NEXT: movq $-1, %rax ; ANY-NEXT: cmovaeq %rdi, %rax ; ANY-NEXT: retq %noty = xor i64 %y, -1 %a = add i64 %x, %y %c = icmp ugt i64 %x, %noty %r = select i1 %c, i64 -1, i64 %a ret i64 %r } define <16 x i8> @unsigned_sat_constant_v16i8_using_min(<16 x i8> %x) { ; SSE-LABEL: unsigned_sat_constant_v16i8_using_min: ; SSE: # %bb.0: ; SSE-NEXT: pminub {{.*}}(%rip), %xmm0 ; SSE-NEXT: paddb {{.*}}(%rip), %xmm0 ; SSE-NEXT: retq ; ; AVX-LABEL: unsigned_sat_constant_v16i8_using_min: ; AVX: # %bb.0: ; AVX-NEXT: vpminub {{.*}}(%rip), %xmm0, %xmm0 ; AVX-NEXT: vpaddb {{.*}}(%rip), %xmm0, %xmm0 ; AVX-NEXT: retq %c = icmp ult <16 x i8> %x, %s = select <16 x i1> %c, <16 x i8> %x, <16 x i8> %r = add <16 x i8> %s, ret <16 x i8> %r } define <16 x i8> @unsigned_sat_constant_v16i8_using_cmp_sum(<16 x i8> %x) { ; SSE-LABEL: unsigned_sat_constant_v16i8_using_cmp_sum: ; SSE: # %bb.0: ; SSE-NEXT: paddusb {{.*}}(%rip), %xmm0 ; SSE-NEXT: retq ; ; AVX-LABEL: unsigned_sat_constant_v16i8_using_cmp_sum: ; AVX: # %bb.0: ; AVX-NEXT: vpaddusb {{.*}}(%rip), %xmm0, %xmm0 ; AVX-NEXT: retq %a = add <16 x i8> %x, %c = icmp ugt <16 x i8> %x, %a %r = select <16 x i1> %c, <16 x i8> , <16 x i8> %a ret <16 x i8> %r } define <16 x i8> @unsigned_sat_constant_v16i8_using_cmp_notval(<16 x i8> %x) { ; SSE-LABEL: unsigned_sat_constant_v16i8_using_cmp_notval: ; SSE: # %bb.0: ; SSE-NEXT: paddusb {{.*}}(%rip), %xmm0 ; SSE-NEXT: retq ; ; AVX-LABEL: unsigned_sat_constant_v16i8_using_cmp_notval: ; AVX: # %bb.0: ; AVX-NEXT: vpaddusb {{.*}}(%rip), %xmm0, %xmm0 ; AVX-NEXT: retq %a = add <16 x i8> %x, %c = icmp ugt <16 x i8> %x, %r = select <16 x i1> %c, <16 x i8> , <16 x i8> %a ret <16 x i8> %r } define <8 x i16> @unsigned_sat_constant_v8i16_using_min(<8 x i16> %x) { ; SSE2-LABEL: unsigned_sat_constant_v8i16_using_min: ; SSE2: # %bb.0: ; SSE2-NEXT: movdqa %xmm0, %xmm1 ; SSE2-NEXT: psubusw {{.*}}(%rip), %xmm1 ; SSE2-NEXT: psubw %xmm1, %xmm0 ; SSE2-NEXT: paddw {{.*}}(%rip), %xmm0 ; SSE2-NEXT: retq ; ; SSE4-LABEL: unsigned_sat_constant_v8i16_using_min: ; SSE4: # %bb.0: ; SSE4-NEXT: pminuw {{.*}}(%rip), %xmm0 ; SSE4-NEXT: paddw {{.*}}(%rip), %xmm0 ; SSE4-NEXT: retq ; ; AVX-LABEL: unsigned_sat_constant_v8i16_using_min: ; AVX: # %bb.0: ; AVX-NEXT: vpminuw {{.*}}(%rip), %xmm0, %xmm0 ; AVX-NEXT: vpaddw {{.*}}(%rip), %xmm0, %xmm0 ; AVX-NEXT: retq %c = icmp ult <8 x i16> %x, %s = select <8 x i1> %c, <8 x i16> %x, <8 x i16> %r = add <8 x i16> %s, ret <8 x i16> %r } define <8 x i16> @unsigned_sat_constant_v8i16_using_cmp_sum(<8 x i16> %x) { ; SSE-LABEL: unsigned_sat_constant_v8i16_using_cmp_sum: ; SSE: # %bb.0: ; SSE-NEXT: paddusw {{.*}}(%rip), %xmm0 ; SSE-NEXT: retq ; ; AVX-LABEL: unsigned_sat_constant_v8i16_using_cmp_sum: ; AVX: # %bb.0: ; AVX-NEXT: vpaddusw {{.*}}(%rip), %xmm0, %xmm0 ; AVX-NEXT: retq %a = add <8 x i16> %x, %c = icmp ugt <8 x i16> %x, %a %r = select <8 x i1> %c, <8 x i16> , <8 x i16> %a ret <8 x i16> %r } define <8 x i16> @unsigned_sat_constant_v8i16_using_cmp_notval(<8 x i16> %x) { ; SSE-LABEL: unsigned_sat_constant_v8i16_using_cmp_notval: ; SSE: # %bb.0: ; SSE-NEXT: paddusw {{.*}}(%rip), %xmm0 ; SSE-NEXT: retq ; ; AVX-LABEL: unsigned_sat_constant_v8i16_using_cmp_notval: ; AVX: # %bb.0: ; AVX-NEXT: vpaddusw {{.*}}(%rip), %xmm0, %xmm0 ; AVX-NEXT: retq %a = add <8 x i16> %x, %c = icmp ugt <8 x i16> %x, %r = select <8 x i1> %c, <8 x i16> , <8 x i16> %a ret <8 x i16> %r } define <4 x i32> @unsigned_sat_constant_v4i32_using_min(<4 x i32> %x) { ; SSE2-LABEL: unsigned_sat_constant_v4i32_using_min: ; SSE2: # %bb.0: ; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [2147483648,2147483648,2147483648,2147483648] ; SSE2-NEXT: pxor %xmm0, %xmm1 ; SSE2-NEXT: movdqa {{.*#+}} xmm2 = [2147483605,2147483605,2147483605,2147483605] ; SSE2-NEXT: pcmpgtd %xmm1, %xmm2 ; SSE2-NEXT: pand %xmm2, %xmm0 ; SSE2-NEXT: pandn {{.*}}(%rip), %xmm2 ; SSE2-NEXT: por %xmm2, %xmm0 ; SSE2-NEXT: paddd {{.*}}(%rip), %xmm0 ; SSE2-NEXT: retq ; ; SSE4-LABEL: unsigned_sat_constant_v4i32_using_min: ; SSE4: # %bb.0: ; SSE4-NEXT: pminud {{.*}}(%rip), %xmm0 ; SSE4-NEXT: paddd {{.*}}(%rip), %xmm0 ; SSE4-NEXT: retq ; ; AVX2-LABEL: unsigned_sat_constant_v4i32_using_min: ; AVX2: # %bb.0: ; AVX2-NEXT: vpbroadcastd {{.*#+}} xmm1 = [4294967253,4294967253,4294967253,4294967253] ; AVX2-NEXT: vpminud %xmm1, %xmm0, %xmm0 ; AVX2-NEXT: vpbroadcastd {{.*#+}} xmm1 = [42,42,42,42] ; AVX2-NEXT: vpaddd %xmm1, %xmm0, %xmm0 ; AVX2-NEXT: retq ; ; AVX512-LABEL: unsigned_sat_constant_v4i32_using_min: ; AVX512: # %bb.0: ; AVX512-NEXT: vpminud {{.*}}(%rip){1to4}, %xmm0, %xmm0 ; AVX512-NEXT: vpaddd {{.*}}(%rip){1to4}, %xmm0, %xmm0 ; AVX512-NEXT: retq %c = icmp ult <4 x i32> %x, %s = select <4 x i1> %c, <4 x i32> %x, <4 x i32> %r = add <4 x i32> %s, ret <4 x i32> %r } define <4 x i32> @unsigned_sat_constant_v4i32_using_cmp_sum(<4 x i32> %x) { ; SSE2-LABEL: unsigned_sat_constant_v4i32_using_cmp_sum: ; SSE2: # %bb.0: ; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [42,42,42,42] ; SSE2-NEXT: paddd %xmm0, %xmm1 ; SSE2-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,2147483648,2147483648,2147483648] ; SSE2-NEXT: pxor %xmm2, %xmm0 ; SSE2-NEXT: pxor %xmm1, %xmm2 ; SSE2-NEXT: pcmpgtd %xmm2, %xmm0 ; SSE2-NEXT: por %xmm1, %xmm0 ; SSE2-NEXT: retq ; ; SSE4-LABEL: unsigned_sat_constant_v4i32_using_cmp_sum: ; SSE4: # %bb.0: ; SSE4-NEXT: pminud {{.*}}(%rip), %xmm0 ; SSE4-NEXT: paddd {{.*}}(%rip), %xmm0 ; SSE4-NEXT: retq ; ; AVX2-LABEL: unsigned_sat_constant_v4i32_using_cmp_sum: ; AVX2: # %bb.0: ; AVX2-NEXT: vpbroadcastd {{.*#+}} xmm1 = [42,42,42,42] ; AVX2-NEXT: vpbroadcastd {{.*#+}} xmm2 = [4294967253,4294967253,4294967253,4294967253] ; AVX2-NEXT: vpminud %xmm2, %xmm0, %xmm0 ; AVX2-NEXT: vpaddd %xmm1, %xmm0, %xmm0 ; AVX2-NEXT: retq ; ; AVX512-LABEL: unsigned_sat_constant_v4i32_using_cmp_sum: ; AVX512: # %bb.0: ; AVX512-NEXT: vpminud {{.*}}(%rip){1to4}, %xmm0, %xmm0 ; AVX512-NEXT: vpaddd {{.*}}(%rip){1to4}, %xmm0, %xmm0 ; AVX512-NEXT: retq %a = add <4 x i32> %x, %c = icmp ugt <4 x i32> %x, %a %r = select <4 x i1> %c, <4 x i32> , <4 x i32> %a ret <4 x i32> %r } define <4 x i32> @unsigned_sat_constant_v4i32_using_cmp_notval(<4 x i32> %x) { ; SSE2-LABEL: unsigned_sat_constant_v4i32_using_cmp_notval: ; SSE2: # %bb.0: ; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [42,42,42,42] ; SSE2-NEXT: paddd %xmm0, %xmm1 ; SSE2-NEXT: pxor {{.*}}(%rip), %xmm0 ; SSE2-NEXT: pcmpgtd {{.*}}(%rip), %xmm0 ; SSE2-NEXT: por %xmm1, %xmm0 ; SSE2-NEXT: retq ; ; SSE4-LABEL: unsigned_sat_constant_v4i32_using_cmp_notval: ; SSE4: # %bb.0: ; SSE4-NEXT: pminud {{.*}}(%rip), %xmm0 ; SSE4-NEXT: paddd {{.*}}(%rip), %xmm0 ; SSE4-NEXT: retq ; ; AVX2-LABEL: unsigned_sat_constant_v4i32_using_cmp_notval: ; AVX2: # %bb.0: ; AVX2-NEXT: vpbroadcastd {{.*#+}} xmm1 = [42,42,42,42] ; AVX2-NEXT: vpbroadcastd {{.*#+}} xmm2 = [4294967253,4294967253,4294967253,4294967253] ; AVX2-NEXT: vpminud %xmm2, %xmm0, %xmm0 ; AVX2-NEXT: vpaddd %xmm1, %xmm0, %xmm0 ; AVX2-NEXT: retq ; ; AVX512-LABEL: unsigned_sat_constant_v4i32_using_cmp_notval: ; AVX512: # %bb.0: ; AVX512-NEXT: vpminud {{.*}}(%rip){1to4}, %xmm0, %xmm0 ; AVX512-NEXT: vpaddd {{.*}}(%rip){1to4}, %xmm0, %xmm0 ; AVX512-NEXT: retq %a = add <4 x i32> %x, %c = icmp ugt <4 x i32> %x, %r = select <4 x i1> %c, <4 x i32> , <4 x i32> %a ret <4 x i32> %r } define <4 x i32> @unsigned_sat_constant_v4i32_using_cmp_notval_nonsplat(<4 x i32> %x) { ; SSE2-LABEL: unsigned_sat_constant_v4i32_using_cmp_notval_nonsplat: ; SSE2: # %bb.0: ; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [43,44,45,46] ; SSE2-NEXT: paddd %xmm0, %xmm1 ; SSE2-NEXT: pxor {{.*}}(%rip), %xmm0 ; SSE2-NEXT: pcmpgtd {{.*}}(%rip), %xmm0 ; SSE2-NEXT: por %xmm1, %xmm0 ; SSE2-NEXT: retq ; ; SSE4-LABEL: unsigned_sat_constant_v4i32_using_cmp_notval_nonsplat: ; SSE4: # %bb.0: ; SSE4-NEXT: pminud {{.*}}(%rip), %xmm0 ; SSE4-NEXT: paddd {{.*}}(%rip), %xmm0 ; SSE4-NEXT: retq ; ; AVX-LABEL: unsigned_sat_constant_v4i32_using_cmp_notval_nonsplat: ; AVX: # %bb.0: ; AVX-NEXT: vpminud {{.*}}(%rip), %xmm0, %xmm0 ; AVX-NEXT: vpaddd {{.*}}(%rip), %xmm0, %xmm0 ; AVX-NEXT: retq %a = add <4 x i32> %x, %c = icmp ugt <4 x i32> %x, %r = select <4 x i1> %c, <4 x i32> , <4 x i32> %a ret <4 x i32> %r } define <2 x i64> @unsigned_sat_constant_v2i64_using_min(<2 x i64> %x) { ; SSE2-LABEL: unsigned_sat_constant_v2i64_using_min: ; SSE2: # %bb.0: ; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [9223372039002259456,9223372039002259456] ; SSE2-NEXT: pxor %xmm0, %xmm1 ; SSE2-NEXT: movdqa {{.*#+}} xmm2 = [9223372034707292117,9223372034707292117] ; SSE2-NEXT: movdqa %xmm2, %xmm3 ; SSE2-NEXT: pcmpgtd %xmm1, %xmm3 ; SSE2-NEXT: pshufd {{.*#+}} xmm4 = xmm3[0,0,2,2] ; SSE2-NEXT: pcmpeqd %xmm2, %xmm1 ; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm1[1,1,3,3] ; SSE2-NEXT: pand %xmm4, %xmm1 ; SSE2-NEXT: pshufd {{.*#+}} xmm2 = xmm3[1,1,3,3] ; SSE2-NEXT: por %xmm1, %xmm2 ; SSE2-NEXT: pand %xmm2, %xmm0 ; SSE2-NEXT: pandn {{.*}}(%rip), %xmm2 ; SSE2-NEXT: por %xmm2, %xmm0 ; SSE2-NEXT: paddq {{.*}}(%rip), %xmm0 ; SSE2-NEXT: retq ; ; SSE41-LABEL: unsigned_sat_constant_v2i64_using_min: ; SSE41: # %bb.0: ; SSE41-NEXT: movdqa %xmm0, %xmm1 ; SSE41-NEXT: movapd {{.*#+}} xmm2 = [18446744073709551573,18446744073709551573] ; SSE41-NEXT: movdqa {{.*#+}} xmm0 = [9223372039002259456,9223372039002259456] ; SSE41-NEXT: pxor %xmm1, %xmm0 ; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [9223372034707292117,9223372034707292117] ; SSE41-NEXT: movdqa %xmm3, %xmm4 ; SSE41-NEXT: pcmpeqd %xmm0, %xmm4 ; SSE41-NEXT: pcmpgtd %xmm0, %xmm3 ; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm3[0,0,2,2] ; SSE41-NEXT: pand %xmm4, %xmm0 ; SSE41-NEXT: por %xmm3, %xmm0 ; SSE41-NEXT: blendvpd %xmm0, %xmm1, %xmm2 ; SSE41-NEXT: paddq {{.*}}(%rip), %xmm2 ; SSE41-NEXT: movdqa %xmm2, %xmm0 ; SSE41-NEXT: retq ; ; SSE42-LABEL: unsigned_sat_constant_v2i64_using_min: ; SSE42: # %bb.0: ; SSE42-NEXT: movdqa %xmm0, %xmm1 ; SSE42-NEXT: movapd {{.*#+}} xmm2 = [18446744073709551573,18446744073709551573] ; SSE42-NEXT: movdqa {{.*#+}} xmm3 = [9223372036854775808,9223372036854775808] ; SSE42-NEXT: pxor %xmm0, %xmm3 ; SSE42-NEXT: movdqa {{.*#+}} xmm0 = [9223372036854775765,9223372036854775765] ; SSE42-NEXT: pcmpgtq %xmm3, %xmm0 ; SSE42-NEXT: blendvpd %xmm0, %xmm1, %xmm2 ; SSE42-NEXT: paddq {{.*}}(%rip), %xmm2 ; SSE42-NEXT: movdqa %xmm2, %xmm0 ; SSE42-NEXT: retq ; ; AVX2-LABEL: unsigned_sat_constant_v2i64_using_min: ; AVX2: # %bb.0: ; AVX2-NEXT: vmovapd {{.*#+}} xmm1 = [18446744073709551573,18446744073709551573] ; AVX2-NEXT: vpxor {{.*}}(%rip), %xmm0, %xmm2 ; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [9223372036854775765,9223372036854775765] ; AVX2-NEXT: vpcmpgtq %xmm2, %xmm3, %xmm2 ; AVX2-NEXT: vblendvpd %xmm2, %xmm0, %xmm1, %xmm0 ; AVX2-NEXT: vpaddq {{.*}}(%rip), %xmm0, %xmm0 ; AVX2-NEXT: retq ; ; AVX512-LABEL: unsigned_sat_constant_v2i64_using_min: ; AVX512: # %bb.0: ; AVX512-NEXT: vpminuq {{.*}}(%rip), %xmm0, %xmm0 ; AVX512-NEXT: vpaddq {{.*}}(%rip), %xmm0, %xmm0 ; AVX512-NEXT: retq %c = icmp ult <2 x i64> %x, %s = select <2 x i1> %c, <2 x i64> %x, <2 x i64> %r = add <2 x i64> %s, ret <2 x i64> %r } define <2 x i64> @unsigned_sat_constant_v2i64_using_cmp_sum(<2 x i64> %x) { ; SSE2-LABEL: unsigned_sat_constant_v2i64_using_cmp_sum: ; SSE2: # %bb.0: ; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [42,42] ; SSE2-NEXT: paddq %xmm0, %xmm1 ; SSE2-NEXT: movdqa {{.*#+}} xmm2 = [9223372039002259456,9223372039002259456] ; SSE2-NEXT: pxor %xmm2, %xmm0 ; SSE2-NEXT: pxor %xmm1, %xmm2 ; SSE2-NEXT: movdqa %xmm0, %xmm3 ; SSE2-NEXT: pcmpgtd %xmm2, %xmm3 ; SSE2-NEXT: pshufd {{.*#+}} xmm4 = xmm3[0,0,2,2] ; SSE2-NEXT: pcmpeqd %xmm0, %xmm2 ; SSE2-NEXT: pshufd {{.*#+}} xmm2 = xmm2[1,1,3,3] ; SSE2-NEXT: pand %xmm4, %xmm2 ; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm3[1,1,3,3] ; SSE2-NEXT: por %xmm1, %xmm0 ; SSE2-NEXT: por %xmm2, %xmm0 ; SSE2-NEXT: retq ; ; SSE41-LABEL: unsigned_sat_constant_v2i64_using_cmp_sum: ; SSE41: # %bb.0: ; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [42,42] ; SSE41-NEXT: paddq %xmm0, %xmm1 ; SSE41-NEXT: movdqa {{.*#+}} xmm2 = [9223372039002259456,9223372039002259456] ; SSE41-NEXT: pxor %xmm2, %xmm0 ; SSE41-NEXT: pxor %xmm1, %xmm2 ; SSE41-NEXT: movdqa %xmm0, %xmm3 ; SSE41-NEXT: pcmpgtd %xmm2, %xmm3 ; SSE41-NEXT: pshufd {{.*#+}} xmm4 = xmm3[0,0,2,2] ; SSE41-NEXT: pcmpeqd %xmm0, %xmm2 ; SSE41-NEXT: pshufd {{.*#+}} xmm2 = xmm2[1,1,3,3] ; SSE41-NEXT: pand %xmm4, %xmm2 ; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm3[1,1,3,3] ; SSE41-NEXT: por %xmm1, %xmm0 ; SSE41-NEXT: por %xmm2, %xmm0 ; SSE41-NEXT: retq ; ; SSE42-LABEL: unsigned_sat_constant_v2i64_using_cmp_sum: ; SSE42: # %bb.0: ; SSE42-NEXT: movdqa {{.*#+}} xmm2 = [9223372036854775808,9223372036854775808] ; SSE42-NEXT: movdqa %xmm0, %xmm1 ; SSE42-NEXT: pxor %xmm2, %xmm1 ; SSE42-NEXT: paddq {{.*}}(%rip), %xmm0 ; SSE42-NEXT: pxor %xmm0, %xmm2 ; SSE42-NEXT: pcmpgtq %xmm2, %xmm1 ; SSE42-NEXT: por %xmm0, %xmm1 ; SSE42-NEXT: movdqa %xmm1, %xmm0 ; SSE42-NEXT: retq ; ; AVX2-LABEL: unsigned_sat_constant_v2i64_using_cmp_sum: ; AVX2: # %bb.0: ; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [9223372036854775808,9223372036854775808] ; AVX2-NEXT: vpxor %xmm1, %xmm0, %xmm2 ; AVX2-NEXT: vpaddq {{.*}}(%rip), %xmm0, %xmm0 ; AVX2-NEXT: vpxor %xmm1, %xmm0, %xmm1 ; AVX2-NEXT: vpcmpgtq %xmm1, %xmm2, %xmm1 ; AVX2-NEXT: vpor %xmm1, %xmm0, %xmm0 ; AVX2-NEXT: retq ; ; AVX512-LABEL: unsigned_sat_constant_v2i64_using_cmp_sum: ; AVX512: # %bb.0: ; AVX512-NEXT: vpminuq {{.*}}(%rip), %xmm0, %xmm0 ; AVX512-NEXT: vpaddq {{.*}}(%rip), %xmm0, %xmm0 ; AVX512-NEXT: retq %a = add <2 x i64> %x, %c = icmp ugt <2 x i64> %x, %a %r = select <2 x i1> %c, <2 x i64> , <2 x i64> %a ret <2 x i64> %r } define <2 x i64> @unsigned_sat_constant_v2i64_using_cmp_notval(<2 x i64> %x) { ; SSE2-LABEL: unsigned_sat_constant_v2i64_using_cmp_notval: ; SSE2: # %bb.0: ; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [42,42] ; SSE2-NEXT: paddq %xmm0, %xmm1 ; SSE2-NEXT: pxor {{.*}}(%rip), %xmm0 ; SSE2-NEXT: movdqa {{.*#+}} xmm2 = [9223372034707292117,9223372034707292117] ; SSE2-NEXT: movdqa %xmm0, %xmm3 ; SSE2-NEXT: pcmpgtd %xmm2, %xmm3 ; SSE2-NEXT: pshufd {{.*#+}} xmm4 = xmm3[0,0,2,2] ; SSE2-NEXT: pcmpeqd %xmm2, %xmm0 ; SSE2-NEXT: pshufd {{.*#+}} xmm2 = xmm0[1,1,3,3] ; SSE2-NEXT: pand %xmm4, %xmm2 ; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm3[1,1,3,3] ; SSE2-NEXT: por %xmm1, %xmm0 ; SSE2-NEXT: por %xmm2, %xmm0 ; SSE2-NEXT: retq ; ; SSE41-LABEL: unsigned_sat_constant_v2i64_using_cmp_notval: ; SSE41: # %bb.0: ; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [42,42] ; SSE41-NEXT: paddq %xmm0, %xmm1 ; SSE41-NEXT: pxor {{.*}}(%rip), %xmm0 ; SSE41-NEXT: movdqa {{.*#+}} xmm2 = [9223372034707292117,9223372034707292117] ; SSE41-NEXT: movdqa %xmm0, %xmm3 ; SSE41-NEXT: pcmpgtd %xmm2, %xmm3 ; SSE41-NEXT: pshufd {{.*#+}} xmm4 = xmm3[0,0,2,2] ; SSE41-NEXT: pcmpeqd %xmm2, %xmm0 ; SSE41-NEXT: pshufd {{.*#+}} xmm2 = xmm0[1,1,3,3] ; SSE41-NEXT: pand %xmm4, %xmm2 ; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm3[1,1,3,3] ; SSE41-NEXT: por %xmm1, %xmm0 ; SSE41-NEXT: por %xmm2, %xmm0 ; SSE41-NEXT: retq ; ; SSE42-LABEL: unsigned_sat_constant_v2i64_using_cmp_notval: ; SSE42: # %bb.0: ; SSE42-NEXT: movdqa {{.*#+}} xmm2 = [9223372036854775808,9223372036854775808] ; SSE42-NEXT: movdqa %xmm0, %xmm1 ; SSE42-NEXT: pxor %xmm2, %xmm1 ; SSE42-NEXT: paddq {{.*}}(%rip), %xmm0 ; SSE42-NEXT: pxor %xmm0, %xmm2 ; SSE42-NEXT: pcmpgtq %xmm2, %xmm1 ; SSE42-NEXT: por %xmm0, %xmm1 ; SSE42-NEXT: movdqa %xmm1, %xmm0 ; SSE42-NEXT: retq ; ; AVX2-LABEL: unsigned_sat_constant_v2i64_using_cmp_notval: ; AVX2: # %bb.0: ; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [9223372036854775808,9223372036854775808] ; AVX2-NEXT: vpxor %xmm1, %xmm0, %xmm2 ; AVX2-NEXT: vpaddq {{.*}}(%rip), %xmm0, %xmm0 ; AVX2-NEXT: vpxor %xmm1, %xmm0, %xmm1 ; AVX2-NEXT: vpcmpgtq %xmm1, %xmm2, %xmm1 ; AVX2-NEXT: vpor %xmm1, %xmm0, %xmm0 ; AVX2-NEXT: retq ; ; AVX512-LABEL: unsigned_sat_constant_v2i64_using_cmp_notval: ; AVX512: # %bb.0: ; AVX512-NEXT: vpminuq {{.*}}(%rip), %xmm0, %xmm0 ; AVX512-NEXT: vpaddq {{.*}}(%rip), %xmm0, %xmm0 ; AVX512-NEXT: retq %a = add <2 x i64> %x, %c = icmp ugt <2 x i64> %x, %r = select <2 x i1> %c, <2 x i64> , <2 x i64> %a ret <2 x i64> %r } define <16 x i8> @unsigned_sat_variable_v16i8_using_min(<16 x i8> %x, <16 x i8> %y) { ; SSE-LABEL: unsigned_sat_variable_v16i8_using_min: ; SSE: # %bb.0: ; SSE-NEXT: pcmpeqd %xmm2, %xmm2 ; SSE-NEXT: pxor %xmm1, %xmm2 ; SSE-NEXT: pminub %xmm2, %xmm0 ; SSE-NEXT: paddb %xmm1, %xmm0 ; SSE-NEXT: retq ; ; AVX2-LABEL: unsigned_sat_variable_v16i8_using_min: ; AVX2: # %bb.0: ; AVX2-NEXT: vpcmpeqd %xmm2, %xmm2, %xmm2 ; AVX2-NEXT: vpxor %xmm2, %xmm1, %xmm2 ; AVX2-NEXT: vpminub %xmm2, %xmm0, %xmm0 ; AVX2-NEXT: vpaddb %xmm1, %xmm0, %xmm0 ; AVX2-NEXT: retq ; ; AVX512-LABEL: unsigned_sat_variable_v16i8_using_min: ; AVX512: # %bb.0: ; AVX512-NEXT: vmovdqa %xmm1, %xmm2 ; AVX512-NEXT: vpternlogq $15, %xmm1, %xmm1, %xmm2 ; AVX512-NEXT: vpminub %xmm2, %xmm0, %xmm0 ; AVX512-NEXT: vpaddb %xmm1, %xmm0, %xmm0 ; AVX512-NEXT: retq %noty = xor <16 x i8> %y, %c = icmp ult <16 x i8> %x, %noty %s = select <16 x i1> %c, <16 x i8> %x, <16 x i8> %noty %r = add <16 x i8> %s, %y ret <16 x i8> %r } define <16 x i8> @unsigned_sat_variable_v16i8_using_cmp_sum(<16 x i8> %x, <16 x i8> %y) { ; SSE-LABEL: unsigned_sat_variable_v16i8_using_cmp_sum: ; SSE: # %bb.0: ; SSE-NEXT: paddusb %xmm1, %xmm0 ; SSE-NEXT: retq ; ; AVX-LABEL: unsigned_sat_variable_v16i8_using_cmp_sum: ; AVX: # %bb.0: ; AVX-NEXT: vpaddusb %xmm1, %xmm0, %xmm0 ; AVX-NEXT: retq %a = add <16 x i8> %x, %y %c = icmp ugt <16 x i8> %x, %a %r = select <16 x i1> %c, <16 x i8> , <16 x i8> %a ret <16 x i8> %r } define <16 x i8> @unsigned_sat_variable_v16i8_using_cmp_notval(<16 x i8> %x, <16 x i8> %y) { ; SSE-LABEL: unsigned_sat_variable_v16i8_using_cmp_notval: ; SSE: # %bb.0: ; SSE-NEXT: pcmpeqd %xmm2, %xmm2 ; SSE-NEXT: movdqa %xmm0, %xmm3 ; SSE-NEXT: paddb %xmm1, %xmm3 ; SSE-NEXT: pxor %xmm2, %xmm1 ; SSE-NEXT: pminub %xmm0, %xmm1 ; SSE-NEXT: pcmpeqb %xmm1, %xmm0 ; SSE-NEXT: pxor %xmm2, %xmm0 ; SSE-NEXT: por %xmm3, %xmm0 ; SSE-NEXT: retq ; ; AVX2-LABEL: unsigned_sat_variable_v16i8_using_cmp_notval: ; AVX2: # %bb.0: ; AVX2-NEXT: vpcmpeqd %xmm2, %xmm2, %xmm2 ; AVX2-NEXT: vpxor %xmm2, %xmm1, %xmm3 ; AVX2-NEXT: vpaddb %xmm1, %xmm0, %xmm1 ; AVX2-NEXT: vpminub %xmm3, %xmm0, %xmm3 ; AVX2-NEXT: vpcmpeqb %xmm3, %xmm0, %xmm0 ; AVX2-NEXT: vpxor %xmm2, %xmm0, %xmm0 ; AVX2-NEXT: vpor %xmm1, %xmm0, %xmm0 ; AVX2-NEXT: retq ; ; AVX512-LABEL: unsigned_sat_variable_v16i8_using_cmp_notval: ; AVX512: # %bb.0: ; AVX512-NEXT: vpcmpeqd %xmm2, %xmm2, %xmm2 ; AVX512-NEXT: vpaddb %xmm1, %xmm0, %xmm3 ; AVX512-NEXT: vpternlogq $15, %xmm1, %xmm1, %xmm1 ; AVX512-NEXT: vpminub %xmm1, %xmm0, %xmm1 ; AVX512-NEXT: vpcmpeqb %xmm1, %xmm0, %xmm0 ; AVX512-NEXT: vpternlogq $222, %xmm2, %xmm3, %xmm0 ; AVX512-NEXT: retq %noty = xor <16 x i8> %y, %a = add <16 x i8> %x, %y %c = icmp ugt <16 x i8> %x, %noty %r = select <16 x i1> %c, <16 x i8> , <16 x i8> %a ret <16 x i8> %r } define <8 x i16> @unsigned_sat_variable_v8i16_using_min(<8 x i16> %x, <8 x i16> %y) { ; SSE2-LABEL: unsigned_sat_variable_v8i16_using_min: ; SSE2: # %bb.0: ; SSE2-NEXT: pcmpeqd %xmm2, %xmm2 ; SSE2-NEXT: pxor %xmm1, %xmm2 ; SSE2-NEXT: movdqa %xmm0, %xmm3 ; SSE2-NEXT: psubusw %xmm2, %xmm3 ; SSE2-NEXT: psubw %xmm3, %xmm0 ; SSE2-NEXT: paddw %xmm1, %xmm0 ; SSE2-NEXT: retq ; ; SSE4-LABEL: unsigned_sat_variable_v8i16_using_min: ; SSE4: # %bb.0: ; SSE4-NEXT: pcmpeqd %xmm2, %xmm2 ; SSE4-NEXT: pxor %xmm1, %xmm2 ; SSE4-NEXT: pminuw %xmm2, %xmm0 ; SSE4-NEXT: paddw %xmm1, %xmm0 ; SSE4-NEXT: retq ; ; AVX2-LABEL: unsigned_sat_variable_v8i16_using_min: ; AVX2: # %bb.0: ; AVX2-NEXT: vpcmpeqd %xmm2, %xmm2, %xmm2 ; AVX2-NEXT: vpxor %xmm2, %xmm1, %xmm2 ; AVX2-NEXT: vpminuw %xmm2, %xmm0, %xmm0 ; AVX2-NEXT: vpaddw %xmm1, %xmm0, %xmm0 ; AVX2-NEXT: retq ; ; AVX512-LABEL: unsigned_sat_variable_v8i16_using_min: ; AVX512: # %bb.0: ; AVX512-NEXT: vmovdqa %xmm1, %xmm2 ; AVX512-NEXT: vpternlogq $15, %xmm1, %xmm1, %xmm2 ; AVX512-NEXT: vpminuw %xmm2, %xmm0, %xmm0 ; AVX512-NEXT: vpaddw %xmm1, %xmm0, %xmm0 ; AVX512-NEXT: retq %noty = xor <8 x i16> %y, %c = icmp ult <8 x i16> %x, %noty %s = select <8 x i1> %c, <8 x i16> %x, <8 x i16> %noty %r = add <8 x i16> %s, %y ret <8 x i16> %r } define <8 x i16> @unsigned_sat_variable_v8i16_using_cmp_sum(<8 x i16> %x, <8 x i16> %y) { ; SSE-LABEL: unsigned_sat_variable_v8i16_using_cmp_sum: ; SSE: # %bb.0: ; SSE-NEXT: paddusw %xmm1, %xmm0 ; SSE-NEXT: retq ; ; AVX-LABEL: unsigned_sat_variable_v8i16_using_cmp_sum: ; AVX: # %bb.0: ; AVX-NEXT: vpaddusw %xmm1, %xmm0, %xmm0 ; AVX-NEXT: retq %a = add <8 x i16> %x, %y %c = icmp ugt <8 x i16> %x, %a %r = select <8 x i1> %c, <8 x i16> , <8 x i16> %a ret <8 x i16> %r } define <8 x i16> @unsigned_sat_variable_v8i16_using_cmp_notval(<8 x i16> %x, <8 x i16> %y) { ; SSE2-LABEL: unsigned_sat_variable_v8i16_using_cmp_notval: ; SSE2: # %bb.0: ; SSE2-NEXT: movdqa %xmm0, %xmm2 ; SSE2-NEXT: paddw %xmm1, %xmm2 ; SSE2-NEXT: pxor {{.*}}(%rip), %xmm1 ; SSE2-NEXT: pxor {{.*}}(%rip), %xmm0 ; SSE2-NEXT: pcmpgtw %xmm1, %xmm0 ; SSE2-NEXT: por %xmm2, %xmm0 ; SSE2-NEXT: retq ; ; SSE4-LABEL: unsigned_sat_variable_v8i16_using_cmp_notval: ; SSE4: # %bb.0: ; SSE4-NEXT: pcmpeqd %xmm2, %xmm2 ; SSE4-NEXT: movdqa %xmm0, %xmm3 ; SSE4-NEXT: paddw %xmm1, %xmm3 ; SSE4-NEXT: pxor %xmm2, %xmm1 ; SSE4-NEXT: pminuw %xmm0, %xmm1 ; SSE4-NEXT: pcmpeqw %xmm1, %xmm0 ; SSE4-NEXT: pxor %xmm2, %xmm0 ; SSE4-NEXT: por %xmm3, %xmm0 ; SSE4-NEXT: retq ; ; AVX2-LABEL: unsigned_sat_variable_v8i16_using_cmp_notval: ; AVX2: # %bb.0: ; AVX2-NEXT: vpcmpeqd %xmm2, %xmm2, %xmm2 ; AVX2-NEXT: vpxor %xmm2, %xmm1, %xmm3 ; AVX2-NEXT: vpaddw %xmm1, %xmm0, %xmm1 ; AVX2-NEXT: vpminuw %xmm3, %xmm0, %xmm3 ; AVX2-NEXT: vpcmpeqw %xmm3, %xmm0, %xmm0 ; AVX2-NEXT: vpxor %xmm2, %xmm0, %xmm0 ; AVX2-NEXT: vpor %xmm1, %xmm0, %xmm0 ; AVX2-NEXT: retq ; ; AVX512-LABEL: unsigned_sat_variable_v8i16_using_cmp_notval: ; AVX512: # %bb.0: ; AVX512-NEXT: vpcmpeqd %xmm2, %xmm2, %xmm2 ; AVX512-NEXT: vpaddw %xmm1, %xmm0, %xmm3 ; AVX512-NEXT: vpternlogq $15, %xmm1, %xmm1, %xmm1 ; AVX512-NEXT: vpminuw %xmm1, %xmm0, %xmm1 ; AVX512-NEXT: vpcmpeqw %xmm1, %xmm0, %xmm0 ; AVX512-NEXT: vpternlogq $222, %xmm2, %xmm3, %xmm0 ; AVX512-NEXT: retq %noty = xor <8 x i16> %y, %a = add <8 x i16> %x, %y %c = icmp ugt <8 x i16> %x, %noty %r = select <8 x i1> %c, <8 x i16> , <8 x i16> %a ret <8 x i16> %r } define <4 x i32> @unsigned_sat_variable_v4i32_using_min(<4 x i32> %x, <4 x i32> %y) { ; SSE2-LABEL: unsigned_sat_variable_v4i32_using_min: ; SSE2: # %bb.0: ; SSE2-NEXT: pcmpeqd %xmm2, %xmm2 ; SSE2-NEXT: movdqa {{.*#+}} xmm3 = [2147483648,2147483648,2147483648,2147483648] ; SSE2-NEXT: pxor %xmm0, %xmm3 ; SSE2-NEXT: movdqa {{.*#+}} xmm4 = [2147483647,2147483647,2147483647,2147483647] ; SSE2-NEXT: pxor %xmm1, %xmm4 ; SSE2-NEXT: pcmpgtd %xmm3, %xmm4 ; SSE2-NEXT: pand %xmm4, %xmm0 ; SSE2-NEXT: pxor %xmm2, %xmm4 ; SSE2-NEXT: movdqa %xmm1, %xmm2 ; SSE2-NEXT: pandn %xmm4, %xmm2 ; SSE2-NEXT: por %xmm2, %xmm0 ; SSE2-NEXT: paddd %xmm1, %xmm0 ; SSE2-NEXT: retq ; ; SSE4-LABEL: unsigned_sat_variable_v4i32_using_min: ; SSE4: # %bb.0: ; SSE4-NEXT: pcmpeqd %xmm2, %xmm2 ; SSE4-NEXT: pxor %xmm1, %xmm2 ; SSE4-NEXT: pminud %xmm2, %xmm0 ; SSE4-NEXT: paddd %xmm1, %xmm0 ; SSE4-NEXT: retq ; ; AVX2-LABEL: unsigned_sat_variable_v4i32_using_min: ; AVX2: # %bb.0: ; AVX2-NEXT: vpcmpeqd %xmm2, %xmm2, %xmm2 ; AVX2-NEXT: vpxor %xmm2, %xmm1, %xmm2 ; AVX2-NEXT: vpminud %xmm2, %xmm0, %xmm0 ; AVX2-NEXT: vpaddd %xmm1, %xmm0, %xmm0 ; AVX2-NEXT: retq ; ; AVX512-LABEL: unsigned_sat_variable_v4i32_using_min: ; AVX512: # %bb.0: ; AVX512-NEXT: vmovdqa %xmm1, %xmm2 ; AVX512-NEXT: vpternlogq $15, %xmm1, %xmm1, %xmm2 ; AVX512-NEXT: vpminud %xmm2, %xmm0, %xmm0 ; AVX512-NEXT: vpaddd %xmm1, %xmm0, %xmm0 ; AVX512-NEXT: retq %noty = xor <4 x i32> %y, %c = icmp ult <4 x i32> %x, %noty %s = select <4 x i1> %c, <4 x i32> %x, <4 x i32> %noty %r = add <4 x i32> %s, %y ret <4 x i32> %r } define <4 x i32> @unsigned_sat_variable_v4i32_using_cmp_sum(<4 x i32> %x, <4 x i32> %y) { ; SSE2-LABEL: unsigned_sat_variable_v4i32_using_cmp_sum: ; SSE2: # %bb.0: ; SSE2-NEXT: paddd %xmm0, %xmm1 ; SSE2-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,2147483648,2147483648,2147483648] ; SSE2-NEXT: pxor %xmm2, %xmm0 ; SSE2-NEXT: pxor %xmm1, %xmm2 ; SSE2-NEXT: pcmpgtd %xmm2, %xmm0 ; SSE2-NEXT: por %xmm1, %xmm0 ; SSE2-NEXT: retq ; ; SSE4-LABEL: unsigned_sat_variable_v4i32_using_cmp_sum: ; SSE4: # %bb.0: ; SSE4-NEXT: pcmpeqd %xmm2, %xmm2 ; SSE4-NEXT: pxor %xmm1, %xmm2 ; SSE4-NEXT: pminud %xmm2, %xmm0 ; SSE4-NEXT: paddd %xmm1, %xmm0 ; SSE4-NEXT: retq ; ; AVX2-LABEL: unsigned_sat_variable_v4i32_using_cmp_sum: ; AVX2: # %bb.0: ; AVX2-NEXT: vpcmpeqd %xmm2, %xmm2, %xmm2 ; AVX2-NEXT: vpxor %xmm2, %xmm1, %xmm2 ; AVX2-NEXT: vpminud %xmm2, %xmm0, %xmm0 ; AVX2-NEXT: vpaddd %xmm1, %xmm0, %xmm0 ; AVX2-NEXT: retq ; ; AVX512-LABEL: unsigned_sat_variable_v4i32_using_cmp_sum: ; AVX512: # %bb.0: ; AVX512-NEXT: vmovdqa %xmm1, %xmm2 ; AVX512-NEXT: vpternlogq $15, %xmm1, %xmm1, %xmm2 ; AVX512-NEXT: vpminud %xmm2, %xmm0, %xmm0 ; AVX512-NEXT: vpaddd %xmm1, %xmm0, %xmm0 ; AVX512-NEXT: retq %a = add <4 x i32> %x, %y %c = icmp ugt <4 x i32> %x, %a %r = select <4 x i1> %c, <4 x i32> , <4 x i32> %a ret <4 x i32> %r } define <4 x i32> @unsigned_sat_variable_v4i32_using_cmp_notval(<4 x i32> %x, <4 x i32> %y) { ; SSE2-LABEL: unsigned_sat_variable_v4i32_using_cmp_notval: ; SSE2: # %bb.0: ; SSE2-NEXT: movdqa %xmm0, %xmm2 ; SSE2-NEXT: paddd %xmm1, %xmm2 ; SSE2-NEXT: pxor {{.*}}(%rip), %xmm1 ; SSE2-NEXT: pxor {{.*}}(%rip), %xmm0 ; SSE2-NEXT: pcmpgtd %xmm1, %xmm0 ; SSE2-NEXT: por %xmm2, %xmm0 ; SSE2-NEXT: retq ; ; SSE4-LABEL: unsigned_sat_variable_v4i32_using_cmp_notval: ; SSE4: # %bb.0: ; SSE4-NEXT: pcmpeqd %xmm2, %xmm2 ; SSE4-NEXT: movdqa %xmm0, %xmm3 ; SSE4-NEXT: paddd %xmm1, %xmm3 ; SSE4-NEXT: pxor %xmm2, %xmm1 ; SSE4-NEXT: pminud %xmm0, %xmm1 ; SSE4-NEXT: pcmpeqd %xmm1, %xmm0 ; SSE4-NEXT: pxor %xmm2, %xmm0 ; SSE4-NEXT: por %xmm3, %xmm0 ; SSE4-NEXT: retq ; ; AVX2-LABEL: unsigned_sat_variable_v4i32_using_cmp_notval: ; AVX2: # %bb.0: ; AVX2-NEXT: vpcmpeqd %xmm2, %xmm2, %xmm2 ; AVX2-NEXT: vpxor %xmm2, %xmm1, %xmm3 ; AVX2-NEXT: vpaddd %xmm1, %xmm0, %xmm1 ; AVX2-NEXT: vpminud %xmm3, %xmm0, %xmm3 ; AVX2-NEXT: vpcmpeqd %xmm3, %xmm0, %xmm0 ; AVX2-NEXT: vpxor %xmm2, %xmm0, %xmm0 ; AVX2-NEXT: vpor %xmm1, %xmm0, %xmm0 ; AVX2-NEXT: retq ; ; AVX512-LABEL: unsigned_sat_variable_v4i32_using_cmp_notval: ; AVX512: # %bb.0: ; AVX512-NEXT: vpcmpeqd %xmm3, %xmm3, %xmm3 ; AVX512-NEXT: vpaddd %xmm1, %xmm0, %xmm2 ; AVX512-NEXT: vpternlogq $15, %xmm1, %xmm1, %xmm1 ; AVX512-NEXT: vpcmpnleud %xmm1, %xmm0, %k1 ; AVX512-NEXT: vmovdqa32 %xmm3, %xmm2 {%k1} ; AVX512-NEXT: vmovdqa %xmm2, %xmm0 ; AVX512-NEXT: retq %noty = xor <4 x i32> %y, %a = add <4 x i32> %x, %y %c = icmp ugt <4 x i32> %x, %noty %r = select <4 x i1> %c, <4 x i32> , <4 x i32> %a ret <4 x i32> %r } define <2 x i64> @unsigned_sat_variable_v2i64_using_min(<2 x i64> %x, <2 x i64> %y) { ; SSE2-LABEL: unsigned_sat_variable_v2i64_using_min: ; SSE2: # %bb.0: ; SSE2-NEXT: pcmpeqd %xmm2, %xmm2 ; SSE2-NEXT: movdqa {{.*#+}} xmm3 = [9223372039002259456,9223372039002259456] ; SSE2-NEXT: pxor %xmm0, %xmm3 ; SSE2-NEXT: movdqa {{.*#+}} xmm4 = [9223372034707292159,9223372034707292159] ; SSE2-NEXT: pxor %xmm1, %xmm4 ; SSE2-NEXT: movdqa %xmm4, %xmm5 ; SSE2-NEXT: pcmpgtd %xmm3, %xmm5 ; SSE2-NEXT: pshufd {{.*#+}} xmm6 = xmm5[0,0,2,2] ; SSE2-NEXT: pcmpeqd %xmm3, %xmm4 ; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm4[1,1,3,3] ; SSE2-NEXT: pand %xmm6, %xmm3 ; SSE2-NEXT: pshufd {{.*#+}} xmm4 = xmm5[1,1,3,3] ; SSE2-NEXT: por %xmm3, %xmm4 ; SSE2-NEXT: pand %xmm4, %xmm0 ; SSE2-NEXT: pxor %xmm2, %xmm4 ; SSE2-NEXT: movdqa %xmm1, %xmm2 ; SSE2-NEXT: pandn %xmm4, %xmm2 ; SSE2-NEXT: por %xmm2, %xmm0 ; SSE2-NEXT: paddq %xmm1, %xmm0 ; SSE2-NEXT: retq ; ; SSE41-LABEL: unsigned_sat_variable_v2i64_using_min: ; SSE41: # %bb.0: ; SSE41-NEXT: movdqa %xmm0, %xmm2 ; SSE41-NEXT: pcmpeqd %xmm3, %xmm3 ; SSE41-NEXT: pxor %xmm1, %xmm3 ; SSE41-NEXT: movdqa {{.*#+}} xmm0 = [9223372039002259456,9223372039002259456] ; SSE41-NEXT: pxor %xmm2, %xmm0 ; SSE41-NEXT: movdqa {{.*#+}} xmm4 = [9223372034707292159,9223372034707292159] ; SSE41-NEXT: pxor %xmm1, %xmm4 ; SSE41-NEXT: movdqa %xmm4, %xmm5 ; SSE41-NEXT: pcmpeqd %xmm0, %xmm5 ; SSE41-NEXT: pcmpgtd %xmm0, %xmm4 ; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm4[0,0,2,2] ; SSE41-NEXT: pand %xmm5, %xmm0 ; SSE41-NEXT: por %xmm4, %xmm0 ; SSE41-NEXT: blendvpd %xmm0, %xmm2, %xmm3 ; SSE41-NEXT: paddq %xmm1, %xmm3 ; SSE41-NEXT: movdqa %xmm3, %xmm0 ; SSE41-NEXT: retq ; ; SSE42-LABEL: unsigned_sat_variable_v2i64_using_min: ; SSE42: # %bb.0: ; SSE42-NEXT: movdqa %xmm0, %xmm2 ; SSE42-NEXT: pcmpeqd %xmm3, %xmm3 ; SSE42-NEXT: pxor %xmm1, %xmm3 ; SSE42-NEXT: movdqa {{.*#+}} xmm4 = [9223372036854775808,9223372036854775808] ; SSE42-NEXT: pxor %xmm0, %xmm4 ; SSE42-NEXT: movdqa {{.*#+}} xmm0 = [9223372036854775807,9223372036854775807] ; SSE42-NEXT: pxor %xmm1, %xmm0 ; SSE42-NEXT: pcmpgtq %xmm4, %xmm0 ; SSE42-NEXT: blendvpd %xmm0, %xmm2, %xmm3 ; SSE42-NEXT: paddq %xmm1, %xmm3 ; SSE42-NEXT: movdqa %xmm3, %xmm0 ; SSE42-NEXT: retq ; ; AVX2-LABEL: unsigned_sat_variable_v2i64_using_min: ; AVX2: # %bb.0: ; AVX2-NEXT: vpcmpeqd %xmm2, %xmm2, %xmm2 ; AVX2-NEXT: vpxor %xmm2, %xmm1, %xmm2 ; AVX2-NEXT: vpxor {{.*}}(%rip), %xmm0, %xmm3 ; AVX2-NEXT: vpxor {{.*}}(%rip), %xmm1, %xmm4 ; AVX2-NEXT: vpcmpgtq %xmm3, %xmm4, %xmm3 ; AVX2-NEXT: vblendvpd %xmm3, %xmm0, %xmm2, %xmm0 ; AVX2-NEXT: vpaddq %xmm1, %xmm0, %xmm0 ; AVX2-NEXT: retq ; ; AVX512-LABEL: unsigned_sat_variable_v2i64_using_min: ; AVX512: # %bb.0: ; AVX512-NEXT: vmovdqa %xmm1, %xmm2 ; AVX512-NEXT: vpternlogq $15, %xmm1, %xmm1, %xmm2 ; AVX512-NEXT: vpminuq %xmm2, %xmm0, %xmm0 ; AVX512-NEXT: vpaddq %xmm1, %xmm0, %xmm0 ; AVX512-NEXT: retq %noty = xor <2 x i64> %y, %c = icmp ult <2 x i64> %x, %noty %s = select <2 x i1> %c, <2 x i64> %x, <2 x i64> %noty %r = add <2 x i64> %s, %y ret <2 x i64> %r } define <2 x i64> @unsigned_sat_variable_v2i64_using_cmp_sum(<2 x i64> %x, <2 x i64> %y) { ; SSE2-LABEL: unsigned_sat_variable_v2i64_using_cmp_sum: ; SSE2: # %bb.0: ; SSE2-NEXT: paddq %xmm0, %xmm1 ; SSE2-NEXT: movdqa {{.*#+}} xmm2 = [9223372039002259456,9223372039002259456] ; SSE2-NEXT: pxor %xmm2, %xmm0 ; SSE2-NEXT: pxor %xmm1, %xmm2 ; SSE2-NEXT: movdqa %xmm0, %xmm3 ; SSE2-NEXT: pcmpgtd %xmm2, %xmm3 ; SSE2-NEXT: pshufd {{.*#+}} xmm4 = xmm3[0,0,2,2] ; SSE2-NEXT: pcmpeqd %xmm0, %xmm2 ; SSE2-NEXT: pshufd {{.*#+}} xmm2 = xmm2[1,1,3,3] ; SSE2-NEXT: pand %xmm4, %xmm2 ; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm3[1,1,3,3] ; SSE2-NEXT: por %xmm1, %xmm0 ; SSE2-NEXT: por %xmm2, %xmm0 ; SSE2-NEXT: retq ; ; SSE41-LABEL: unsigned_sat_variable_v2i64_using_cmp_sum: ; SSE41: # %bb.0: ; SSE41-NEXT: paddq %xmm0, %xmm1 ; SSE41-NEXT: movdqa {{.*#+}} xmm2 = [9223372039002259456,9223372039002259456] ; SSE41-NEXT: pxor %xmm2, %xmm0 ; SSE41-NEXT: pxor %xmm1, %xmm2 ; SSE41-NEXT: movdqa %xmm0, %xmm3 ; SSE41-NEXT: pcmpgtd %xmm2, %xmm3 ; SSE41-NEXT: pshufd {{.*#+}} xmm4 = xmm3[0,0,2,2] ; SSE41-NEXT: pcmpeqd %xmm0, %xmm2 ; SSE41-NEXT: pshufd {{.*#+}} xmm2 = xmm2[1,1,3,3] ; SSE41-NEXT: pand %xmm4, %xmm2 ; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm3[1,1,3,3] ; SSE41-NEXT: por %xmm1, %xmm0 ; SSE41-NEXT: por %xmm2, %xmm0 ; SSE41-NEXT: retq ; ; SSE42-LABEL: unsigned_sat_variable_v2i64_using_cmp_sum: ; SSE42: # %bb.0: ; SSE42-NEXT: movdqa {{.*#+}} xmm2 = [9223372036854775808,9223372036854775808] ; SSE42-NEXT: paddq %xmm0, %xmm1 ; SSE42-NEXT: pxor %xmm2, %xmm0 ; SSE42-NEXT: pxor %xmm1, %xmm2 ; SSE42-NEXT: pcmpgtq %xmm2, %xmm0 ; SSE42-NEXT: por %xmm1, %xmm0 ; SSE42-NEXT: retq ; ; AVX2-LABEL: unsigned_sat_variable_v2i64_using_cmp_sum: ; AVX2: # %bb.0: ; AVX2-NEXT: vmovdqa {{.*#+}} xmm2 = [9223372036854775808,9223372036854775808] ; AVX2-NEXT: vpxor %xmm2, %xmm0, %xmm3 ; AVX2-NEXT: vpaddq %xmm1, %xmm0, %xmm0 ; AVX2-NEXT: vpxor %xmm2, %xmm0, %xmm1 ; AVX2-NEXT: vpcmpgtq %xmm1, %xmm3, %xmm1 ; AVX2-NEXT: vpor %xmm1, %xmm0, %xmm0 ; AVX2-NEXT: retq ; ; AVX512-LABEL: unsigned_sat_variable_v2i64_using_cmp_sum: ; AVX512: # %bb.0: ; AVX512-NEXT: vmovdqa %xmm1, %xmm2 ; AVX512-NEXT: vpternlogq $15, %xmm1, %xmm1, %xmm2 ; AVX512-NEXT: vpminuq %xmm2, %xmm0, %xmm0 ; AVX512-NEXT: vpaddq %xmm1, %xmm0, %xmm0 ; AVX512-NEXT: retq %a = add <2 x i64> %x, %y %c = icmp ugt <2 x i64> %x, %a %r = select <2 x i1> %c, <2 x i64> , <2 x i64> %a ret <2 x i64> %r } define <2 x i64> @unsigned_sat_variable_v2i64_using_cmp_notval(<2 x i64> %x, <2 x i64> %y) { ; SSE2-LABEL: unsigned_sat_variable_v2i64_using_cmp_notval: ; SSE2: # %bb.0: ; SSE2-NEXT: movdqa %xmm0, %xmm2 ; SSE2-NEXT: paddq %xmm1, %xmm2 ; SSE2-NEXT: pxor {{.*}}(%rip), %xmm1 ; SSE2-NEXT: pxor {{.*}}(%rip), %xmm0 ; SSE2-NEXT: movdqa %xmm0, %xmm3 ; SSE2-NEXT: pcmpgtd %xmm1, %xmm3 ; SSE2-NEXT: pshufd {{.*#+}} xmm4 = xmm3[0,0,2,2] ; SSE2-NEXT: pcmpeqd %xmm1, %xmm0 ; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3] ; SSE2-NEXT: pand %xmm4, %xmm1 ; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm3[1,1,3,3] ; SSE2-NEXT: por %xmm2, %xmm0 ; SSE2-NEXT: por %xmm1, %xmm0 ; SSE2-NEXT: retq ; ; SSE41-LABEL: unsigned_sat_variable_v2i64_using_cmp_notval: ; SSE41: # %bb.0: ; SSE41-NEXT: movdqa %xmm0, %xmm2 ; SSE41-NEXT: paddq %xmm1, %xmm2 ; SSE41-NEXT: pxor {{.*}}(%rip), %xmm1 ; SSE41-NEXT: pxor {{.*}}(%rip), %xmm0 ; SSE41-NEXT: movdqa %xmm0, %xmm3 ; SSE41-NEXT: pcmpgtd %xmm1, %xmm3 ; SSE41-NEXT: pshufd {{.*#+}} xmm4 = xmm3[0,0,2,2] ; SSE41-NEXT: pcmpeqd %xmm1, %xmm0 ; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3] ; SSE41-NEXT: pand %xmm4, %xmm1 ; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm3[1,1,3,3] ; SSE41-NEXT: por %xmm2, %xmm0 ; SSE41-NEXT: por %xmm1, %xmm0 ; SSE41-NEXT: retq ; ; SSE42-LABEL: unsigned_sat_variable_v2i64_using_cmp_notval: ; SSE42: # %bb.0: ; SSE42-NEXT: movdqa %xmm0, %xmm2 ; SSE42-NEXT: paddq %xmm1, %xmm2 ; SSE42-NEXT: pxor {{.*}}(%rip), %xmm1 ; SSE42-NEXT: pxor {{.*}}(%rip), %xmm0 ; SSE42-NEXT: pcmpgtq %xmm1, %xmm0 ; SSE42-NEXT: por %xmm2, %xmm0 ; SSE42-NEXT: retq ; ; AVX2-LABEL: unsigned_sat_variable_v2i64_using_cmp_notval: ; AVX2: # %bb.0: ; AVX2-NEXT: vpaddq %xmm1, %xmm0, %xmm2 ; AVX2-NEXT: vpxor {{.*}}(%rip), %xmm1, %xmm1 ; AVX2-NEXT: vpxor {{.*}}(%rip), %xmm0, %xmm0 ; AVX2-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm0 ; AVX2-NEXT: vpor %xmm2, %xmm0, %xmm0 ; AVX2-NEXT: retq ; ; AVX512-LABEL: unsigned_sat_variable_v2i64_using_cmp_notval: ; AVX512: # %bb.0: ; AVX512-NEXT: vpcmpeqd %xmm3, %xmm3, %xmm3 ; AVX512-NEXT: vpaddq %xmm1, %xmm0, %xmm2 ; AVX512-NEXT: vpternlogq $15, %xmm1, %xmm1, %xmm1 ; AVX512-NEXT: vpcmpnleuq %xmm1, %xmm0, %k1 ; AVX512-NEXT: vmovdqa64 %xmm3, %xmm2 {%k1} ; AVX512-NEXT: vmovdqa %xmm2, %xmm0 ; AVX512-NEXT: retq %noty = xor <2 x i64> %y, %a = add <2 x i64> %x, %y %c = icmp ugt <2 x i64> %x, %noty %r = select <2 x i1> %c, <2 x i64> , <2 x i64> %a ret <2 x i64> %r }