; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py ; RUN: llc -mtriple=i686-unknown-linux-gnu -mattr=-bmi,-tbm,-bmi2 < %s | FileCheck %s --check-prefixes=CHECK,X86,NOBMI,X86-NOBMI ; RUN: llc -mtriple=i686-unknown-linux-gnu -mattr=+bmi,-tbm,-bmi2 < %s | FileCheck %s --check-prefixes=CHECK,X86,BMI1,X86-BMI1,BMI1NOTBM,X86-BMI1NOTBM ; RUN: llc -mtriple=i686-unknown-linux-gnu -mattr=+bmi,+tbm,-bmi2 < %s | FileCheck %s --check-prefixes=CHECK,X86,BMI1,X86-BMI1,BMI1TBM,X86-BMI1TBM ; RUN: llc -mtriple=i686-unknown-linux-gnu -mattr=+bmi,+tbm,+bmi2 < %s | FileCheck %s --check-prefixes=CHECK,X86,BMI1,X86-BMI1,BMI1BMI2,X86-BMI1BMI2,BMI1TBM,X86-BMI1TBM,BMI1TBMBMI2,X86-BMI1TBMBMI2 ; RUN: llc -mtriple=i686-unknown-linux-gnu -mattr=+bmi,-tbm,+bmi2 < %s | FileCheck %s --check-prefixes=CHECK,X86,BMI1,X86-BMI1,BMI1BMI2,X86-BMI1BMI2,BMI1NOTBMBMI2,X86-BMI1NOTBMBMI2 ; RUN: llc -mtriple=x86_64-unknown-linux-gnu -mattr=-bmi,-tbm,-bmi2 < %s | FileCheck %s --check-prefixes=CHECK,X64,NOBMI,X64-NOBMI ; RUN: llc -mtriple=x86_64-unknown-linux-gnu -mattr=+bmi,-tbm,-bmi2 < %s | FileCheck %s --check-prefixes=CHECK,X64,BMI1,X64-BMI1,BMI1NOTBM,X64-BMI1NOTBM ; RUN: llc -mtriple=x86_64-unknown-linux-gnu -mattr=+bmi,+tbm,-bmi2 < %s | FileCheck %s --check-prefixes=CHECK,X64,BMI1,X64-BMI1,BMI1TBM,X64-BMI1TBM ; RUN: llc -mtriple=x86_64-unknown-linux-gnu -mattr=+bmi,+tbm,+bmi2 < %s | FileCheck %s --check-prefixes=CHECK,X64,BMI1,X64-BMI1,BMI1BMI2,X64-BMI1BMI2,BMI1TBM,X64-BMI1TBM,BMI1TBMBMI2,X64-BMI1TBMBMI2 ; RUN: llc -mtriple=x86_64-unknown-linux-gnu -mattr=+bmi,-tbm,+bmi2 < %s | FileCheck %s --check-prefixes=CHECK,X64,BMI1,X64-BMI1,BMI1BMI2,X64-BMI1BMI2,BMI1NOTBMBMI2,X64-BMI1NOTBMBMI2 ; *Please* keep in sync with test/CodeGen/AArch64/extract-lowbits.ll ; https://bugs.llvm.org/show_bug.cgi?id=36419 ; https://bugs.llvm.org/show_bug.cgi?id=37603 ; https://bugs.llvm.org/show_bug.cgi?id=37610 ; Patterns: ; a) x & (1 << nbits) - 1 ; b) x & ~(-1 << nbits) ; c) x & (-1 >> (32 - y)) ; d) x << (32 - y) >> (32 - y) ; are equivalent. ; ---------------------------------------------------------------------------- ; ; Pattern a. 32-bit ; ---------------------------------------------------------------------------- ; define i32 @bzhi32_a0(i32 %val, i32 %numlowbits) nounwind { ; X86-NOBMI-LABEL: bzhi32_a0: ; X86-NOBMI: # %bb.0: ; X86-NOBMI-NEXT: movb {{[0-9]+}}(%esp), %cl ; X86-NOBMI-NEXT: movl $1, %eax ; X86-NOBMI-NEXT: shll %cl, %eax ; X86-NOBMI-NEXT: decl %eax ; X86-NOBMI-NEXT: andl {{[0-9]+}}(%esp), %eax ; X86-NOBMI-NEXT: retl ; ; X86-BMI1NOTBM-LABEL: bzhi32_a0: ; X86-BMI1NOTBM: # %bb.0: ; X86-BMI1NOTBM-NEXT: movb {{[0-9]+}}(%esp), %al ; X86-BMI1NOTBM-NEXT: shll $8, %eax ; X86-BMI1NOTBM-NEXT: bextrl %eax, {{[0-9]+}}(%esp), %eax ; X86-BMI1NOTBM-NEXT: retl ; ; X86-BMI1BMI2-LABEL: bzhi32_a0: ; X86-BMI1BMI2: # %bb.0: ; X86-BMI1BMI2-NEXT: movb {{[0-9]+}}(%esp), %al ; X86-BMI1BMI2-NEXT: bzhil %eax, {{[0-9]+}}(%esp), %eax ; X86-BMI1BMI2-NEXT: retl ; ; X64-NOBMI-LABEL: bzhi32_a0: ; X64-NOBMI: # %bb.0: ; X64-NOBMI-NEXT: movl %esi, %ecx ; X64-NOBMI-NEXT: movl $1, %eax ; X64-NOBMI-NEXT: # kill: def $cl killed $cl killed $ecx ; X64-NOBMI-NEXT: shll %cl, %eax ; X64-NOBMI-NEXT: decl %eax ; X64-NOBMI-NEXT: andl %edi, %eax ; X64-NOBMI-NEXT: retq ; ; X64-BMI1NOTBM-LABEL: bzhi32_a0: ; X64-BMI1NOTBM: # %bb.0: ; X64-BMI1NOTBM-NEXT: shll $8, %esi ; X64-BMI1NOTBM-NEXT: bextrl %esi, %edi, %eax ; X64-BMI1NOTBM-NEXT: retq ; ; X64-BMI1BMI2-LABEL: bzhi32_a0: ; X64-BMI1BMI2: # %bb.0: ; X64-BMI1BMI2-NEXT: bzhil %esi, %edi, %eax ; X64-BMI1BMI2-NEXT: retq %onebit = shl i32 1, %numlowbits %mask = add nsw i32 %onebit, -1 %masked = and i32 %mask, %val ret i32 %masked } define i32 @bzhi32_a1_indexzext(i32 %val, i8 zeroext %numlowbits) nounwind { ; X86-NOBMI-LABEL: bzhi32_a1_indexzext: ; X86-NOBMI: # %bb.0: ; X86-NOBMI-NEXT: movb {{[0-9]+}}(%esp), %cl ; X86-NOBMI-NEXT: movl $1, %eax ; X86-NOBMI-NEXT: shll %cl, %eax ; X86-NOBMI-NEXT: decl %eax ; X86-NOBMI-NEXT: andl {{[0-9]+}}(%esp), %eax ; X86-NOBMI-NEXT: retl ; ; X86-BMI1NOTBM-LABEL: bzhi32_a1_indexzext: ; X86-BMI1NOTBM: # %bb.0: ; X86-BMI1NOTBM-NEXT: movb {{[0-9]+}}(%esp), %al ; X86-BMI1NOTBM-NEXT: shll $8, %eax ; X86-BMI1NOTBM-NEXT: bextrl %eax, {{[0-9]+}}(%esp), %eax ; X86-BMI1NOTBM-NEXT: retl ; ; X86-BMI1BMI2-LABEL: bzhi32_a1_indexzext: ; X86-BMI1BMI2: # %bb.0: ; X86-BMI1BMI2-NEXT: movb {{[0-9]+}}(%esp), %al ; X86-BMI1BMI2-NEXT: bzhil %eax, {{[0-9]+}}(%esp), %eax ; X86-BMI1BMI2-NEXT: retl ; ; X64-NOBMI-LABEL: bzhi32_a1_indexzext: ; X64-NOBMI: # %bb.0: ; X64-NOBMI-NEXT: movl %esi, %ecx ; X64-NOBMI-NEXT: movl $1, %eax ; X64-NOBMI-NEXT: # kill: def $cl killed $cl killed $ecx ; X64-NOBMI-NEXT: shll %cl, %eax ; X64-NOBMI-NEXT: decl %eax ; X64-NOBMI-NEXT: andl %edi, %eax ; X64-NOBMI-NEXT: retq ; ; X64-BMI1NOTBM-LABEL: bzhi32_a1_indexzext: ; X64-BMI1NOTBM: # %bb.0: ; X64-BMI1NOTBM-NEXT: shll $8, %esi ; X64-BMI1NOTBM-NEXT: bextrl %esi, %edi, %eax ; X64-BMI1NOTBM-NEXT: retq ; ; X64-BMI1BMI2-LABEL: bzhi32_a1_indexzext: ; X64-BMI1BMI2: # %bb.0: ; X64-BMI1BMI2-NEXT: bzhil %esi, %edi, %eax ; X64-BMI1BMI2-NEXT: retq %conv = zext i8 %numlowbits to i32 %onebit = shl i32 1, %conv %mask = add nsw i32 %onebit, -1 %masked = and i32 %mask, %val ret i32 %masked } define i32 @bzhi32_a2_load(i32* %w, i32 %numlowbits) nounwind { ; X86-NOBMI-LABEL: bzhi32_a2_load: ; X86-NOBMI: # %bb.0: ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %edx ; X86-NOBMI-NEXT: movb {{[0-9]+}}(%esp), %cl ; X86-NOBMI-NEXT: movl $1, %eax ; X86-NOBMI-NEXT: shll %cl, %eax ; X86-NOBMI-NEXT: decl %eax ; X86-NOBMI-NEXT: andl (%edx), %eax ; X86-NOBMI-NEXT: retl ; ; X86-BMI1NOTBM-LABEL: bzhi32_a2_load: ; X86-BMI1NOTBM: # %bb.0: ; X86-BMI1NOTBM-NEXT: movl {{[0-9]+}}(%esp), %eax ; X86-BMI1NOTBM-NEXT: movb {{[0-9]+}}(%esp), %cl ; X86-BMI1NOTBM-NEXT: shll $8, %ecx ; X86-BMI1NOTBM-NEXT: bextrl %ecx, (%eax), %eax ; X86-BMI1NOTBM-NEXT: retl ; ; X86-BMI1BMI2-LABEL: bzhi32_a2_load: ; X86-BMI1BMI2: # %bb.0: ; X86-BMI1BMI2-NEXT: movl {{[0-9]+}}(%esp), %eax ; X86-BMI1BMI2-NEXT: movb {{[0-9]+}}(%esp), %cl ; X86-BMI1BMI2-NEXT: bzhil %ecx, (%eax), %eax ; X86-BMI1BMI2-NEXT: retl ; ; X64-NOBMI-LABEL: bzhi32_a2_load: ; X64-NOBMI: # %bb.0: ; X64-NOBMI-NEXT: movl %esi, %ecx ; X64-NOBMI-NEXT: movl $1, %eax ; X64-NOBMI-NEXT: # kill: def $cl killed $cl killed $ecx ; X64-NOBMI-NEXT: shll %cl, %eax ; X64-NOBMI-NEXT: decl %eax ; X64-NOBMI-NEXT: andl (%rdi), %eax ; X64-NOBMI-NEXT: retq ; ; X64-BMI1NOTBM-LABEL: bzhi32_a2_load: ; X64-BMI1NOTBM: # %bb.0: ; X64-BMI1NOTBM-NEXT: shll $8, %esi ; X64-BMI1NOTBM-NEXT: bextrl %esi, (%rdi), %eax ; X64-BMI1NOTBM-NEXT: retq ; ; X64-BMI1BMI2-LABEL: bzhi32_a2_load: ; X64-BMI1BMI2: # %bb.0: ; X64-BMI1BMI2-NEXT: bzhil %esi, (%rdi), %eax ; X64-BMI1BMI2-NEXT: retq %val = load i32, i32* %w %onebit = shl i32 1, %numlowbits %mask = add nsw i32 %onebit, -1 %masked = and i32 %mask, %val ret i32 %masked } define i32 @bzhi32_a3_load_indexzext(i32* %w, i8 zeroext %numlowbits) nounwind { ; X86-NOBMI-LABEL: bzhi32_a3_load_indexzext: ; X86-NOBMI: # %bb.0: ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %edx ; X86-NOBMI-NEXT: movb {{[0-9]+}}(%esp), %cl ; X86-NOBMI-NEXT: movl $1, %eax ; X86-NOBMI-NEXT: shll %cl, %eax ; X86-NOBMI-NEXT: decl %eax ; X86-NOBMI-NEXT: andl (%edx), %eax ; X86-NOBMI-NEXT: retl ; ; X86-BMI1NOTBM-LABEL: bzhi32_a3_load_indexzext: ; X86-BMI1NOTBM: # %bb.0: ; X86-BMI1NOTBM-NEXT: movl {{[0-9]+}}(%esp), %eax ; X86-BMI1NOTBM-NEXT: movb {{[0-9]+}}(%esp), %cl ; X86-BMI1NOTBM-NEXT: shll $8, %ecx ; X86-BMI1NOTBM-NEXT: bextrl %ecx, (%eax), %eax ; X86-BMI1NOTBM-NEXT: retl ; ; X86-BMI1BMI2-LABEL: bzhi32_a3_load_indexzext: ; X86-BMI1BMI2: # %bb.0: ; X86-BMI1BMI2-NEXT: movl {{[0-9]+}}(%esp), %eax ; X86-BMI1BMI2-NEXT: movb {{[0-9]+}}(%esp), %cl ; X86-BMI1BMI2-NEXT: bzhil %ecx, (%eax), %eax ; X86-BMI1BMI2-NEXT: retl ; ; X64-NOBMI-LABEL: bzhi32_a3_load_indexzext: ; X64-NOBMI: # %bb.0: ; X64-NOBMI-NEXT: movl %esi, %ecx ; X64-NOBMI-NEXT: movl $1, %eax ; X64-NOBMI-NEXT: # kill: def $cl killed $cl killed $ecx ; X64-NOBMI-NEXT: shll %cl, %eax ; X64-NOBMI-NEXT: decl %eax ; X64-NOBMI-NEXT: andl (%rdi), %eax ; X64-NOBMI-NEXT: retq ; ; X64-BMI1NOTBM-LABEL: bzhi32_a3_load_indexzext: ; X64-BMI1NOTBM: # %bb.0: ; X64-BMI1NOTBM-NEXT: shll $8, %esi ; X64-BMI1NOTBM-NEXT: bextrl %esi, (%rdi), %eax ; X64-BMI1NOTBM-NEXT: retq ; ; X64-BMI1BMI2-LABEL: bzhi32_a3_load_indexzext: ; X64-BMI1BMI2: # %bb.0: ; X64-BMI1BMI2-NEXT: bzhil %esi, (%rdi), %eax ; X64-BMI1BMI2-NEXT: retq %val = load i32, i32* %w %conv = zext i8 %numlowbits to i32 %onebit = shl i32 1, %conv %mask = add nsw i32 %onebit, -1 %masked = and i32 %mask, %val ret i32 %masked } define i32 @bzhi32_a4_commutative(i32 %val, i32 %numlowbits) nounwind { ; X86-NOBMI-LABEL: bzhi32_a4_commutative: ; X86-NOBMI: # %bb.0: ; X86-NOBMI-NEXT: movb {{[0-9]+}}(%esp), %cl ; X86-NOBMI-NEXT: movl $1, %eax ; X86-NOBMI-NEXT: shll %cl, %eax ; X86-NOBMI-NEXT: decl %eax ; X86-NOBMI-NEXT: andl {{[0-9]+}}(%esp), %eax ; X86-NOBMI-NEXT: retl ; ; X86-BMI1NOTBM-LABEL: bzhi32_a4_commutative: ; X86-BMI1NOTBM: # %bb.0: ; X86-BMI1NOTBM-NEXT: movb {{[0-9]+}}(%esp), %al ; X86-BMI1NOTBM-NEXT: shll $8, %eax ; X86-BMI1NOTBM-NEXT: bextrl %eax, {{[0-9]+}}(%esp), %eax ; X86-BMI1NOTBM-NEXT: retl ; ; X86-BMI1BMI2-LABEL: bzhi32_a4_commutative: ; X86-BMI1BMI2: # %bb.0: ; X86-BMI1BMI2-NEXT: movb {{[0-9]+}}(%esp), %al ; X86-BMI1BMI2-NEXT: bzhil %eax, {{[0-9]+}}(%esp), %eax ; X86-BMI1BMI2-NEXT: retl ; ; X64-NOBMI-LABEL: bzhi32_a4_commutative: ; X64-NOBMI: # %bb.0: ; X64-NOBMI-NEXT: movl %esi, %ecx ; X64-NOBMI-NEXT: movl $1, %eax ; X64-NOBMI-NEXT: # kill: def $cl killed $cl killed $ecx ; X64-NOBMI-NEXT: shll %cl, %eax ; X64-NOBMI-NEXT: decl %eax ; X64-NOBMI-NEXT: andl %edi, %eax ; X64-NOBMI-NEXT: retq ; ; X64-BMI1NOTBM-LABEL: bzhi32_a4_commutative: ; X64-BMI1NOTBM: # %bb.0: ; X64-BMI1NOTBM-NEXT: shll $8, %esi ; X64-BMI1NOTBM-NEXT: bextrl %esi, %edi, %eax ; X64-BMI1NOTBM-NEXT: retq ; ; X64-BMI1BMI2-LABEL: bzhi32_a4_commutative: ; X64-BMI1BMI2: # %bb.0: ; X64-BMI1BMI2-NEXT: bzhil %esi, %edi, %eax ; X64-BMI1BMI2-NEXT: retq %onebit = shl i32 1, %numlowbits %mask = add nsw i32 %onebit, -1 %masked = and i32 %val, %mask ; swapped order ret i32 %masked } ; 64-bit define i64 @bzhi64_a0(i64 %val, i64 %numlowbits) nounwind { ; X86-NOBMI-LABEL: bzhi64_a0: ; X86-NOBMI: # %bb.0: ; X86-NOBMI-NEXT: movb {{[0-9]+}}(%esp), %cl ; X86-NOBMI-NEXT: movl $1, %eax ; X86-NOBMI-NEXT: xorl %edx, %edx ; X86-NOBMI-NEXT: shldl %cl, %eax, %edx ; X86-NOBMI-NEXT: shll %cl, %eax ; X86-NOBMI-NEXT: testb $32, %cl ; X86-NOBMI-NEXT: je .LBB5_2 ; X86-NOBMI-NEXT: # %bb.1: ; X86-NOBMI-NEXT: movl %eax, %edx ; X86-NOBMI-NEXT: xorl %eax, %eax ; X86-NOBMI-NEXT: .LBB5_2: ; X86-NOBMI-NEXT: addl $-1, %eax ; X86-NOBMI-NEXT: adcl $-1, %edx ; X86-NOBMI-NEXT: andl {{[0-9]+}}(%esp), %edx ; X86-NOBMI-NEXT: andl {{[0-9]+}}(%esp), %eax ; X86-NOBMI-NEXT: retl ; ; X86-BMI1NOTBM-LABEL: bzhi64_a0: ; X86-BMI1NOTBM: # %bb.0: ; X86-BMI1NOTBM-NEXT: movb {{[0-9]+}}(%esp), %cl ; X86-BMI1NOTBM-NEXT: movl $1, %eax ; X86-BMI1NOTBM-NEXT: xorl %edx, %edx ; X86-BMI1NOTBM-NEXT: shldl %cl, %eax, %edx ; X86-BMI1NOTBM-NEXT: shll %cl, %eax ; X86-BMI1NOTBM-NEXT: testb $32, %cl ; X86-BMI1NOTBM-NEXT: je .LBB5_2 ; X86-BMI1NOTBM-NEXT: # %bb.1: ; X86-BMI1NOTBM-NEXT: movl %eax, %edx ; X86-BMI1NOTBM-NEXT: xorl %eax, %eax ; X86-BMI1NOTBM-NEXT: .LBB5_2: ; X86-BMI1NOTBM-NEXT: addl $-1, %eax ; X86-BMI1NOTBM-NEXT: adcl $-1, %edx ; X86-BMI1NOTBM-NEXT: andl {{[0-9]+}}(%esp), %edx ; X86-BMI1NOTBM-NEXT: andl {{[0-9]+}}(%esp), %eax ; X86-BMI1NOTBM-NEXT: retl ; ; X86-BMI1BMI2-LABEL: bzhi64_a0: ; X86-BMI1BMI2: # %bb.0: ; X86-BMI1BMI2-NEXT: movb {{[0-9]+}}(%esp), %cl ; X86-BMI1BMI2-NEXT: movl $1, %eax ; X86-BMI1BMI2-NEXT: xorl %edx, %edx ; X86-BMI1BMI2-NEXT: shldl %cl, %eax, %edx ; X86-BMI1BMI2-NEXT: shlxl %ecx, %eax, %eax ; X86-BMI1BMI2-NEXT: testb $32, %cl ; X86-BMI1BMI2-NEXT: je .LBB5_2 ; X86-BMI1BMI2-NEXT: # %bb.1: ; X86-BMI1BMI2-NEXT: movl %eax, %edx ; X86-BMI1BMI2-NEXT: xorl %eax, %eax ; X86-BMI1BMI2-NEXT: .LBB5_2: ; X86-BMI1BMI2-NEXT: addl $-1, %eax ; X86-BMI1BMI2-NEXT: adcl $-1, %edx ; X86-BMI1BMI2-NEXT: andl {{[0-9]+}}(%esp), %edx ; X86-BMI1BMI2-NEXT: andl {{[0-9]+}}(%esp), %eax ; X86-BMI1BMI2-NEXT: retl ; ; X64-NOBMI-LABEL: bzhi64_a0: ; X64-NOBMI: # %bb.0: ; X64-NOBMI-NEXT: movq %rsi, %rcx ; X64-NOBMI-NEXT: movl $1, %eax ; X64-NOBMI-NEXT: # kill: def $cl killed $cl killed $rcx ; X64-NOBMI-NEXT: shlq %cl, %rax ; X64-NOBMI-NEXT: decq %rax ; X64-NOBMI-NEXT: andq %rdi, %rax ; X64-NOBMI-NEXT: retq ; ; X64-BMI1NOTBM-LABEL: bzhi64_a0: ; X64-BMI1NOTBM: # %bb.0: ; X64-BMI1NOTBM-NEXT: shll $8, %esi ; X64-BMI1NOTBM-NEXT: bextrq %rsi, %rdi, %rax ; X64-BMI1NOTBM-NEXT: retq ; ; X64-BMI1BMI2-LABEL: bzhi64_a0: ; X64-BMI1BMI2: # %bb.0: ; X64-BMI1BMI2-NEXT: bzhiq %rsi, %rdi, %rax ; X64-BMI1BMI2-NEXT: retq %onebit = shl i64 1, %numlowbits %mask = add nsw i64 %onebit, -1 %masked = and i64 %mask, %val ret i64 %masked } define i64 @bzhi64_a1_indexzext(i64 %val, i8 zeroext %numlowbits) nounwind { ; X86-NOBMI-LABEL: bzhi64_a1_indexzext: ; X86-NOBMI: # %bb.0: ; X86-NOBMI-NEXT: movb {{[0-9]+}}(%esp), %cl ; X86-NOBMI-NEXT: movl $1, %eax ; X86-NOBMI-NEXT: xorl %edx, %edx ; X86-NOBMI-NEXT: shldl %cl, %eax, %edx ; X86-NOBMI-NEXT: shll %cl, %eax ; X86-NOBMI-NEXT: testb $32, %cl ; X86-NOBMI-NEXT: je .LBB6_2 ; X86-NOBMI-NEXT: # %bb.1: ; X86-NOBMI-NEXT: movl %eax, %edx ; X86-NOBMI-NEXT: xorl %eax, %eax ; X86-NOBMI-NEXT: .LBB6_2: ; X86-NOBMI-NEXT: addl $-1, %eax ; X86-NOBMI-NEXT: adcl $-1, %edx ; X86-NOBMI-NEXT: andl {{[0-9]+}}(%esp), %edx ; X86-NOBMI-NEXT: andl {{[0-9]+}}(%esp), %eax ; X86-NOBMI-NEXT: retl ; ; X86-BMI1NOTBM-LABEL: bzhi64_a1_indexzext: ; X86-BMI1NOTBM: # %bb.0: ; X86-BMI1NOTBM-NEXT: movb {{[0-9]+}}(%esp), %cl ; X86-BMI1NOTBM-NEXT: movl $1, %eax ; X86-BMI1NOTBM-NEXT: xorl %edx, %edx ; X86-BMI1NOTBM-NEXT: shldl %cl, %eax, %edx ; X86-BMI1NOTBM-NEXT: shll %cl, %eax ; X86-BMI1NOTBM-NEXT: testb $32, %cl ; X86-BMI1NOTBM-NEXT: je .LBB6_2 ; X86-BMI1NOTBM-NEXT: # %bb.1: ; X86-BMI1NOTBM-NEXT: movl %eax, %edx ; X86-BMI1NOTBM-NEXT: xorl %eax, %eax ; X86-BMI1NOTBM-NEXT: .LBB6_2: ; X86-BMI1NOTBM-NEXT: addl $-1, %eax ; X86-BMI1NOTBM-NEXT: adcl $-1, %edx ; X86-BMI1NOTBM-NEXT: andl {{[0-9]+}}(%esp), %edx ; X86-BMI1NOTBM-NEXT: andl {{[0-9]+}}(%esp), %eax ; X86-BMI1NOTBM-NEXT: retl ; ; X86-BMI1BMI2-LABEL: bzhi64_a1_indexzext: ; X86-BMI1BMI2: # %bb.0: ; X86-BMI1BMI2-NEXT: movb {{[0-9]+}}(%esp), %cl ; X86-BMI1BMI2-NEXT: movl $1, %eax ; X86-BMI1BMI2-NEXT: xorl %edx, %edx ; X86-BMI1BMI2-NEXT: shldl %cl, %eax, %edx ; X86-BMI1BMI2-NEXT: shlxl %ecx, %eax, %eax ; X86-BMI1BMI2-NEXT: testb $32, %cl ; X86-BMI1BMI2-NEXT: je .LBB6_2 ; X86-BMI1BMI2-NEXT: # %bb.1: ; X86-BMI1BMI2-NEXT: movl %eax, %edx ; X86-BMI1BMI2-NEXT: xorl %eax, %eax ; X86-BMI1BMI2-NEXT: .LBB6_2: ; X86-BMI1BMI2-NEXT: addl $-1, %eax ; X86-BMI1BMI2-NEXT: adcl $-1, %edx ; X86-BMI1BMI2-NEXT: andl {{[0-9]+}}(%esp), %edx ; X86-BMI1BMI2-NEXT: andl {{[0-9]+}}(%esp), %eax ; X86-BMI1BMI2-NEXT: retl ; ; X64-NOBMI-LABEL: bzhi64_a1_indexzext: ; X64-NOBMI: # %bb.0: ; X64-NOBMI-NEXT: movl %esi, %ecx ; X64-NOBMI-NEXT: movl $1, %eax ; X64-NOBMI-NEXT: # kill: def $cl killed $cl killed $ecx ; X64-NOBMI-NEXT: shlq %cl, %rax ; X64-NOBMI-NEXT: decq %rax ; X64-NOBMI-NEXT: andq %rdi, %rax ; X64-NOBMI-NEXT: retq ; ; X64-BMI1NOTBM-LABEL: bzhi64_a1_indexzext: ; X64-BMI1NOTBM: # %bb.0: ; X64-BMI1NOTBM-NEXT: # kill: def $esi killed $esi def $rsi ; X64-BMI1NOTBM-NEXT: shll $8, %esi ; X64-BMI1NOTBM-NEXT: bextrq %rsi, %rdi, %rax ; X64-BMI1NOTBM-NEXT: retq ; ; X64-BMI1BMI2-LABEL: bzhi64_a1_indexzext: ; X64-BMI1BMI2: # %bb.0: ; X64-BMI1BMI2-NEXT: # kill: def $esi killed $esi def $rsi ; X64-BMI1BMI2-NEXT: bzhiq %rsi, %rdi, %rax ; X64-BMI1BMI2-NEXT: retq %conv = zext i8 %numlowbits to i64 %onebit = shl i64 1, %conv %mask = add nsw i64 %onebit, -1 %masked = and i64 %mask, %val ret i64 %masked } define i64 @bzhi64_a2_load(i64* %w, i64 %numlowbits) nounwind { ; X86-NOBMI-LABEL: bzhi64_a2_load: ; X86-NOBMI: # %bb.0: ; X86-NOBMI-NEXT: pushl %esi ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %esi ; X86-NOBMI-NEXT: movb {{[0-9]+}}(%esp), %cl ; X86-NOBMI-NEXT: movl $1, %eax ; X86-NOBMI-NEXT: xorl %edx, %edx ; X86-NOBMI-NEXT: shldl %cl, %eax, %edx ; X86-NOBMI-NEXT: shll %cl, %eax ; X86-NOBMI-NEXT: testb $32, %cl ; X86-NOBMI-NEXT: je .LBB7_2 ; X86-NOBMI-NEXT: # %bb.1: ; X86-NOBMI-NEXT: movl %eax, %edx ; X86-NOBMI-NEXT: xorl %eax, %eax ; X86-NOBMI-NEXT: .LBB7_2: ; X86-NOBMI-NEXT: addl $-1, %eax ; X86-NOBMI-NEXT: adcl $-1, %edx ; X86-NOBMI-NEXT: andl 4(%esi), %edx ; X86-NOBMI-NEXT: andl (%esi), %eax ; X86-NOBMI-NEXT: popl %esi ; X86-NOBMI-NEXT: retl ; ; X86-BMI1NOTBM-LABEL: bzhi64_a2_load: ; X86-BMI1NOTBM: # %bb.0: ; X86-BMI1NOTBM-NEXT: pushl %esi ; X86-BMI1NOTBM-NEXT: movl {{[0-9]+}}(%esp), %esi ; X86-BMI1NOTBM-NEXT: movb {{[0-9]+}}(%esp), %cl ; X86-BMI1NOTBM-NEXT: movl $1, %eax ; X86-BMI1NOTBM-NEXT: xorl %edx, %edx ; X86-BMI1NOTBM-NEXT: shldl %cl, %eax, %edx ; X86-BMI1NOTBM-NEXT: shll %cl, %eax ; X86-BMI1NOTBM-NEXT: testb $32, %cl ; X86-BMI1NOTBM-NEXT: je .LBB7_2 ; X86-BMI1NOTBM-NEXT: # %bb.1: ; X86-BMI1NOTBM-NEXT: movl %eax, %edx ; X86-BMI1NOTBM-NEXT: xorl %eax, %eax ; X86-BMI1NOTBM-NEXT: .LBB7_2: ; X86-BMI1NOTBM-NEXT: addl $-1, %eax ; X86-BMI1NOTBM-NEXT: adcl $-1, %edx ; X86-BMI1NOTBM-NEXT: andl 4(%esi), %edx ; X86-BMI1NOTBM-NEXT: andl (%esi), %eax ; X86-BMI1NOTBM-NEXT: popl %esi ; X86-BMI1NOTBM-NEXT: retl ; ; X86-BMI1BMI2-LABEL: bzhi64_a2_load: ; X86-BMI1BMI2: # %bb.0: ; X86-BMI1BMI2-NEXT: pushl %esi ; X86-BMI1BMI2-NEXT: movl {{[0-9]+}}(%esp), %esi ; X86-BMI1BMI2-NEXT: movb {{[0-9]+}}(%esp), %cl ; X86-BMI1BMI2-NEXT: movl $1, %eax ; X86-BMI1BMI2-NEXT: xorl %edx, %edx ; X86-BMI1BMI2-NEXT: shldl %cl, %eax, %edx ; X86-BMI1BMI2-NEXT: shlxl %ecx, %eax, %eax ; X86-BMI1BMI2-NEXT: testb $32, %cl ; X86-BMI1BMI2-NEXT: je .LBB7_2 ; X86-BMI1BMI2-NEXT: # %bb.1: ; X86-BMI1BMI2-NEXT: movl %eax, %edx ; X86-BMI1BMI2-NEXT: xorl %eax, %eax ; X86-BMI1BMI2-NEXT: .LBB7_2: ; X86-BMI1BMI2-NEXT: addl $-1, %eax ; X86-BMI1BMI2-NEXT: adcl $-1, %edx ; X86-BMI1BMI2-NEXT: andl 4(%esi), %edx ; X86-BMI1BMI2-NEXT: andl (%esi), %eax ; X86-BMI1BMI2-NEXT: popl %esi ; X86-BMI1BMI2-NEXT: retl ; ; X64-NOBMI-LABEL: bzhi64_a2_load: ; X64-NOBMI: # %bb.0: ; X64-NOBMI-NEXT: movq %rsi, %rcx ; X64-NOBMI-NEXT: movl $1, %eax ; X64-NOBMI-NEXT: # kill: def $cl killed $cl killed $rcx ; X64-NOBMI-NEXT: shlq %cl, %rax ; X64-NOBMI-NEXT: decq %rax ; X64-NOBMI-NEXT: andq (%rdi), %rax ; X64-NOBMI-NEXT: retq ; ; X64-BMI1NOTBM-LABEL: bzhi64_a2_load: ; X64-BMI1NOTBM: # %bb.0: ; X64-BMI1NOTBM-NEXT: shll $8, %esi ; X64-BMI1NOTBM-NEXT: bextrq %rsi, (%rdi), %rax ; X64-BMI1NOTBM-NEXT: retq ; ; X64-BMI1BMI2-LABEL: bzhi64_a2_load: ; X64-BMI1BMI2: # %bb.0: ; X64-BMI1BMI2-NEXT: bzhiq %rsi, (%rdi), %rax ; X64-BMI1BMI2-NEXT: retq %val = load i64, i64* %w %onebit = shl i64 1, %numlowbits %mask = add nsw i64 %onebit, -1 %masked = and i64 %mask, %val ret i64 %masked } define i64 @bzhi64_a3_load_indexzext(i64* %w, i8 zeroext %numlowbits) nounwind { ; X86-NOBMI-LABEL: bzhi64_a3_load_indexzext: ; X86-NOBMI: # %bb.0: ; X86-NOBMI-NEXT: pushl %esi ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %esi ; X86-NOBMI-NEXT: movb {{[0-9]+}}(%esp), %cl ; X86-NOBMI-NEXT: movl $1, %eax ; X86-NOBMI-NEXT: xorl %edx, %edx ; X86-NOBMI-NEXT: shldl %cl, %eax, %edx ; X86-NOBMI-NEXT: shll %cl, %eax ; X86-NOBMI-NEXT: testb $32, %cl ; X86-NOBMI-NEXT: je .LBB8_2 ; X86-NOBMI-NEXT: # %bb.1: ; X86-NOBMI-NEXT: movl %eax, %edx ; X86-NOBMI-NEXT: xorl %eax, %eax ; X86-NOBMI-NEXT: .LBB8_2: ; X86-NOBMI-NEXT: addl $-1, %eax ; X86-NOBMI-NEXT: adcl $-1, %edx ; X86-NOBMI-NEXT: andl 4(%esi), %edx ; X86-NOBMI-NEXT: andl (%esi), %eax ; X86-NOBMI-NEXT: popl %esi ; X86-NOBMI-NEXT: retl ; ; X86-BMI1NOTBM-LABEL: bzhi64_a3_load_indexzext: ; X86-BMI1NOTBM: # %bb.0: ; X86-BMI1NOTBM-NEXT: pushl %esi ; X86-BMI1NOTBM-NEXT: movl {{[0-9]+}}(%esp), %esi ; X86-BMI1NOTBM-NEXT: movb {{[0-9]+}}(%esp), %cl ; X86-BMI1NOTBM-NEXT: movl $1, %eax ; X86-BMI1NOTBM-NEXT: xorl %edx, %edx ; X86-BMI1NOTBM-NEXT: shldl %cl, %eax, %edx ; X86-BMI1NOTBM-NEXT: shll %cl, %eax ; X86-BMI1NOTBM-NEXT: testb $32, %cl ; X86-BMI1NOTBM-NEXT: je .LBB8_2 ; X86-BMI1NOTBM-NEXT: # %bb.1: ; X86-BMI1NOTBM-NEXT: movl %eax, %edx ; X86-BMI1NOTBM-NEXT: xorl %eax, %eax ; X86-BMI1NOTBM-NEXT: .LBB8_2: ; X86-BMI1NOTBM-NEXT: addl $-1, %eax ; X86-BMI1NOTBM-NEXT: adcl $-1, %edx ; X86-BMI1NOTBM-NEXT: andl 4(%esi), %edx ; X86-BMI1NOTBM-NEXT: andl (%esi), %eax ; X86-BMI1NOTBM-NEXT: popl %esi ; X86-BMI1NOTBM-NEXT: retl ; ; X86-BMI1BMI2-LABEL: bzhi64_a3_load_indexzext: ; X86-BMI1BMI2: # %bb.0: ; X86-BMI1BMI2-NEXT: pushl %esi ; X86-BMI1BMI2-NEXT: movl {{[0-9]+}}(%esp), %esi ; X86-BMI1BMI2-NEXT: movb {{[0-9]+}}(%esp), %cl ; X86-BMI1BMI2-NEXT: movl $1, %eax ; X86-BMI1BMI2-NEXT: xorl %edx, %edx ; X86-BMI1BMI2-NEXT: shldl %cl, %eax, %edx ; X86-BMI1BMI2-NEXT: shlxl %ecx, %eax, %eax ; X86-BMI1BMI2-NEXT: testb $32, %cl ; X86-BMI1BMI2-NEXT: je .LBB8_2 ; X86-BMI1BMI2-NEXT: # %bb.1: ; X86-BMI1BMI2-NEXT: movl %eax, %edx ; X86-BMI1BMI2-NEXT: xorl %eax, %eax ; X86-BMI1BMI2-NEXT: .LBB8_2: ; X86-BMI1BMI2-NEXT: addl $-1, %eax ; X86-BMI1BMI2-NEXT: adcl $-1, %edx ; X86-BMI1BMI2-NEXT: andl 4(%esi), %edx ; X86-BMI1BMI2-NEXT: andl (%esi), %eax ; X86-BMI1BMI2-NEXT: popl %esi ; X86-BMI1BMI2-NEXT: retl ; ; X64-NOBMI-LABEL: bzhi64_a3_load_indexzext: ; X64-NOBMI: # %bb.0: ; X64-NOBMI-NEXT: movl %esi, %ecx ; X64-NOBMI-NEXT: movl $1, %eax ; X64-NOBMI-NEXT: # kill: def $cl killed $cl killed $ecx ; X64-NOBMI-NEXT: shlq %cl, %rax ; X64-NOBMI-NEXT: decq %rax ; X64-NOBMI-NEXT: andq (%rdi), %rax ; X64-NOBMI-NEXT: retq ; ; X64-BMI1NOTBM-LABEL: bzhi64_a3_load_indexzext: ; X64-BMI1NOTBM: # %bb.0: ; X64-BMI1NOTBM-NEXT: # kill: def $esi killed $esi def $rsi ; X64-BMI1NOTBM-NEXT: shll $8, %esi ; X64-BMI1NOTBM-NEXT: bextrq %rsi, (%rdi), %rax ; X64-BMI1NOTBM-NEXT: retq ; ; X64-BMI1BMI2-LABEL: bzhi64_a3_load_indexzext: ; X64-BMI1BMI2: # %bb.0: ; X64-BMI1BMI2-NEXT: # kill: def $esi killed $esi def $rsi ; X64-BMI1BMI2-NEXT: bzhiq %rsi, (%rdi), %rax ; X64-BMI1BMI2-NEXT: retq %val = load i64, i64* %w %conv = zext i8 %numlowbits to i64 %onebit = shl i64 1, %conv %mask = add nsw i64 %onebit, -1 %masked = and i64 %mask, %val ret i64 %masked } define i64 @bzhi64_a4_commutative(i64 %val, i64 %numlowbits) nounwind { ; X86-NOBMI-LABEL: bzhi64_a4_commutative: ; X86-NOBMI: # %bb.0: ; X86-NOBMI-NEXT: movb {{[0-9]+}}(%esp), %cl ; X86-NOBMI-NEXT: movl $1, %eax ; X86-NOBMI-NEXT: xorl %edx, %edx ; X86-NOBMI-NEXT: shldl %cl, %eax, %edx ; X86-NOBMI-NEXT: shll %cl, %eax ; X86-NOBMI-NEXT: testb $32, %cl ; X86-NOBMI-NEXT: je .LBB9_2 ; X86-NOBMI-NEXT: # %bb.1: ; X86-NOBMI-NEXT: movl %eax, %edx ; X86-NOBMI-NEXT: xorl %eax, %eax ; X86-NOBMI-NEXT: .LBB9_2: ; X86-NOBMI-NEXT: addl $-1, %eax ; X86-NOBMI-NEXT: adcl $-1, %edx ; X86-NOBMI-NEXT: andl {{[0-9]+}}(%esp), %edx ; X86-NOBMI-NEXT: andl {{[0-9]+}}(%esp), %eax ; X86-NOBMI-NEXT: retl ; ; X86-BMI1NOTBM-LABEL: bzhi64_a4_commutative: ; X86-BMI1NOTBM: # %bb.0: ; X86-BMI1NOTBM-NEXT: movb {{[0-9]+}}(%esp), %cl ; X86-BMI1NOTBM-NEXT: movl $1, %eax ; X86-BMI1NOTBM-NEXT: xorl %edx, %edx ; X86-BMI1NOTBM-NEXT: shldl %cl, %eax, %edx ; X86-BMI1NOTBM-NEXT: shll %cl, %eax ; X86-BMI1NOTBM-NEXT: testb $32, %cl ; X86-BMI1NOTBM-NEXT: je .LBB9_2 ; X86-BMI1NOTBM-NEXT: # %bb.1: ; X86-BMI1NOTBM-NEXT: movl %eax, %edx ; X86-BMI1NOTBM-NEXT: xorl %eax, %eax ; X86-BMI1NOTBM-NEXT: .LBB9_2: ; X86-BMI1NOTBM-NEXT: addl $-1, %eax ; X86-BMI1NOTBM-NEXT: adcl $-1, %edx ; X86-BMI1NOTBM-NEXT: andl {{[0-9]+}}(%esp), %edx ; X86-BMI1NOTBM-NEXT: andl {{[0-9]+}}(%esp), %eax ; X86-BMI1NOTBM-NEXT: retl ; ; X86-BMI1BMI2-LABEL: bzhi64_a4_commutative: ; X86-BMI1BMI2: # %bb.0: ; X86-BMI1BMI2-NEXT: movb {{[0-9]+}}(%esp), %cl ; X86-BMI1BMI2-NEXT: movl $1, %eax ; X86-BMI1BMI2-NEXT: xorl %edx, %edx ; X86-BMI1BMI2-NEXT: shldl %cl, %eax, %edx ; X86-BMI1BMI2-NEXT: shlxl %ecx, %eax, %eax ; X86-BMI1BMI2-NEXT: testb $32, %cl ; X86-BMI1BMI2-NEXT: je .LBB9_2 ; X86-BMI1BMI2-NEXT: # %bb.1: ; X86-BMI1BMI2-NEXT: movl %eax, %edx ; X86-BMI1BMI2-NEXT: xorl %eax, %eax ; X86-BMI1BMI2-NEXT: .LBB9_2: ; X86-BMI1BMI2-NEXT: addl $-1, %eax ; X86-BMI1BMI2-NEXT: adcl $-1, %edx ; X86-BMI1BMI2-NEXT: andl {{[0-9]+}}(%esp), %edx ; X86-BMI1BMI2-NEXT: andl {{[0-9]+}}(%esp), %eax ; X86-BMI1BMI2-NEXT: retl ; ; X64-NOBMI-LABEL: bzhi64_a4_commutative: ; X64-NOBMI: # %bb.0: ; X64-NOBMI-NEXT: movq %rsi, %rcx ; X64-NOBMI-NEXT: movl $1, %eax ; X64-NOBMI-NEXT: # kill: def $cl killed $cl killed $rcx ; X64-NOBMI-NEXT: shlq %cl, %rax ; X64-NOBMI-NEXT: decq %rax ; X64-NOBMI-NEXT: andq %rdi, %rax ; X64-NOBMI-NEXT: retq ; ; X64-BMI1NOTBM-LABEL: bzhi64_a4_commutative: ; X64-BMI1NOTBM: # %bb.0: ; X64-BMI1NOTBM-NEXT: shll $8, %esi ; X64-BMI1NOTBM-NEXT: bextrq %rsi, %rdi, %rax ; X64-BMI1NOTBM-NEXT: retq ; ; X64-BMI1BMI2-LABEL: bzhi64_a4_commutative: ; X64-BMI1BMI2: # %bb.0: ; X64-BMI1BMI2-NEXT: bzhiq %rsi, %rdi, %rax ; X64-BMI1BMI2-NEXT: retq %onebit = shl i64 1, %numlowbits %mask = add nsw i64 %onebit, -1 %masked = and i64 %val, %mask ; swapped order ret i64 %masked } ; 64-bit, but with 32-bit output ; Everything done in 64-bit, truncation happens last. define i32 @bzhi64_32_a0(i64 %val, i64 %numlowbits) nounwind { ; X86-NOBMI-LABEL: bzhi64_32_a0: ; X86-NOBMI: # %bb.0: ; X86-NOBMI-NEXT: movb {{[0-9]+}}(%esp), %cl ; X86-NOBMI-NEXT: movl $1, %edx ; X86-NOBMI-NEXT: shll %cl, %edx ; X86-NOBMI-NEXT: xorl %eax, %eax ; X86-NOBMI-NEXT: testb $32, %cl ; X86-NOBMI-NEXT: jne .LBB10_2 ; X86-NOBMI-NEXT: # %bb.1: ; X86-NOBMI-NEXT: movl %edx, %eax ; X86-NOBMI-NEXT: .LBB10_2: ; X86-NOBMI-NEXT: decl %eax ; X86-NOBMI-NEXT: andl {{[0-9]+}}(%esp), %eax ; X86-NOBMI-NEXT: retl ; ; X86-BMI1NOTBM-LABEL: bzhi64_32_a0: ; X86-BMI1NOTBM: # %bb.0: ; X86-BMI1NOTBM-NEXT: movb {{[0-9]+}}(%esp), %cl ; X86-BMI1NOTBM-NEXT: movl $1, %edx ; X86-BMI1NOTBM-NEXT: shll %cl, %edx ; X86-BMI1NOTBM-NEXT: xorl %eax, %eax ; X86-BMI1NOTBM-NEXT: testb $32, %cl ; X86-BMI1NOTBM-NEXT: jne .LBB10_2 ; X86-BMI1NOTBM-NEXT: # %bb.1: ; X86-BMI1NOTBM-NEXT: movl %edx, %eax ; X86-BMI1NOTBM-NEXT: .LBB10_2: ; X86-BMI1NOTBM-NEXT: decl %eax ; X86-BMI1NOTBM-NEXT: andl {{[0-9]+}}(%esp), %eax ; X86-BMI1NOTBM-NEXT: retl ; ; X86-BMI1BMI2-LABEL: bzhi64_32_a0: ; X86-BMI1BMI2: # %bb.0: ; X86-BMI1BMI2-NEXT: movb {{[0-9]+}}(%esp), %cl ; X86-BMI1BMI2-NEXT: xorl %eax, %eax ; X86-BMI1BMI2-NEXT: testb $32, %cl ; X86-BMI1BMI2-NEXT: jne .LBB10_2 ; X86-BMI1BMI2-NEXT: # %bb.1: ; X86-BMI1BMI2-NEXT: movl $1, %eax ; X86-BMI1BMI2-NEXT: shlxl %ecx, %eax, %eax ; X86-BMI1BMI2-NEXT: .LBB10_2: ; X86-BMI1BMI2-NEXT: decl %eax ; X86-BMI1BMI2-NEXT: andl {{[0-9]+}}(%esp), %eax ; X86-BMI1BMI2-NEXT: retl ; ; X64-NOBMI-LABEL: bzhi64_32_a0: ; X64-NOBMI: # %bb.0: ; X64-NOBMI-NEXT: movq %rsi, %rcx ; X64-NOBMI-NEXT: movl $1, %eax ; X64-NOBMI-NEXT: # kill: def $cl killed $cl killed $rcx ; X64-NOBMI-NEXT: shlq %cl, %rax ; X64-NOBMI-NEXT: decl %eax ; X64-NOBMI-NEXT: andl %edi, %eax ; X64-NOBMI-NEXT: # kill: def $eax killed $eax killed $rax ; X64-NOBMI-NEXT: retq ; ; X64-BMI1NOTBM-LABEL: bzhi64_32_a0: ; X64-BMI1NOTBM: # %bb.0: ; X64-BMI1NOTBM-NEXT: shll $8, %esi ; X64-BMI1NOTBM-NEXT: bextrl %esi, %edi, %eax ; X64-BMI1NOTBM-NEXT: retq ; ; X64-BMI1BMI2-LABEL: bzhi64_32_a0: ; X64-BMI1BMI2: # %bb.0: ; X64-BMI1BMI2-NEXT: bzhil %esi, %edi, %eax ; X64-BMI1BMI2-NEXT: retq %onebit = shl i64 1, %numlowbits %mask = add nsw i64 %onebit, -1 %masked = and i64 %mask, %val %res = trunc i64 %masked to i32 ret i32 %res } ; Shifting happens in 64-bit, then truncation. Masking is 32-bit. define i32 @bzhi64_32_a1(i64 %val, i32 %numlowbits) nounwind { ; X86-NOBMI-LABEL: bzhi64_32_a1: ; X86-NOBMI: # %bb.0: ; X86-NOBMI-NEXT: movb {{[0-9]+}}(%esp), %cl ; X86-NOBMI-NEXT: movl $1, %eax ; X86-NOBMI-NEXT: shll %cl, %eax ; X86-NOBMI-NEXT: decl %eax ; X86-NOBMI-NEXT: andl {{[0-9]+}}(%esp), %eax ; X86-NOBMI-NEXT: retl ; ; X86-BMI1NOTBM-LABEL: bzhi64_32_a1: ; X86-BMI1NOTBM: # %bb.0: ; X86-BMI1NOTBM-NEXT: movb {{[0-9]+}}(%esp), %al ; X86-BMI1NOTBM-NEXT: shll $8, %eax ; X86-BMI1NOTBM-NEXT: bextrl %eax, {{[0-9]+}}(%esp), %eax ; X86-BMI1NOTBM-NEXT: retl ; ; X86-BMI1BMI2-LABEL: bzhi64_32_a1: ; X86-BMI1BMI2: # %bb.0: ; X86-BMI1BMI2-NEXT: movb {{[0-9]+}}(%esp), %al ; X86-BMI1BMI2-NEXT: bzhil %eax, {{[0-9]+}}(%esp), %eax ; X86-BMI1BMI2-NEXT: retl ; ; X64-NOBMI-LABEL: bzhi64_32_a1: ; X64-NOBMI: # %bb.0: ; X64-NOBMI-NEXT: movl %esi, %ecx ; X64-NOBMI-NEXT: movl $1, %eax ; X64-NOBMI-NEXT: # kill: def $cl killed $cl killed $ecx ; X64-NOBMI-NEXT: shll %cl, %eax ; X64-NOBMI-NEXT: decl %eax ; X64-NOBMI-NEXT: andl %edi, %eax ; X64-NOBMI-NEXT: retq ; ; X64-BMI1NOTBM-LABEL: bzhi64_32_a1: ; X64-BMI1NOTBM: # %bb.0: ; X64-BMI1NOTBM-NEXT: shll $8, %esi ; X64-BMI1NOTBM-NEXT: bextrl %esi, %edi, %eax ; X64-BMI1NOTBM-NEXT: retq ; ; X64-BMI1BMI2-LABEL: bzhi64_32_a1: ; X64-BMI1BMI2: # %bb.0: ; X64-BMI1BMI2-NEXT: bzhil %esi, %edi, %eax ; X64-BMI1BMI2-NEXT: retq %truncval = trunc i64 %val to i32 %onebit = shl i32 1, %numlowbits %mask = add nsw i32 %onebit, -1 %masked = and i32 %mask, %truncval ret i32 %masked } ; Shifting happens in 64-bit, then truncation (with extra use). ; Masking is 32-bit. define i32 @bzhi64_32_a1_trunc_extrause(i64 %val, i32 %numlowbits) nounwind { ; X86-NOBMI-LABEL: bzhi64_32_a1_trunc_extrause: ; X86-NOBMI: # %bb.0: ; X86-NOBMI-NEXT: pushl %ebx ; X86-NOBMI-NEXT: pushl %esi ; X86-NOBMI-NEXT: pushl %eax ; X86-NOBMI-NEXT: movb {{[0-9]+}}(%esp), %bl ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %esi ; X86-NOBMI-NEXT: movl %esi, (%esp) ; X86-NOBMI-NEXT: calll use32 ; X86-NOBMI-NEXT: movl $1, %eax ; X86-NOBMI-NEXT: movl %ebx, %ecx ; X86-NOBMI-NEXT: shll %cl, %eax ; X86-NOBMI-NEXT: decl %eax ; X86-NOBMI-NEXT: andl %esi, %eax ; X86-NOBMI-NEXT: addl $4, %esp ; X86-NOBMI-NEXT: popl %esi ; X86-NOBMI-NEXT: popl %ebx ; X86-NOBMI-NEXT: retl ; ; X86-BMI1NOTBM-LABEL: bzhi64_32_a1_trunc_extrause: ; X86-BMI1NOTBM: # %bb.0: ; X86-BMI1NOTBM-NEXT: pushl %ebx ; X86-BMI1NOTBM-NEXT: pushl %esi ; X86-BMI1NOTBM-NEXT: pushl %eax ; X86-BMI1NOTBM-NEXT: movb {{[0-9]+}}(%esp), %bl ; X86-BMI1NOTBM-NEXT: movl {{[0-9]+}}(%esp), %esi ; X86-BMI1NOTBM-NEXT: movl %esi, (%esp) ; X86-BMI1NOTBM-NEXT: calll use32 ; X86-BMI1NOTBM-NEXT: shll $8, %ebx ; X86-BMI1NOTBM-NEXT: bextrl %ebx, %esi, %eax ; X86-BMI1NOTBM-NEXT: addl $4, %esp ; X86-BMI1NOTBM-NEXT: popl %esi ; X86-BMI1NOTBM-NEXT: popl %ebx ; X86-BMI1NOTBM-NEXT: retl ; ; X86-BMI1BMI2-LABEL: bzhi64_32_a1_trunc_extrause: ; X86-BMI1BMI2: # %bb.0: ; X86-BMI1BMI2-NEXT: pushl %ebx ; X86-BMI1BMI2-NEXT: pushl %esi ; X86-BMI1BMI2-NEXT: pushl %eax ; X86-BMI1BMI2-NEXT: movb {{[0-9]+}}(%esp), %bl ; X86-BMI1BMI2-NEXT: movl {{[0-9]+}}(%esp), %esi ; X86-BMI1BMI2-NEXT: movl %esi, (%esp) ; X86-BMI1BMI2-NEXT: calll use32 ; X86-BMI1BMI2-NEXT: bzhil %ebx, %esi, %eax ; X86-BMI1BMI2-NEXT: addl $4, %esp ; X86-BMI1BMI2-NEXT: popl %esi ; X86-BMI1BMI2-NEXT: popl %ebx ; X86-BMI1BMI2-NEXT: retl ; ; X64-NOBMI-LABEL: bzhi64_32_a1_trunc_extrause: ; X64-NOBMI: # %bb.0: ; X64-NOBMI-NEXT: pushq %rbp ; X64-NOBMI-NEXT: pushq %rbx ; X64-NOBMI-NEXT: pushq %rax ; X64-NOBMI-NEXT: movl %esi, %ebp ; X64-NOBMI-NEXT: movq %rdi, %rbx ; X64-NOBMI-NEXT: callq use32 ; X64-NOBMI-NEXT: movl $1, %eax ; X64-NOBMI-NEXT: movl %ebp, %ecx ; X64-NOBMI-NEXT: shll %cl, %eax ; X64-NOBMI-NEXT: decl %eax ; X64-NOBMI-NEXT: andl %ebx, %eax ; X64-NOBMI-NEXT: addq $8, %rsp ; X64-NOBMI-NEXT: popq %rbx ; X64-NOBMI-NEXT: popq %rbp ; X64-NOBMI-NEXT: retq ; ; X64-BMI1NOTBM-LABEL: bzhi64_32_a1_trunc_extrause: ; X64-BMI1NOTBM: # %bb.0: ; X64-BMI1NOTBM-NEXT: pushq %r14 ; X64-BMI1NOTBM-NEXT: pushq %rbx ; X64-BMI1NOTBM-NEXT: pushq %rax ; X64-BMI1NOTBM-NEXT: movl %esi, %ebx ; X64-BMI1NOTBM-NEXT: movq %rdi, %r14 ; X64-BMI1NOTBM-NEXT: callq use32 ; X64-BMI1NOTBM-NEXT: shll $8, %ebx ; X64-BMI1NOTBM-NEXT: bextrl %ebx, %r14d, %eax ; X64-BMI1NOTBM-NEXT: addq $8, %rsp ; X64-BMI1NOTBM-NEXT: popq %rbx ; X64-BMI1NOTBM-NEXT: popq %r14 ; X64-BMI1NOTBM-NEXT: retq ; ; X64-BMI1BMI2-LABEL: bzhi64_32_a1_trunc_extrause: ; X64-BMI1BMI2: # %bb.0: ; X64-BMI1BMI2-NEXT: pushq %rbp ; X64-BMI1BMI2-NEXT: pushq %rbx ; X64-BMI1BMI2-NEXT: pushq %rax ; X64-BMI1BMI2-NEXT: movl %esi, %ebp ; X64-BMI1BMI2-NEXT: movq %rdi, %rbx ; X64-BMI1BMI2-NEXT: callq use32 ; X64-BMI1BMI2-NEXT: bzhil %ebp, %ebx, %eax ; X64-BMI1BMI2-NEXT: addq $8, %rsp ; X64-BMI1BMI2-NEXT: popq %rbx ; X64-BMI1BMI2-NEXT: popq %rbp ; X64-BMI1BMI2-NEXT: retq %truncval = trunc i64 %val to i32 call void @use32(i32 %truncval) %onebit = shl i32 1, %numlowbits %mask = add nsw i32 %onebit, -1 %masked = and i32 %mask, %truncval ret i32 %masked } ; Shifting happens in 64-bit. Mask is 32-bit, but extended to 64-bit. ; Masking is 64-bit. Then truncation. define i32 @bzhi64_32_a2(i64 %val, i32 %numlowbits) nounwind { ; X86-NOBMI-LABEL: bzhi64_32_a2: ; X86-NOBMI: # %bb.0: ; X86-NOBMI-NEXT: movb {{[0-9]+}}(%esp), %cl ; X86-NOBMI-NEXT: movl $1, %eax ; X86-NOBMI-NEXT: shll %cl, %eax ; X86-NOBMI-NEXT: decl %eax ; X86-NOBMI-NEXT: andl {{[0-9]+}}(%esp), %eax ; X86-NOBMI-NEXT: retl ; ; X86-BMI1NOTBM-LABEL: bzhi64_32_a2: ; X86-BMI1NOTBM: # %bb.0: ; X86-BMI1NOTBM-NEXT: movb {{[0-9]+}}(%esp), %al ; X86-BMI1NOTBM-NEXT: shll $8, %eax ; X86-BMI1NOTBM-NEXT: bextrl %eax, {{[0-9]+}}(%esp), %eax ; X86-BMI1NOTBM-NEXT: retl ; ; X86-BMI1BMI2-LABEL: bzhi64_32_a2: ; X86-BMI1BMI2: # %bb.0: ; X86-BMI1BMI2-NEXT: movb {{[0-9]+}}(%esp), %al ; X86-BMI1BMI2-NEXT: bzhil %eax, {{[0-9]+}}(%esp), %eax ; X86-BMI1BMI2-NEXT: retl ; ; X64-NOBMI-LABEL: bzhi64_32_a2: ; X64-NOBMI: # %bb.0: ; X64-NOBMI-NEXT: movl %esi, %ecx ; X64-NOBMI-NEXT: movl $1, %eax ; X64-NOBMI-NEXT: # kill: def $cl killed $cl killed $ecx ; X64-NOBMI-NEXT: shll %cl, %eax ; X64-NOBMI-NEXT: decl %eax ; X64-NOBMI-NEXT: andl %edi, %eax ; X64-NOBMI-NEXT: retq ; ; X64-BMI1NOTBM-LABEL: bzhi64_32_a2: ; X64-BMI1NOTBM: # %bb.0: ; X64-BMI1NOTBM-NEXT: shll $8, %esi ; X64-BMI1NOTBM-NEXT: bextrl %esi, %edi, %eax ; X64-BMI1NOTBM-NEXT: retq ; ; X64-BMI1BMI2-LABEL: bzhi64_32_a2: ; X64-BMI1BMI2: # %bb.0: ; X64-BMI1BMI2-NEXT: bzhil %esi, %edi, %eax ; X64-BMI1BMI2-NEXT: retq %onebit = shl i32 1, %numlowbits %mask = add nsw i32 %onebit, -1 %zextmask = zext i32 %mask to i64 %masked = and i64 %zextmask, %val %truncmasked = trunc i64 %masked to i32 ret i32 %truncmasked } ; Shifting happens in 64-bit. Mask is 32-bit, but calculated in 64-bit. ; Masking is 64-bit. Then truncation. define i32 @bzhi64_32_a3(i64 %val, i64 %numlowbits) nounwind { ; X86-NOBMI-LABEL: bzhi64_32_a3: ; X86-NOBMI: # %bb.0: ; X86-NOBMI-NEXT: movb {{[0-9]+}}(%esp), %cl ; X86-NOBMI-NEXT: movl $1, %edx ; X86-NOBMI-NEXT: shll %cl, %edx ; X86-NOBMI-NEXT: xorl %eax, %eax ; X86-NOBMI-NEXT: testb $32, %cl ; X86-NOBMI-NEXT: jne .LBB14_2 ; X86-NOBMI-NEXT: # %bb.1: ; X86-NOBMI-NEXT: movl %edx, %eax ; X86-NOBMI-NEXT: .LBB14_2: ; X86-NOBMI-NEXT: decl %eax ; X86-NOBMI-NEXT: andl {{[0-9]+}}(%esp), %eax ; X86-NOBMI-NEXT: retl ; ; X86-BMI1NOTBM-LABEL: bzhi64_32_a3: ; X86-BMI1NOTBM: # %bb.0: ; X86-BMI1NOTBM-NEXT: movb {{[0-9]+}}(%esp), %cl ; X86-BMI1NOTBM-NEXT: movl $1, %edx ; X86-BMI1NOTBM-NEXT: shll %cl, %edx ; X86-BMI1NOTBM-NEXT: xorl %eax, %eax ; X86-BMI1NOTBM-NEXT: testb $32, %cl ; X86-BMI1NOTBM-NEXT: jne .LBB14_2 ; X86-BMI1NOTBM-NEXT: # %bb.1: ; X86-BMI1NOTBM-NEXT: movl %edx, %eax ; X86-BMI1NOTBM-NEXT: .LBB14_2: ; X86-BMI1NOTBM-NEXT: decl %eax ; X86-BMI1NOTBM-NEXT: andl {{[0-9]+}}(%esp), %eax ; X86-BMI1NOTBM-NEXT: retl ; ; X86-BMI1BMI2-LABEL: bzhi64_32_a3: ; X86-BMI1BMI2: # %bb.0: ; X86-BMI1BMI2-NEXT: movb {{[0-9]+}}(%esp), %cl ; X86-BMI1BMI2-NEXT: xorl %eax, %eax ; X86-BMI1BMI2-NEXT: testb $32, %cl ; X86-BMI1BMI2-NEXT: jne .LBB14_2 ; X86-BMI1BMI2-NEXT: # %bb.1: ; X86-BMI1BMI2-NEXT: movl $1, %eax ; X86-BMI1BMI2-NEXT: shlxl %ecx, %eax, %eax ; X86-BMI1BMI2-NEXT: .LBB14_2: ; X86-BMI1BMI2-NEXT: decl %eax ; X86-BMI1BMI2-NEXT: andl {{[0-9]+}}(%esp), %eax ; X86-BMI1BMI2-NEXT: retl ; ; X64-NOBMI-LABEL: bzhi64_32_a3: ; X64-NOBMI: # %bb.0: ; X64-NOBMI-NEXT: movq %rsi, %rcx ; X64-NOBMI-NEXT: movl $1, %eax ; X64-NOBMI-NEXT: # kill: def $cl killed $cl killed $rcx ; X64-NOBMI-NEXT: shlq %cl, %rax ; X64-NOBMI-NEXT: decl %eax ; X64-NOBMI-NEXT: andl %edi, %eax ; X64-NOBMI-NEXT: # kill: def $eax killed $eax killed $rax ; X64-NOBMI-NEXT: retq ; ; X64-BMI1NOTBM-LABEL: bzhi64_32_a3: ; X64-BMI1NOTBM: # %bb.0: ; X64-BMI1NOTBM-NEXT: shll $8, %esi ; X64-BMI1NOTBM-NEXT: bextrl %esi, %edi, %eax ; X64-BMI1NOTBM-NEXT: retq ; ; X64-BMI1BMI2-LABEL: bzhi64_32_a3: ; X64-BMI1BMI2: # %bb.0: ; X64-BMI1BMI2-NEXT: bzhil %esi, %edi, %eax ; X64-BMI1BMI2-NEXT: retq %onebit = shl i64 1, %numlowbits %mask = add nsw i64 %onebit, 4294967295 %masked = and i64 %mask, %val %truncmasked = trunc i64 %masked to i32 ret i32 %truncmasked } ; ---------------------------------------------------------------------------- ; ; Pattern b. 32-bit ; ---------------------------------------------------------------------------- ; define i32 @bzhi32_b0(i32 %val, i32 %numlowbits) nounwind { ; X86-NOBMI-LABEL: bzhi32_b0: ; X86-NOBMI: # %bb.0: ; X86-NOBMI-NEXT: movb {{[0-9]+}}(%esp), %cl ; X86-NOBMI-NEXT: movl $-1, %eax ; X86-NOBMI-NEXT: shll %cl, %eax ; X86-NOBMI-NEXT: notl %eax ; X86-NOBMI-NEXT: andl {{[0-9]+}}(%esp), %eax ; X86-NOBMI-NEXT: retl ; ; X86-BMI1NOTBM-LABEL: bzhi32_b0: ; X86-BMI1NOTBM: # %bb.0: ; X86-BMI1NOTBM-NEXT: movb {{[0-9]+}}(%esp), %al ; X86-BMI1NOTBM-NEXT: shll $8, %eax ; X86-BMI1NOTBM-NEXT: bextrl %eax, {{[0-9]+}}(%esp), %eax ; X86-BMI1NOTBM-NEXT: retl ; ; X86-BMI1BMI2-LABEL: bzhi32_b0: ; X86-BMI1BMI2: # %bb.0: ; X86-BMI1BMI2-NEXT: movb {{[0-9]+}}(%esp), %al ; X86-BMI1BMI2-NEXT: bzhil %eax, {{[0-9]+}}(%esp), %eax ; X86-BMI1BMI2-NEXT: retl ; ; X64-NOBMI-LABEL: bzhi32_b0: ; X64-NOBMI: # %bb.0: ; X64-NOBMI-NEXT: movl %esi, %ecx ; X64-NOBMI-NEXT: movl $-1, %eax ; X64-NOBMI-NEXT: # kill: def $cl killed $cl killed $ecx ; X64-NOBMI-NEXT: shll %cl, %eax ; X64-NOBMI-NEXT: notl %eax ; X64-NOBMI-NEXT: andl %edi, %eax ; X64-NOBMI-NEXT: retq ; ; X64-BMI1NOTBM-LABEL: bzhi32_b0: ; X64-BMI1NOTBM: # %bb.0: ; X64-BMI1NOTBM-NEXT: shll $8, %esi ; X64-BMI1NOTBM-NEXT: bextrl %esi, %edi, %eax ; X64-BMI1NOTBM-NEXT: retq ; ; X64-BMI1BMI2-LABEL: bzhi32_b0: ; X64-BMI1BMI2: # %bb.0: ; X64-BMI1BMI2-NEXT: bzhil %esi, %edi, %eax ; X64-BMI1BMI2-NEXT: retq %notmask = shl i32 -1, %numlowbits %mask = xor i32 %notmask, -1 %masked = and i32 %mask, %val ret i32 %masked } define i32 @bzhi32_b1_indexzext(i32 %val, i8 zeroext %numlowbits) nounwind { ; X86-NOBMI-LABEL: bzhi32_b1_indexzext: ; X86-NOBMI: # %bb.0: ; X86-NOBMI-NEXT: movb {{[0-9]+}}(%esp), %cl ; X86-NOBMI-NEXT: movl $-1, %eax ; X86-NOBMI-NEXT: shll %cl, %eax ; X86-NOBMI-NEXT: notl %eax ; X86-NOBMI-NEXT: andl {{[0-9]+}}(%esp), %eax ; X86-NOBMI-NEXT: retl ; ; X86-BMI1NOTBM-LABEL: bzhi32_b1_indexzext: ; X86-BMI1NOTBM: # %bb.0: ; X86-BMI1NOTBM-NEXT: movb {{[0-9]+}}(%esp), %al ; X86-BMI1NOTBM-NEXT: shll $8, %eax ; X86-BMI1NOTBM-NEXT: bextrl %eax, {{[0-9]+}}(%esp), %eax ; X86-BMI1NOTBM-NEXT: retl ; ; X86-BMI1BMI2-LABEL: bzhi32_b1_indexzext: ; X86-BMI1BMI2: # %bb.0: ; X86-BMI1BMI2-NEXT: movb {{[0-9]+}}(%esp), %al ; X86-BMI1BMI2-NEXT: bzhil %eax, {{[0-9]+}}(%esp), %eax ; X86-BMI1BMI2-NEXT: retl ; ; X64-NOBMI-LABEL: bzhi32_b1_indexzext: ; X64-NOBMI: # %bb.0: ; X64-NOBMI-NEXT: movl %esi, %ecx ; X64-NOBMI-NEXT: movl $-1, %eax ; X64-NOBMI-NEXT: # kill: def $cl killed $cl killed $ecx ; X64-NOBMI-NEXT: shll %cl, %eax ; X64-NOBMI-NEXT: notl %eax ; X64-NOBMI-NEXT: andl %edi, %eax ; X64-NOBMI-NEXT: retq ; ; X64-BMI1NOTBM-LABEL: bzhi32_b1_indexzext: ; X64-BMI1NOTBM: # %bb.0: ; X64-BMI1NOTBM-NEXT: shll $8, %esi ; X64-BMI1NOTBM-NEXT: bextrl %esi, %edi, %eax ; X64-BMI1NOTBM-NEXT: retq ; ; X64-BMI1BMI2-LABEL: bzhi32_b1_indexzext: ; X64-BMI1BMI2: # %bb.0: ; X64-BMI1BMI2-NEXT: bzhil %esi, %edi, %eax ; X64-BMI1BMI2-NEXT: retq %conv = zext i8 %numlowbits to i32 %notmask = shl i32 -1, %conv %mask = xor i32 %notmask, -1 %masked = and i32 %mask, %val ret i32 %masked } define i32 @bzhi32_b2_load(i32* %w, i32 %numlowbits) nounwind { ; X86-NOBMI-LABEL: bzhi32_b2_load: ; X86-NOBMI: # %bb.0: ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %edx ; X86-NOBMI-NEXT: movb {{[0-9]+}}(%esp), %cl ; X86-NOBMI-NEXT: movl $-1, %eax ; X86-NOBMI-NEXT: shll %cl, %eax ; X86-NOBMI-NEXT: notl %eax ; X86-NOBMI-NEXT: andl (%edx), %eax ; X86-NOBMI-NEXT: retl ; ; X86-BMI1NOTBM-LABEL: bzhi32_b2_load: ; X86-BMI1NOTBM: # %bb.0: ; X86-BMI1NOTBM-NEXT: movl {{[0-9]+}}(%esp), %eax ; X86-BMI1NOTBM-NEXT: movb {{[0-9]+}}(%esp), %cl ; X86-BMI1NOTBM-NEXT: shll $8, %ecx ; X86-BMI1NOTBM-NEXT: bextrl %ecx, (%eax), %eax ; X86-BMI1NOTBM-NEXT: retl ; ; X86-BMI1BMI2-LABEL: bzhi32_b2_load: ; X86-BMI1BMI2: # %bb.0: ; X86-BMI1BMI2-NEXT: movl {{[0-9]+}}(%esp), %eax ; X86-BMI1BMI2-NEXT: movb {{[0-9]+}}(%esp), %cl ; X86-BMI1BMI2-NEXT: bzhil %ecx, (%eax), %eax ; X86-BMI1BMI2-NEXT: retl ; ; X64-NOBMI-LABEL: bzhi32_b2_load: ; X64-NOBMI: # %bb.0: ; X64-NOBMI-NEXT: movl %esi, %ecx ; X64-NOBMI-NEXT: movl $-1, %eax ; X64-NOBMI-NEXT: # kill: def $cl killed $cl killed $ecx ; X64-NOBMI-NEXT: shll %cl, %eax ; X64-NOBMI-NEXT: notl %eax ; X64-NOBMI-NEXT: andl (%rdi), %eax ; X64-NOBMI-NEXT: retq ; ; X64-BMI1NOTBM-LABEL: bzhi32_b2_load: ; X64-BMI1NOTBM: # %bb.0: ; X64-BMI1NOTBM-NEXT: shll $8, %esi ; X64-BMI1NOTBM-NEXT: bextrl %esi, (%rdi), %eax ; X64-BMI1NOTBM-NEXT: retq ; ; X64-BMI1BMI2-LABEL: bzhi32_b2_load: ; X64-BMI1BMI2: # %bb.0: ; X64-BMI1BMI2-NEXT: bzhil %esi, (%rdi), %eax ; X64-BMI1BMI2-NEXT: retq %val = load i32, i32* %w %notmask = shl i32 -1, %numlowbits %mask = xor i32 %notmask, -1 %masked = and i32 %mask, %val ret i32 %masked } define i32 @bzhi32_b3_load_indexzext(i32* %w, i8 zeroext %numlowbits) nounwind { ; X86-NOBMI-LABEL: bzhi32_b3_load_indexzext: ; X86-NOBMI: # %bb.0: ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %edx ; X86-NOBMI-NEXT: movb {{[0-9]+}}(%esp), %cl ; X86-NOBMI-NEXT: movl $-1, %eax ; X86-NOBMI-NEXT: shll %cl, %eax ; X86-NOBMI-NEXT: notl %eax ; X86-NOBMI-NEXT: andl (%edx), %eax ; X86-NOBMI-NEXT: retl ; ; X86-BMI1NOTBM-LABEL: bzhi32_b3_load_indexzext: ; X86-BMI1NOTBM: # %bb.0: ; X86-BMI1NOTBM-NEXT: movl {{[0-9]+}}(%esp), %eax ; X86-BMI1NOTBM-NEXT: movb {{[0-9]+}}(%esp), %cl ; X86-BMI1NOTBM-NEXT: shll $8, %ecx ; X86-BMI1NOTBM-NEXT: bextrl %ecx, (%eax), %eax ; X86-BMI1NOTBM-NEXT: retl ; ; X86-BMI1BMI2-LABEL: bzhi32_b3_load_indexzext: ; X86-BMI1BMI2: # %bb.0: ; X86-BMI1BMI2-NEXT: movl {{[0-9]+}}(%esp), %eax ; X86-BMI1BMI2-NEXT: movb {{[0-9]+}}(%esp), %cl ; X86-BMI1BMI2-NEXT: bzhil %ecx, (%eax), %eax ; X86-BMI1BMI2-NEXT: retl ; ; X64-NOBMI-LABEL: bzhi32_b3_load_indexzext: ; X64-NOBMI: # %bb.0: ; X64-NOBMI-NEXT: movl %esi, %ecx ; X64-NOBMI-NEXT: movl $-1, %eax ; X64-NOBMI-NEXT: # kill: def $cl killed $cl killed $ecx ; X64-NOBMI-NEXT: shll %cl, %eax ; X64-NOBMI-NEXT: notl %eax ; X64-NOBMI-NEXT: andl (%rdi), %eax ; X64-NOBMI-NEXT: retq ; ; X64-BMI1NOTBM-LABEL: bzhi32_b3_load_indexzext: ; X64-BMI1NOTBM: # %bb.0: ; X64-BMI1NOTBM-NEXT: shll $8, %esi ; X64-BMI1NOTBM-NEXT: bextrl %esi, (%rdi), %eax ; X64-BMI1NOTBM-NEXT: retq ; ; X64-BMI1BMI2-LABEL: bzhi32_b3_load_indexzext: ; X64-BMI1BMI2: # %bb.0: ; X64-BMI1BMI2-NEXT: bzhil %esi, (%rdi), %eax ; X64-BMI1BMI2-NEXT: retq %val = load i32, i32* %w %conv = zext i8 %numlowbits to i32 %notmask = shl i32 -1, %conv %mask = xor i32 %notmask, -1 %masked = and i32 %mask, %val ret i32 %masked } define i32 @bzhi32_b4_commutative(i32 %val, i32 %numlowbits) nounwind { ; X86-NOBMI-LABEL: bzhi32_b4_commutative: ; X86-NOBMI: # %bb.0: ; X86-NOBMI-NEXT: movb {{[0-9]+}}(%esp), %cl ; X86-NOBMI-NEXT: movl $-1, %eax ; X86-NOBMI-NEXT: shll %cl, %eax ; X86-NOBMI-NEXT: notl %eax ; X86-NOBMI-NEXT: andl {{[0-9]+}}(%esp), %eax ; X86-NOBMI-NEXT: retl ; ; X86-BMI1NOTBM-LABEL: bzhi32_b4_commutative: ; X86-BMI1NOTBM: # %bb.0: ; X86-BMI1NOTBM-NEXT: movb {{[0-9]+}}(%esp), %al ; X86-BMI1NOTBM-NEXT: shll $8, %eax ; X86-BMI1NOTBM-NEXT: bextrl %eax, {{[0-9]+}}(%esp), %eax ; X86-BMI1NOTBM-NEXT: retl ; ; X86-BMI1BMI2-LABEL: bzhi32_b4_commutative: ; X86-BMI1BMI2: # %bb.0: ; X86-BMI1BMI2-NEXT: movb {{[0-9]+}}(%esp), %al ; X86-BMI1BMI2-NEXT: bzhil %eax, {{[0-9]+}}(%esp), %eax ; X86-BMI1BMI2-NEXT: retl ; ; X64-NOBMI-LABEL: bzhi32_b4_commutative: ; X64-NOBMI: # %bb.0: ; X64-NOBMI-NEXT: movl %esi, %ecx ; X64-NOBMI-NEXT: movl $-1, %eax ; X64-NOBMI-NEXT: # kill: def $cl killed $cl killed $ecx ; X64-NOBMI-NEXT: shll %cl, %eax ; X64-NOBMI-NEXT: notl %eax ; X64-NOBMI-NEXT: andl %edi, %eax ; X64-NOBMI-NEXT: retq ; ; X64-BMI1NOTBM-LABEL: bzhi32_b4_commutative: ; X64-BMI1NOTBM: # %bb.0: ; X64-BMI1NOTBM-NEXT: shll $8, %esi ; X64-BMI1NOTBM-NEXT: bextrl %esi, %edi, %eax ; X64-BMI1NOTBM-NEXT: retq ; ; X64-BMI1BMI2-LABEL: bzhi32_b4_commutative: ; X64-BMI1BMI2: # %bb.0: ; X64-BMI1BMI2-NEXT: bzhil %esi, %edi, %eax ; X64-BMI1BMI2-NEXT: retq %notmask = shl i32 -1, %numlowbits %mask = xor i32 %notmask, -1 %masked = and i32 %val, %mask ; swapped order ret i32 %masked } ; 64-bit define i64 @bzhi64_b0(i64 %val, i64 %numlowbits) nounwind { ; X86-NOBMI-LABEL: bzhi64_b0: ; X86-NOBMI: # %bb.0: ; X86-NOBMI-NEXT: pushl %esi ; X86-NOBMI-NEXT: movb {{[0-9]+}}(%esp), %cl ; X86-NOBMI-NEXT: movl $-1, %edx ; X86-NOBMI-NEXT: movl $-1, %esi ; X86-NOBMI-NEXT: shll %cl, %esi ; X86-NOBMI-NEXT: xorl %eax, %eax ; X86-NOBMI-NEXT: testb $32, %cl ; X86-NOBMI-NEXT: jne .LBB20_1 ; X86-NOBMI-NEXT: # %bb.2: ; X86-NOBMI-NEXT: movl %esi, %eax ; X86-NOBMI-NEXT: jmp .LBB20_3 ; X86-NOBMI-NEXT: .LBB20_1: ; X86-NOBMI-NEXT: movl %esi, %edx ; X86-NOBMI-NEXT: .LBB20_3: ; X86-NOBMI-NEXT: notl %edx ; X86-NOBMI-NEXT: notl %eax ; X86-NOBMI-NEXT: andl {{[0-9]+}}(%esp), %eax ; X86-NOBMI-NEXT: andl {{[0-9]+}}(%esp), %edx ; X86-NOBMI-NEXT: popl %esi ; X86-NOBMI-NEXT: retl ; ; X86-BMI1NOTBM-LABEL: bzhi64_b0: ; X86-BMI1NOTBM: # %bb.0: ; X86-BMI1NOTBM-NEXT: movb {{[0-9]+}}(%esp), %cl ; X86-BMI1NOTBM-NEXT: movl $-1, %edx ; X86-BMI1NOTBM-NEXT: movl $-1, %eax ; X86-BMI1NOTBM-NEXT: shll %cl, %eax ; X86-BMI1NOTBM-NEXT: testb $32, %cl ; X86-BMI1NOTBM-NEXT: je .LBB20_2 ; X86-BMI1NOTBM-NEXT: # %bb.1: ; X86-BMI1NOTBM-NEXT: movl %eax, %edx ; X86-BMI1NOTBM-NEXT: xorl %eax, %eax ; X86-BMI1NOTBM-NEXT: .LBB20_2: ; X86-BMI1NOTBM-NEXT: andnl {{[0-9]+}}(%esp), %eax, %eax ; X86-BMI1NOTBM-NEXT: andnl {{[0-9]+}}(%esp), %edx, %edx ; X86-BMI1NOTBM-NEXT: retl ; ; X86-BMI1BMI2-LABEL: bzhi64_b0: ; X86-BMI1BMI2: # %bb.0: ; X86-BMI1BMI2-NEXT: movb {{[0-9]+}}(%esp), %dl ; X86-BMI1BMI2-NEXT: movl $-1, %ecx ; X86-BMI1BMI2-NEXT: shlxl %edx, %ecx, %eax ; X86-BMI1BMI2-NEXT: testb $32, %dl ; X86-BMI1BMI2-NEXT: je .LBB20_2 ; X86-BMI1BMI2-NEXT: # %bb.1: ; X86-BMI1BMI2-NEXT: movl %eax, %ecx ; X86-BMI1BMI2-NEXT: xorl %eax, %eax ; X86-BMI1BMI2-NEXT: .LBB20_2: ; X86-BMI1BMI2-NEXT: andnl {{[0-9]+}}(%esp), %eax, %eax ; X86-BMI1BMI2-NEXT: andnl {{[0-9]+}}(%esp), %ecx, %edx ; X86-BMI1BMI2-NEXT: retl ; ; X64-NOBMI-LABEL: bzhi64_b0: ; X64-NOBMI: # %bb.0: ; X64-NOBMI-NEXT: movq %rsi, %rcx ; X64-NOBMI-NEXT: movq $-1, %rax ; X64-NOBMI-NEXT: # kill: def $cl killed $cl killed $rcx ; X64-NOBMI-NEXT: shlq %cl, %rax ; X64-NOBMI-NEXT: notq %rax ; X64-NOBMI-NEXT: andq %rdi, %rax ; X64-NOBMI-NEXT: retq ; ; X64-BMI1NOTBM-LABEL: bzhi64_b0: ; X64-BMI1NOTBM: # %bb.0: ; X64-BMI1NOTBM-NEXT: shll $8, %esi ; X64-BMI1NOTBM-NEXT: bextrq %rsi, %rdi, %rax ; X64-BMI1NOTBM-NEXT: retq ; ; X64-BMI1BMI2-LABEL: bzhi64_b0: ; X64-BMI1BMI2: # %bb.0: ; X64-BMI1BMI2-NEXT: bzhiq %rsi, %rdi, %rax ; X64-BMI1BMI2-NEXT: retq %notmask = shl i64 -1, %numlowbits %mask = xor i64 %notmask, -1 %masked = and i64 %mask, %val ret i64 %masked } define i64 @bzhi64_b1_indexzext(i64 %val, i8 zeroext %numlowbits) nounwind { ; X86-NOBMI-LABEL: bzhi64_b1_indexzext: ; X86-NOBMI: # %bb.0: ; X86-NOBMI-NEXT: pushl %esi ; X86-NOBMI-NEXT: movb {{[0-9]+}}(%esp), %cl ; X86-NOBMI-NEXT: movl $-1, %edx ; X86-NOBMI-NEXT: movl $-1, %esi ; X86-NOBMI-NEXT: shll %cl, %esi ; X86-NOBMI-NEXT: xorl %eax, %eax ; X86-NOBMI-NEXT: testb $32, %cl ; X86-NOBMI-NEXT: jne .LBB21_1 ; X86-NOBMI-NEXT: # %bb.2: ; X86-NOBMI-NEXT: movl %esi, %eax ; X86-NOBMI-NEXT: jmp .LBB21_3 ; X86-NOBMI-NEXT: .LBB21_1: ; X86-NOBMI-NEXT: movl %esi, %edx ; X86-NOBMI-NEXT: .LBB21_3: ; X86-NOBMI-NEXT: notl %edx ; X86-NOBMI-NEXT: notl %eax ; X86-NOBMI-NEXT: andl {{[0-9]+}}(%esp), %eax ; X86-NOBMI-NEXT: andl {{[0-9]+}}(%esp), %edx ; X86-NOBMI-NEXT: popl %esi ; X86-NOBMI-NEXT: retl ; ; X86-BMI1NOTBM-LABEL: bzhi64_b1_indexzext: ; X86-BMI1NOTBM: # %bb.0: ; X86-BMI1NOTBM-NEXT: movb {{[0-9]+}}(%esp), %cl ; X86-BMI1NOTBM-NEXT: movl $-1, %edx ; X86-BMI1NOTBM-NEXT: movl $-1, %eax ; X86-BMI1NOTBM-NEXT: shll %cl, %eax ; X86-BMI1NOTBM-NEXT: testb $32, %cl ; X86-BMI1NOTBM-NEXT: je .LBB21_2 ; X86-BMI1NOTBM-NEXT: # %bb.1: ; X86-BMI1NOTBM-NEXT: movl %eax, %edx ; X86-BMI1NOTBM-NEXT: xorl %eax, %eax ; X86-BMI1NOTBM-NEXT: .LBB21_2: ; X86-BMI1NOTBM-NEXT: andnl {{[0-9]+}}(%esp), %eax, %eax ; X86-BMI1NOTBM-NEXT: andnl {{[0-9]+}}(%esp), %edx, %edx ; X86-BMI1NOTBM-NEXT: retl ; ; X86-BMI1BMI2-LABEL: bzhi64_b1_indexzext: ; X86-BMI1BMI2: # %bb.0: ; X86-BMI1BMI2-NEXT: movb {{[0-9]+}}(%esp), %dl ; X86-BMI1BMI2-NEXT: movl $-1, %ecx ; X86-BMI1BMI2-NEXT: shlxl %edx, %ecx, %eax ; X86-BMI1BMI2-NEXT: testb $32, %dl ; X86-BMI1BMI2-NEXT: je .LBB21_2 ; X86-BMI1BMI2-NEXT: # %bb.1: ; X86-BMI1BMI2-NEXT: movl %eax, %ecx ; X86-BMI1BMI2-NEXT: xorl %eax, %eax ; X86-BMI1BMI2-NEXT: .LBB21_2: ; X86-BMI1BMI2-NEXT: andnl {{[0-9]+}}(%esp), %eax, %eax ; X86-BMI1BMI2-NEXT: andnl {{[0-9]+}}(%esp), %ecx, %edx ; X86-BMI1BMI2-NEXT: retl ; ; X64-NOBMI-LABEL: bzhi64_b1_indexzext: ; X64-NOBMI: # %bb.0: ; X64-NOBMI-NEXT: movl %esi, %ecx ; X64-NOBMI-NEXT: movq $-1, %rax ; X64-NOBMI-NEXT: # kill: def $cl killed $cl killed $ecx ; X64-NOBMI-NEXT: shlq %cl, %rax ; X64-NOBMI-NEXT: notq %rax ; X64-NOBMI-NEXT: andq %rdi, %rax ; X64-NOBMI-NEXT: retq ; ; X64-BMI1NOTBM-LABEL: bzhi64_b1_indexzext: ; X64-BMI1NOTBM: # %bb.0: ; X64-BMI1NOTBM-NEXT: # kill: def $esi killed $esi def $rsi ; X64-BMI1NOTBM-NEXT: shll $8, %esi ; X64-BMI1NOTBM-NEXT: bextrq %rsi, %rdi, %rax ; X64-BMI1NOTBM-NEXT: retq ; ; X64-BMI1BMI2-LABEL: bzhi64_b1_indexzext: ; X64-BMI1BMI2: # %bb.0: ; X64-BMI1BMI2-NEXT: # kill: def $esi killed $esi def $rsi ; X64-BMI1BMI2-NEXT: bzhiq %rsi, %rdi, %rax ; X64-BMI1BMI2-NEXT: retq %conv = zext i8 %numlowbits to i64 %notmask = shl i64 -1, %conv %mask = xor i64 %notmask, -1 %masked = and i64 %mask, %val ret i64 %masked } define i64 @bzhi64_b2_load(i64* %w, i64 %numlowbits) nounwind { ; X86-NOBMI-LABEL: bzhi64_b2_load: ; X86-NOBMI: # %bb.0: ; X86-NOBMI-NEXT: pushl %edi ; X86-NOBMI-NEXT: pushl %esi ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %esi ; X86-NOBMI-NEXT: movb {{[0-9]+}}(%esp), %cl ; X86-NOBMI-NEXT: movl $-1, %edx ; X86-NOBMI-NEXT: movl $-1, %edi ; X86-NOBMI-NEXT: shll %cl, %edi ; X86-NOBMI-NEXT: xorl %eax, %eax ; X86-NOBMI-NEXT: testb $32, %cl ; X86-NOBMI-NEXT: jne .LBB22_1 ; X86-NOBMI-NEXT: # %bb.2: ; X86-NOBMI-NEXT: movl %edi, %eax ; X86-NOBMI-NEXT: jmp .LBB22_3 ; X86-NOBMI-NEXT: .LBB22_1: ; X86-NOBMI-NEXT: movl %edi, %edx ; X86-NOBMI-NEXT: .LBB22_3: ; X86-NOBMI-NEXT: notl %edx ; X86-NOBMI-NEXT: notl %eax ; X86-NOBMI-NEXT: andl (%esi), %eax ; X86-NOBMI-NEXT: andl 4(%esi), %edx ; X86-NOBMI-NEXT: popl %esi ; X86-NOBMI-NEXT: popl %edi ; X86-NOBMI-NEXT: retl ; ; X86-BMI1NOTBM-LABEL: bzhi64_b2_load: ; X86-BMI1NOTBM: # %bb.0: ; X86-BMI1NOTBM-NEXT: pushl %esi ; X86-BMI1NOTBM-NEXT: movl {{[0-9]+}}(%esp), %edx ; X86-BMI1NOTBM-NEXT: movb {{[0-9]+}}(%esp), %cl ; X86-BMI1NOTBM-NEXT: movl $-1, %esi ; X86-BMI1NOTBM-NEXT: movl $-1, %eax ; X86-BMI1NOTBM-NEXT: shll %cl, %eax ; X86-BMI1NOTBM-NEXT: testb $32, %cl ; X86-BMI1NOTBM-NEXT: je .LBB22_2 ; X86-BMI1NOTBM-NEXT: # %bb.1: ; X86-BMI1NOTBM-NEXT: movl %eax, %esi ; X86-BMI1NOTBM-NEXT: xorl %eax, %eax ; X86-BMI1NOTBM-NEXT: .LBB22_2: ; X86-BMI1NOTBM-NEXT: andnl (%edx), %eax, %eax ; X86-BMI1NOTBM-NEXT: andnl 4(%edx), %esi, %edx ; X86-BMI1NOTBM-NEXT: popl %esi ; X86-BMI1NOTBM-NEXT: retl ; ; X86-BMI1BMI2-LABEL: bzhi64_b2_load: ; X86-BMI1BMI2: # %bb.0: ; X86-BMI1BMI2-NEXT: pushl %ebx ; X86-BMI1BMI2-NEXT: movl {{[0-9]+}}(%esp), %ecx ; X86-BMI1BMI2-NEXT: movb {{[0-9]+}}(%esp), %bl ; X86-BMI1BMI2-NEXT: movl $-1, %edx ; X86-BMI1BMI2-NEXT: shlxl %ebx, %edx, %eax ; X86-BMI1BMI2-NEXT: testb $32, %bl ; X86-BMI1BMI2-NEXT: je .LBB22_2 ; X86-BMI1BMI2-NEXT: # %bb.1: ; X86-BMI1BMI2-NEXT: movl %eax, %edx ; X86-BMI1BMI2-NEXT: xorl %eax, %eax ; X86-BMI1BMI2-NEXT: .LBB22_2: ; X86-BMI1BMI2-NEXT: andnl (%ecx), %eax, %eax ; X86-BMI1BMI2-NEXT: andnl 4(%ecx), %edx, %edx ; X86-BMI1BMI2-NEXT: popl %ebx ; X86-BMI1BMI2-NEXT: retl ; ; X64-NOBMI-LABEL: bzhi64_b2_load: ; X64-NOBMI: # %bb.0: ; X64-NOBMI-NEXT: movq %rsi, %rcx ; X64-NOBMI-NEXT: movq $-1, %rax ; X64-NOBMI-NEXT: # kill: def $cl killed $cl killed $rcx ; X64-NOBMI-NEXT: shlq %cl, %rax ; X64-NOBMI-NEXT: notq %rax ; X64-NOBMI-NEXT: andq (%rdi), %rax ; X64-NOBMI-NEXT: retq ; ; X64-BMI1NOTBM-LABEL: bzhi64_b2_load: ; X64-BMI1NOTBM: # %bb.0: ; X64-BMI1NOTBM-NEXT: shll $8, %esi ; X64-BMI1NOTBM-NEXT: bextrq %rsi, (%rdi), %rax ; X64-BMI1NOTBM-NEXT: retq ; ; X64-BMI1BMI2-LABEL: bzhi64_b2_load: ; X64-BMI1BMI2: # %bb.0: ; X64-BMI1BMI2-NEXT: bzhiq %rsi, (%rdi), %rax ; X64-BMI1BMI2-NEXT: retq %val = load i64, i64* %w %notmask = shl i64 -1, %numlowbits %mask = xor i64 %notmask, -1 %masked = and i64 %mask, %val ret i64 %masked } define i64 @bzhi64_b3_load_indexzext(i64* %w, i8 zeroext %numlowbits) nounwind { ; X86-NOBMI-LABEL: bzhi64_b3_load_indexzext: ; X86-NOBMI: # %bb.0: ; X86-NOBMI-NEXT: pushl %edi ; X86-NOBMI-NEXT: pushl %esi ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %esi ; X86-NOBMI-NEXT: movb {{[0-9]+}}(%esp), %cl ; X86-NOBMI-NEXT: movl $-1, %edx ; X86-NOBMI-NEXT: movl $-1, %edi ; X86-NOBMI-NEXT: shll %cl, %edi ; X86-NOBMI-NEXT: xorl %eax, %eax ; X86-NOBMI-NEXT: testb $32, %cl ; X86-NOBMI-NEXT: jne .LBB23_1 ; X86-NOBMI-NEXT: # %bb.2: ; X86-NOBMI-NEXT: movl %edi, %eax ; X86-NOBMI-NEXT: jmp .LBB23_3 ; X86-NOBMI-NEXT: .LBB23_1: ; X86-NOBMI-NEXT: movl %edi, %edx ; X86-NOBMI-NEXT: .LBB23_3: ; X86-NOBMI-NEXT: notl %edx ; X86-NOBMI-NEXT: notl %eax ; X86-NOBMI-NEXT: andl (%esi), %eax ; X86-NOBMI-NEXT: andl 4(%esi), %edx ; X86-NOBMI-NEXT: popl %esi ; X86-NOBMI-NEXT: popl %edi ; X86-NOBMI-NEXT: retl ; ; X86-BMI1NOTBM-LABEL: bzhi64_b3_load_indexzext: ; X86-BMI1NOTBM: # %bb.0: ; X86-BMI1NOTBM-NEXT: pushl %esi ; X86-BMI1NOTBM-NEXT: movl {{[0-9]+}}(%esp), %edx ; X86-BMI1NOTBM-NEXT: movb {{[0-9]+}}(%esp), %cl ; X86-BMI1NOTBM-NEXT: movl $-1, %esi ; X86-BMI1NOTBM-NEXT: movl $-1, %eax ; X86-BMI1NOTBM-NEXT: shll %cl, %eax ; X86-BMI1NOTBM-NEXT: testb $32, %cl ; X86-BMI1NOTBM-NEXT: je .LBB23_2 ; X86-BMI1NOTBM-NEXT: # %bb.1: ; X86-BMI1NOTBM-NEXT: movl %eax, %esi ; X86-BMI1NOTBM-NEXT: xorl %eax, %eax ; X86-BMI1NOTBM-NEXT: .LBB23_2: ; X86-BMI1NOTBM-NEXT: andnl (%edx), %eax, %eax ; X86-BMI1NOTBM-NEXT: andnl 4(%edx), %esi, %edx ; X86-BMI1NOTBM-NEXT: popl %esi ; X86-BMI1NOTBM-NEXT: retl ; ; X86-BMI1BMI2-LABEL: bzhi64_b3_load_indexzext: ; X86-BMI1BMI2: # %bb.0: ; X86-BMI1BMI2-NEXT: pushl %ebx ; X86-BMI1BMI2-NEXT: movl {{[0-9]+}}(%esp), %ecx ; X86-BMI1BMI2-NEXT: movb {{[0-9]+}}(%esp), %bl ; X86-BMI1BMI2-NEXT: movl $-1, %edx ; X86-BMI1BMI2-NEXT: shlxl %ebx, %edx, %eax ; X86-BMI1BMI2-NEXT: testb $32, %bl ; X86-BMI1BMI2-NEXT: je .LBB23_2 ; X86-BMI1BMI2-NEXT: # %bb.1: ; X86-BMI1BMI2-NEXT: movl %eax, %edx ; X86-BMI1BMI2-NEXT: xorl %eax, %eax ; X86-BMI1BMI2-NEXT: .LBB23_2: ; X86-BMI1BMI2-NEXT: andnl (%ecx), %eax, %eax ; X86-BMI1BMI2-NEXT: andnl 4(%ecx), %edx, %edx ; X86-BMI1BMI2-NEXT: popl %ebx ; X86-BMI1BMI2-NEXT: retl ; ; X64-NOBMI-LABEL: bzhi64_b3_load_indexzext: ; X64-NOBMI: # %bb.0: ; X64-NOBMI-NEXT: movl %esi, %ecx ; X64-NOBMI-NEXT: movq $-1, %rax ; X64-NOBMI-NEXT: # kill: def $cl killed $cl killed $ecx ; X64-NOBMI-NEXT: shlq %cl, %rax ; X64-NOBMI-NEXT: notq %rax ; X64-NOBMI-NEXT: andq (%rdi), %rax ; X64-NOBMI-NEXT: retq ; ; X64-BMI1NOTBM-LABEL: bzhi64_b3_load_indexzext: ; X64-BMI1NOTBM: # %bb.0: ; X64-BMI1NOTBM-NEXT: # kill: def $esi killed $esi def $rsi ; X64-BMI1NOTBM-NEXT: shll $8, %esi ; X64-BMI1NOTBM-NEXT: bextrq %rsi, (%rdi), %rax ; X64-BMI1NOTBM-NEXT: retq ; ; X64-BMI1BMI2-LABEL: bzhi64_b3_load_indexzext: ; X64-BMI1BMI2: # %bb.0: ; X64-BMI1BMI2-NEXT: # kill: def $esi killed $esi def $rsi ; X64-BMI1BMI2-NEXT: bzhiq %rsi, (%rdi), %rax ; X64-BMI1BMI2-NEXT: retq %val = load i64, i64* %w %conv = zext i8 %numlowbits to i64 %notmask = shl i64 -1, %conv %mask = xor i64 %notmask, -1 %masked = and i64 %mask, %val ret i64 %masked } define i64 @bzhi64_b4_commutative(i64 %val, i64 %numlowbits) nounwind { ; X86-NOBMI-LABEL: bzhi64_b4_commutative: ; X86-NOBMI: # %bb.0: ; X86-NOBMI-NEXT: pushl %esi ; X86-NOBMI-NEXT: movb {{[0-9]+}}(%esp), %cl ; X86-NOBMI-NEXT: movl $-1, %edx ; X86-NOBMI-NEXT: movl $-1, %esi ; X86-NOBMI-NEXT: shll %cl, %esi ; X86-NOBMI-NEXT: xorl %eax, %eax ; X86-NOBMI-NEXT: testb $32, %cl ; X86-NOBMI-NEXT: jne .LBB24_1 ; X86-NOBMI-NEXT: # %bb.2: ; X86-NOBMI-NEXT: movl %esi, %eax ; X86-NOBMI-NEXT: jmp .LBB24_3 ; X86-NOBMI-NEXT: .LBB24_1: ; X86-NOBMI-NEXT: movl %esi, %edx ; X86-NOBMI-NEXT: .LBB24_3: ; X86-NOBMI-NEXT: notl %edx ; X86-NOBMI-NEXT: notl %eax ; X86-NOBMI-NEXT: andl {{[0-9]+}}(%esp), %eax ; X86-NOBMI-NEXT: andl {{[0-9]+}}(%esp), %edx ; X86-NOBMI-NEXT: popl %esi ; X86-NOBMI-NEXT: retl ; ; X86-BMI1NOTBM-LABEL: bzhi64_b4_commutative: ; X86-BMI1NOTBM: # %bb.0: ; X86-BMI1NOTBM-NEXT: movb {{[0-9]+}}(%esp), %cl ; X86-BMI1NOTBM-NEXT: movl $-1, %edx ; X86-BMI1NOTBM-NEXT: movl $-1, %eax ; X86-BMI1NOTBM-NEXT: shll %cl, %eax ; X86-BMI1NOTBM-NEXT: testb $32, %cl ; X86-BMI1NOTBM-NEXT: je .LBB24_2 ; X86-BMI1NOTBM-NEXT: # %bb.1: ; X86-BMI1NOTBM-NEXT: movl %eax, %edx ; X86-BMI1NOTBM-NEXT: xorl %eax, %eax ; X86-BMI1NOTBM-NEXT: .LBB24_2: ; X86-BMI1NOTBM-NEXT: andnl {{[0-9]+}}(%esp), %eax, %eax ; X86-BMI1NOTBM-NEXT: andnl {{[0-9]+}}(%esp), %edx, %edx ; X86-BMI1NOTBM-NEXT: retl ; ; X86-BMI1BMI2-LABEL: bzhi64_b4_commutative: ; X86-BMI1BMI2: # %bb.0: ; X86-BMI1BMI2-NEXT: movb {{[0-9]+}}(%esp), %dl ; X86-BMI1BMI2-NEXT: movl $-1, %ecx ; X86-BMI1BMI2-NEXT: shlxl %edx, %ecx, %eax ; X86-BMI1BMI2-NEXT: testb $32, %dl ; X86-BMI1BMI2-NEXT: je .LBB24_2 ; X86-BMI1BMI2-NEXT: # %bb.1: ; X86-BMI1BMI2-NEXT: movl %eax, %ecx ; X86-BMI1BMI2-NEXT: xorl %eax, %eax ; X86-BMI1BMI2-NEXT: .LBB24_2: ; X86-BMI1BMI2-NEXT: andnl {{[0-9]+}}(%esp), %eax, %eax ; X86-BMI1BMI2-NEXT: andnl {{[0-9]+}}(%esp), %ecx, %edx ; X86-BMI1BMI2-NEXT: retl ; ; X64-NOBMI-LABEL: bzhi64_b4_commutative: ; X64-NOBMI: # %bb.0: ; X64-NOBMI-NEXT: movq %rsi, %rcx ; X64-NOBMI-NEXT: movq $-1, %rax ; X64-NOBMI-NEXT: # kill: def $cl killed $cl killed $rcx ; X64-NOBMI-NEXT: shlq %cl, %rax ; X64-NOBMI-NEXT: notq %rax ; X64-NOBMI-NEXT: andq %rdi, %rax ; X64-NOBMI-NEXT: retq ; ; X64-BMI1NOTBM-LABEL: bzhi64_b4_commutative: ; X64-BMI1NOTBM: # %bb.0: ; X64-BMI1NOTBM-NEXT: shll $8, %esi ; X64-BMI1NOTBM-NEXT: bextrq %rsi, %rdi, %rax ; X64-BMI1NOTBM-NEXT: retq ; ; X64-BMI1BMI2-LABEL: bzhi64_b4_commutative: ; X64-BMI1BMI2: # %bb.0: ; X64-BMI1BMI2-NEXT: bzhiq %rsi, %rdi, %rax ; X64-BMI1BMI2-NEXT: retq %notmask = shl i64 -1, %numlowbits %mask = xor i64 %notmask, -1 %masked = and i64 %val, %mask ; swapped order ret i64 %masked } ; 64-bit, but with 32-bit output ; Everything done in 64-bit, truncation happens last. define i32 @bzhi64_32_b0(i64 %val, i8 %numlowbits) nounwind { ; X86-NOBMI-LABEL: bzhi64_32_b0: ; X86-NOBMI: # %bb.0: ; X86-NOBMI-NEXT: movb {{[0-9]+}}(%esp), %cl ; X86-NOBMI-NEXT: movl $-1, %edx ; X86-NOBMI-NEXT: shll %cl, %edx ; X86-NOBMI-NEXT: xorl %eax, %eax ; X86-NOBMI-NEXT: testb $32, %cl ; X86-NOBMI-NEXT: jne .LBB25_2 ; X86-NOBMI-NEXT: # %bb.1: ; X86-NOBMI-NEXT: movl %edx, %eax ; X86-NOBMI-NEXT: .LBB25_2: ; X86-NOBMI-NEXT: notl %eax ; X86-NOBMI-NEXT: andl {{[0-9]+}}(%esp), %eax ; X86-NOBMI-NEXT: retl ; ; X86-BMI1NOTBM-LABEL: bzhi64_32_b0: ; X86-BMI1NOTBM: # %bb.0: ; X86-BMI1NOTBM-NEXT: movb {{[0-9]+}}(%esp), %cl ; X86-BMI1NOTBM-NEXT: movl $-1, %eax ; X86-BMI1NOTBM-NEXT: shll %cl, %eax ; X86-BMI1NOTBM-NEXT: xorl %edx, %edx ; X86-BMI1NOTBM-NEXT: testb $32, %cl ; X86-BMI1NOTBM-NEXT: jne .LBB25_2 ; X86-BMI1NOTBM-NEXT: # %bb.1: ; X86-BMI1NOTBM-NEXT: movl %eax, %edx ; X86-BMI1NOTBM-NEXT: .LBB25_2: ; X86-BMI1NOTBM-NEXT: andnl {{[0-9]+}}(%esp), %edx, %eax ; X86-BMI1NOTBM-NEXT: retl ; ; X86-BMI1BMI2-LABEL: bzhi64_32_b0: ; X86-BMI1BMI2: # %bb.0: ; X86-BMI1BMI2-NEXT: movb {{[0-9]+}}(%esp), %al ; X86-BMI1BMI2-NEXT: xorl %ecx, %ecx ; X86-BMI1BMI2-NEXT: testb $32, %al ; X86-BMI1BMI2-NEXT: jne .LBB25_2 ; X86-BMI1BMI2-NEXT: # %bb.1: ; X86-BMI1BMI2-NEXT: movl $-1, %ecx ; X86-BMI1BMI2-NEXT: shlxl %eax, %ecx, %ecx ; X86-BMI1BMI2-NEXT: .LBB25_2: ; X86-BMI1BMI2-NEXT: andnl {{[0-9]+}}(%esp), %ecx, %eax ; X86-BMI1BMI2-NEXT: retl ; ; X64-NOBMI-LABEL: bzhi64_32_b0: ; X64-NOBMI: # %bb.0: ; X64-NOBMI-NEXT: movl %esi, %ecx ; X64-NOBMI-NEXT: movq $-1, %rax ; X64-NOBMI-NEXT: # kill: def $cl killed $cl killed $ecx ; X64-NOBMI-NEXT: shlq %cl, %rax ; X64-NOBMI-NEXT: notl %eax ; X64-NOBMI-NEXT: andl %edi, %eax ; X64-NOBMI-NEXT: # kill: def $eax killed $eax killed $rax ; X64-NOBMI-NEXT: retq ; ; X64-BMI1NOTBM-LABEL: bzhi64_32_b0: ; X64-BMI1NOTBM: # %bb.0: ; X64-BMI1NOTBM-NEXT: shll $8, %esi ; X64-BMI1NOTBM-NEXT: bextrl %esi, %edi, %eax ; X64-BMI1NOTBM-NEXT: retq ; ; X64-BMI1BMI2-LABEL: bzhi64_32_b0: ; X64-BMI1BMI2: # %bb.0: ; X64-BMI1BMI2-NEXT: bzhil %esi, %edi, %eax ; X64-BMI1BMI2-NEXT: retq %widenumlowbits = zext i8 %numlowbits to i64 %notmask = shl nsw i64 -1, %widenumlowbits %mask = xor i64 %notmask, -1 %wideres = and i64 %val, %mask %res = trunc i64 %wideres to i32 ret i32 %res } ; Shifting happens in 64-bit, then truncation. Masking is 32-bit. define i32 @bzhi64_32_b1(i64 %val, i8 %numlowbits) nounwind { ; X86-NOBMI-LABEL: bzhi64_32_b1: ; X86-NOBMI: # %bb.0: ; X86-NOBMI-NEXT: movb {{[0-9]+}}(%esp), %cl ; X86-NOBMI-NEXT: movl $-1, %eax ; X86-NOBMI-NEXT: shll %cl, %eax ; X86-NOBMI-NEXT: notl %eax ; X86-NOBMI-NEXT: andl {{[0-9]+}}(%esp), %eax ; X86-NOBMI-NEXT: retl ; ; X86-BMI1NOTBM-LABEL: bzhi64_32_b1: ; X86-BMI1NOTBM: # %bb.0: ; X86-BMI1NOTBM-NEXT: movb {{[0-9]+}}(%esp), %al ; X86-BMI1NOTBM-NEXT: shll $8, %eax ; X86-BMI1NOTBM-NEXT: bextrl %eax, {{[0-9]+}}(%esp), %eax ; X86-BMI1NOTBM-NEXT: retl ; ; X86-BMI1BMI2-LABEL: bzhi64_32_b1: ; X86-BMI1BMI2: # %bb.0: ; X86-BMI1BMI2-NEXT: movb {{[0-9]+}}(%esp), %al ; X86-BMI1BMI2-NEXT: bzhil %eax, {{[0-9]+}}(%esp), %eax ; X86-BMI1BMI2-NEXT: retl ; ; X64-NOBMI-LABEL: bzhi64_32_b1: ; X64-NOBMI: # %bb.0: ; X64-NOBMI-NEXT: movl %esi, %ecx ; X64-NOBMI-NEXT: movl $-1, %eax ; X64-NOBMI-NEXT: # kill: def $cl killed $cl killed $ecx ; X64-NOBMI-NEXT: shll %cl, %eax ; X64-NOBMI-NEXT: notl %eax ; X64-NOBMI-NEXT: andl %edi, %eax ; X64-NOBMI-NEXT: retq ; ; X64-BMI1NOTBM-LABEL: bzhi64_32_b1: ; X64-BMI1NOTBM: # %bb.0: ; X64-BMI1NOTBM-NEXT: shll $8, %esi ; X64-BMI1NOTBM-NEXT: bextrl %esi, %edi, %eax ; X64-BMI1NOTBM-NEXT: retq ; ; X64-BMI1BMI2-LABEL: bzhi64_32_b1: ; X64-BMI1BMI2: # %bb.0: ; X64-BMI1BMI2-NEXT: bzhil %esi, %edi, %eax ; X64-BMI1BMI2-NEXT: retq %truncval = trunc i64 %val to i32 %widenumlowbits = zext i8 %numlowbits to i32 %notmask = shl nsw i32 -1, %widenumlowbits %mask = xor i32 %notmask, -1 %res = and i32 %truncval, %mask ret i32 %res } ; Shifting happens in 64-bit. Mask is 32-bit, but extended to 64-bit. ; Masking is 64-bit. Then truncation. define i32 @bzhi64_32_b2(i64 %val, i8 %numlowbits) nounwind { ; X86-NOBMI-LABEL: bzhi64_32_b2: ; X86-NOBMI: # %bb.0: ; X86-NOBMI-NEXT: movb {{[0-9]+}}(%esp), %cl ; X86-NOBMI-NEXT: movl $-1, %eax ; X86-NOBMI-NEXT: shll %cl, %eax ; X86-NOBMI-NEXT: notl %eax ; X86-NOBMI-NEXT: andl {{[0-9]+}}(%esp), %eax ; X86-NOBMI-NEXT: retl ; ; X86-BMI1NOTBM-LABEL: bzhi64_32_b2: ; X86-BMI1NOTBM: # %bb.0: ; X86-BMI1NOTBM-NEXT: movb {{[0-9]+}}(%esp), %al ; X86-BMI1NOTBM-NEXT: shll $8, %eax ; X86-BMI1NOTBM-NEXT: bextrl %eax, {{[0-9]+}}(%esp), %eax ; X86-BMI1NOTBM-NEXT: retl ; ; X86-BMI1BMI2-LABEL: bzhi64_32_b2: ; X86-BMI1BMI2: # %bb.0: ; X86-BMI1BMI2-NEXT: movb {{[0-9]+}}(%esp), %al ; X86-BMI1BMI2-NEXT: bzhil %eax, {{[0-9]+}}(%esp), %eax ; X86-BMI1BMI2-NEXT: retl ; ; X64-NOBMI-LABEL: bzhi64_32_b2: ; X64-NOBMI: # %bb.0: ; X64-NOBMI-NEXT: movl %esi, %ecx ; X64-NOBMI-NEXT: movl $-1, %eax ; X64-NOBMI-NEXT: # kill: def $cl killed $cl killed $ecx ; X64-NOBMI-NEXT: shll %cl, %eax ; X64-NOBMI-NEXT: notl %eax ; X64-NOBMI-NEXT: andl %edi, %eax ; X64-NOBMI-NEXT: retq ; ; X64-BMI1NOTBM-LABEL: bzhi64_32_b2: ; X64-BMI1NOTBM: # %bb.0: ; X64-BMI1NOTBM-NEXT: shll $8, %esi ; X64-BMI1NOTBM-NEXT: bextrl %esi, %edi, %eax ; X64-BMI1NOTBM-NEXT: retq ; ; X64-BMI1BMI2-LABEL: bzhi64_32_b2: ; X64-BMI1BMI2: # %bb.0: ; X64-BMI1BMI2-NEXT: bzhil %esi, %edi, %eax ; X64-BMI1BMI2-NEXT: retq %widenumlowbits = zext i8 %numlowbits to i32 %notmask = shl nsw i32 -1, %widenumlowbits %mask = xor i32 %notmask, -1 %zextmask = zext i32 %mask to i64 %wideres = and i64 %val, %zextmask %res = trunc i64 %wideres to i32 ret i32 %res } ; Shifting happens in 64-bit. Mask is 32-bit, but calculated in 64-bit. ; Masking is 64-bit. Then truncation. define i32 @bzhi64_32_b3(i64 %val, i8 %numlowbits) nounwind { ; X86-NOBMI-LABEL: bzhi64_32_b3: ; X86-NOBMI: # %bb.0: ; X86-NOBMI-NEXT: movb {{[0-9]+}}(%esp), %cl ; X86-NOBMI-NEXT: movl $-1, %edx ; X86-NOBMI-NEXT: shll %cl, %edx ; X86-NOBMI-NEXT: xorl %eax, %eax ; X86-NOBMI-NEXT: testb $32, %cl ; X86-NOBMI-NEXT: jne .LBB28_2 ; X86-NOBMI-NEXT: # %bb.1: ; X86-NOBMI-NEXT: movl %edx, %eax ; X86-NOBMI-NEXT: .LBB28_2: ; X86-NOBMI-NEXT: notl %eax ; X86-NOBMI-NEXT: andl {{[0-9]+}}(%esp), %eax ; X86-NOBMI-NEXT: retl ; ; X86-BMI1NOTBM-LABEL: bzhi64_32_b3: ; X86-BMI1NOTBM: # %bb.0: ; X86-BMI1NOTBM-NEXT: movb {{[0-9]+}}(%esp), %cl ; X86-BMI1NOTBM-NEXT: movl $-1, %eax ; X86-BMI1NOTBM-NEXT: shll %cl, %eax ; X86-BMI1NOTBM-NEXT: xorl %edx, %edx ; X86-BMI1NOTBM-NEXT: testb $32, %cl ; X86-BMI1NOTBM-NEXT: jne .LBB28_2 ; X86-BMI1NOTBM-NEXT: # %bb.1: ; X86-BMI1NOTBM-NEXT: movl %eax, %edx ; X86-BMI1NOTBM-NEXT: .LBB28_2: ; X86-BMI1NOTBM-NEXT: andnl {{[0-9]+}}(%esp), %edx, %eax ; X86-BMI1NOTBM-NEXT: retl ; ; X86-BMI1BMI2-LABEL: bzhi64_32_b3: ; X86-BMI1BMI2: # %bb.0: ; X86-BMI1BMI2-NEXT: movb {{[0-9]+}}(%esp), %al ; X86-BMI1BMI2-NEXT: xorl %ecx, %ecx ; X86-BMI1BMI2-NEXT: testb $32, %al ; X86-BMI1BMI2-NEXT: jne .LBB28_2 ; X86-BMI1BMI2-NEXT: # %bb.1: ; X86-BMI1BMI2-NEXT: movl $-1, %ecx ; X86-BMI1BMI2-NEXT: shlxl %eax, %ecx, %ecx ; X86-BMI1BMI2-NEXT: .LBB28_2: ; X86-BMI1BMI2-NEXT: andnl {{[0-9]+}}(%esp), %ecx, %eax ; X86-BMI1BMI2-NEXT: retl ; ; X64-NOBMI-LABEL: bzhi64_32_b3: ; X64-NOBMI: # %bb.0: ; X64-NOBMI-NEXT: movl %esi, %ecx ; X64-NOBMI-NEXT: movl $4294967295, %eax # imm = 0xFFFFFFFF ; X64-NOBMI-NEXT: movl $4294967295, %edx # imm = 0xFFFFFFFF ; X64-NOBMI-NEXT: # kill: def $cl killed $cl killed $ecx ; X64-NOBMI-NEXT: shlq %cl, %rdx ; X64-NOBMI-NEXT: xorl %edx, %eax ; X64-NOBMI-NEXT: andl %edi, %eax ; X64-NOBMI-NEXT: # kill: def $eax killed $eax killed $rax ; X64-NOBMI-NEXT: retq ; ; X64-BMI1NOTBM-LABEL: bzhi64_32_b3: ; X64-BMI1NOTBM: # %bb.0: ; X64-BMI1NOTBM-NEXT: shll $8, %esi ; X64-BMI1NOTBM-NEXT: bextrl %esi, %edi, %eax ; X64-BMI1NOTBM-NEXT: retq ; ; X64-BMI1BMI2-LABEL: bzhi64_32_b3: ; X64-BMI1BMI2: # %bb.0: ; X64-BMI1BMI2-NEXT: bzhil %esi, %edi, %eax ; X64-BMI1BMI2-NEXT: retq %widenumlowbits = zext i8 %numlowbits to i64 %notmask = shl nsw i64 4294967295, %widenumlowbits %mask = xor i64 %notmask, 4294967295 %wideres = and i64 %val, %mask %res = trunc i64 %wideres to i32 ret i32 %res } ; ---------------------------------------------------------------------------- ; ; Pattern c. 32-bit ; ---------------------------------------------------------------------------- ; declare void @use32(i32) define i32 @bzhi32_c0(i32 %val, i32 %numlowbits) nounwind { ; X86-NOBMI-LABEL: bzhi32_c0: ; X86-NOBMI: # %bb.0: ; X86-NOBMI-NEXT: pushl %esi ; X86-NOBMI-NEXT: subl $8, %esp ; X86-NOBMI-NEXT: xorl %ecx, %ecx ; X86-NOBMI-NEXT: subb {{[0-9]+}}(%esp), %cl ; X86-NOBMI-NEXT: movl $-1, %esi ; X86-NOBMI-NEXT: # kill: def $cl killed $cl killed $ecx ; X86-NOBMI-NEXT: shrl %cl, %esi ; X86-NOBMI-NEXT: movl %esi, (%esp) ; X86-NOBMI-NEXT: calll use32 ; X86-NOBMI-NEXT: andl {{[0-9]+}}(%esp), %esi ; X86-NOBMI-NEXT: movl %esi, %eax ; X86-NOBMI-NEXT: addl $8, %esp ; X86-NOBMI-NEXT: popl %esi ; X86-NOBMI-NEXT: retl ; ; X86-BMI1NOTBM-LABEL: bzhi32_c0: ; X86-BMI1NOTBM: # %bb.0: ; X86-BMI1NOTBM-NEXT: pushl %esi ; X86-BMI1NOTBM-NEXT: subl $8, %esp ; X86-BMI1NOTBM-NEXT: xorl %ecx, %ecx ; X86-BMI1NOTBM-NEXT: subb {{[0-9]+}}(%esp), %cl ; X86-BMI1NOTBM-NEXT: movl $-1, %esi ; X86-BMI1NOTBM-NEXT: # kill: def $cl killed $cl killed $ecx ; X86-BMI1NOTBM-NEXT: shrl %cl, %esi ; X86-BMI1NOTBM-NEXT: movl %esi, (%esp) ; X86-BMI1NOTBM-NEXT: calll use32 ; X86-BMI1NOTBM-NEXT: andl {{[0-9]+}}(%esp), %esi ; X86-BMI1NOTBM-NEXT: movl %esi, %eax ; X86-BMI1NOTBM-NEXT: addl $8, %esp ; X86-BMI1NOTBM-NEXT: popl %esi ; X86-BMI1NOTBM-NEXT: retl ; ; X86-BMI1BMI2-LABEL: bzhi32_c0: ; X86-BMI1BMI2: # %bb.0: ; X86-BMI1BMI2-NEXT: pushl %ebx ; X86-BMI1BMI2-NEXT: subl $8, %esp ; X86-BMI1BMI2-NEXT: movb {{[0-9]+}}(%esp), %bl ; X86-BMI1BMI2-NEXT: movl %ebx, %eax ; X86-BMI1BMI2-NEXT: negb %al ; X86-BMI1BMI2-NEXT: movl $-1, %ecx ; X86-BMI1BMI2-NEXT: shrxl %eax, %ecx, %eax ; X86-BMI1BMI2-NEXT: movl %eax, (%esp) ; X86-BMI1BMI2-NEXT: calll use32 ; X86-BMI1BMI2-NEXT: bzhil %ebx, {{[0-9]+}}(%esp), %eax ; X86-BMI1BMI2-NEXT: addl $8, %esp ; X86-BMI1BMI2-NEXT: popl %ebx ; X86-BMI1BMI2-NEXT: retl ; ; X64-NOBMI-LABEL: bzhi32_c0: ; X64-NOBMI: # %bb.0: ; X64-NOBMI-NEXT: pushq %rbp ; X64-NOBMI-NEXT: pushq %rbx ; X64-NOBMI-NEXT: pushq %rax ; X64-NOBMI-NEXT: movl %esi, %ecx ; X64-NOBMI-NEXT: movl %edi, %ebx ; X64-NOBMI-NEXT: negb %cl ; X64-NOBMI-NEXT: movl $-1, %ebp ; X64-NOBMI-NEXT: # kill: def $cl killed $cl killed $ecx ; X64-NOBMI-NEXT: shrl %cl, %ebp ; X64-NOBMI-NEXT: movl %ebp, %edi ; X64-NOBMI-NEXT: callq use32 ; X64-NOBMI-NEXT: andl %ebx, %ebp ; X64-NOBMI-NEXT: movl %ebp, %eax ; X64-NOBMI-NEXT: addq $8, %rsp ; X64-NOBMI-NEXT: popq %rbx ; X64-NOBMI-NEXT: popq %rbp ; X64-NOBMI-NEXT: retq ; ; X64-BMI1NOTBM-LABEL: bzhi32_c0: ; X64-BMI1NOTBM: # %bb.0: ; X64-BMI1NOTBM-NEXT: pushq %rbp ; X64-BMI1NOTBM-NEXT: pushq %rbx ; X64-BMI1NOTBM-NEXT: pushq %rax ; X64-BMI1NOTBM-NEXT: movl %esi, %ecx ; X64-BMI1NOTBM-NEXT: movl %edi, %ebx ; X64-BMI1NOTBM-NEXT: negb %cl ; X64-BMI1NOTBM-NEXT: movl $-1, %ebp ; X64-BMI1NOTBM-NEXT: # kill: def $cl killed $cl killed $ecx ; X64-BMI1NOTBM-NEXT: shrl %cl, %ebp ; X64-BMI1NOTBM-NEXT: movl %ebp, %edi ; X64-BMI1NOTBM-NEXT: callq use32 ; X64-BMI1NOTBM-NEXT: andl %ebx, %ebp ; X64-BMI1NOTBM-NEXT: movl %ebp, %eax ; X64-BMI1NOTBM-NEXT: addq $8, %rsp ; X64-BMI1NOTBM-NEXT: popq %rbx ; X64-BMI1NOTBM-NEXT: popq %rbp ; X64-BMI1NOTBM-NEXT: retq ; ; X64-BMI1BMI2-LABEL: bzhi32_c0: ; X64-BMI1BMI2: # %bb.0: ; X64-BMI1BMI2-NEXT: pushq %rbp ; X64-BMI1BMI2-NEXT: pushq %rbx ; X64-BMI1BMI2-NEXT: pushq %rax ; X64-BMI1BMI2-NEXT: movl %esi, %ebx ; X64-BMI1BMI2-NEXT: movl %edi, %ebp ; X64-BMI1BMI2-NEXT: movl %ebx, %eax ; X64-BMI1BMI2-NEXT: negb %al ; X64-BMI1BMI2-NEXT: movl $-1, %ecx ; X64-BMI1BMI2-NEXT: shrxl %eax, %ecx, %edi ; X64-BMI1BMI2-NEXT: callq use32 ; X64-BMI1BMI2-NEXT: bzhil %ebx, %ebp, %eax ; X64-BMI1BMI2-NEXT: addq $8, %rsp ; X64-BMI1BMI2-NEXT: popq %rbx ; X64-BMI1BMI2-NEXT: popq %rbp ; X64-BMI1BMI2-NEXT: retq %numhighbits = sub i32 32, %numlowbits %mask = lshr i32 -1, %numhighbits call void @use32(i32 %mask) %masked = and i32 %mask, %val ret i32 %masked } define i32 @bzhi32_c1_indexzext(i32 %val, i8 %numlowbits) nounwind { ; X86-NOBMI-LABEL: bzhi32_c1_indexzext: ; X86-NOBMI: # %bb.0: ; X86-NOBMI-NEXT: pushl %esi ; X86-NOBMI-NEXT: subl $8, %esp ; X86-NOBMI-NEXT: xorl %ecx, %ecx ; X86-NOBMI-NEXT: subb {{[0-9]+}}(%esp), %cl ; X86-NOBMI-NEXT: movl $-1, %esi ; X86-NOBMI-NEXT: # kill: def $cl killed $cl killed $ecx ; X86-NOBMI-NEXT: shrl %cl, %esi ; X86-NOBMI-NEXT: movl %esi, (%esp) ; X86-NOBMI-NEXT: calll use32 ; X86-NOBMI-NEXT: andl {{[0-9]+}}(%esp), %esi ; X86-NOBMI-NEXT: movl %esi, %eax ; X86-NOBMI-NEXT: addl $8, %esp ; X86-NOBMI-NEXT: popl %esi ; X86-NOBMI-NEXT: retl ; ; X86-BMI1NOTBM-LABEL: bzhi32_c1_indexzext: ; X86-BMI1NOTBM: # %bb.0: ; X86-BMI1NOTBM-NEXT: pushl %esi ; X86-BMI1NOTBM-NEXT: subl $8, %esp ; X86-BMI1NOTBM-NEXT: xorl %ecx, %ecx ; X86-BMI1NOTBM-NEXT: subb {{[0-9]+}}(%esp), %cl ; X86-BMI1NOTBM-NEXT: movl $-1, %esi ; X86-BMI1NOTBM-NEXT: # kill: def $cl killed $cl killed $ecx ; X86-BMI1NOTBM-NEXT: shrl %cl, %esi ; X86-BMI1NOTBM-NEXT: movl %esi, (%esp) ; X86-BMI1NOTBM-NEXT: calll use32 ; X86-BMI1NOTBM-NEXT: andl {{[0-9]+}}(%esp), %esi ; X86-BMI1NOTBM-NEXT: movl %esi, %eax ; X86-BMI1NOTBM-NEXT: addl $8, %esp ; X86-BMI1NOTBM-NEXT: popl %esi ; X86-BMI1NOTBM-NEXT: retl ; ; X86-BMI1BMI2-LABEL: bzhi32_c1_indexzext: ; X86-BMI1BMI2: # %bb.0: ; X86-BMI1BMI2-NEXT: pushl %ebx ; X86-BMI1BMI2-NEXT: subl $8, %esp ; X86-BMI1BMI2-NEXT: movb {{[0-9]+}}(%esp), %bl ; X86-BMI1BMI2-NEXT: movl %ebx, %eax ; X86-BMI1BMI2-NEXT: negb %al ; X86-BMI1BMI2-NEXT: movl $-1, %ecx ; X86-BMI1BMI2-NEXT: shrxl %eax, %ecx, %eax ; X86-BMI1BMI2-NEXT: movl %eax, (%esp) ; X86-BMI1BMI2-NEXT: calll use32 ; X86-BMI1BMI2-NEXT: bzhil %ebx, {{[0-9]+}}(%esp), %eax ; X86-BMI1BMI2-NEXT: addl $8, %esp ; X86-BMI1BMI2-NEXT: popl %ebx ; X86-BMI1BMI2-NEXT: retl ; ; X64-NOBMI-LABEL: bzhi32_c1_indexzext: ; X64-NOBMI: # %bb.0: ; X64-NOBMI-NEXT: pushq %rbp ; X64-NOBMI-NEXT: pushq %rbx ; X64-NOBMI-NEXT: pushq %rax ; X64-NOBMI-NEXT: movl %esi, %ecx ; X64-NOBMI-NEXT: movl %edi, %ebx ; X64-NOBMI-NEXT: negb %cl ; X64-NOBMI-NEXT: movl $-1, %ebp ; X64-NOBMI-NEXT: # kill: def $cl killed $cl killed $ecx ; X64-NOBMI-NEXT: shrl %cl, %ebp ; X64-NOBMI-NEXT: movl %ebp, %edi ; X64-NOBMI-NEXT: callq use32 ; X64-NOBMI-NEXT: andl %ebx, %ebp ; X64-NOBMI-NEXT: movl %ebp, %eax ; X64-NOBMI-NEXT: addq $8, %rsp ; X64-NOBMI-NEXT: popq %rbx ; X64-NOBMI-NEXT: popq %rbp ; X64-NOBMI-NEXT: retq ; ; X64-BMI1NOTBM-LABEL: bzhi32_c1_indexzext: ; X64-BMI1NOTBM: # %bb.0: ; X64-BMI1NOTBM-NEXT: pushq %rbp ; X64-BMI1NOTBM-NEXT: pushq %rbx ; X64-BMI1NOTBM-NEXT: pushq %rax ; X64-BMI1NOTBM-NEXT: movl %esi, %ecx ; X64-BMI1NOTBM-NEXT: movl %edi, %ebx ; X64-BMI1NOTBM-NEXT: negb %cl ; X64-BMI1NOTBM-NEXT: movl $-1, %ebp ; X64-BMI1NOTBM-NEXT: # kill: def $cl killed $cl killed $ecx ; X64-BMI1NOTBM-NEXT: shrl %cl, %ebp ; X64-BMI1NOTBM-NEXT: movl %ebp, %edi ; X64-BMI1NOTBM-NEXT: callq use32 ; X64-BMI1NOTBM-NEXT: andl %ebx, %ebp ; X64-BMI1NOTBM-NEXT: movl %ebp, %eax ; X64-BMI1NOTBM-NEXT: addq $8, %rsp ; X64-BMI1NOTBM-NEXT: popq %rbx ; X64-BMI1NOTBM-NEXT: popq %rbp ; X64-BMI1NOTBM-NEXT: retq ; ; X64-BMI1BMI2-LABEL: bzhi32_c1_indexzext: ; X64-BMI1BMI2: # %bb.0: ; X64-BMI1BMI2-NEXT: pushq %rbp ; X64-BMI1BMI2-NEXT: pushq %rbx ; X64-BMI1BMI2-NEXT: pushq %rax ; X64-BMI1BMI2-NEXT: movl %esi, %ebx ; X64-BMI1BMI2-NEXT: movl %edi, %ebp ; X64-BMI1BMI2-NEXT: movl %ebx, %eax ; X64-BMI1BMI2-NEXT: negb %al ; X64-BMI1BMI2-NEXT: movl $-1, %ecx ; X64-BMI1BMI2-NEXT: shrxl %eax, %ecx, %edi ; X64-BMI1BMI2-NEXT: callq use32 ; X64-BMI1BMI2-NEXT: bzhil %ebx, %ebp, %eax ; X64-BMI1BMI2-NEXT: addq $8, %rsp ; X64-BMI1BMI2-NEXT: popq %rbx ; X64-BMI1BMI2-NEXT: popq %rbp ; X64-BMI1BMI2-NEXT: retq %numhighbits = sub i8 32, %numlowbits %sh_prom = zext i8 %numhighbits to i32 %mask = lshr i32 -1, %sh_prom call void @use32(i32 %mask) %masked = and i32 %mask, %val ret i32 %masked } define i32 @bzhi32_c2_load(i32* %w, i32 %numlowbits) nounwind { ; X86-NOBMI-LABEL: bzhi32_c2_load: ; X86-NOBMI: # %bb.0: ; X86-NOBMI-NEXT: pushl %esi ; X86-NOBMI-NEXT: subl $8, %esp ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %eax ; X86-NOBMI-NEXT: xorl %ecx, %ecx ; X86-NOBMI-NEXT: subb {{[0-9]+}}(%esp), %cl ; X86-NOBMI-NEXT: movl $-1, %edx ; X86-NOBMI-NEXT: # kill: def $cl killed $cl killed $ecx ; X86-NOBMI-NEXT: shrl %cl, %edx ; X86-NOBMI-NEXT: movl (%eax), %esi ; X86-NOBMI-NEXT: andl %edx, %esi ; X86-NOBMI-NEXT: movl %edx, (%esp) ; X86-NOBMI-NEXT: calll use32 ; X86-NOBMI-NEXT: movl %esi, %eax ; X86-NOBMI-NEXT: addl $8, %esp ; X86-NOBMI-NEXT: popl %esi ; X86-NOBMI-NEXT: retl ; ; X86-BMI1NOTBM-LABEL: bzhi32_c2_load: ; X86-BMI1NOTBM: # %bb.0: ; X86-BMI1NOTBM-NEXT: pushl %esi ; X86-BMI1NOTBM-NEXT: subl $8, %esp ; X86-BMI1NOTBM-NEXT: movl {{[0-9]+}}(%esp), %eax ; X86-BMI1NOTBM-NEXT: xorl %ecx, %ecx ; X86-BMI1NOTBM-NEXT: subb {{[0-9]+}}(%esp), %cl ; X86-BMI1NOTBM-NEXT: movl $-1, %edx ; X86-BMI1NOTBM-NEXT: # kill: def $cl killed $cl killed $ecx ; X86-BMI1NOTBM-NEXT: shrl %cl, %edx ; X86-BMI1NOTBM-NEXT: movl (%eax), %esi ; X86-BMI1NOTBM-NEXT: andl %edx, %esi ; X86-BMI1NOTBM-NEXT: movl %edx, (%esp) ; X86-BMI1NOTBM-NEXT: calll use32 ; X86-BMI1NOTBM-NEXT: movl %esi, %eax ; X86-BMI1NOTBM-NEXT: addl $8, %esp ; X86-BMI1NOTBM-NEXT: popl %esi ; X86-BMI1NOTBM-NEXT: retl ; ; X86-BMI1BMI2-LABEL: bzhi32_c2_load: ; X86-BMI1BMI2: # %bb.0: ; X86-BMI1BMI2-NEXT: pushl %esi ; X86-BMI1BMI2-NEXT: subl $8, %esp ; X86-BMI1BMI2-NEXT: movl {{[0-9]+}}(%esp), %eax ; X86-BMI1BMI2-NEXT: movb {{[0-9]+}}(%esp), %cl ; X86-BMI1BMI2-NEXT: bzhil %ecx, (%eax), %esi ; X86-BMI1BMI2-NEXT: # kill: def $cl killed $cl killed $ecx def $ecx ; X86-BMI1BMI2-NEXT: negb %cl ; X86-BMI1BMI2-NEXT: movl $-1, %eax ; X86-BMI1BMI2-NEXT: shrxl %ecx, %eax, %eax ; X86-BMI1BMI2-NEXT: movl %eax, (%esp) ; X86-BMI1BMI2-NEXT: calll use32 ; X86-BMI1BMI2-NEXT: movl %esi, %eax ; X86-BMI1BMI2-NEXT: addl $8, %esp ; X86-BMI1BMI2-NEXT: popl %esi ; X86-BMI1BMI2-NEXT: retl ; ; X64-NOBMI-LABEL: bzhi32_c2_load: ; X64-NOBMI: # %bb.0: ; X64-NOBMI-NEXT: pushq %rbx ; X64-NOBMI-NEXT: movl %esi, %ecx ; X64-NOBMI-NEXT: negb %cl ; X64-NOBMI-NEXT: movl $-1, %eax ; X64-NOBMI-NEXT: # kill: def $cl killed $cl killed $ecx ; X64-NOBMI-NEXT: shrl %cl, %eax ; X64-NOBMI-NEXT: movl (%rdi), %ebx ; X64-NOBMI-NEXT: andl %eax, %ebx ; X64-NOBMI-NEXT: movl %eax, %edi ; X64-NOBMI-NEXT: callq use32 ; X64-NOBMI-NEXT: movl %ebx, %eax ; X64-NOBMI-NEXT: popq %rbx ; X64-NOBMI-NEXT: retq ; ; X64-BMI1NOTBM-LABEL: bzhi32_c2_load: ; X64-BMI1NOTBM: # %bb.0: ; X64-BMI1NOTBM-NEXT: pushq %rbx ; X64-BMI1NOTBM-NEXT: movl %esi, %ecx ; X64-BMI1NOTBM-NEXT: negb %cl ; X64-BMI1NOTBM-NEXT: movl $-1, %eax ; X64-BMI1NOTBM-NEXT: # kill: def $cl killed $cl killed $ecx ; X64-BMI1NOTBM-NEXT: shrl %cl, %eax ; X64-BMI1NOTBM-NEXT: movl (%rdi), %ebx ; X64-BMI1NOTBM-NEXT: andl %eax, %ebx ; X64-BMI1NOTBM-NEXT: movl %eax, %edi ; X64-BMI1NOTBM-NEXT: callq use32 ; X64-BMI1NOTBM-NEXT: movl %ebx, %eax ; X64-BMI1NOTBM-NEXT: popq %rbx ; X64-BMI1NOTBM-NEXT: retq ; ; X64-BMI1BMI2-LABEL: bzhi32_c2_load: ; X64-BMI1BMI2: # %bb.0: ; X64-BMI1BMI2-NEXT: pushq %rbx ; X64-BMI1BMI2-NEXT: bzhil %esi, (%rdi), %ebx ; X64-BMI1BMI2-NEXT: # kill: def $sil killed $sil killed $esi def $esi ; X64-BMI1BMI2-NEXT: negb %sil ; X64-BMI1BMI2-NEXT: movl $-1, %eax ; X64-BMI1BMI2-NEXT: shrxl %esi, %eax, %edi ; X64-BMI1BMI2-NEXT: callq use32 ; X64-BMI1BMI2-NEXT: movl %ebx, %eax ; X64-BMI1BMI2-NEXT: popq %rbx ; X64-BMI1BMI2-NEXT: retq %val = load i32, i32* %w %numhighbits = sub i32 32, %numlowbits %mask = lshr i32 -1, %numhighbits call void @use32(i32 %mask) %masked = and i32 %mask, %val ret i32 %masked } define i32 @bzhi32_c3_load_indexzext(i32* %w, i8 %numlowbits) nounwind { ; X86-NOBMI-LABEL: bzhi32_c3_load_indexzext: ; X86-NOBMI: # %bb.0: ; X86-NOBMI-NEXT: pushl %esi ; X86-NOBMI-NEXT: subl $8, %esp ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %eax ; X86-NOBMI-NEXT: xorl %ecx, %ecx ; X86-NOBMI-NEXT: subb {{[0-9]+}}(%esp), %cl ; X86-NOBMI-NEXT: movl $-1, %edx ; X86-NOBMI-NEXT: # kill: def $cl killed $cl killed $ecx ; X86-NOBMI-NEXT: shrl %cl, %edx ; X86-NOBMI-NEXT: movl (%eax), %esi ; X86-NOBMI-NEXT: andl %edx, %esi ; X86-NOBMI-NEXT: movl %edx, (%esp) ; X86-NOBMI-NEXT: calll use32 ; X86-NOBMI-NEXT: movl %esi, %eax ; X86-NOBMI-NEXT: addl $8, %esp ; X86-NOBMI-NEXT: popl %esi ; X86-NOBMI-NEXT: retl ; ; X86-BMI1NOTBM-LABEL: bzhi32_c3_load_indexzext: ; X86-BMI1NOTBM: # %bb.0: ; X86-BMI1NOTBM-NEXT: pushl %esi ; X86-BMI1NOTBM-NEXT: subl $8, %esp ; X86-BMI1NOTBM-NEXT: movl {{[0-9]+}}(%esp), %eax ; X86-BMI1NOTBM-NEXT: xorl %ecx, %ecx ; X86-BMI1NOTBM-NEXT: subb {{[0-9]+}}(%esp), %cl ; X86-BMI1NOTBM-NEXT: movl $-1, %edx ; X86-BMI1NOTBM-NEXT: # kill: def $cl killed $cl killed $ecx ; X86-BMI1NOTBM-NEXT: shrl %cl, %edx ; X86-BMI1NOTBM-NEXT: movl (%eax), %esi ; X86-BMI1NOTBM-NEXT: andl %edx, %esi ; X86-BMI1NOTBM-NEXT: movl %edx, (%esp) ; X86-BMI1NOTBM-NEXT: calll use32 ; X86-BMI1NOTBM-NEXT: movl %esi, %eax ; X86-BMI1NOTBM-NEXT: addl $8, %esp ; X86-BMI1NOTBM-NEXT: popl %esi ; X86-BMI1NOTBM-NEXT: retl ; ; X86-BMI1BMI2-LABEL: bzhi32_c3_load_indexzext: ; X86-BMI1BMI2: # %bb.0: ; X86-BMI1BMI2-NEXT: pushl %esi ; X86-BMI1BMI2-NEXT: subl $8, %esp ; X86-BMI1BMI2-NEXT: movl {{[0-9]+}}(%esp), %eax ; X86-BMI1BMI2-NEXT: movb {{[0-9]+}}(%esp), %cl ; X86-BMI1BMI2-NEXT: bzhil %ecx, (%eax), %esi ; X86-BMI1BMI2-NEXT: # kill: def $cl killed $cl killed $ecx def $ecx ; X86-BMI1BMI2-NEXT: negb %cl ; X86-BMI1BMI2-NEXT: movl $-1, %eax ; X86-BMI1BMI2-NEXT: shrxl %ecx, %eax, %eax ; X86-BMI1BMI2-NEXT: movl %eax, (%esp) ; X86-BMI1BMI2-NEXT: calll use32 ; X86-BMI1BMI2-NEXT: movl %esi, %eax ; X86-BMI1BMI2-NEXT: addl $8, %esp ; X86-BMI1BMI2-NEXT: popl %esi ; X86-BMI1BMI2-NEXT: retl ; ; X64-NOBMI-LABEL: bzhi32_c3_load_indexzext: ; X64-NOBMI: # %bb.0: ; X64-NOBMI-NEXT: pushq %rbx ; X64-NOBMI-NEXT: movl %esi, %ecx ; X64-NOBMI-NEXT: negb %cl ; X64-NOBMI-NEXT: movl $-1, %eax ; X64-NOBMI-NEXT: # kill: def $cl killed $cl killed $ecx ; X64-NOBMI-NEXT: shrl %cl, %eax ; X64-NOBMI-NEXT: movl (%rdi), %ebx ; X64-NOBMI-NEXT: andl %eax, %ebx ; X64-NOBMI-NEXT: movl %eax, %edi ; X64-NOBMI-NEXT: callq use32 ; X64-NOBMI-NEXT: movl %ebx, %eax ; X64-NOBMI-NEXT: popq %rbx ; X64-NOBMI-NEXT: retq ; ; X64-BMI1NOTBM-LABEL: bzhi32_c3_load_indexzext: ; X64-BMI1NOTBM: # %bb.0: ; X64-BMI1NOTBM-NEXT: pushq %rbx ; X64-BMI1NOTBM-NEXT: movl %esi, %ecx ; X64-BMI1NOTBM-NEXT: negb %cl ; X64-BMI1NOTBM-NEXT: movl $-1, %eax ; X64-BMI1NOTBM-NEXT: # kill: def $cl killed $cl killed $ecx ; X64-BMI1NOTBM-NEXT: shrl %cl, %eax ; X64-BMI1NOTBM-NEXT: movl (%rdi), %ebx ; X64-BMI1NOTBM-NEXT: andl %eax, %ebx ; X64-BMI1NOTBM-NEXT: movl %eax, %edi ; X64-BMI1NOTBM-NEXT: callq use32 ; X64-BMI1NOTBM-NEXT: movl %ebx, %eax ; X64-BMI1NOTBM-NEXT: popq %rbx ; X64-BMI1NOTBM-NEXT: retq ; ; X64-BMI1BMI2-LABEL: bzhi32_c3_load_indexzext: ; X64-BMI1BMI2: # %bb.0: ; X64-BMI1BMI2-NEXT: pushq %rbx ; X64-BMI1BMI2-NEXT: bzhil %esi, (%rdi), %ebx ; X64-BMI1BMI2-NEXT: # kill: def $sil killed $sil killed $esi def $esi ; X64-BMI1BMI2-NEXT: negb %sil ; X64-BMI1BMI2-NEXT: movl $-1, %eax ; X64-BMI1BMI2-NEXT: shrxl %esi, %eax, %edi ; X64-BMI1BMI2-NEXT: callq use32 ; X64-BMI1BMI2-NEXT: movl %ebx, %eax ; X64-BMI1BMI2-NEXT: popq %rbx ; X64-BMI1BMI2-NEXT: retq %val = load i32, i32* %w %numhighbits = sub i8 32, %numlowbits %sh_prom = zext i8 %numhighbits to i32 %mask = lshr i32 -1, %sh_prom call void @use32(i32 %mask) %masked = and i32 %mask, %val ret i32 %masked } define i32 @bzhi32_c4_commutative(i32 %val, i32 %numlowbits) nounwind { ; X86-NOBMI-LABEL: bzhi32_c4_commutative: ; X86-NOBMI: # %bb.0: ; X86-NOBMI-NEXT: pushl %esi ; X86-NOBMI-NEXT: subl $8, %esp ; X86-NOBMI-NEXT: xorl %ecx, %ecx ; X86-NOBMI-NEXT: subb {{[0-9]+}}(%esp), %cl ; X86-NOBMI-NEXT: movl $-1, %esi ; X86-NOBMI-NEXT: # kill: def $cl killed $cl killed $ecx ; X86-NOBMI-NEXT: shrl %cl, %esi ; X86-NOBMI-NEXT: movl %esi, (%esp) ; X86-NOBMI-NEXT: calll use32 ; X86-NOBMI-NEXT: andl {{[0-9]+}}(%esp), %esi ; X86-NOBMI-NEXT: movl %esi, %eax ; X86-NOBMI-NEXT: addl $8, %esp ; X86-NOBMI-NEXT: popl %esi ; X86-NOBMI-NEXT: retl ; ; X86-BMI1NOTBM-LABEL: bzhi32_c4_commutative: ; X86-BMI1NOTBM: # %bb.0: ; X86-BMI1NOTBM-NEXT: pushl %esi ; X86-BMI1NOTBM-NEXT: subl $8, %esp ; X86-BMI1NOTBM-NEXT: xorl %ecx, %ecx ; X86-BMI1NOTBM-NEXT: subb {{[0-9]+}}(%esp), %cl ; X86-BMI1NOTBM-NEXT: movl $-1, %esi ; X86-BMI1NOTBM-NEXT: # kill: def $cl killed $cl killed $ecx ; X86-BMI1NOTBM-NEXT: shrl %cl, %esi ; X86-BMI1NOTBM-NEXT: movl %esi, (%esp) ; X86-BMI1NOTBM-NEXT: calll use32 ; X86-BMI1NOTBM-NEXT: andl {{[0-9]+}}(%esp), %esi ; X86-BMI1NOTBM-NEXT: movl %esi, %eax ; X86-BMI1NOTBM-NEXT: addl $8, %esp ; X86-BMI1NOTBM-NEXT: popl %esi ; X86-BMI1NOTBM-NEXT: retl ; ; X86-BMI1BMI2-LABEL: bzhi32_c4_commutative: ; X86-BMI1BMI2: # %bb.0: ; X86-BMI1BMI2-NEXT: pushl %ebx ; X86-BMI1BMI2-NEXT: subl $8, %esp ; X86-BMI1BMI2-NEXT: movb {{[0-9]+}}(%esp), %bl ; X86-BMI1BMI2-NEXT: movl %ebx, %eax ; X86-BMI1BMI2-NEXT: negb %al ; X86-BMI1BMI2-NEXT: movl $-1, %ecx ; X86-BMI1BMI2-NEXT: shrxl %eax, %ecx, %eax ; X86-BMI1BMI2-NEXT: movl %eax, (%esp) ; X86-BMI1BMI2-NEXT: calll use32 ; X86-BMI1BMI2-NEXT: bzhil %ebx, {{[0-9]+}}(%esp), %eax ; X86-BMI1BMI2-NEXT: addl $8, %esp ; X86-BMI1BMI2-NEXT: popl %ebx ; X86-BMI1BMI2-NEXT: retl ; ; X64-NOBMI-LABEL: bzhi32_c4_commutative: ; X64-NOBMI: # %bb.0: ; X64-NOBMI-NEXT: pushq %rbp ; X64-NOBMI-NEXT: pushq %rbx ; X64-NOBMI-NEXT: pushq %rax ; X64-NOBMI-NEXT: movl %esi, %ecx ; X64-NOBMI-NEXT: movl %edi, %ebx ; X64-NOBMI-NEXT: negb %cl ; X64-NOBMI-NEXT: movl $-1, %ebp ; X64-NOBMI-NEXT: # kill: def $cl killed $cl killed $ecx ; X64-NOBMI-NEXT: shrl %cl, %ebp ; X64-NOBMI-NEXT: movl %ebp, %edi ; X64-NOBMI-NEXT: callq use32 ; X64-NOBMI-NEXT: andl %ebx, %ebp ; X64-NOBMI-NEXT: movl %ebp, %eax ; X64-NOBMI-NEXT: addq $8, %rsp ; X64-NOBMI-NEXT: popq %rbx ; X64-NOBMI-NEXT: popq %rbp ; X64-NOBMI-NEXT: retq ; ; X64-BMI1NOTBM-LABEL: bzhi32_c4_commutative: ; X64-BMI1NOTBM: # %bb.0: ; X64-BMI1NOTBM-NEXT: pushq %rbp ; X64-BMI1NOTBM-NEXT: pushq %rbx ; X64-BMI1NOTBM-NEXT: pushq %rax ; X64-BMI1NOTBM-NEXT: movl %esi, %ecx ; X64-BMI1NOTBM-NEXT: movl %edi, %ebx ; X64-BMI1NOTBM-NEXT: negb %cl ; X64-BMI1NOTBM-NEXT: movl $-1, %ebp ; X64-BMI1NOTBM-NEXT: # kill: def $cl killed $cl killed $ecx ; X64-BMI1NOTBM-NEXT: shrl %cl, %ebp ; X64-BMI1NOTBM-NEXT: movl %ebp, %edi ; X64-BMI1NOTBM-NEXT: callq use32 ; X64-BMI1NOTBM-NEXT: andl %ebx, %ebp ; X64-BMI1NOTBM-NEXT: movl %ebp, %eax ; X64-BMI1NOTBM-NEXT: addq $8, %rsp ; X64-BMI1NOTBM-NEXT: popq %rbx ; X64-BMI1NOTBM-NEXT: popq %rbp ; X64-BMI1NOTBM-NEXT: retq ; ; X64-BMI1BMI2-LABEL: bzhi32_c4_commutative: ; X64-BMI1BMI2: # %bb.0: ; X64-BMI1BMI2-NEXT: pushq %rbp ; X64-BMI1BMI2-NEXT: pushq %rbx ; X64-BMI1BMI2-NEXT: pushq %rax ; X64-BMI1BMI2-NEXT: movl %esi, %ebx ; X64-BMI1BMI2-NEXT: movl %edi, %ebp ; X64-BMI1BMI2-NEXT: movl %ebx, %eax ; X64-BMI1BMI2-NEXT: negb %al ; X64-BMI1BMI2-NEXT: movl $-1, %ecx ; X64-BMI1BMI2-NEXT: shrxl %eax, %ecx, %edi ; X64-BMI1BMI2-NEXT: callq use32 ; X64-BMI1BMI2-NEXT: bzhil %ebx, %ebp, %eax ; X64-BMI1BMI2-NEXT: addq $8, %rsp ; X64-BMI1BMI2-NEXT: popq %rbx ; X64-BMI1BMI2-NEXT: popq %rbp ; X64-BMI1BMI2-NEXT: retq %numhighbits = sub i32 32, %numlowbits %mask = lshr i32 -1, %numhighbits call void @use32(i32 %mask) %masked = and i32 %val, %mask ; swapped order ret i32 %masked } ; 64-bit declare void @use64(i64) define i64 @bzhi64_c0(i64 %val, i64 %numlowbits) nounwind { ; X86-NOBMI-LABEL: bzhi64_c0: ; X86-NOBMI: # %bb.0: ; X86-NOBMI-NEXT: pushl %edi ; X86-NOBMI-NEXT: pushl %esi ; X86-NOBMI-NEXT: pushl %eax ; X86-NOBMI-NEXT: movb $64, %cl ; X86-NOBMI-NEXT: subb {{[0-9]+}}(%esp), %cl ; X86-NOBMI-NEXT: movl $-1, %esi ; X86-NOBMI-NEXT: movl $-1, %edi ; X86-NOBMI-NEXT: shrl %cl, %edi ; X86-NOBMI-NEXT: testb $32, %cl ; X86-NOBMI-NEXT: je .LBB34_2 ; X86-NOBMI-NEXT: # %bb.1: ; X86-NOBMI-NEXT: movl %edi, %esi ; X86-NOBMI-NEXT: xorl %edi, %edi ; X86-NOBMI-NEXT: .LBB34_2: ; X86-NOBMI-NEXT: subl $8, %esp ; X86-NOBMI-NEXT: pushl %edi ; X86-NOBMI-NEXT: pushl %esi ; X86-NOBMI-NEXT: calll use64 ; X86-NOBMI-NEXT: addl $16, %esp ; X86-NOBMI-NEXT: andl {{[0-9]+}}(%esp), %esi ; X86-NOBMI-NEXT: andl {{[0-9]+}}(%esp), %edi ; X86-NOBMI-NEXT: movl %esi, %eax ; X86-NOBMI-NEXT: movl %edi, %edx ; X86-NOBMI-NEXT: addl $4, %esp ; X86-NOBMI-NEXT: popl %esi ; X86-NOBMI-NEXT: popl %edi ; X86-NOBMI-NEXT: retl ; ; X86-BMI1NOTBM-LABEL: bzhi64_c0: ; X86-BMI1NOTBM: # %bb.0: ; X86-BMI1NOTBM-NEXT: pushl %edi ; X86-BMI1NOTBM-NEXT: pushl %esi ; X86-BMI1NOTBM-NEXT: pushl %eax ; X86-BMI1NOTBM-NEXT: movb $64, %cl ; X86-BMI1NOTBM-NEXT: subb {{[0-9]+}}(%esp), %cl ; X86-BMI1NOTBM-NEXT: movl $-1, %esi ; X86-BMI1NOTBM-NEXT: movl $-1, %edi ; X86-BMI1NOTBM-NEXT: shrl %cl, %edi ; X86-BMI1NOTBM-NEXT: testb $32, %cl ; X86-BMI1NOTBM-NEXT: je .LBB34_2 ; X86-BMI1NOTBM-NEXT: # %bb.1: ; X86-BMI1NOTBM-NEXT: movl %edi, %esi ; X86-BMI1NOTBM-NEXT: xorl %edi, %edi ; X86-BMI1NOTBM-NEXT: .LBB34_2: ; X86-BMI1NOTBM-NEXT: subl $8, %esp ; X86-BMI1NOTBM-NEXT: pushl %edi ; X86-BMI1NOTBM-NEXT: pushl %esi ; X86-BMI1NOTBM-NEXT: calll use64 ; X86-BMI1NOTBM-NEXT: addl $16, %esp ; X86-BMI1NOTBM-NEXT: andl {{[0-9]+}}(%esp), %esi ; X86-BMI1NOTBM-NEXT: andl {{[0-9]+}}(%esp), %edi ; X86-BMI1NOTBM-NEXT: movl %esi, %eax ; X86-BMI1NOTBM-NEXT: movl %edi, %edx ; X86-BMI1NOTBM-NEXT: addl $4, %esp ; X86-BMI1NOTBM-NEXT: popl %esi ; X86-BMI1NOTBM-NEXT: popl %edi ; X86-BMI1NOTBM-NEXT: retl ; ; X86-BMI1BMI2-LABEL: bzhi64_c0: ; X86-BMI1BMI2: # %bb.0: ; X86-BMI1BMI2-NEXT: pushl %edi ; X86-BMI1BMI2-NEXT: pushl %esi ; X86-BMI1BMI2-NEXT: pushl %eax ; X86-BMI1BMI2-NEXT: movb $64, %al ; X86-BMI1BMI2-NEXT: subb {{[0-9]+}}(%esp), %al ; X86-BMI1BMI2-NEXT: movl $-1, %edi ; X86-BMI1BMI2-NEXT: shrxl %eax, %edi, %esi ; X86-BMI1BMI2-NEXT: testb $32, %al ; X86-BMI1BMI2-NEXT: je .LBB34_2 ; X86-BMI1BMI2-NEXT: # %bb.1: ; X86-BMI1BMI2-NEXT: movl %esi, %edi ; X86-BMI1BMI2-NEXT: xorl %esi, %esi ; X86-BMI1BMI2-NEXT: .LBB34_2: ; X86-BMI1BMI2-NEXT: subl $8, %esp ; X86-BMI1BMI2-NEXT: pushl %esi ; X86-BMI1BMI2-NEXT: pushl %edi ; X86-BMI1BMI2-NEXT: calll use64 ; X86-BMI1BMI2-NEXT: addl $16, %esp ; X86-BMI1BMI2-NEXT: andl {{[0-9]+}}(%esp), %edi ; X86-BMI1BMI2-NEXT: andl {{[0-9]+}}(%esp), %esi ; X86-BMI1BMI2-NEXT: movl %edi, %eax ; X86-BMI1BMI2-NEXT: movl %esi, %edx ; X86-BMI1BMI2-NEXT: addl $4, %esp ; X86-BMI1BMI2-NEXT: popl %esi ; X86-BMI1BMI2-NEXT: popl %edi ; X86-BMI1BMI2-NEXT: retl ; ; X64-NOBMI-LABEL: bzhi64_c0: ; X64-NOBMI: # %bb.0: ; X64-NOBMI-NEXT: pushq %r14 ; X64-NOBMI-NEXT: pushq %rbx ; X64-NOBMI-NEXT: pushq %rax ; X64-NOBMI-NEXT: movq %rsi, %rcx ; X64-NOBMI-NEXT: movq %rdi, %r14 ; X64-NOBMI-NEXT: negb %cl ; X64-NOBMI-NEXT: movq $-1, %rbx ; X64-NOBMI-NEXT: # kill: def $cl killed $cl killed $rcx ; X64-NOBMI-NEXT: shrq %cl, %rbx ; X64-NOBMI-NEXT: movq %rbx, %rdi ; X64-NOBMI-NEXT: callq use64 ; X64-NOBMI-NEXT: andq %r14, %rbx ; X64-NOBMI-NEXT: movq %rbx, %rax ; X64-NOBMI-NEXT: addq $8, %rsp ; X64-NOBMI-NEXT: popq %rbx ; X64-NOBMI-NEXT: popq %r14 ; X64-NOBMI-NEXT: retq ; ; X64-BMI1NOTBM-LABEL: bzhi64_c0: ; X64-BMI1NOTBM: # %bb.0: ; X64-BMI1NOTBM-NEXT: pushq %r14 ; X64-BMI1NOTBM-NEXT: pushq %rbx ; X64-BMI1NOTBM-NEXT: pushq %rax ; X64-BMI1NOTBM-NEXT: movq %rsi, %rcx ; X64-BMI1NOTBM-NEXT: movq %rdi, %r14 ; X64-BMI1NOTBM-NEXT: negb %cl ; X64-BMI1NOTBM-NEXT: movq $-1, %rbx ; X64-BMI1NOTBM-NEXT: # kill: def $cl killed $cl killed $rcx ; X64-BMI1NOTBM-NEXT: shrq %cl, %rbx ; X64-BMI1NOTBM-NEXT: movq %rbx, %rdi ; X64-BMI1NOTBM-NEXT: callq use64 ; X64-BMI1NOTBM-NEXT: andq %r14, %rbx ; X64-BMI1NOTBM-NEXT: movq %rbx, %rax ; X64-BMI1NOTBM-NEXT: addq $8, %rsp ; X64-BMI1NOTBM-NEXT: popq %rbx ; X64-BMI1NOTBM-NEXT: popq %r14 ; X64-BMI1NOTBM-NEXT: retq ; ; X64-BMI1BMI2-LABEL: bzhi64_c0: ; X64-BMI1BMI2: # %bb.0: ; X64-BMI1BMI2-NEXT: pushq %r14 ; X64-BMI1BMI2-NEXT: pushq %rbx ; X64-BMI1BMI2-NEXT: pushq %rax ; X64-BMI1BMI2-NEXT: movq %rsi, %rbx ; X64-BMI1BMI2-NEXT: movq %rdi, %r14 ; X64-BMI1BMI2-NEXT: movl %ebx, %eax ; X64-BMI1BMI2-NEXT: negb %al ; X64-BMI1BMI2-NEXT: movq $-1, %rcx ; X64-BMI1BMI2-NEXT: shrxq %rax, %rcx, %rdi ; X64-BMI1BMI2-NEXT: callq use64 ; X64-BMI1BMI2-NEXT: bzhiq %rbx, %r14, %rax ; X64-BMI1BMI2-NEXT: addq $8, %rsp ; X64-BMI1BMI2-NEXT: popq %rbx ; X64-BMI1BMI2-NEXT: popq %r14 ; X64-BMI1BMI2-NEXT: retq %numhighbits = sub i64 64, %numlowbits %mask = lshr i64 -1, %numhighbits call void @use64(i64 %mask) %masked = and i64 %mask, %val ret i64 %masked } define i64 @bzhi64_c1_indexzext(i64 %val, i8 %numlowbits) nounwind { ; X86-NOBMI-LABEL: bzhi64_c1_indexzext: ; X86-NOBMI: # %bb.0: ; X86-NOBMI-NEXT: pushl %edi ; X86-NOBMI-NEXT: pushl %esi ; X86-NOBMI-NEXT: pushl %eax ; X86-NOBMI-NEXT: movb $64, %cl ; X86-NOBMI-NEXT: subb {{[0-9]+}}(%esp), %cl ; X86-NOBMI-NEXT: movl $-1, %esi ; X86-NOBMI-NEXT: movl $-1, %edi ; X86-NOBMI-NEXT: shrl %cl, %edi ; X86-NOBMI-NEXT: testb $32, %cl ; X86-NOBMI-NEXT: je .LBB35_2 ; X86-NOBMI-NEXT: # %bb.1: ; X86-NOBMI-NEXT: movl %edi, %esi ; X86-NOBMI-NEXT: xorl %edi, %edi ; X86-NOBMI-NEXT: .LBB35_2: ; X86-NOBMI-NEXT: subl $8, %esp ; X86-NOBMI-NEXT: pushl %edi ; X86-NOBMI-NEXT: pushl %esi ; X86-NOBMI-NEXT: calll use64 ; X86-NOBMI-NEXT: addl $16, %esp ; X86-NOBMI-NEXT: andl {{[0-9]+}}(%esp), %esi ; X86-NOBMI-NEXT: andl {{[0-9]+}}(%esp), %edi ; X86-NOBMI-NEXT: movl %esi, %eax ; X86-NOBMI-NEXT: movl %edi, %edx ; X86-NOBMI-NEXT: addl $4, %esp ; X86-NOBMI-NEXT: popl %esi ; X86-NOBMI-NEXT: popl %edi ; X86-NOBMI-NEXT: retl ; ; X86-BMI1NOTBM-LABEL: bzhi64_c1_indexzext: ; X86-BMI1NOTBM: # %bb.0: ; X86-BMI1NOTBM-NEXT: pushl %edi ; X86-BMI1NOTBM-NEXT: pushl %esi ; X86-BMI1NOTBM-NEXT: pushl %eax ; X86-BMI1NOTBM-NEXT: movb $64, %cl ; X86-BMI1NOTBM-NEXT: subb {{[0-9]+}}(%esp), %cl ; X86-BMI1NOTBM-NEXT: movl $-1, %esi ; X86-BMI1NOTBM-NEXT: movl $-1, %edi ; X86-BMI1NOTBM-NEXT: shrl %cl, %edi ; X86-BMI1NOTBM-NEXT: testb $32, %cl ; X86-BMI1NOTBM-NEXT: je .LBB35_2 ; X86-BMI1NOTBM-NEXT: # %bb.1: ; X86-BMI1NOTBM-NEXT: movl %edi, %esi ; X86-BMI1NOTBM-NEXT: xorl %edi, %edi ; X86-BMI1NOTBM-NEXT: .LBB35_2: ; X86-BMI1NOTBM-NEXT: subl $8, %esp ; X86-BMI1NOTBM-NEXT: pushl %edi ; X86-BMI1NOTBM-NEXT: pushl %esi ; X86-BMI1NOTBM-NEXT: calll use64 ; X86-BMI1NOTBM-NEXT: addl $16, %esp ; X86-BMI1NOTBM-NEXT: andl {{[0-9]+}}(%esp), %esi ; X86-BMI1NOTBM-NEXT: andl {{[0-9]+}}(%esp), %edi ; X86-BMI1NOTBM-NEXT: movl %esi, %eax ; X86-BMI1NOTBM-NEXT: movl %edi, %edx ; X86-BMI1NOTBM-NEXT: addl $4, %esp ; X86-BMI1NOTBM-NEXT: popl %esi ; X86-BMI1NOTBM-NEXT: popl %edi ; X86-BMI1NOTBM-NEXT: retl ; ; X86-BMI1BMI2-LABEL: bzhi64_c1_indexzext: ; X86-BMI1BMI2: # %bb.0: ; X86-BMI1BMI2-NEXT: pushl %edi ; X86-BMI1BMI2-NEXT: pushl %esi ; X86-BMI1BMI2-NEXT: pushl %eax ; X86-BMI1BMI2-NEXT: movb $64, %al ; X86-BMI1BMI2-NEXT: subb {{[0-9]+}}(%esp), %al ; X86-BMI1BMI2-NEXT: movl $-1, %edi ; X86-BMI1BMI2-NEXT: shrxl %eax, %edi, %esi ; X86-BMI1BMI2-NEXT: testb $32, %al ; X86-BMI1BMI2-NEXT: je .LBB35_2 ; X86-BMI1BMI2-NEXT: # %bb.1: ; X86-BMI1BMI2-NEXT: movl %esi, %edi ; X86-BMI1BMI2-NEXT: xorl %esi, %esi ; X86-BMI1BMI2-NEXT: .LBB35_2: ; X86-BMI1BMI2-NEXT: subl $8, %esp ; X86-BMI1BMI2-NEXT: pushl %esi ; X86-BMI1BMI2-NEXT: pushl %edi ; X86-BMI1BMI2-NEXT: calll use64 ; X86-BMI1BMI2-NEXT: addl $16, %esp ; X86-BMI1BMI2-NEXT: andl {{[0-9]+}}(%esp), %edi ; X86-BMI1BMI2-NEXT: andl {{[0-9]+}}(%esp), %esi ; X86-BMI1BMI2-NEXT: movl %edi, %eax ; X86-BMI1BMI2-NEXT: movl %esi, %edx ; X86-BMI1BMI2-NEXT: addl $4, %esp ; X86-BMI1BMI2-NEXT: popl %esi ; X86-BMI1BMI2-NEXT: popl %edi ; X86-BMI1BMI2-NEXT: retl ; ; X64-NOBMI-LABEL: bzhi64_c1_indexzext: ; X64-NOBMI: # %bb.0: ; X64-NOBMI-NEXT: pushq %r14 ; X64-NOBMI-NEXT: pushq %rbx ; X64-NOBMI-NEXT: pushq %rax ; X64-NOBMI-NEXT: movl %esi, %ecx ; X64-NOBMI-NEXT: movq %rdi, %r14 ; X64-NOBMI-NEXT: negb %cl ; X64-NOBMI-NEXT: movq $-1, %rbx ; X64-NOBMI-NEXT: # kill: def $cl killed $cl killed $ecx ; X64-NOBMI-NEXT: shrq %cl, %rbx ; X64-NOBMI-NEXT: movq %rbx, %rdi ; X64-NOBMI-NEXT: callq use64 ; X64-NOBMI-NEXT: andq %r14, %rbx ; X64-NOBMI-NEXT: movq %rbx, %rax ; X64-NOBMI-NEXT: addq $8, %rsp ; X64-NOBMI-NEXT: popq %rbx ; X64-NOBMI-NEXT: popq %r14 ; X64-NOBMI-NEXT: retq ; ; X64-BMI1NOTBM-LABEL: bzhi64_c1_indexzext: ; X64-BMI1NOTBM: # %bb.0: ; X64-BMI1NOTBM-NEXT: pushq %r14 ; X64-BMI1NOTBM-NEXT: pushq %rbx ; X64-BMI1NOTBM-NEXT: pushq %rax ; X64-BMI1NOTBM-NEXT: movl %esi, %ecx ; X64-BMI1NOTBM-NEXT: movq %rdi, %r14 ; X64-BMI1NOTBM-NEXT: negb %cl ; X64-BMI1NOTBM-NEXT: movq $-1, %rbx ; X64-BMI1NOTBM-NEXT: # kill: def $cl killed $cl killed $ecx ; X64-BMI1NOTBM-NEXT: shrq %cl, %rbx ; X64-BMI1NOTBM-NEXT: movq %rbx, %rdi ; X64-BMI1NOTBM-NEXT: callq use64 ; X64-BMI1NOTBM-NEXT: andq %r14, %rbx ; X64-BMI1NOTBM-NEXT: movq %rbx, %rax ; X64-BMI1NOTBM-NEXT: addq $8, %rsp ; X64-BMI1NOTBM-NEXT: popq %rbx ; X64-BMI1NOTBM-NEXT: popq %r14 ; X64-BMI1NOTBM-NEXT: retq ; ; X64-BMI1BMI2-LABEL: bzhi64_c1_indexzext: ; X64-BMI1BMI2: # %bb.0: ; X64-BMI1BMI2-NEXT: pushq %r14 ; X64-BMI1BMI2-NEXT: pushq %rbx ; X64-BMI1BMI2-NEXT: pushq %rax ; X64-BMI1BMI2-NEXT: movl %esi, %ebx ; X64-BMI1BMI2-NEXT: movq %rdi, %r14 ; X64-BMI1BMI2-NEXT: movl %ebx, %eax ; X64-BMI1BMI2-NEXT: negb %al ; X64-BMI1BMI2-NEXT: movq $-1, %rcx ; X64-BMI1BMI2-NEXT: shrxq %rax, %rcx, %rdi ; X64-BMI1BMI2-NEXT: callq use64 ; X64-BMI1BMI2-NEXT: bzhiq %rbx, %r14, %rax ; X64-BMI1BMI2-NEXT: addq $8, %rsp ; X64-BMI1BMI2-NEXT: popq %rbx ; X64-BMI1BMI2-NEXT: popq %r14 ; X64-BMI1BMI2-NEXT: retq %numhighbits = sub i8 64, %numlowbits %sh_prom = zext i8 %numhighbits to i64 %mask = lshr i64 -1, %sh_prom call void @use64(i64 %mask) %masked = and i64 %mask, %val ret i64 %masked } define i64 @bzhi64_c2_load(i64* %w, i64 %numlowbits) nounwind { ; X86-NOBMI-LABEL: bzhi64_c2_load: ; X86-NOBMI: # %bb.0: ; X86-NOBMI-NEXT: pushl %ebx ; X86-NOBMI-NEXT: pushl %edi ; X86-NOBMI-NEXT: pushl %esi ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %edx ; X86-NOBMI-NEXT: movb $64, %cl ; X86-NOBMI-NEXT: subb {{[0-9]+}}(%esp), %cl ; X86-NOBMI-NEXT: movl $-1, %eax ; X86-NOBMI-NEXT: movl $-1, %ebx ; X86-NOBMI-NEXT: shrl %cl, %ebx ; X86-NOBMI-NEXT: testb $32, %cl ; X86-NOBMI-NEXT: je .LBB36_2 ; X86-NOBMI-NEXT: # %bb.1: ; X86-NOBMI-NEXT: movl %ebx, %eax ; X86-NOBMI-NEXT: xorl %ebx, %ebx ; X86-NOBMI-NEXT: .LBB36_2: ; X86-NOBMI-NEXT: movl 4(%edx), %esi ; X86-NOBMI-NEXT: andl %ebx, %esi ; X86-NOBMI-NEXT: movl (%edx), %edi ; X86-NOBMI-NEXT: andl %eax, %edi ; X86-NOBMI-NEXT: subl $8, %esp ; X86-NOBMI-NEXT: pushl %ebx ; X86-NOBMI-NEXT: pushl %eax ; X86-NOBMI-NEXT: calll use64 ; X86-NOBMI-NEXT: addl $16, %esp ; X86-NOBMI-NEXT: movl %edi, %eax ; X86-NOBMI-NEXT: movl %esi, %edx ; X86-NOBMI-NEXT: popl %esi ; X86-NOBMI-NEXT: popl %edi ; X86-NOBMI-NEXT: popl %ebx ; X86-NOBMI-NEXT: retl ; ; X86-BMI1NOTBM-LABEL: bzhi64_c2_load: ; X86-BMI1NOTBM: # %bb.0: ; X86-BMI1NOTBM-NEXT: pushl %ebx ; X86-BMI1NOTBM-NEXT: pushl %edi ; X86-BMI1NOTBM-NEXT: pushl %esi ; X86-BMI1NOTBM-NEXT: movl {{[0-9]+}}(%esp), %edx ; X86-BMI1NOTBM-NEXT: movb $64, %cl ; X86-BMI1NOTBM-NEXT: subb {{[0-9]+}}(%esp), %cl ; X86-BMI1NOTBM-NEXT: movl $-1, %eax ; X86-BMI1NOTBM-NEXT: movl $-1, %ebx ; X86-BMI1NOTBM-NEXT: shrl %cl, %ebx ; X86-BMI1NOTBM-NEXT: testb $32, %cl ; X86-BMI1NOTBM-NEXT: je .LBB36_2 ; X86-BMI1NOTBM-NEXT: # %bb.1: ; X86-BMI1NOTBM-NEXT: movl %ebx, %eax ; X86-BMI1NOTBM-NEXT: xorl %ebx, %ebx ; X86-BMI1NOTBM-NEXT: .LBB36_2: ; X86-BMI1NOTBM-NEXT: movl 4(%edx), %esi ; X86-BMI1NOTBM-NEXT: andl %ebx, %esi ; X86-BMI1NOTBM-NEXT: movl (%edx), %edi ; X86-BMI1NOTBM-NEXT: andl %eax, %edi ; X86-BMI1NOTBM-NEXT: subl $8, %esp ; X86-BMI1NOTBM-NEXT: pushl %ebx ; X86-BMI1NOTBM-NEXT: pushl %eax ; X86-BMI1NOTBM-NEXT: calll use64 ; X86-BMI1NOTBM-NEXT: addl $16, %esp ; X86-BMI1NOTBM-NEXT: movl %edi, %eax ; X86-BMI1NOTBM-NEXT: movl %esi, %edx ; X86-BMI1NOTBM-NEXT: popl %esi ; X86-BMI1NOTBM-NEXT: popl %edi ; X86-BMI1NOTBM-NEXT: popl %ebx ; X86-BMI1NOTBM-NEXT: retl ; ; X86-BMI1BMI2-LABEL: bzhi64_c2_load: ; X86-BMI1BMI2: # %bb.0: ; X86-BMI1BMI2-NEXT: pushl %ebx ; X86-BMI1BMI2-NEXT: pushl %edi ; X86-BMI1BMI2-NEXT: pushl %esi ; X86-BMI1BMI2-NEXT: movl {{[0-9]+}}(%esp), %eax ; X86-BMI1BMI2-NEXT: movb $64, %bl ; X86-BMI1BMI2-NEXT: subb {{[0-9]+}}(%esp), %bl ; X86-BMI1BMI2-NEXT: movl $-1, %ecx ; X86-BMI1BMI2-NEXT: shrxl %ebx, %ecx, %edx ; X86-BMI1BMI2-NEXT: testb $32, %bl ; X86-BMI1BMI2-NEXT: je .LBB36_2 ; X86-BMI1BMI2-NEXT: # %bb.1: ; X86-BMI1BMI2-NEXT: movl %edx, %ecx ; X86-BMI1BMI2-NEXT: xorl %edx, %edx ; X86-BMI1BMI2-NEXT: .LBB36_2: ; X86-BMI1BMI2-NEXT: movl 4(%eax), %esi ; X86-BMI1BMI2-NEXT: andl %edx, %esi ; X86-BMI1BMI2-NEXT: movl (%eax), %edi ; X86-BMI1BMI2-NEXT: andl %ecx, %edi ; X86-BMI1BMI2-NEXT: subl $8, %esp ; X86-BMI1BMI2-NEXT: pushl %edx ; X86-BMI1BMI2-NEXT: pushl %ecx ; X86-BMI1BMI2-NEXT: calll use64 ; X86-BMI1BMI2-NEXT: addl $16, %esp ; X86-BMI1BMI2-NEXT: movl %edi, %eax ; X86-BMI1BMI2-NEXT: movl %esi, %edx ; X86-BMI1BMI2-NEXT: popl %esi ; X86-BMI1BMI2-NEXT: popl %edi ; X86-BMI1BMI2-NEXT: popl %ebx ; X86-BMI1BMI2-NEXT: retl ; ; X64-NOBMI-LABEL: bzhi64_c2_load: ; X64-NOBMI: # %bb.0: ; X64-NOBMI-NEXT: pushq %rbx ; X64-NOBMI-NEXT: movq %rsi, %rcx ; X64-NOBMI-NEXT: negb %cl ; X64-NOBMI-NEXT: movq $-1, %rax ; X64-NOBMI-NEXT: # kill: def $cl killed $cl killed $rcx ; X64-NOBMI-NEXT: shrq %cl, %rax ; X64-NOBMI-NEXT: movq (%rdi), %rbx ; X64-NOBMI-NEXT: andq %rax, %rbx ; X64-NOBMI-NEXT: movq %rax, %rdi ; X64-NOBMI-NEXT: callq use64 ; X64-NOBMI-NEXT: movq %rbx, %rax ; X64-NOBMI-NEXT: popq %rbx ; X64-NOBMI-NEXT: retq ; ; X64-BMI1NOTBM-LABEL: bzhi64_c2_load: ; X64-BMI1NOTBM: # %bb.0: ; X64-BMI1NOTBM-NEXT: pushq %rbx ; X64-BMI1NOTBM-NEXT: movq %rsi, %rcx ; X64-BMI1NOTBM-NEXT: negb %cl ; X64-BMI1NOTBM-NEXT: movq $-1, %rax ; X64-BMI1NOTBM-NEXT: # kill: def $cl killed $cl killed $rcx ; X64-BMI1NOTBM-NEXT: shrq %cl, %rax ; X64-BMI1NOTBM-NEXT: movq (%rdi), %rbx ; X64-BMI1NOTBM-NEXT: andq %rax, %rbx ; X64-BMI1NOTBM-NEXT: movq %rax, %rdi ; X64-BMI1NOTBM-NEXT: callq use64 ; X64-BMI1NOTBM-NEXT: movq %rbx, %rax ; X64-BMI1NOTBM-NEXT: popq %rbx ; X64-BMI1NOTBM-NEXT: retq ; ; X64-BMI1BMI2-LABEL: bzhi64_c2_load: ; X64-BMI1BMI2: # %bb.0: ; X64-BMI1BMI2-NEXT: pushq %rbx ; X64-BMI1BMI2-NEXT: bzhiq %rsi, (%rdi), %rbx ; X64-BMI1BMI2-NEXT: # kill: def $sil killed $sil killed $rsi def $rsi ; X64-BMI1BMI2-NEXT: negb %sil ; X64-BMI1BMI2-NEXT: movq $-1, %rax ; X64-BMI1BMI2-NEXT: shrxq %rsi, %rax, %rdi ; X64-BMI1BMI2-NEXT: callq use64 ; X64-BMI1BMI2-NEXT: movq %rbx, %rax ; X64-BMI1BMI2-NEXT: popq %rbx ; X64-BMI1BMI2-NEXT: retq %val = load i64, i64* %w %numhighbits = sub i64 64, %numlowbits %mask = lshr i64 -1, %numhighbits call void @use64(i64 %mask) %masked = and i64 %mask, %val ret i64 %masked } define i64 @bzhi64_c3_load_indexzext(i64* %w, i8 %numlowbits) nounwind { ; X86-NOBMI-LABEL: bzhi64_c3_load_indexzext: ; X86-NOBMI: # %bb.0: ; X86-NOBMI-NEXT: pushl %ebx ; X86-NOBMI-NEXT: pushl %edi ; X86-NOBMI-NEXT: pushl %esi ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %edx ; X86-NOBMI-NEXT: movb $64, %cl ; X86-NOBMI-NEXT: subb {{[0-9]+}}(%esp), %cl ; X86-NOBMI-NEXT: movl $-1, %eax ; X86-NOBMI-NEXT: movl $-1, %ebx ; X86-NOBMI-NEXT: shrl %cl, %ebx ; X86-NOBMI-NEXT: testb $32, %cl ; X86-NOBMI-NEXT: je .LBB37_2 ; X86-NOBMI-NEXT: # %bb.1: ; X86-NOBMI-NEXT: movl %ebx, %eax ; X86-NOBMI-NEXT: xorl %ebx, %ebx ; X86-NOBMI-NEXT: .LBB37_2: ; X86-NOBMI-NEXT: movl 4(%edx), %esi ; X86-NOBMI-NEXT: andl %ebx, %esi ; X86-NOBMI-NEXT: movl (%edx), %edi ; X86-NOBMI-NEXT: andl %eax, %edi ; X86-NOBMI-NEXT: subl $8, %esp ; X86-NOBMI-NEXT: pushl %ebx ; X86-NOBMI-NEXT: pushl %eax ; X86-NOBMI-NEXT: calll use64 ; X86-NOBMI-NEXT: addl $16, %esp ; X86-NOBMI-NEXT: movl %edi, %eax ; X86-NOBMI-NEXT: movl %esi, %edx ; X86-NOBMI-NEXT: popl %esi ; X86-NOBMI-NEXT: popl %edi ; X86-NOBMI-NEXT: popl %ebx ; X86-NOBMI-NEXT: retl ; ; X86-BMI1NOTBM-LABEL: bzhi64_c3_load_indexzext: ; X86-BMI1NOTBM: # %bb.0: ; X86-BMI1NOTBM-NEXT: pushl %ebx ; X86-BMI1NOTBM-NEXT: pushl %edi ; X86-BMI1NOTBM-NEXT: pushl %esi ; X86-BMI1NOTBM-NEXT: movl {{[0-9]+}}(%esp), %edx ; X86-BMI1NOTBM-NEXT: movb $64, %cl ; X86-BMI1NOTBM-NEXT: subb {{[0-9]+}}(%esp), %cl ; X86-BMI1NOTBM-NEXT: movl $-1, %eax ; X86-BMI1NOTBM-NEXT: movl $-1, %ebx ; X86-BMI1NOTBM-NEXT: shrl %cl, %ebx ; X86-BMI1NOTBM-NEXT: testb $32, %cl ; X86-BMI1NOTBM-NEXT: je .LBB37_2 ; X86-BMI1NOTBM-NEXT: # %bb.1: ; X86-BMI1NOTBM-NEXT: movl %ebx, %eax ; X86-BMI1NOTBM-NEXT: xorl %ebx, %ebx ; X86-BMI1NOTBM-NEXT: .LBB37_2: ; X86-BMI1NOTBM-NEXT: movl 4(%edx), %esi ; X86-BMI1NOTBM-NEXT: andl %ebx, %esi ; X86-BMI1NOTBM-NEXT: movl (%edx), %edi ; X86-BMI1NOTBM-NEXT: andl %eax, %edi ; X86-BMI1NOTBM-NEXT: subl $8, %esp ; X86-BMI1NOTBM-NEXT: pushl %ebx ; X86-BMI1NOTBM-NEXT: pushl %eax ; X86-BMI1NOTBM-NEXT: calll use64 ; X86-BMI1NOTBM-NEXT: addl $16, %esp ; X86-BMI1NOTBM-NEXT: movl %edi, %eax ; X86-BMI1NOTBM-NEXT: movl %esi, %edx ; X86-BMI1NOTBM-NEXT: popl %esi ; X86-BMI1NOTBM-NEXT: popl %edi ; X86-BMI1NOTBM-NEXT: popl %ebx ; X86-BMI1NOTBM-NEXT: retl ; ; X86-BMI1BMI2-LABEL: bzhi64_c3_load_indexzext: ; X86-BMI1BMI2: # %bb.0: ; X86-BMI1BMI2-NEXT: pushl %ebx ; X86-BMI1BMI2-NEXT: pushl %edi ; X86-BMI1BMI2-NEXT: pushl %esi ; X86-BMI1BMI2-NEXT: movl {{[0-9]+}}(%esp), %eax ; X86-BMI1BMI2-NEXT: movb $64, %bl ; X86-BMI1BMI2-NEXT: subb {{[0-9]+}}(%esp), %bl ; X86-BMI1BMI2-NEXT: movl $-1, %ecx ; X86-BMI1BMI2-NEXT: shrxl %ebx, %ecx, %edx ; X86-BMI1BMI2-NEXT: testb $32, %bl ; X86-BMI1BMI2-NEXT: je .LBB37_2 ; X86-BMI1BMI2-NEXT: # %bb.1: ; X86-BMI1BMI2-NEXT: movl %edx, %ecx ; X86-BMI1BMI2-NEXT: xorl %edx, %edx ; X86-BMI1BMI2-NEXT: .LBB37_2: ; X86-BMI1BMI2-NEXT: movl 4(%eax), %esi ; X86-BMI1BMI2-NEXT: andl %edx, %esi ; X86-BMI1BMI2-NEXT: movl (%eax), %edi ; X86-BMI1BMI2-NEXT: andl %ecx, %edi ; X86-BMI1BMI2-NEXT: subl $8, %esp ; X86-BMI1BMI2-NEXT: pushl %edx ; X86-BMI1BMI2-NEXT: pushl %ecx ; X86-BMI1BMI2-NEXT: calll use64 ; X86-BMI1BMI2-NEXT: addl $16, %esp ; X86-BMI1BMI2-NEXT: movl %edi, %eax ; X86-BMI1BMI2-NEXT: movl %esi, %edx ; X86-BMI1BMI2-NEXT: popl %esi ; X86-BMI1BMI2-NEXT: popl %edi ; X86-BMI1BMI2-NEXT: popl %ebx ; X86-BMI1BMI2-NEXT: retl ; ; X64-NOBMI-LABEL: bzhi64_c3_load_indexzext: ; X64-NOBMI: # %bb.0: ; X64-NOBMI-NEXT: pushq %rbx ; X64-NOBMI-NEXT: movl %esi, %ecx ; X64-NOBMI-NEXT: negb %cl ; X64-NOBMI-NEXT: movq $-1, %rax ; X64-NOBMI-NEXT: # kill: def $cl killed $cl killed $ecx ; X64-NOBMI-NEXT: shrq %cl, %rax ; X64-NOBMI-NEXT: movq (%rdi), %rbx ; X64-NOBMI-NEXT: andq %rax, %rbx ; X64-NOBMI-NEXT: movq %rax, %rdi ; X64-NOBMI-NEXT: callq use64 ; X64-NOBMI-NEXT: movq %rbx, %rax ; X64-NOBMI-NEXT: popq %rbx ; X64-NOBMI-NEXT: retq ; ; X64-BMI1NOTBM-LABEL: bzhi64_c3_load_indexzext: ; X64-BMI1NOTBM: # %bb.0: ; X64-BMI1NOTBM-NEXT: pushq %rbx ; X64-BMI1NOTBM-NEXT: movl %esi, %ecx ; X64-BMI1NOTBM-NEXT: negb %cl ; X64-BMI1NOTBM-NEXT: movq $-1, %rax ; X64-BMI1NOTBM-NEXT: # kill: def $cl killed $cl killed $ecx ; X64-BMI1NOTBM-NEXT: shrq %cl, %rax ; X64-BMI1NOTBM-NEXT: movq (%rdi), %rbx ; X64-BMI1NOTBM-NEXT: andq %rax, %rbx ; X64-BMI1NOTBM-NEXT: movq %rax, %rdi ; X64-BMI1NOTBM-NEXT: callq use64 ; X64-BMI1NOTBM-NEXT: movq %rbx, %rax ; X64-BMI1NOTBM-NEXT: popq %rbx ; X64-BMI1NOTBM-NEXT: retq ; ; X64-BMI1BMI2-LABEL: bzhi64_c3_load_indexzext: ; X64-BMI1BMI2: # %bb.0: ; X64-BMI1BMI2-NEXT: pushq %rbx ; X64-BMI1BMI2-NEXT: # kill: def $esi killed $esi def $rsi ; X64-BMI1BMI2-NEXT: bzhiq %rsi, (%rdi), %rbx ; X64-BMI1BMI2-NEXT: # kill: def $sil killed $sil killed $rsi def $rsi ; X64-BMI1BMI2-NEXT: negb %sil ; X64-BMI1BMI2-NEXT: movq $-1, %rax ; X64-BMI1BMI2-NEXT: shrxq %rsi, %rax, %rdi ; X64-BMI1BMI2-NEXT: callq use64 ; X64-BMI1BMI2-NEXT: movq %rbx, %rax ; X64-BMI1BMI2-NEXT: popq %rbx ; X64-BMI1BMI2-NEXT: retq %val = load i64, i64* %w %numhighbits = sub i8 64, %numlowbits %sh_prom = zext i8 %numhighbits to i64 %mask = lshr i64 -1, %sh_prom call void @use64(i64 %mask) %masked = and i64 %mask, %val ret i64 %masked } define i64 @bzhi64_c4_commutative(i64 %val, i64 %numlowbits) nounwind { ; X86-NOBMI-LABEL: bzhi64_c4_commutative: ; X86-NOBMI: # %bb.0: ; X86-NOBMI-NEXT: pushl %edi ; X86-NOBMI-NEXT: pushl %esi ; X86-NOBMI-NEXT: pushl %eax ; X86-NOBMI-NEXT: movb $64, %cl ; X86-NOBMI-NEXT: subb {{[0-9]+}}(%esp), %cl ; X86-NOBMI-NEXT: movl $-1, %esi ; X86-NOBMI-NEXT: movl $-1, %edi ; X86-NOBMI-NEXT: shrl %cl, %edi ; X86-NOBMI-NEXT: testb $32, %cl ; X86-NOBMI-NEXT: je .LBB38_2 ; X86-NOBMI-NEXT: # %bb.1: ; X86-NOBMI-NEXT: movl %edi, %esi ; X86-NOBMI-NEXT: xorl %edi, %edi ; X86-NOBMI-NEXT: .LBB38_2: ; X86-NOBMI-NEXT: subl $8, %esp ; X86-NOBMI-NEXT: pushl %edi ; X86-NOBMI-NEXT: pushl %esi ; X86-NOBMI-NEXT: calll use64 ; X86-NOBMI-NEXT: addl $16, %esp ; X86-NOBMI-NEXT: andl {{[0-9]+}}(%esp), %esi ; X86-NOBMI-NEXT: andl {{[0-9]+}}(%esp), %edi ; X86-NOBMI-NEXT: movl %esi, %eax ; X86-NOBMI-NEXT: movl %edi, %edx ; X86-NOBMI-NEXT: addl $4, %esp ; X86-NOBMI-NEXT: popl %esi ; X86-NOBMI-NEXT: popl %edi ; X86-NOBMI-NEXT: retl ; ; X86-BMI1NOTBM-LABEL: bzhi64_c4_commutative: ; X86-BMI1NOTBM: # %bb.0: ; X86-BMI1NOTBM-NEXT: pushl %edi ; X86-BMI1NOTBM-NEXT: pushl %esi ; X86-BMI1NOTBM-NEXT: pushl %eax ; X86-BMI1NOTBM-NEXT: movb $64, %cl ; X86-BMI1NOTBM-NEXT: subb {{[0-9]+}}(%esp), %cl ; X86-BMI1NOTBM-NEXT: movl $-1, %esi ; X86-BMI1NOTBM-NEXT: movl $-1, %edi ; X86-BMI1NOTBM-NEXT: shrl %cl, %edi ; X86-BMI1NOTBM-NEXT: testb $32, %cl ; X86-BMI1NOTBM-NEXT: je .LBB38_2 ; X86-BMI1NOTBM-NEXT: # %bb.1: ; X86-BMI1NOTBM-NEXT: movl %edi, %esi ; X86-BMI1NOTBM-NEXT: xorl %edi, %edi ; X86-BMI1NOTBM-NEXT: .LBB38_2: ; X86-BMI1NOTBM-NEXT: subl $8, %esp ; X86-BMI1NOTBM-NEXT: pushl %edi ; X86-BMI1NOTBM-NEXT: pushl %esi ; X86-BMI1NOTBM-NEXT: calll use64 ; X86-BMI1NOTBM-NEXT: addl $16, %esp ; X86-BMI1NOTBM-NEXT: andl {{[0-9]+}}(%esp), %esi ; X86-BMI1NOTBM-NEXT: andl {{[0-9]+}}(%esp), %edi ; X86-BMI1NOTBM-NEXT: movl %esi, %eax ; X86-BMI1NOTBM-NEXT: movl %edi, %edx ; X86-BMI1NOTBM-NEXT: addl $4, %esp ; X86-BMI1NOTBM-NEXT: popl %esi ; X86-BMI1NOTBM-NEXT: popl %edi ; X86-BMI1NOTBM-NEXT: retl ; ; X86-BMI1BMI2-LABEL: bzhi64_c4_commutative: ; X86-BMI1BMI2: # %bb.0: ; X86-BMI1BMI2-NEXT: pushl %edi ; X86-BMI1BMI2-NEXT: pushl %esi ; X86-BMI1BMI2-NEXT: pushl %eax ; X86-BMI1BMI2-NEXT: movb $64, %al ; X86-BMI1BMI2-NEXT: subb {{[0-9]+}}(%esp), %al ; X86-BMI1BMI2-NEXT: movl $-1, %edi ; X86-BMI1BMI2-NEXT: shrxl %eax, %edi, %esi ; X86-BMI1BMI2-NEXT: testb $32, %al ; X86-BMI1BMI2-NEXT: je .LBB38_2 ; X86-BMI1BMI2-NEXT: # %bb.1: ; X86-BMI1BMI2-NEXT: movl %esi, %edi ; X86-BMI1BMI2-NEXT: xorl %esi, %esi ; X86-BMI1BMI2-NEXT: .LBB38_2: ; X86-BMI1BMI2-NEXT: subl $8, %esp ; X86-BMI1BMI2-NEXT: pushl %esi ; X86-BMI1BMI2-NEXT: pushl %edi ; X86-BMI1BMI2-NEXT: calll use64 ; X86-BMI1BMI2-NEXT: addl $16, %esp ; X86-BMI1BMI2-NEXT: andl {{[0-9]+}}(%esp), %edi ; X86-BMI1BMI2-NEXT: andl {{[0-9]+}}(%esp), %esi ; X86-BMI1BMI2-NEXT: movl %edi, %eax ; X86-BMI1BMI2-NEXT: movl %esi, %edx ; X86-BMI1BMI2-NEXT: addl $4, %esp ; X86-BMI1BMI2-NEXT: popl %esi ; X86-BMI1BMI2-NEXT: popl %edi ; X86-BMI1BMI2-NEXT: retl ; ; X64-NOBMI-LABEL: bzhi64_c4_commutative: ; X64-NOBMI: # %bb.0: ; X64-NOBMI-NEXT: pushq %r14 ; X64-NOBMI-NEXT: pushq %rbx ; X64-NOBMI-NEXT: pushq %rax ; X64-NOBMI-NEXT: movq %rsi, %rcx ; X64-NOBMI-NEXT: movq %rdi, %r14 ; X64-NOBMI-NEXT: negb %cl ; X64-NOBMI-NEXT: movq $-1, %rbx ; X64-NOBMI-NEXT: # kill: def $cl killed $cl killed $rcx ; X64-NOBMI-NEXT: shrq %cl, %rbx ; X64-NOBMI-NEXT: movq %rbx, %rdi ; X64-NOBMI-NEXT: callq use64 ; X64-NOBMI-NEXT: andq %r14, %rbx ; X64-NOBMI-NEXT: movq %rbx, %rax ; X64-NOBMI-NEXT: addq $8, %rsp ; X64-NOBMI-NEXT: popq %rbx ; X64-NOBMI-NEXT: popq %r14 ; X64-NOBMI-NEXT: retq ; ; X64-BMI1NOTBM-LABEL: bzhi64_c4_commutative: ; X64-BMI1NOTBM: # %bb.0: ; X64-BMI1NOTBM-NEXT: pushq %r14 ; X64-BMI1NOTBM-NEXT: pushq %rbx ; X64-BMI1NOTBM-NEXT: pushq %rax ; X64-BMI1NOTBM-NEXT: movq %rsi, %rcx ; X64-BMI1NOTBM-NEXT: movq %rdi, %r14 ; X64-BMI1NOTBM-NEXT: negb %cl ; X64-BMI1NOTBM-NEXT: movq $-1, %rbx ; X64-BMI1NOTBM-NEXT: # kill: def $cl killed $cl killed $rcx ; X64-BMI1NOTBM-NEXT: shrq %cl, %rbx ; X64-BMI1NOTBM-NEXT: movq %rbx, %rdi ; X64-BMI1NOTBM-NEXT: callq use64 ; X64-BMI1NOTBM-NEXT: andq %r14, %rbx ; X64-BMI1NOTBM-NEXT: movq %rbx, %rax ; X64-BMI1NOTBM-NEXT: addq $8, %rsp ; X64-BMI1NOTBM-NEXT: popq %rbx ; X64-BMI1NOTBM-NEXT: popq %r14 ; X64-BMI1NOTBM-NEXT: retq ; ; X64-BMI1BMI2-LABEL: bzhi64_c4_commutative: ; X64-BMI1BMI2: # %bb.0: ; X64-BMI1BMI2-NEXT: pushq %r14 ; X64-BMI1BMI2-NEXT: pushq %rbx ; X64-BMI1BMI2-NEXT: pushq %rax ; X64-BMI1BMI2-NEXT: movq %rsi, %rbx ; X64-BMI1BMI2-NEXT: movq %rdi, %r14 ; X64-BMI1BMI2-NEXT: movl %ebx, %eax ; X64-BMI1BMI2-NEXT: negb %al ; X64-BMI1BMI2-NEXT: movq $-1, %rcx ; X64-BMI1BMI2-NEXT: shrxq %rax, %rcx, %rdi ; X64-BMI1BMI2-NEXT: callq use64 ; X64-BMI1BMI2-NEXT: bzhiq %rbx, %r14, %rax ; X64-BMI1BMI2-NEXT: addq $8, %rsp ; X64-BMI1BMI2-NEXT: popq %rbx ; X64-BMI1BMI2-NEXT: popq %r14 ; X64-BMI1BMI2-NEXT: retq %numhighbits = sub i64 64, %numlowbits %mask = lshr i64 -1, %numhighbits call void @use64(i64 %mask) %masked = and i64 %val, %mask ; swapped order ret i64 %masked } ; 64-bit, but with 32-bit output ; Everything done in 64-bit, truncation happens last. define i32 @bzhi64_32_c0(i64 %val, i64 %numlowbits) nounwind { ; X86-NOBMI-LABEL: bzhi64_32_c0: ; X86-NOBMI: # %bb.0: ; X86-NOBMI-NEXT: movb $64, %cl ; X86-NOBMI-NEXT: subb {{[0-9]+}}(%esp), %cl ; X86-NOBMI-NEXT: movl $-1, %eax ; X86-NOBMI-NEXT: shrl %cl, %eax ; X86-NOBMI-NEXT: testb $32, %cl ; X86-NOBMI-NEXT: jne .LBB39_2 ; X86-NOBMI-NEXT: # %bb.1: ; X86-NOBMI-NEXT: movl $-1, %eax ; X86-NOBMI-NEXT: .LBB39_2: ; X86-NOBMI-NEXT: andl {{[0-9]+}}(%esp), %eax ; X86-NOBMI-NEXT: retl ; ; X86-BMI1NOTBM-LABEL: bzhi64_32_c0: ; X86-BMI1NOTBM: # %bb.0: ; X86-BMI1NOTBM-NEXT: movb $64, %cl ; X86-BMI1NOTBM-NEXT: subb {{[0-9]+}}(%esp), %cl ; X86-BMI1NOTBM-NEXT: movl $-1, %eax ; X86-BMI1NOTBM-NEXT: shrl %cl, %eax ; X86-BMI1NOTBM-NEXT: testb $32, %cl ; X86-BMI1NOTBM-NEXT: jne .LBB39_2 ; X86-BMI1NOTBM-NEXT: # %bb.1: ; X86-BMI1NOTBM-NEXT: movl $-1, %eax ; X86-BMI1NOTBM-NEXT: .LBB39_2: ; X86-BMI1NOTBM-NEXT: andl {{[0-9]+}}(%esp), %eax ; X86-BMI1NOTBM-NEXT: retl ; ; X86-BMI1BMI2-LABEL: bzhi64_32_c0: ; X86-BMI1BMI2: # %bb.0: ; X86-BMI1BMI2-NEXT: movb $64, %cl ; X86-BMI1BMI2-NEXT: subb {{[0-9]+}}(%esp), %cl ; X86-BMI1BMI2-NEXT: movl $-1, %eax ; X86-BMI1BMI2-NEXT: testb $32, %cl ; X86-BMI1BMI2-NEXT: je .LBB39_2 ; X86-BMI1BMI2-NEXT: # %bb.1: ; X86-BMI1BMI2-NEXT: shrxl %ecx, %eax, %eax ; X86-BMI1BMI2-NEXT: .LBB39_2: ; X86-BMI1BMI2-NEXT: andl {{[0-9]+}}(%esp), %eax ; X86-BMI1BMI2-NEXT: retl ; ; X64-NOBMI-LABEL: bzhi64_32_c0: ; X64-NOBMI: # %bb.0: ; X64-NOBMI-NEXT: movq %rsi, %rcx ; X64-NOBMI-NEXT: negb %cl ; X64-NOBMI-NEXT: movq $-1, %rax ; X64-NOBMI-NEXT: # kill: def $cl killed $cl killed $rcx ; X64-NOBMI-NEXT: shrq %cl, %rax ; X64-NOBMI-NEXT: andl %edi, %eax ; X64-NOBMI-NEXT: # kill: def $eax killed $eax killed $rax ; X64-NOBMI-NEXT: retq ; ; X64-BMI1NOTBM-LABEL: bzhi64_32_c0: ; X64-BMI1NOTBM: # %bb.0: ; X64-BMI1NOTBM-NEXT: shll $8, %esi ; X64-BMI1NOTBM-NEXT: bextrl %esi, %edi, %eax ; X64-BMI1NOTBM-NEXT: retq ; ; X64-BMI1BMI2-LABEL: bzhi64_32_c0: ; X64-BMI1BMI2: # %bb.0: ; X64-BMI1BMI2-NEXT: bzhil %esi, %edi, %eax ; X64-BMI1BMI2-NEXT: retq %numhighbits = sub i64 64, %numlowbits %mask = lshr i64 -1, %numhighbits %masked = and i64 %mask, %val %res = trunc i64 %masked to i32 ret i32 %res } ; Shifting happens in 64-bit, then truncation. Masking is 32-bit. define i32 @bzhi64_32_c1(i64 %val, i32 %numlowbits) nounwind { ; X86-NOBMI-LABEL: bzhi64_32_c1: ; X86-NOBMI: # %bb.0: ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %eax ; X86-NOBMI-NEXT: xorl %ecx, %ecx ; X86-NOBMI-NEXT: subb {{[0-9]+}}(%esp), %cl ; X86-NOBMI-NEXT: shll %cl, %eax ; X86-NOBMI-NEXT: # kill: def $cl killed $cl killed $ecx ; X86-NOBMI-NEXT: shrl %cl, %eax ; X86-NOBMI-NEXT: retl ; ; X86-BMI1NOTBM-LABEL: bzhi64_32_c1: ; X86-BMI1NOTBM: # %bb.0: ; X86-BMI1NOTBM-NEXT: movb {{[0-9]+}}(%esp), %al ; X86-BMI1NOTBM-NEXT: shll $8, %eax ; X86-BMI1NOTBM-NEXT: bextrl %eax, {{[0-9]+}}(%esp), %eax ; X86-BMI1NOTBM-NEXT: retl ; ; X86-BMI1BMI2-LABEL: bzhi64_32_c1: ; X86-BMI1BMI2: # %bb.0: ; X86-BMI1BMI2-NEXT: movb {{[0-9]+}}(%esp), %al ; X86-BMI1BMI2-NEXT: bzhil %eax, {{[0-9]+}}(%esp), %eax ; X86-BMI1BMI2-NEXT: retl ; ; X64-NOBMI-LABEL: bzhi64_32_c1: ; X64-NOBMI: # %bb.0: ; X64-NOBMI-NEXT: movl %esi, %ecx ; X64-NOBMI-NEXT: movq %rdi, %rax ; X64-NOBMI-NEXT: negb %cl ; X64-NOBMI-NEXT: shll %cl, %eax ; X64-NOBMI-NEXT: # kill: def $cl killed $cl killed $ecx ; X64-NOBMI-NEXT: shrl %cl, %eax ; X64-NOBMI-NEXT: # kill: def $eax killed $eax killed $rax ; X64-NOBMI-NEXT: retq ; ; X64-BMI1NOTBM-LABEL: bzhi64_32_c1: ; X64-BMI1NOTBM: # %bb.0: ; X64-BMI1NOTBM-NEXT: shll $8, %esi ; X64-BMI1NOTBM-NEXT: bextrl %esi, %edi, %eax ; X64-BMI1NOTBM-NEXT: retq ; ; X64-BMI1BMI2-LABEL: bzhi64_32_c1: ; X64-BMI1BMI2: # %bb.0: ; X64-BMI1BMI2-NEXT: bzhil %esi, %edi, %eax ; X64-BMI1BMI2-NEXT: retq %truncval = trunc i64 %val to i32 %numhighbits = sub i32 32, %numlowbits %mask = lshr i32 -1, %numhighbits %masked = and i32 %mask, %truncval ret i32 %masked } ; Shifting happens in 64-bit. Mask is 32-bit, but extended to 64-bit. ; Masking is 64-bit. Then truncation. define i32 @bzhi64_32_c2(i64 %val, i32 %numlowbits) nounwind { ; X86-NOBMI-LABEL: bzhi64_32_c2: ; X86-NOBMI: # %bb.0: ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %eax ; X86-NOBMI-NEXT: xorl %ecx, %ecx ; X86-NOBMI-NEXT: subb {{[0-9]+}}(%esp), %cl ; X86-NOBMI-NEXT: shll %cl, %eax ; X86-NOBMI-NEXT: # kill: def $cl killed $cl killed $ecx ; X86-NOBMI-NEXT: shrl %cl, %eax ; X86-NOBMI-NEXT: retl ; ; X86-BMI1NOTBM-LABEL: bzhi64_32_c2: ; X86-BMI1NOTBM: # %bb.0: ; X86-BMI1NOTBM-NEXT: movb {{[0-9]+}}(%esp), %al ; X86-BMI1NOTBM-NEXT: shll $8, %eax ; X86-BMI1NOTBM-NEXT: bextrl %eax, {{[0-9]+}}(%esp), %eax ; X86-BMI1NOTBM-NEXT: retl ; ; X86-BMI1BMI2-LABEL: bzhi64_32_c2: ; X86-BMI1BMI2: # %bb.0: ; X86-BMI1BMI2-NEXT: movb {{[0-9]+}}(%esp), %al ; X86-BMI1BMI2-NEXT: bzhil %eax, {{[0-9]+}}(%esp), %eax ; X86-BMI1BMI2-NEXT: retl ; ; X64-NOBMI-LABEL: bzhi64_32_c2: ; X64-NOBMI: # %bb.0: ; X64-NOBMI-NEXT: movl %esi, %ecx ; X64-NOBMI-NEXT: movq %rdi, %rax ; X64-NOBMI-NEXT: negb %cl ; X64-NOBMI-NEXT: shll %cl, %eax ; X64-NOBMI-NEXT: # kill: def $cl killed $cl killed $ecx ; X64-NOBMI-NEXT: shrl %cl, %eax ; X64-NOBMI-NEXT: # kill: def $eax killed $eax killed $rax ; X64-NOBMI-NEXT: retq ; ; X64-BMI1NOTBM-LABEL: bzhi64_32_c2: ; X64-BMI1NOTBM: # %bb.0: ; X64-BMI1NOTBM-NEXT: shll $8, %esi ; X64-BMI1NOTBM-NEXT: bextrl %esi, %edi, %eax ; X64-BMI1NOTBM-NEXT: retq ; ; X64-BMI1BMI2-LABEL: bzhi64_32_c2: ; X64-BMI1BMI2: # %bb.0: ; X64-BMI1BMI2-NEXT: bzhil %esi, %edi, %eax ; X64-BMI1BMI2-NEXT: retq %numhighbits = sub i32 32, %numlowbits %mask = lshr i32 -1, %numhighbits %zextmask = zext i32 %mask to i64 %masked = and i64 %zextmask, %val %truncmasked = trunc i64 %masked to i32 ret i32 %truncmasked } ; Shifting happens in 64-bit. Mask is 32-bit, but calculated in 64-bit. ; Masking is 64-bit. Then truncation. define i32 @bzhi64_32_c3(i64 %val, i64 %numlowbits) nounwind { ; X86-LABEL: bzhi64_32_c3: ; X86: # %bb.0: ; X86-NEXT: movb $64, %cl ; X86-NEXT: subb {{[0-9]+}}(%esp), %cl ; X86-NEXT: xorl %eax, %eax ; X86-NEXT: movl $-1, %edx ; X86-NEXT: shrdl %cl, %eax, %edx ; X86-NEXT: testb $32, %cl ; X86-NEXT: jne .LBB42_2 ; X86-NEXT: # %bb.1: ; X86-NEXT: movl %edx, %eax ; X86-NEXT: .LBB42_2: ; X86-NEXT: andl {{[0-9]+}}(%esp), %eax ; X86-NEXT: retl ; ; X64-NOBMI-LABEL: bzhi64_32_c3: ; X64-NOBMI: # %bb.0: ; X64-NOBMI-NEXT: movq %rsi, %rcx ; X64-NOBMI-NEXT: negb %cl ; X64-NOBMI-NEXT: movl $4294967295, %eax # imm = 0xFFFFFFFF ; X64-NOBMI-NEXT: # kill: def $cl killed $cl killed $rcx ; X64-NOBMI-NEXT: shrq %cl, %rax ; X64-NOBMI-NEXT: andl %edi, %eax ; X64-NOBMI-NEXT: # kill: def $eax killed $eax killed $rax ; X64-NOBMI-NEXT: retq ; ; X64-BMI1NOTBM-LABEL: bzhi64_32_c3: ; X64-BMI1NOTBM: # %bb.0: ; X64-BMI1NOTBM-NEXT: movq %rsi, %rcx ; X64-BMI1NOTBM-NEXT: negb %cl ; X64-BMI1NOTBM-NEXT: movl $4294967295, %eax # imm = 0xFFFFFFFF ; X64-BMI1NOTBM-NEXT: # kill: def $cl killed $cl killed $rcx ; X64-BMI1NOTBM-NEXT: shrq %cl, %rax ; X64-BMI1NOTBM-NEXT: andl %edi, %eax ; X64-BMI1NOTBM-NEXT: # kill: def $eax killed $eax killed $rax ; X64-BMI1NOTBM-NEXT: retq ; ; X64-BMI1BMI2-LABEL: bzhi64_32_c3: ; X64-BMI1BMI2: # %bb.0: ; X64-BMI1BMI2-NEXT: negb %sil ; X64-BMI1BMI2-NEXT: movl $4294967295, %eax # imm = 0xFFFFFFFF ; X64-BMI1BMI2-NEXT: shrxq %rsi, %rax, %rax ; X64-BMI1BMI2-NEXT: andl %edi, %eax ; X64-BMI1BMI2-NEXT: # kill: def $eax killed $eax killed $rax ; X64-BMI1BMI2-NEXT: retq %numhighbits = sub i64 64, %numlowbits %mask = lshr i64 4294967295, %numhighbits %masked = and i64 %mask, %val %truncmasked = trunc i64 %masked to i32 ret i32 %truncmasked } ; ---------------------------------------------------------------------------- ; ; Pattern d. 32-bit. ; ---------------------------------------------------------------------------- ; define i32 @bzhi32_d0(i32 %val, i32 %numlowbits) nounwind { ; X86-NOBMI-LABEL: bzhi32_d0: ; X86-NOBMI: # %bb.0: ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %eax ; X86-NOBMI-NEXT: xorl %ecx, %ecx ; X86-NOBMI-NEXT: subb {{[0-9]+}}(%esp), %cl ; X86-NOBMI-NEXT: shll %cl, %eax ; X86-NOBMI-NEXT: # kill: def $cl killed $cl killed $ecx ; X86-NOBMI-NEXT: shrl %cl, %eax ; X86-NOBMI-NEXT: retl ; ; X86-BMI1NOTBM-LABEL: bzhi32_d0: ; X86-BMI1NOTBM: # %bb.0: ; X86-BMI1NOTBM-NEXT: movb {{[0-9]+}}(%esp), %al ; X86-BMI1NOTBM-NEXT: shll $8, %eax ; X86-BMI1NOTBM-NEXT: bextrl %eax, {{[0-9]+}}(%esp), %eax ; X86-BMI1NOTBM-NEXT: retl ; ; X86-BMI1BMI2-LABEL: bzhi32_d0: ; X86-BMI1BMI2: # %bb.0: ; X86-BMI1BMI2-NEXT: movb {{[0-9]+}}(%esp), %al ; X86-BMI1BMI2-NEXT: bzhil %eax, {{[0-9]+}}(%esp), %eax ; X86-BMI1BMI2-NEXT: retl ; ; X64-NOBMI-LABEL: bzhi32_d0: ; X64-NOBMI: # %bb.0: ; X64-NOBMI-NEXT: movl %esi, %ecx ; X64-NOBMI-NEXT: movl %edi, %eax ; X64-NOBMI-NEXT: negb %cl ; X64-NOBMI-NEXT: shll %cl, %eax ; X64-NOBMI-NEXT: # kill: def $cl killed $cl killed $ecx ; X64-NOBMI-NEXT: shrl %cl, %eax ; X64-NOBMI-NEXT: retq ; ; X64-BMI1NOTBM-LABEL: bzhi32_d0: ; X64-BMI1NOTBM: # %bb.0: ; X64-BMI1NOTBM-NEXT: shll $8, %esi ; X64-BMI1NOTBM-NEXT: bextrl %esi, %edi, %eax ; X64-BMI1NOTBM-NEXT: retq ; ; X64-BMI1BMI2-LABEL: bzhi32_d0: ; X64-BMI1BMI2: # %bb.0: ; X64-BMI1BMI2-NEXT: bzhil %esi, %edi, %eax ; X64-BMI1BMI2-NEXT: retq %numhighbits = sub i32 32, %numlowbits %highbitscleared = shl i32 %val, %numhighbits %masked = lshr i32 %highbitscleared, %numhighbits ret i32 %masked } define i32 @bzhi32_d1_indexzext(i32 %val, i8 %numlowbits) nounwind { ; X86-NOBMI-LABEL: bzhi32_d1_indexzext: ; X86-NOBMI: # %bb.0: ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %eax ; X86-NOBMI-NEXT: xorl %ecx, %ecx ; X86-NOBMI-NEXT: subb {{[0-9]+}}(%esp), %cl ; X86-NOBMI-NEXT: shll %cl, %eax ; X86-NOBMI-NEXT: # kill: def $cl killed $cl killed $ecx ; X86-NOBMI-NEXT: shrl %cl, %eax ; X86-NOBMI-NEXT: retl ; ; X86-BMI1NOTBM-LABEL: bzhi32_d1_indexzext: ; X86-BMI1NOTBM: # %bb.0: ; X86-BMI1NOTBM-NEXT: movb {{[0-9]+}}(%esp), %al ; X86-BMI1NOTBM-NEXT: shll $8, %eax ; X86-BMI1NOTBM-NEXT: bextrl %eax, {{[0-9]+}}(%esp), %eax ; X86-BMI1NOTBM-NEXT: retl ; ; X86-BMI1BMI2-LABEL: bzhi32_d1_indexzext: ; X86-BMI1BMI2: # %bb.0: ; X86-BMI1BMI2-NEXT: movb {{[0-9]+}}(%esp), %al ; X86-BMI1BMI2-NEXT: bzhil %eax, {{[0-9]+}}(%esp), %eax ; X86-BMI1BMI2-NEXT: retl ; ; X64-NOBMI-LABEL: bzhi32_d1_indexzext: ; X64-NOBMI: # %bb.0: ; X64-NOBMI-NEXT: movl %esi, %ecx ; X64-NOBMI-NEXT: movl %edi, %eax ; X64-NOBMI-NEXT: negb %cl ; X64-NOBMI-NEXT: shll %cl, %eax ; X64-NOBMI-NEXT: # kill: def $cl killed $cl killed $ecx ; X64-NOBMI-NEXT: shrl %cl, %eax ; X64-NOBMI-NEXT: retq ; ; X64-BMI1NOTBM-LABEL: bzhi32_d1_indexzext: ; X64-BMI1NOTBM: # %bb.0: ; X64-BMI1NOTBM-NEXT: shll $8, %esi ; X64-BMI1NOTBM-NEXT: bextrl %esi, %edi, %eax ; X64-BMI1NOTBM-NEXT: retq ; ; X64-BMI1BMI2-LABEL: bzhi32_d1_indexzext: ; X64-BMI1BMI2: # %bb.0: ; X64-BMI1BMI2-NEXT: bzhil %esi, %edi, %eax ; X64-BMI1BMI2-NEXT: retq %numhighbits = sub i8 32, %numlowbits %sh_prom = zext i8 %numhighbits to i32 %highbitscleared = shl i32 %val, %sh_prom %masked = lshr i32 %highbitscleared, %sh_prom ret i32 %masked } define i32 @bzhi32_d2_load(i32* %w, i32 %numlowbits) nounwind { ; X86-NOBMI-LABEL: bzhi32_d2_load: ; X86-NOBMI: # %bb.0: ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %eax ; X86-NOBMI-NEXT: movl (%eax), %eax ; X86-NOBMI-NEXT: xorl %ecx, %ecx ; X86-NOBMI-NEXT: subb {{[0-9]+}}(%esp), %cl ; X86-NOBMI-NEXT: shll %cl, %eax ; X86-NOBMI-NEXT: # kill: def $cl killed $cl killed $ecx ; X86-NOBMI-NEXT: shrl %cl, %eax ; X86-NOBMI-NEXT: retl ; ; X86-BMI1NOTBM-LABEL: bzhi32_d2_load: ; X86-BMI1NOTBM: # %bb.0: ; X86-BMI1NOTBM-NEXT: movl {{[0-9]+}}(%esp), %eax ; X86-BMI1NOTBM-NEXT: movb {{[0-9]+}}(%esp), %cl ; X86-BMI1NOTBM-NEXT: shll $8, %ecx ; X86-BMI1NOTBM-NEXT: bextrl %ecx, (%eax), %eax ; X86-BMI1NOTBM-NEXT: retl ; ; X86-BMI1BMI2-LABEL: bzhi32_d2_load: ; X86-BMI1BMI2: # %bb.0: ; X86-BMI1BMI2-NEXT: movl {{[0-9]+}}(%esp), %eax ; X86-BMI1BMI2-NEXT: movb {{[0-9]+}}(%esp), %cl ; X86-BMI1BMI2-NEXT: bzhil %ecx, (%eax), %eax ; X86-BMI1BMI2-NEXT: retl ; ; X64-NOBMI-LABEL: bzhi32_d2_load: ; X64-NOBMI: # %bb.0: ; X64-NOBMI-NEXT: movl %esi, %ecx ; X64-NOBMI-NEXT: movl (%rdi), %eax ; X64-NOBMI-NEXT: negb %cl ; X64-NOBMI-NEXT: shll %cl, %eax ; X64-NOBMI-NEXT: # kill: def $cl killed $cl killed $ecx ; X64-NOBMI-NEXT: shrl %cl, %eax ; X64-NOBMI-NEXT: retq ; ; X64-BMI1NOTBM-LABEL: bzhi32_d2_load: ; X64-BMI1NOTBM: # %bb.0: ; X64-BMI1NOTBM-NEXT: shll $8, %esi ; X64-BMI1NOTBM-NEXT: bextrl %esi, (%rdi), %eax ; X64-BMI1NOTBM-NEXT: retq ; ; X64-BMI1BMI2-LABEL: bzhi32_d2_load: ; X64-BMI1BMI2: # %bb.0: ; X64-BMI1BMI2-NEXT: bzhil %esi, (%rdi), %eax ; X64-BMI1BMI2-NEXT: retq %val = load i32, i32* %w %numhighbits = sub i32 32, %numlowbits %highbitscleared = shl i32 %val, %numhighbits %masked = lshr i32 %highbitscleared, %numhighbits ret i32 %masked } define i32 @bzhi32_d3_load_indexzext(i32* %w, i8 %numlowbits) nounwind { ; X86-NOBMI-LABEL: bzhi32_d3_load_indexzext: ; X86-NOBMI: # %bb.0: ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %eax ; X86-NOBMI-NEXT: movl (%eax), %eax ; X86-NOBMI-NEXT: xorl %ecx, %ecx ; X86-NOBMI-NEXT: subb {{[0-9]+}}(%esp), %cl ; X86-NOBMI-NEXT: shll %cl, %eax ; X86-NOBMI-NEXT: # kill: def $cl killed $cl killed $ecx ; X86-NOBMI-NEXT: shrl %cl, %eax ; X86-NOBMI-NEXT: retl ; ; X86-BMI1NOTBM-LABEL: bzhi32_d3_load_indexzext: ; X86-BMI1NOTBM: # %bb.0: ; X86-BMI1NOTBM-NEXT: movl {{[0-9]+}}(%esp), %eax ; X86-BMI1NOTBM-NEXT: movb {{[0-9]+}}(%esp), %cl ; X86-BMI1NOTBM-NEXT: shll $8, %ecx ; X86-BMI1NOTBM-NEXT: bextrl %ecx, (%eax), %eax ; X86-BMI1NOTBM-NEXT: retl ; ; X86-BMI1BMI2-LABEL: bzhi32_d3_load_indexzext: ; X86-BMI1BMI2: # %bb.0: ; X86-BMI1BMI2-NEXT: movl {{[0-9]+}}(%esp), %eax ; X86-BMI1BMI2-NEXT: movb {{[0-9]+}}(%esp), %cl ; X86-BMI1BMI2-NEXT: bzhil %ecx, (%eax), %eax ; X86-BMI1BMI2-NEXT: retl ; ; X64-NOBMI-LABEL: bzhi32_d3_load_indexzext: ; X64-NOBMI: # %bb.0: ; X64-NOBMI-NEXT: movl %esi, %ecx ; X64-NOBMI-NEXT: movl (%rdi), %eax ; X64-NOBMI-NEXT: negb %cl ; X64-NOBMI-NEXT: shll %cl, %eax ; X64-NOBMI-NEXT: # kill: def $cl killed $cl killed $ecx ; X64-NOBMI-NEXT: shrl %cl, %eax ; X64-NOBMI-NEXT: retq ; ; X64-BMI1NOTBM-LABEL: bzhi32_d3_load_indexzext: ; X64-BMI1NOTBM: # %bb.0: ; X64-BMI1NOTBM-NEXT: shll $8, %esi ; X64-BMI1NOTBM-NEXT: bextrl %esi, (%rdi), %eax ; X64-BMI1NOTBM-NEXT: retq ; ; X64-BMI1BMI2-LABEL: bzhi32_d3_load_indexzext: ; X64-BMI1BMI2: # %bb.0: ; X64-BMI1BMI2-NEXT: bzhil %esi, (%rdi), %eax ; X64-BMI1BMI2-NEXT: retq %val = load i32, i32* %w %numhighbits = sub i8 32, %numlowbits %sh_prom = zext i8 %numhighbits to i32 %highbitscleared = shl i32 %val, %sh_prom %masked = lshr i32 %highbitscleared, %sh_prom ret i32 %masked } ; 64-bit. define i64 @bzhi64_d0(i64 %val, i64 %numlowbits) nounwind { ; X86-NOBMI-LABEL: bzhi64_d0: ; X86-NOBMI: # %bb.0: ; X86-NOBMI-NEXT: pushl %ebx ; X86-NOBMI-NEXT: pushl %edi ; X86-NOBMI-NEXT: pushl %esi ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %edx ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %eax ; X86-NOBMI-NEXT: movb $64, %cl ; X86-NOBMI-NEXT: subb {{[0-9]+}}(%esp), %cl ; X86-NOBMI-NEXT: movl %edx, %esi ; X86-NOBMI-NEXT: shll %cl, %esi ; X86-NOBMI-NEXT: shldl %cl, %edx, %eax ; X86-NOBMI-NEXT: testb $32, %cl ; X86-NOBMI-NEXT: movl %esi, %edi ; X86-NOBMI-NEXT: jne .LBB47_2 ; X86-NOBMI-NEXT: # %bb.1: ; X86-NOBMI-NEXT: movl %eax, %edi ; X86-NOBMI-NEXT: .LBB47_2: ; X86-NOBMI-NEXT: movl %edi, %eax ; X86-NOBMI-NEXT: shrl %cl, %eax ; X86-NOBMI-NEXT: xorl %ebx, %ebx ; X86-NOBMI-NEXT: testb $32, %cl ; X86-NOBMI-NEXT: movl $0, %edx ; X86-NOBMI-NEXT: jne .LBB47_4 ; X86-NOBMI-NEXT: # %bb.3: ; X86-NOBMI-NEXT: movl %esi, %ebx ; X86-NOBMI-NEXT: movl %eax, %edx ; X86-NOBMI-NEXT: .LBB47_4: ; X86-NOBMI-NEXT: shrdl %cl, %edi, %ebx ; X86-NOBMI-NEXT: testb $32, %cl ; X86-NOBMI-NEXT: jne .LBB47_6 ; X86-NOBMI-NEXT: # %bb.5: ; X86-NOBMI-NEXT: movl %ebx, %eax ; X86-NOBMI-NEXT: .LBB47_6: ; X86-NOBMI-NEXT: popl %esi ; X86-NOBMI-NEXT: popl %edi ; X86-NOBMI-NEXT: popl %ebx ; X86-NOBMI-NEXT: retl ; ; X86-BMI1NOTBM-LABEL: bzhi64_d0: ; X86-BMI1NOTBM: # %bb.0: ; X86-BMI1NOTBM-NEXT: pushl %ebx ; X86-BMI1NOTBM-NEXT: pushl %edi ; X86-BMI1NOTBM-NEXT: pushl %esi ; X86-BMI1NOTBM-NEXT: movl {{[0-9]+}}(%esp), %edx ; X86-BMI1NOTBM-NEXT: movl {{[0-9]+}}(%esp), %eax ; X86-BMI1NOTBM-NEXT: movb $64, %cl ; X86-BMI1NOTBM-NEXT: subb {{[0-9]+}}(%esp), %cl ; X86-BMI1NOTBM-NEXT: movl %edx, %esi ; X86-BMI1NOTBM-NEXT: shll %cl, %esi ; X86-BMI1NOTBM-NEXT: shldl %cl, %edx, %eax ; X86-BMI1NOTBM-NEXT: testb $32, %cl ; X86-BMI1NOTBM-NEXT: movl %esi, %edi ; X86-BMI1NOTBM-NEXT: jne .LBB47_2 ; X86-BMI1NOTBM-NEXT: # %bb.1: ; X86-BMI1NOTBM-NEXT: movl %eax, %edi ; X86-BMI1NOTBM-NEXT: .LBB47_2: ; X86-BMI1NOTBM-NEXT: movl %edi, %eax ; X86-BMI1NOTBM-NEXT: shrl %cl, %eax ; X86-BMI1NOTBM-NEXT: xorl %ebx, %ebx ; X86-BMI1NOTBM-NEXT: testb $32, %cl ; X86-BMI1NOTBM-NEXT: movl $0, %edx ; X86-BMI1NOTBM-NEXT: jne .LBB47_4 ; X86-BMI1NOTBM-NEXT: # %bb.3: ; X86-BMI1NOTBM-NEXT: movl %esi, %ebx ; X86-BMI1NOTBM-NEXT: movl %eax, %edx ; X86-BMI1NOTBM-NEXT: .LBB47_4: ; X86-BMI1NOTBM-NEXT: shrdl %cl, %edi, %ebx ; X86-BMI1NOTBM-NEXT: testb $32, %cl ; X86-BMI1NOTBM-NEXT: jne .LBB47_6 ; X86-BMI1NOTBM-NEXT: # %bb.5: ; X86-BMI1NOTBM-NEXT: movl %ebx, %eax ; X86-BMI1NOTBM-NEXT: .LBB47_6: ; X86-BMI1NOTBM-NEXT: popl %esi ; X86-BMI1NOTBM-NEXT: popl %edi ; X86-BMI1NOTBM-NEXT: popl %ebx ; X86-BMI1NOTBM-NEXT: retl ; ; X86-BMI1BMI2-LABEL: bzhi64_d0: ; X86-BMI1BMI2: # %bb.0: ; X86-BMI1BMI2-NEXT: pushl %edi ; X86-BMI1BMI2-NEXT: pushl %esi ; X86-BMI1BMI2-NEXT: movl {{[0-9]+}}(%esp), %eax ; X86-BMI1BMI2-NEXT: movl {{[0-9]+}}(%esp), %esi ; X86-BMI1BMI2-NEXT: movb $64, %cl ; X86-BMI1BMI2-NEXT: subb {{[0-9]+}}(%esp), %cl ; X86-BMI1BMI2-NEXT: shldl %cl, %eax, %esi ; X86-BMI1BMI2-NEXT: shlxl %ecx, %eax, %edi ; X86-BMI1BMI2-NEXT: xorl %edx, %edx ; X86-BMI1BMI2-NEXT: testb $32, %cl ; X86-BMI1BMI2-NEXT: je .LBB47_2 ; X86-BMI1BMI2-NEXT: # %bb.1: ; X86-BMI1BMI2-NEXT: movl %edi, %esi ; X86-BMI1BMI2-NEXT: movl $0, %edi ; X86-BMI1BMI2-NEXT: .LBB47_2: ; X86-BMI1BMI2-NEXT: shrxl %ecx, %esi, %eax ; X86-BMI1BMI2-NEXT: jne .LBB47_4 ; X86-BMI1BMI2-NEXT: # %bb.3: ; X86-BMI1BMI2-NEXT: movl %eax, %edx ; X86-BMI1BMI2-NEXT: .LBB47_4: ; X86-BMI1BMI2-NEXT: shrdl %cl, %esi, %edi ; X86-BMI1BMI2-NEXT: testb $32, %cl ; X86-BMI1BMI2-NEXT: jne .LBB47_6 ; X86-BMI1BMI2-NEXT: # %bb.5: ; X86-BMI1BMI2-NEXT: movl %edi, %eax ; X86-BMI1BMI2-NEXT: .LBB47_6: ; X86-BMI1BMI2-NEXT: popl %esi ; X86-BMI1BMI2-NEXT: popl %edi ; X86-BMI1BMI2-NEXT: retl ; ; X64-NOBMI-LABEL: bzhi64_d0: ; X64-NOBMI: # %bb.0: ; X64-NOBMI-NEXT: movq %rsi, %rcx ; X64-NOBMI-NEXT: movq %rdi, %rax ; X64-NOBMI-NEXT: negb %cl ; X64-NOBMI-NEXT: shlq %cl, %rax ; X64-NOBMI-NEXT: # kill: def $cl killed $cl killed $rcx ; X64-NOBMI-NEXT: shrq %cl, %rax ; X64-NOBMI-NEXT: retq ; ; X64-BMI1NOTBM-LABEL: bzhi64_d0: ; X64-BMI1NOTBM: # %bb.0: ; X64-BMI1NOTBM-NEXT: shll $8, %esi ; X64-BMI1NOTBM-NEXT: bextrq %rsi, %rdi, %rax ; X64-BMI1NOTBM-NEXT: retq ; ; X64-BMI1BMI2-LABEL: bzhi64_d0: ; X64-BMI1BMI2: # %bb.0: ; X64-BMI1BMI2-NEXT: bzhiq %rsi, %rdi, %rax ; X64-BMI1BMI2-NEXT: retq %numhighbits = sub i64 64, %numlowbits %highbitscleared = shl i64 %val, %numhighbits %masked = lshr i64 %highbitscleared, %numhighbits ret i64 %masked } define i64 @bzhi64_d1_indexzext(i64 %val, i8 %numlowbits) nounwind { ; X86-NOBMI-LABEL: bzhi64_d1_indexzext: ; X86-NOBMI: # %bb.0: ; X86-NOBMI-NEXT: pushl %ebx ; X86-NOBMI-NEXT: pushl %edi ; X86-NOBMI-NEXT: pushl %esi ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %edx ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %eax ; X86-NOBMI-NEXT: movb $64, %cl ; X86-NOBMI-NEXT: subb {{[0-9]+}}(%esp), %cl ; X86-NOBMI-NEXT: movl %edx, %esi ; X86-NOBMI-NEXT: shll %cl, %esi ; X86-NOBMI-NEXT: shldl %cl, %edx, %eax ; X86-NOBMI-NEXT: testb $32, %cl ; X86-NOBMI-NEXT: movl %esi, %edi ; X86-NOBMI-NEXT: jne .LBB48_2 ; X86-NOBMI-NEXT: # %bb.1: ; X86-NOBMI-NEXT: movl %eax, %edi ; X86-NOBMI-NEXT: .LBB48_2: ; X86-NOBMI-NEXT: movl %edi, %eax ; X86-NOBMI-NEXT: shrl %cl, %eax ; X86-NOBMI-NEXT: xorl %ebx, %ebx ; X86-NOBMI-NEXT: testb $32, %cl ; X86-NOBMI-NEXT: movl $0, %edx ; X86-NOBMI-NEXT: jne .LBB48_4 ; X86-NOBMI-NEXT: # %bb.3: ; X86-NOBMI-NEXT: movl %esi, %ebx ; X86-NOBMI-NEXT: movl %eax, %edx ; X86-NOBMI-NEXT: .LBB48_4: ; X86-NOBMI-NEXT: shrdl %cl, %edi, %ebx ; X86-NOBMI-NEXT: testb $32, %cl ; X86-NOBMI-NEXT: jne .LBB48_6 ; X86-NOBMI-NEXT: # %bb.5: ; X86-NOBMI-NEXT: movl %ebx, %eax ; X86-NOBMI-NEXT: .LBB48_6: ; X86-NOBMI-NEXT: popl %esi ; X86-NOBMI-NEXT: popl %edi ; X86-NOBMI-NEXT: popl %ebx ; X86-NOBMI-NEXT: retl ; ; X86-BMI1NOTBM-LABEL: bzhi64_d1_indexzext: ; X86-BMI1NOTBM: # %bb.0: ; X86-BMI1NOTBM-NEXT: pushl %ebx ; X86-BMI1NOTBM-NEXT: pushl %edi ; X86-BMI1NOTBM-NEXT: pushl %esi ; X86-BMI1NOTBM-NEXT: movl {{[0-9]+}}(%esp), %edx ; X86-BMI1NOTBM-NEXT: movl {{[0-9]+}}(%esp), %eax ; X86-BMI1NOTBM-NEXT: movb $64, %cl ; X86-BMI1NOTBM-NEXT: subb {{[0-9]+}}(%esp), %cl ; X86-BMI1NOTBM-NEXT: movl %edx, %esi ; X86-BMI1NOTBM-NEXT: shll %cl, %esi ; X86-BMI1NOTBM-NEXT: shldl %cl, %edx, %eax ; X86-BMI1NOTBM-NEXT: testb $32, %cl ; X86-BMI1NOTBM-NEXT: movl %esi, %edi ; X86-BMI1NOTBM-NEXT: jne .LBB48_2 ; X86-BMI1NOTBM-NEXT: # %bb.1: ; X86-BMI1NOTBM-NEXT: movl %eax, %edi ; X86-BMI1NOTBM-NEXT: .LBB48_2: ; X86-BMI1NOTBM-NEXT: movl %edi, %eax ; X86-BMI1NOTBM-NEXT: shrl %cl, %eax ; X86-BMI1NOTBM-NEXT: xorl %ebx, %ebx ; X86-BMI1NOTBM-NEXT: testb $32, %cl ; X86-BMI1NOTBM-NEXT: movl $0, %edx ; X86-BMI1NOTBM-NEXT: jne .LBB48_4 ; X86-BMI1NOTBM-NEXT: # %bb.3: ; X86-BMI1NOTBM-NEXT: movl %esi, %ebx ; X86-BMI1NOTBM-NEXT: movl %eax, %edx ; X86-BMI1NOTBM-NEXT: .LBB48_4: ; X86-BMI1NOTBM-NEXT: shrdl %cl, %edi, %ebx ; X86-BMI1NOTBM-NEXT: testb $32, %cl ; X86-BMI1NOTBM-NEXT: jne .LBB48_6 ; X86-BMI1NOTBM-NEXT: # %bb.5: ; X86-BMI1NOTBM-NEXT: movl %ebx, %eax ; X86-BMI1NOTBM-NEXT: .LBB48_6: ; X86-BMI1NOTBM-NEXT: popl %esi ; X86-BMI1NOTBM-NEXT: popl %edi ; X86-BMI1NOTBM-NEXT: popl %ebx ; X86-BMI1NOTBM-NEXT: retl ; ; X86-BMI1BMI2-LABEL: bzhi64_d1_indexzext: ; X86-BMI1BMI2: # %bb.0: ; X86-BMI1BMI2-NEXT: pushl %edi ; X86-BMI1BMI2-NEXT: pushl %esi ; X86-BMI1BMI2-NEXT: movl {{[0-9]+}}(%esp), %eax ; X86-BMI1BMI2-NEXT: movl {{[0-9]+}}(%esp), %esi ; X86-BMI1BMI2-NEXT: movb $64, %cl ; X86-BMI1BMI2-NEXT: subb {{[0-9]+}}(%esp), %cl ; X86-BMI1BMI2-NEXT: shldl %cl, %eax, %esi ; X86-BMI1BMI2-NEXT: shlxl %ecx, %eax, %edi ; X86-BMI1BMI2-NEXT: xorl %edx, %edx ; X86-BMI1BMI2-NEXT: testb $32, %cl ; X86-BMI1BMI2-NEXT: je .LBB48_2 ; X86-BMI1BMI2-NEXT: # %bb.1: ; X86-BMI1BMI2-NEXT: movl %edi, %esi ; X86-BMI1BMI2-NEXT: movl $0, %edi ; X86-BMI1BMI2-NEXT: .LBB48_2: ; X86-BMI1BMI2-NEXT: shrxl %ecx, %esi, %eax ; X86-BMI1BMI2-NEXT: jne .LBB48_4 ; X86-BMI1BMI2-NEXT: # %bb.3: ; X86-BMI1BMI2-NEXT: movl %eax, %edx ; X86-BMI1BMI2-NEXT: .LBB48_4: ; X86-BMI1BMI2-NEXT: shrdl %cl, %esi, %edi ; X86-BMI1BMI2-NEXT: testb $32, %cl ; X86-BMI1BMI2-NEXT: jne .LBB48_6 ; X86-BMI1BMI2-NEXT: # %bb.5: ; X86-BMI1BMI2-NEXT: movl %edi, %eax ; X86-BMI1BMI2-NEXT: .LBB48_6: ; X86-BMI1BMI2-NEXT: popl %esi ; X86-BMI1BMI2-NEXT: popl %edi ; X86-BMI1BMI2-NEXT: retl ; ; X64-NOBMI-LABEL: bzhi64_d1_indexzext: ; X64-NOBMI: # %bb.0: ; X64-NOBMI-NEXT: movl %esi, %ecx ; X64-NOBMI-NEXT: movq %rdi, %rax ; X64-NOBMI-NEXT: negb %cl ; X64-NOBMI-NEXT: shlq %cl, %rax ; X64-NOBMI-NEXT: # kill: def $cl killed $cl killed $ecx ; X64-NOBMI-NEXT: shrq %cl, %rax ; X64-NOBMI-NEXT: retq ; ; X64-BMI1NOTBM-LABEL: bzhi64_d1_indexzext: ; X64-BMI1NOTBM: # %bb.0: ; X64-BMI1NOTBM-NEXT: # kill: def $esi killed $esi def $rsi ; X64-BMI1NOTBM-NEXT: shll $8, %esi ; X64-BMI1NOTBM-NEXT: bextrq %rsi, %rdi, %rax ; X64-BMI1NOTBM-NEXT: retq ; ; X64-BMI1BMI2-LABEL: bzhi64_d1_indexzext: ; X64-BMI1BMI2: # %bb.0: ; X64-BMI1BMI2-NEXT: # kill: def $esi killed $esi def $rsi ; X64-BMI1BMI2-NEXT: bzhiq %rsi, %rdi, %rax ; X64-BMI1BMI2-NEXT: retq %numhighbits = sub i8 64, %numlowbits %sh_prom = zext i8 %numhighbits to i64 %highbitscleared = shl i64 %val, %sh_prom %masked = lshr i64 %highbitscleared, %sh_prom ret i64 %masked } define i64 @bzhi64_d2_load(i64* %w, i64 %numlowbits) nounwind { ; X86-NOBMI-LABEL: bzhi64_d2_load: ; X86-NOBMI: # %bb.0: ; X86-NOBMI-NEXT: pushl %ebx ; X86-NOBMI-NEXT: pushl %edi ; X86-NOBMI-NEXT: pushl %esi ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %eax ; X86-NOBMI-NEXT: movl (%eax), %edx ; X86-NOBMI-NEXT: movl 4(%eax), %eax ; X86-NOBMI-NEXT: movb $64, %cl ; X86-NOBMI-NEXT: subb {{[0-9]+}}(%esp), %cl ; X86-NOBMI-NEXT: movl %edx, %esi ; X86-NOBMI-NEXT: shll %cl, %esi ; X86-NOBMI-NEXT: shldl %cl, %edx, %eax ; X86-NOBMI-NEXT: testb $32, %cl ; X86-NOBMI-NEXT: movl %esi, %edi ; X86-NOBMI-NEXT: jne .LBB49_2 ; X86-NOBMI-NEXT: # %bb.1: ; X86-NOBMI-NEXT: movl %eax, %edi ; X86-NOBMI-NEXT: .LBB49_2: ; X86-NOBMI-NEXT: movl %edi, %eax ; X86-NOBMI-NEXT: shrl %cl, %eax ; X86-NOBMI-NEXT: xorl %ebx, %ebx ; X86-NOBMI-NEXT: testb $32, %cl ; X86-NOBMI-NEXT: movl $0, %edx ; X86-NOBMI-NEXT: jne .LBB49_4 ; X86-NOBMI-NEXT: # %bb.3: ; X86-NOBMI-NEXT: movl %esi, %ebx ; X86-NOBMI-NEXT: movl %eax, %edx ; X86-NOBMI-NEXT: .LBB49_4: ; X86-NOBMI-NEXT: shrdl %cl, %edi, %ebx ; X86-NOBMI-NEXT: testb $32, %cl ; X86-NOBMI-NEXT: jne .LBB49_6 ; X86-NOBMI-NEXT: # %bb.5: ; X86-NOBMI-NEXT: movl %ebx, %eax ; X86-NOBMI-NEXT: .LBB49_6: ; X86-NOBMI-NEXT: popl %esi ; X86-NOBMI-NEXT: popl %edi ; X86-NOBMI-NEXT: popl %ebx ; X86-NOBMI-NEXT: retl ; ; X86-BMI1NOTBM-LABEL: bzhi64_d2_load: ; X86-BMI1NOTBM: # %bb.0: ; X86-BMI1NOTBM-NEXT: pushl %ebx ; X86-BMI1NOTBM-NEXT: pushl %edi ; X86-BMI1NOTBM-NEXT: pushl %esi ; X86-BMI1NOTBM-NEXT: movl {{[0-9]+}}(%esp), %eax ; X86-BMI1NOTBM-NEXT: movl (%eax), %edx ; X86-BMI1NOTBM-NEXT: movl 4(%eax), %eax ; X86-BMI1NOTBM-NEXT: movb $64, %cl ; X86-BMI1NOTBM-NEXT: subb {{[0-9]+}}(%esp), %cl ; X86-BMI1NOTBM-NEXT: movl %edx, %esi ; X86-BMI1NOTBM-NEXT: shll %cl, %esi ; X86-BMI1NOTBM-NEXT: shldl %cl, %edx, %eax ; X86-BMI1NOTBM-NEXT: testb $32, %cl ; X86-BMI1NOTBM-NEXT: movl %esi, %edi ; X86-BMI1NOTBM-NEXT: jne .LBB49_2 ; X86-BMI1NOTBM-NEXT: # %bb.1: ; X86-BMI1NOTBM-NEXT: movl %eax, %edi ; X86-BMI1NOTBM-NEXT: .LBB49_2: ; X86-BMI1NOTBM-NEXT: movl %edi, %eax ; X86-BMI1NOTBM-NEXT: shrl %cl, %eax ; X86-BMI1NOTBM-NEXT: xorl %ebx, %ebx ; X86-BMI1NOTBM-NEXT: testb $32, %cl ; X86-BMI1NOTBM-NEXT: movl $0, %edx ; X86-BMI1NOTBM-NEXT: jne .LBB49_4 ; X86-BMI1NOTBM-NEXT: # %bb.3: ; X86-BMI1NOTBM-NEXT: movl %esi, %ebx ; X86-BMI1NOTBM-NEXT: movl %eax, %edx ; X86-BMI1NOTBM-NEXT: .LBB49_4: ; X86-BMI1NOTBM-NEXT: shrdl %cl, %edi, %ebx ; X86-BMI1NOTBM-NEXT: testb $32, %cl ; X86-BMI1NOTBM-NEXT: jne .LBB49_6 ; X86-BMI1NOTBM-NEXT: # %bb.5: ; X86-BMI1NOTBM-NEXT: movl %ebx, %eax ; X86-BMI1NOTBM-NEXT: .LBB49_6: ; X86-BMI1NOTBM-NEXT: popl %esi ; X86-BMI1NOTBM-NEXT: popl %edi ; X86-BMI1NOTBM-NEXT: popl %ebx ; X86-BMI1NOTBM-NEXT: retl ; ; X86-BMI1BMI2-LABEL: bzhi64_d2_load: ; X86-BMI1BMI2: # %bb.0: ; X86-BMI1BMI2-NEXT: pushl %edi ; X86-BMI1BMI2-NEXT: pushl %esi ; X86-BMI1BMI2-NEXT: movl {{[0-9]+}}(%esp), %eax ; X86-BMI1BMI2-NEXT: movl (%eax), %edx ; X86-BMI1BMI2-NEXT: movl 4(%eax), %esi ; X86-BMI1BMI2-NEXT: movb $64, %cl ; X86-BMI1BMI2-NEXT: subb {{[0-9]+}}(%esp), %cl ; X86-BMI1BMI2-NEXT: shldl %cl, %edx, %esi ; X86-BMI1BMI2-NEXT: shlxl %ecx, %edx, %edi ; X86-BMI1BMI2-NEXT: xorl %edx, %edx ; X86-BMI1BMI2-NEXT: testb $32, %cl ; X86-BMI1BMI2-NEXT: je .LBB49_2 ; X86-BMI1BMI2-NEXT: # %bb.1: ; X86-BMI1BMI2-NEXT: movl %edi, %esi ; X86-BMI1BMI2-NEXT: movl $0, %edi ; X86-BMI1BMI2-NEXT: .LBB49_2: ; X86-BMI1BMI2-NEXT: shrxl %ecx, %esi, %eax ; X86-BMI1BMI2-NEXT: jne .LBB49_4 ; X86-BMI1BMI2-NEXT: # %bb.3: ; X86-BMI1BMI2-NEXT: movl %eax, %edx ; X86-BMI1BMI2-NEXT: .LBB49_4: ; X86-BMI1BMI2-NEXT: shrdl %cl, %esi, %edi ; X86-BMI1BMI2-NEXT: testb $32, %cl ; X86-BMI1BMI2-NEXT: jne .LBB49_6 ; X86-BMI1BMI2-NEXT: # %bb.5: ; X86-BMI1BMI2-NEXT: movl %edi, %eax ; X86-BMI1BMI2-NEXT: .LBB49_6: ; X86-BMI1BMI2-NEXT: popl %esi ; X86-BMI1BMI2-NEXT: popl %edi ; X86-BMI1BMI2-NEXT: retl ; ; X64-NOBMI-LABEL: bzhi64_d2_load: ; X64-NOBMI: # %bb.0: ; X64-NOBMI-NEXT: movq %rsi, %rcx ; X64-NOBMI-NEXT: movq (%rdi), %rax ; X64-NOBMI-NEXT: negb %cl ; X64-NOBMI-NEXT: shlq %cl, %rax ; X64-NOBMI-NEXT: # kill: def $cl killed $cl killed $rcx ; X64-NOBMI-NEXT: shrq %cl, %rax ; X64-NOBMI-NEXT: retq ; ; X64-BMI1NOTBM-LABEL: bzhi64_d2_load: ; X64-BMI1NOTBM: # %bb.0: ; X64-BMI1NOTBM-NEXT: shll $8, %esi ; X64-BMI1NOTBM-NEXT: bextrq %rsi, (%rdi), %rax ; X64-BMI1NOTBM-NEXT: retq ; ; X64-BMI1BMI2-LABEL: bzhi64_d2_load: ; X64-BMI1BMI2: # %bb.0: ; X64-BMI1BMI2-NEXT: bzhiq %rsi, (%rdi), %rax ; X64-BMI1BMI2-NEXT: retq %val = load i64, i64* %w %numhighbits = sub i64 64, %numlowbits %highbitscleared = shl i64 %val, %numhighbits %masked = lshr i64 %highbitscleared, %numhighbits ret i64 %masked } define i64 @bzhi64_d3_load_indexzext(i64* %w, i8 %numlowbits) nounwind { ; X86-NOBMI-LABEL: bzhi64_d3_load_indexzext: ; X86-NOBMI: # %bb.0: ; X86-NOBMI-NEXT: pushl %ebx ; X86-NOBMI-NEXT: pushl %edi ; X86-NOBMI-NEXT: pushl %esi ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %eax ; X86-NOBMI-NEXT: movl (%eax), %edx ; X86-NOBMI-NEXT: movl 4(%eax), %eax ; X86-NOBMI-NEXT: movb $64, %cl ; X86-NOBMI-NEXT: subb {{[0-9]+}}(%esp), %cl ; X86-NOBMI-NEXT: movl %edx, %esi ; X86-NOBMI-NEXT: shll %cl, %esi ; X86-NOBMI-NEXT: shldl %cl, %edx, %eax ; X86-NOBMI-NEXT: testb $32, %cl ; X86-NOBMI-NEXT: movl %esi, %edi ; X86-NOBMI-NEXT: jne .LBB50_2 ; X86-NOBMI-NEXT: # %bb.1: ; X86-NOBMI-NEXT: movl %eax, %edi ; X86-NOBMI-NEXT: .LBB50_2: ; X86-NOBMI-NEXT: movl %edi, %eax ; X86-NOBMI-NEXT: shrl %cl, %eax ; X86-NOBMI-NEXT: xorl %ebx, %ebx ; X86-NOBMI-NEXT: testb $32, %cl ; X86-NOBMI-NEXT: movl $0, %edx ; X86-NOBMI-NEXT: jne .LBB50_4 ; X86-NOBMI-NEXT: # %bb.3: ; X86-NOBMI-NEXT: movl %esi, %ebx ; X86-NOBMI-NEXT: movl %eax, %edx ; X86-NOBMI-NEXT: .LBB50_4: ; X86-NOBMI-NEXT: shrdl %cl, %edi, %ebx ; X86-NOBMI-NEXT: testb $32, %cl ; X86-NOBMI-NEXT: jne .LBB50_6 ; X86-NOBMI-NEXT: # %bb.5: ; X86-NOBMI-NEXT: movl %ebx, %eax ; X86-NOBMI-NEXT: .LBB50_6: ; X86-NOBMI-NEXT: popl %esi ; X86-NOBMI-NEXT: popl %edi ; X86-NOBMI-NEXT: popl %ebx ; X86-NOBMI-NEXT: retl ; ; X86-BMI1NOTBM-LABEL: bzhi64_d3_load_indexzext: ; X86-BMI1NOTBM: # %bb.0: ; X86-BMI1NOTBM-NEXT: pushl %ebx ; X86-BMI1NOTBM-NEXT: pushl %edi ; X86-BMI1NOTBM-NEXT: pushl %esi ; X86-BMI1NOTBM-NEXT: movl {{[0-9]+}}(%esp), %eax ; X86-BMI1NOTBM-NEXT: movl (%eax), %edx ; X86-BMI1NOTBM-NEXT: movl 4(%eax), %eax ; X86-BMI1NOTBM-NEXT: movb $64, %cl ; X86-BMI1NOTBM-NEXT: subb {{[0-9]+}}(%esp), %cl ; X86-BMI1NOTBM-NEXT: movl %edx, %esi ; X86-BMI1NOTBM-NEXT: shll %cl, %esi ; X86-BMI1NOTBM-NEXT: shldl %cl, %edx, %eax ; X86-BMI1NOTBM-NEXT: testb $32, %cl ; X86-BMI1NOTBM-NEXT: movl %esi, %edi ; X86-BMI1NOTBM-NEXT: jne .LBB50_2 ; X86-BMI1NOTBM-NEXT: # %bb.1: ; X86-BMI1NOTBM-NEXT: movl %eax, %edi ; X86-BMI1NOTBM-NEXT: .LBB50_2: ; X86-BMI1NOTBM-NEXT: movl %edi, %eax ; X86-BMI1NOTBM-NEXT: shrl %cl, %eax ; X86-BMI1NOTBM-NEXT: xorl %ebx, %ebx ; X86-BMI1NOTBM-NEXT: testb $32, %cl ; X86-BMI1NOTBM-NEXT: movl $0, %edx ; X86-BMI1NOTBM-NEXT: jne .LBB50_4 ; X86-BMI1NOTBM-NEXT: # %bb.3: ; X86-BMI1NOTBM-NEXT: movl %esi, %ebx ; X86-BMI1NOTBM-NEXT: movl %eax, %edx ; X86-BMI1NOTBM-NEXT: .LBB50_4: ; X86-BMI1NOTBM-NEXT: shrdl %cl, %edi, %ebx ; X86-BMI1NOTBM-NEXT: testb $32, %cl ; X86-BMI1NOTBM-NEXT: jne .LBB50_6 ; X86-BMI1NOTBM-NEXT: # %bb.5: ; X86-BMI1NOTBM-NEXT: movl %ebx, %eax ; X86-BMI1NOTBM-NEXT: .LBB50_6: ; X86-BMI1NOTBM-NEXT: popl %esi ; X86-BMI1NOTBM-NEXT: popl %edi ; X86-BMI1NOTBM-NEXT: popl %ebx ; X86-BMI1NOTBM-NEXT: retl ; ; X86-BMI1BMI2-LABEL: bzhi64_d3_load_indexzext: ; X86-BMI1BMI2: # %bb.0: ; X86-BMI1BMI2-NEXT: pushl %edi ; X86-BMI1BMI2-NEXT: pushl %esi ; X86-BMI1BMI2-NEXT: movl {{[0-9]+}}(%esp), %eax ; X86-BMI1BMI2-NEXT: movl (%eax), %edx ; X86-BMI1BMI2-NEXT: movl 4(%eax), %esi ; X86-BMI1BMI2-NEXT: movb $64, %cl ; X86-BMI1BMI2-NEXT: subb {{[0-9]+}}(%esp), %cl ; X86-BMI1BMI2-NEXT: shldl %cl, %edx, %esi ; X86-BMI1BMI2-NEXT: shlxl %ecx, %edx, %edi ; X86-BMI1BMI2-NEXT: xorl %edx, %edx ; X86-BMI1BMI2-NEXT: testb $32, %cl ; X86-BMI1BMI2-NEXT: je .LBB50_2 ; X86-BMI1BMI2-NEXT: # %bb.1: ; X86-BMI1BMI2-NEXT: movl %edi, %esi ; X86-BMI1BMI2-NEXT: movl $0, %edi ; X86-BMI1BMI2-NEXT: .LBB50_2: ; X86-BMI1BMI2-NEXT: shrxl %ecx, %esi, %eax ; X86-BMI1BMI2-NEXT: jne .LBB50_4 ; X86-BMI1BMI2-NEXT: # %bb.3: ; X86-BMI1BMI2-NEXT: movl %eax, %edx ; X86-BMI1BMI2-NEXT: .LBB50_4: ; X86-BMI1BMI2-NEXT: shrdl %cl, %esi, %edi ; X86-BMI1BMI2-NEXT: testb $32, %cl ; X86-BMI1BMI2-NEXT: jne .LBB50_6 ; X86-BMI1BMI2-NEXT: # %bb.5: ; X86-BMI1BMI2-NEXT: movl %edi, %eax ; X86-BMI1BMI2-NEXT: .LBB50_6: ; X86-BMI1BMI2-NEXT: popl %esi ; X86-BMI1BMI2-NEXT: popl %edi ; X86-BMI1BMI2-NEXT: retl ; ; X64-NOBMI-LABEL: bzhi64_d3_load_indexzext: ; X64-NOBMI: # %bb.0: ; X64-NOBMI-NEXT: movl %esi, %ecx ; X64-NOBMI-NEXT: movq (%rdi), %rax ; X64-NOBMI-NEXT: negb %cl ; X64-NOBMI-NEXT: shlq %cl, %rax ; X64-NOBMI-NEXT: # kill: def $cl killed $cl killed $ecx ; X64-NOBMI-NEXT: shrq %cl, %rax ; X64-NOBMI-NEXT: retq ; ; X64-BMI1NOTBM-LABEL: bzhi64_d3_load_indexzext: ; X64-BMI1NOTBM: # %bb.0: ; X64-BMI1NOTBM-NEXT: # kill: def $esi killed $esi def $rsi ; X64-BMI1NOTBM-NEXT: shll $8, %esi ; X64-BMI1NOTBM-NEXT: bextrq %rsi, (%rdi), %rax ; X64-BMI1NOTBM-NEXT: retq ; ; X64-BMI1BMI2-LABEL: bzhi64_d3_load_indexzext: ; X64-BMI1BMI2: # %bb.0: ; X64-BMI1BMI2-NEXT: # kill: def $esi killed $esi def $rsi ; X64-BMI1BMI2-NEXT: bzhiq %rsi, (%rdi), %rax ; X64-BMI1BMI2-NEXT: retq %val = load i64, i64* %w %numhighbits = sub i8 64, %numlowbits %sh_prom = zext i8 %numhighbits to i64 %highbitscleared = shl i64 %val, %sh_prom %masked = lshr i64 %highbitscleared, %sh_prom ret i64 %masked } ; 64-bit, but with 32-bit output ; Everything done in 64-bit, truncation happens last. define i32 @bzhi64_32_d0(i64 %val, i64 %numlowbits) nounwind { ; X86-NOBMI-LABEL: bzhi64_32_d0: ; X86-NOBMI: # %bb.0: ; X86-NOBMI-NEXT: pushl %esi ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %esi ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %eax ; X86-NOBMI-NEXT: movb $64, %cl ; X86-NOBMI-NEXT: subb {{[0-9]+}}(%esp), %cl ; X86-NOBMI-NEXT: movl %esi, %edx ; X86-NOBMI-NEXT: shll %cl, %edx ; X86-NOBMI-NEXT: shldl %cl, %esi, %eax ; X86-NOBMI-NEXT: testb $32, %cl ; X86-NOBMI-NEXT: je .LBB51_2 ; X86-NOBMI-NEXT: # %bb.1: ; X86-NOBMI-NEXT: movl %edx, %eax ; X86-NOBMI-NEXT: xorl %edx, %edx ; X86-NOBMI-NEXT: .LBB51_2: ; X86-NOBMI-NEXT: shrdl %cl, %eax, %edx ; X86-NOBMI-NEXT: shrl %cl, %eax ; X86-NOBMI-NEXT: testb $32, %cl ; X86-NOBMI-NEXT: jne .LBB51_4 ; X86-NOBMI-NEXT: # %bb.3: ; X86-NOBMI-NEXT: movl %edx, %eax ; X86-NOBMI-NEXT: .LBB51_4: ; X86-NOBMI-NEXT: popl %esi ; X86-NOBMI-NEXT: retl ; ; X86-BMI1NOTBM-LABEL: bzhi64_32_d0: ; X86-BMI1NOTBM: # %bb.0: ; X86-BMI1NOTBM-NEXT: pushl %esi ; X86-BMI1NOTBM-NEXT: movl {{[0-9]+}}(%esp), %esi ; X86-BMI1NOTBM-NEXT: movl {{[0-9]+}}(%esp), %eax ; X86-BMI1NOTBM-NEXT: movb $64, %cl ; X86-BMI1NOTBM-NEXT: subb {{[0-9]+}}(%esp), %cl ; X86-BMI1NOTBM-NEXT: movl %esi, %edx ; X86-BMI1NOTBM-NEXT: shll %cl, %edx ; X86-BMI1NOTBM-NEXT: shldl %cl, %esi, %eax ; X86-BMI1NOTBM-NEXT: testb $32, %cl ; X86-BMI1NOTBM-NEXT: je .LBB51_2 ; X86-BMI1NOTBM-NEXT: # %bb.1: ; X86-BMI1NOTBM-NEXT: movl %edx, %eax ; X86-BMI1NOTBM-NEXT: xorl %edx, %edx ; X86-BMI1NOTBM-NEXT: .LBB51_2: ; X86-BMI1NOTBM-NEXT: shrdl %cl, %eax, %edx ; X86-BMI1NOTBM-NEXT: shrl %cl, %eax ; X86-BMI1NOTBM-NEXT: testb $32, %cl ; X86-BMI1NOTBM-NEXT: jne .LBB51_4 ; X86-BMI1NOTBM-NEXT: # %bb.3: ; X86-BMI1NOTBM-NEXT: movl %edx, %eax ; X86-BMI1NOTBM-NEXT: .LBB51_4: ; X86-BMI1NOTBM-NEXT: popl %esi ; X86-BMI1NOTBM-NEXT: retl ; ; X86-BMI1BMI2-LABEL: bzhi64_32_d0: ; X86-BMI1BMI2: # %bb.0: ; X86-BMI1BMI2-NEXT: movl {{[0-9]+}}(%esp), %eax ; X86-BMI1BMI2-NEXT: movl {{[0-9]+}}(%esp), %edx ; X86-BMI1BMI2-NEXT: movb $64, %cl ; X86-BMI1BMI2-NEXT: subb {{[0-9]+}}(%esp), %cl ; X86-BMI1BMI2-NEXT: shldl %cl, %eax, %edx ; X86-BMI1BMI2-NEXT: shlxl %ecx, %eax, %eax ; X86-BMI1BMI2-NEXT: testb $32, %cl ; X86-BMI1BMI2-NEXT: je .LBB51_2 ; X86-BMI1BMI2-NEXT: # %bb.1: ; X86-BMI1BMI2-NEXT: movl %eax, %edx ; X86-BMI1BMI2-NEXT: xorl %eax, %eax ; X86-BMI1BMI2-NEXT: .LBB51_2: ; X86-BMI1BMI2-NEXT: shrdl %cl, %edx, %eax ; X86-BMI1BMI2-NEXT: testb $32, %cl ; X86-BMI1BMI2-NEXT: je .LBB51_4 ; X86-BMI1BMI2-NEXT: # %bb.3: ; X86-BMI1BMI2-NEXT: shrxl %ecx, %edx, %eax ; X86-BMI1BMI2-NEXT: .LBB51_4: ; X86-BMI1BMI2-NEXT: retl ; ; X64-NOBMI-LABEL: bzhi64_32_d0: ; X64-NOBMI: # %bb.0: ; X64-NOBMI-NEXT: movq %rsi, %rcx ; X64-NOBMI-NEXT: movq %rdi, %rax ; X64-NOBMI-NEXT: negb %cl ; X64-NOBMI-NEXT: shlq %cl, %rax ; X64-NOBMI-NEXT: # kill: def $cl killed $cl killed $rcx ; X64-NOBMI-NEXT: shrq %cl, %rax ; X64-NOBMI-NEXT: # kill: def $eax killed $eax killed $rax ; X64-NOBMI-NEXT: retq ; ; X64-BMI1NOTBM-LABEL: bzhi64_32_d0: ; X64-BMI1NOTBM: # %bb.0: ; X64-BMI1NOTBM-NEXT: shll $8, %esi ; X64-BMI1NOTBM-NEXT: bextrq %rsi, %rdi, %rax ; X64-BMI1NOTBM-NEXT: # kill: def $eax killed $eax killed $rax ; X64-BMI1NOTBM-NEXT: retq ; ; X64-BMI1BMI2-LABEL: bzhi64_32_d0: ; X64-BMI1BMI2: # %bb.0: ; X64-BMI1BMI2-NEXT: bzhiq %rsi, %rdi, %rax ; X64-BMI1BMI2-NEXT: # kill: def $eax killed $eax killed $rax ; X64-BMI1BMI2-NEXT: retq %numhighbits = sub i64 64, %numlowbits %highbitscleared = shl i64 %val, %numhighbits %masked = lshr i64 %highbitscleared, %numhighbits %res = trunc i64 %masked to i32 ret i32 %res } ; Shifting happens in 64-bit, then truncation. Masking is 32-bit. define i32 @bzhi64_32_d1(i64 %val, i32 %numlowbits) nounwind { ; X86-NOBMI-LABEL: bzhi64_32_d1: ; X86-NOBMI: # %bb.0: ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %eax ; X86-NOBMI-NEXT: xorl %ecx, %ecx ; X86-NOBMI-NEXT: subb {{[0-9]+}}(%esp), %cl ; X86-NOBMI-NEXT: shll %cl, %eax ; X86-NOBMI-NEXT: # kill: def $cl killed $cl killed $ecx ; X86-NOBMI-NEXT: shrl %cl, %eax ; X86-NOBMI-NEXT: retl ; ; X86-BMI1NOTBM-LABEL: bzhi64_32_d1: ; X86-BMI1NOTBM: # %bb.0: ; X86-BMI1NOTBM-NEXT: movb {{[0-9]+}}(%esp), %al ; X86-BMI1NOTBM-NEXT: shll $8, %eax ; X86-BMI1NOTBM-NEXT: bextrl %eax, {{[0-9]+}}(%esp), %eax ; X86-BMI1NOTBM-NEXT: retl ; ; X86-BMI1BMI2-LABEL: bzhi64_32_d1: ; X86-BMI1BMI2: # %bb.0: ; X86-BMI1BMI2-NEXT: movb {{[0-9]+}}(%esp), %al ; X86-BMI1BMI2-NEXT: bzhil %eax, {{[0-9]+}}(%esp), %eax ; X86-BMI1BMI2-NEXT: retl ; ; X64-NOBMI-LABEL: bzhi64_32_d1: ; X64-NOBMI: # %bb.0: ; X64-NOBMI-NEXT: movl %esi, %ecx ; X64-NOBMI-NEXT: movq %rdi, %rax ; X64-NOBMI-NEXT: negb %cl ; X64-NOBMI-NEXT: shll %cl, %eax ; X64-NOBMI-NEXT: # kill: def $cl killed $cl killed $ecx ; X64-NOBMI-NEXT: shrl %cl, %eax ; X64-NOBMI-NEXT: # kill: def $eax killed $eax killed $rax ; X64-NOBMI-NEXT: retq ; ; X64-BMI1NOTBM-LABEL: bzhi64_32_d1: ; X64-BMI1NOTBM: # %bb.0: ; X64-BMI1NOTBM-NEXT: shll $8, %esi ; X64-BMI1NOTBM-NEXT: bextrl %esi, %edi, %eax ; X64-BMI1NOTBM-NEXT: retq ; ; X64-BMI1BMI2-LABEL: bzhi64_32_d1: ; X64-BMI1BMI2: # %bb.0: ; X64-BMI1BMI2-NEXT: bzhil %esi, %edi, %eax ; X64-BMI1BMI2-NEXT: retq %truncval = trunc i64 %val to i32 %numhighbits = sub i32 32, %numlowbits %highbitscleared = shl i32 %truncval, %numhighbits %masked = lshr i32 %highbitscleared, %numhighbits ret i32 %masked } ; ---------------------------------------------------------------------------- ; ; Constant mask ; ---------------------------------------------------------------------------- ; ; 32-bit define i32 @bzhi32_constant_mask32(i32 %val) nounwind { ; X86-LABEL: bzhi32_constant_mask32: ; X86: # %bb.0: ; X86-NEXT: movl $2147483647, %eax # imm = 0x7FFFFFFF ; X86-NEXT: andl {{[0-9]+}}(%esp), %eax ; X86-NEXT: retl ; ; X64-LABEL: bzhi32_constant_mask32: ; X64: # %bb.0: ; X64-NEXT: movl %edi, %eax ; X64-NEXT: andl $2147483647, %eax # imm = 0x7FFFFFFF ; X64-NEXT: retq %masked = and i32 %val, 2147483647 ret i32 %masked } define i32 @bzhi32_constant_mask32_load(i32* %val) nounwind { ; X86-LABEL: bzhi32_constant_mask32_load: ; X86: # %bb.0: ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx ; X86-NEXT: movl $2147483647, %eax # imm = 0x7FFFFFFF ; X86-NEXT: andl (%ecx), %eax ; X86-NEXT: retl ; ; X64-LABEL: bzhi32_constant_mask32_load: ; X64: # %bb.0: ; X64-NEXT: movl $2147483647, %eax # imm = 0x7FFFFFFF ; X64-NEXT: andl (%rdi), %eax ; X64-NEXT: retq %val1 = load i32, i32* %val %masked = and i32 %val1, 2147483647 ret i32 %masked } define i32 @bzhi32_constant_mask16(i32 %val) nounwind { ; X86-LABEL: bzhi32_constant_mask16: ; X86: # %bb.0: ; X86-NEXT: movl $32767, %eax # imm = 0x7FFF ; X86-NEXT: andl {{[0-9]+}}(%esp), %eax ; X86-NEXT: retl ; ; X64-LABEL: bzhi32_constant_mask16: ; X64: # %bb.0: ; X64-NEXT: movl %edi, %eax ; X64-NEXT: andl $32767, %eax # imm = 0x7FFF ; X64-NEXT: retq %masked = and i32 %val, 32767 ret i32 %masked } define i32 @bzhi32_constant_mask16_load(i32* %val) nounwind { ; X86-LABEL: bzhi32_constant_mask16_load: ; X86: # %bb.0: ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx ; X86-NEXT: movl $32767, %eax # imm = 0x7FFF ; X86-NEXT: andl (%ecx), %eax ; X86-NEXT: retl ; ; X64-LABEL: bzhi32_constant_mask16_load: ; X64: # %bb.0: ; X64-NEXT: movl $32767, %eax # imm = 0x7FFF ; X64-NEXT: andl (%rdi), %eax ; X64-NEXT: retq %val1 = load i32, i32* %val %masked = and i32 %val1, 32767 ret i32 %masked } define i32 @bzhi32_constant_mask8(i32 %val) nounwind { ; X86-LABEL: bzhi32_constant_mask8: ; X86: # %bb.0: ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax ; X86-NEXT: andl $127, %eax ; X86-NEXT: retl ; ; X64-LABEL: bzhi32_constant_mask8: ; X64: # %bb.0: ; X64-NEXT: movl %edi, %eax ; X64-NEXT: andl $127, %eax ; X64-NEXT: retq %masked = and i32 %val, 127 ret i32 %masked } define i32 @bzhi32_constant_mask8_load(i32* %val) nounwind { ; X86-LABEL: bzhi32_constant_mask8_load: ; X86: # %bb.0: ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax ; X86-NEXT: movl (%eax), %eax ; X86-NEXT: andl $127, %eax ; X86-NEXT: retl ; ; X64-LABEL: bzhi32_constant_mask8_load: ; X64: # %bb.0: ; X64-NEXT: movl (%rdi), %eax ; X64-NEXT: andl $127, %eax ; X64-NEXT: retq %val1 = load i32, i32* %val %masked = and i32 %val1, 127 ret i32 %masked } ; 64-bit define i64 @bzhi64_constant_mask64(i64 %val) nounwind { ; X86-LABEL: bzhi64_constant_mask64: ; X86: # %bb.0: ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax ; X86-NEXT: movl $1073741823, %edx # imm = 0x3FFFFFFF ; X86-NEXT: andl {{[0-9]+}}(%esp), %edx ; X86-NEXT: retl ; ; X64-NOBMI-LABEL: bzhi64_constant_mask64: ; X64-NOBMI: # %bb.0: ; X64-NOBMI-NEXT: movabsq $4611686018427387903, %rax # imm = 0x3FFFFFFFFFFFFFFF ; X64-NOBMI-NEXT: andq %rdi, %rax ; X64-NOBMI-NEXT: retq ; ; X64-BMI1NOTBM-LABEL: bzhi64_constant_mask64: ; X64-BMI1NOTBM: # %bb.0: ; X64-BMI1NOTBM-NEXT: movl $15872, %eax # imm = 0x3E00 ; X64-BMI1NOTBM-NEXT: bextrq %rax, %rdi, %rax ; X64-BMI1NOTBM-NEXT: retq ; ; X64-BMI1TBM-LABEL: bzhi64_constant_mask64: ; X64-BMI1TBM: # %bb.0: ; X64-BMI1TBM-NEXT: bextrq $15872, %rdi, %rax # imm = 0x3E00 ; X64-BMI1TBM-NEXT: retq ; ; X64-BMI1NOTBMBMI2-LABEL: bzhi64_constant_mask64: ; X64-BMI1NOTBMBMI2: # %bb.0: ; X64-BMI1NOTBMBMI2-NEXT: movb $62, %al ; X64-BMI1NOTBMBMI2-NEXT: bzhiq %rax, %rdi, %rax ; X64-BMI1NOTBMBMI2-NEXT: retq %masked = and i64 %val, 4611686018427387903 ret i64 %masked } define i64 @bzhi64_constant_mask64_load(i64* %val) nounwind { ; X86-LABEL: bzhi64_constant_mask64_load: ; X86: # %bb.0: ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx ; X86-NEXT: movl (%ecx), %eax ; X86-NEXT: movl $1073741823, %edx # imm = 0x3FFFFFFF ; X86-NEXT: andl 4(%ecx), %edx ; X86-NEXT: retl ; ; X64-NOBMI-LABEL: bzhi64_constant_mask64_load: ; X64-NOBMI: # %bb.0: ; X64-NOBMI-NEXT: movabsq $4611686018427387903, %rax # imm = 0x3FFFFFFFFFFFFFFF ; X64-NOBMI-NEXT: andq (%rdi), %rax ; X64-NOBMI-NEXT: retq ; ; X64-BMI1NOTBM-LABEL: bzhi64_constant_mask64_load: ; X64-BMI1NOTBM: # %bb.0: ; X64-BMI1NOTBM-NEXT: movl $15872, %eax # imm = 0x3E00 ; X64-BMI1NOTBM-NEXT: bextrq %rax, (%rdi), %rax ; X64-BMI1NOTBM-NEXT: retq ; ; X64-BMI1TBM-LABEL: bzhi64_constant_mask64_load: ; X64-BMI1TBM: # %bb.0: ; X64-BMI1TBM-NEXT: bextrq $15872, (%rdi), %rax # imm = 0x3E00 ; X64-BMI1TBM-NEXT: retq ; ; X64-BMI1NOTBMBMI2-LABEL: bzhi64_constant_mask64_load: ; X64-BMI1NOTBMBMI2: # %bb.0: ; X64-BMI1NOTBMBMI2-NEXT: movb $62, %al ; X64-BMI1NOTBMBMI2-NEXT: bzhiq %rax, (%rdi), %rax ; X64-BMI1NOTBMBMI2-NEXT: retq %val1 = load i64, i64* %val %masked = and i64 %val1, 4611686018427387903 ret i64 %masked } define i64 @bzhi64_constant_mask32(i64 %val) nounwind { ; X86-LABEL: bzhi64_constant_mask32: ; X86: # %bb.0: ; X86-NEXT: movl $2147483647, %eax # imm = 0x7FFFFFFF ; X86-NEXT: andl {{[0-9]+}}(%esp), %eax ; X86-NEXT: xorl %edx, %edx ; X86-NEXT: retl ; ; X64-LABEL: bzhi64_constant_mask32: ; X64: # %bb.0: ; X64-NEXT: movq %rdi, %rax ; X64-NEXT: andl $2147483647, %eax # imm = 0x7FFFFFFF ; X64-NEXT: retq %masked = and i64 %val, 2147483647 ret i64 %masked } define i64 @bzhi64_constant_mask32_load(i64* %val) nounwind { ; X86-LABEL: bzhi64_constant_mask32_load: ; X86: # %bb.0: ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx ; X86-NEXT: movl $2147483647, %eax # imm = 0x7FFFFFFF ; X86-NEXT: andl (%ecx), %eax ; X86-NEXT: xorl %edx, %edx ; X86-NEXT: retl ; ; X64-LABEL: bzhi64_constant_mask32_load: ; X64: # %bb.0: ; X64-NEXT: movq (%rdi), %rax ; X64-NEXT: andl $2147483647, %eax # imm = 0x7FFFFFFF ; X64-NEXT: retq %val1 = load i64, i64* %val %masked = and i64 %val1, 2147483647 ret i64 %masked } define i64 @bzhi64_constant_mask16(i64 %val) nounwind { ; X86-LABEL: bzhi64_constant_mask16: ; X86: # %bb.0: ; X86-NEXT: movl $32767, %eax # imm = 0x7FFF ; X86-NEXT: andl {{[0-9]+}}(%esp), %eax ; X86-NEXT: xorl %edx, %edx ; X86-NEXT: retl ; ; X64-LABEL: bzhi64_constant_mask16: ; X64: # %bb.0: ; X64-NEXT: movq %rdi, %rax ; X64-NEXT: andl $32767, %eax # imm = 0x7FFF ; X64-NEXT: retq %masked = and i64 %val, 32767 ret i64 %masked } define i64 @bzhi64_constant_mask16_load(i64* %val) nounwind { ; X86-LABEL: bzhi64_constant_mask16_load: ; X86: # %bb.0: ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx ; X86-NEXT: movl $32767, %eax # imm = 0x7FFF ; X86-NEXT: andl (%ecx), %eax ; X86-NEXT: xorl %edx, %edx ; X86-NEXT: retl ; ; X64-LABEL: bzhi64_constant_mask16_load: ; X64: # %bb.0: ; X64-NEXT: movq (%rdi), %rax ; X64-NEXT: andl $32767, %eax # imm = 0x7FFF ; X64-NEXT: retq %val1 = load i64, i64* %val %masked = and i64 %val1, 32767 ret i64 %masked } define i64 @bzhi64_constant_mask8(i64 %val) nounwind { ; X86-LABEL: bzhi64_constant_mask8: ; X86: # %bb.0: ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax ; X86-NEXT: andl $127, %eax ; X86-NEXT: xorl %edx, %edx ; X86-NEXT: retl ; ; X64-LABEL: bzhi64_constant_mask8: ; X64: # %bb.0: ; X64-NEXT: movq %rdi, %rax ; X64-NEXT: andl $127, %eax ; X64-NEXT: retq %masked = and i64 %val, 127 ret i64 %masked } define i64 @bzhi64_constant_mask8_load(i64* %val) nounwind { ; X86-LABEL: bzhi64_constant_mask8_load: ; X86: # %bb.0: ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax ; X86-NEXT: movl (%eax), %eax ; X86-NEXT: andl $127, %eax ; X86-NEXT: xorl %edx, %edx ; X86-NEXT: retl ; ; X64-LABEL: bzhi64_constant_mask8_load: ; X64: # %bb.0: ; X64-NEXT: movq (%rdi), %rax ; X64-NEXT: andl $127, %eax ; X64-NEXT: retq %val1 = load i64, i64* %val %masked = and i64 %val1, 127 ret i64 %masked }