1; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py 2; RUN: llc < %s -mtriple=x86_64-unknown | FileCheck %s -check-prefix=X64 3; RUN: llc < %s -mtriple=i686-unknown | FileCheck %s -check-prefix=X86 4 5define i32 @foo(i32 %a, i32 %b) local_unnamed_addr #0 { 6; X64-LABEL: foo: 7; X64: # %bb.0: # %entry 8; X64-NEXT: # kill: def $esi killed $esi def $rsi 9; X64-NEXT: # kill: def $edi killed $edi def $rdi 10; X64-NEXT: leal 4(%rdi,%rsi,2), %ecx 11; X64-NEXT: leal 4(%rdi,%rsi,4), %eax 12; X64-NEXT: imull %ecx, %eax 13; X64-NEXT: retq 14; 15; X86-LABEL: foo: 16; X86: # %bb.0: # %entry 17; X86-NEXT: movl {{[0-9]+}}(%esp), %eax 18; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx 19; X86-NEXT: leal 4(%ecx,%eax,2), %edx 20; X86-NEXT: leal 4(%ecx,%eax,4), %eax 21; X86-NEXT: imull %edx, %eax 22; X86-NEXT: retl 23entry: 24 %mul = shl i32 %b, 1 25 %add = add i32 %a, 4 26 %add1 = add i32 %add, %mul 27 %mul2 = shl i32 %b, 2 28 %add4 = add i32 %add, %mul2 29 %mul5 = mul nsw i32 %add1, %add4 30 ret i32 %mul5 31} 32 33define i32 @foo1(i32 %a, i32 %b) local_unnamed_addr #0 { 34; X64-LABEL: foo1: 35; X64: # %bb.0: # %entry 36; X64-NEXT: # kill: def $esi killed $esi def $rsi 37; X64-NEXT: # kill: def $edi killed $edi def $rdi 38; X64-NEXT: leal 4(%rdi,%rsi,4), %ecx 39; X64-NEXT: leal 4(%rdi,%rsi,8), %eax 40; X64-NEXT: imull %ecx, %eax 41; X64-NEXT: retq 42; 43; X86-LABEL: foo1: 44; X86: # %bb.0: # %entry 45; X86-NEXT: movl {{[0-9]+}}(%esp), %eax 46; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx 47; X86-NEXT: leal 4(%ecx,%eax,4), %edx 48; X86-NEXT: leal 4(%ecx,%eax,8), %eax 49; X86-NEXT: imull %edx, %eax 50; X86-NEXT: retl 51entry: 52 %mul = shl i32 %b, 2 53 %add = add i32 %a, 4 54 %add1 = add i32 %add, %mul 55 %mul2 = shl i32 %b, 3 56 %add4 = add i32 %add, %mul2 57 %mul5 = mul nsw i32 %add1, %add4 58 ret i32 %mul5 59} 60 61define i32 @foo1_mult_basic_blocks(i32 %a, i32 %b) local_unnamed_addr #0 { 62; X64-LABEL: foo1_mult_basic_blocks: 63; X64: # %bb.0: # %entry 64; X64-NEXT: # kill: def $esi killed $esi def $rsi 65; X64-NEXT: # kill: def $edi killed $edi def $rdi 66; X64-NEXT: leal 4(%rdi,%rsi,4), %ecx 67; X64-NEXT: xorl %eax, %eax 68; X64-NEXT: cmpl $10, %ecx 69; X64-NEXT: je .LBB2_2 70; X64-NEXT: # %bb.1: # %mid 71; X64-NEXT: leal 4(%rdi,%rsi,8), %eax 72; X64-NEXT: imull %eax, %ecx 73; X64-NEXT: movl %ecx, %eax 74; X64-NEXT: .LBB2_2: # %exit 75; X64-NEXT: retq 76; 77; X86-LABEL: foo1_mult_basic_blocks: 78; X86: # %bb.0: # %entry 79; X86-NEXT: pushl %esi 80; X86-NEXT: .cfi_def_cfa_offset 8 81; X86-NEXT: .cfi_offset %esi, -8 82; X86-NEXT: movl {{[0-9]+}}(%esp), %edx 83; X86-NEXT: movl {{[0-9]+}}(%esp), %esi 84; X86-NEXT: leal 4(%esi,%edx,4), %ecx 85; X86-NEXT: xorl %eax, %eax 86; X86-NEXT: cmpl $10, %ecx 87; X86-NEXT: je .LBB2_2 88; X86-NEXT: # %bb.1: # %mid 89; X86-NEXT: leal 4(%esi,%edx,8), %eax 90; X86-NEXT: imull %eax, %ecx 91; X86-NEXT: movl %ecx, %eax 92; X86-NEXT: .LBB2_2: # %exit 93; X86-NEXT: popl %esi 94; X86-NEXT: .cfi_def_cfa_offset 4 95; X86-NEXT: retl 96entry: 97 %mul = shl i32 %b, 2 98 %add = add i32 %a, 4 99 %add1 = add i32 %add, %mul 100 %cmp = icmp ne i32 %add1 , 10 101 br i1 %cmp , label %mid , label %exit 102mid: 103 %addn = add i32 %a , 4 104 %mul2 = shl i32 %b, 3 105 %add4 = add i32 %addn, %mul2 106 %mul5 = mul nsw i32 %add1, %add4 107 br label %exit 108 109exit: 110 %retmul = phi i32 [%mul5 , %mid] , [0 , %entry] 111 ret i32 %retmul 112} 113 114define i32 @foo1_mult_basic_blocks_illegal_scale(i32 %a, i32 %b) local_unnamed_addr #0 { 115; X64-LABEL: foo1_mult_basic_blocks_illegal_scale: 116; X64: # %bb.0: # %entry 117; X64-NEXT: # kill: def $esi killed $esi def $rsi 118; X64-NEXT: # kill: def $edi killed $edi def $rdi 119; X64-NEXT: leal 4(%rdi,%rsi,2), %ecx 120; X64-NEXT: xorl %eax, %eax 121; X64-NEXT: cmpl $10, %ecx 122; X64-NEXT: je .LBB3_2 123; X64-NEXT: # %bb.1: # %mid 124; X64-NEXT: leal 4(%rdi,%rsi,8), %eax 125; X64-NEXT: imull %eax, %ecx 126; X64-NEXT: movl %ecx, %eax 127; X64-NEXT: .LBB3_2: # %exit 128; X64-NEXT: retq 129; 130; X86-LABEL: foo1_mult_basic_blocks_illegal_scale: 131; X86: # %bb.0: # %entry 132; X86-NEXT: pushl %esi 133; X86-NEXT: .cfi_def_cfa_offset 8 134; X86-NEXT: .cfi_offset %esi, -8 135; X86-NEXT: movl {{[0-9]+}}(%esp), %edx 136; X86-NEXT: movl {{[0-9]+}}(%esp), %esi 137; X86-NEXT: leal 4(%esi,%edx,2), %ecx 138; X86-NEXT: xorl %eax, %eax 139; X86-NEXT: cmpl $10, %ecx 140; X86-NEXT: je .LBB3_2 141; X86-NEXT: # %bb.1: # %mid 142; X86-NEXT: leal 4(%esi,%edx,8), %eax 143; X86-NEXT: imull %eax, %ecx 144; X86-NEXT: movl %ecx, %eax 145; X86-NEXT: .LBB3_2: # %exit 146; X86-NEXT: popl %esi 147; X86-NEXT: .cfi_def_cfa_offset 4 148; X86-NEXT: retl 149entry: 150 %mul = shl i32 %b, 1 151 %add = add i32 %a, 4 152 %add1 = add i32 %add, %mul 153 %cmp = icmp ne i32 %add1 , 10 154 br i1 %cmp, label %mid , label %exit 155mid: 156 %addn = add i32 %a , 4 157 %mul2 = shl i32 %b, 3 158 %add4 = add i32 %addn, %mul2 159 %mul5 = mul nsw i32 %add1, %add4 160 br label %exit 161exit: 162 %retmul = phi i32 [%mul5 , %mid] , [0 , %entry] 163 ret i32 %retmul 164} 165