1; NOTE: Assertions have been autogenerated by utils/update_mir_test_checks.py 2; RUN: llc -mtriple=x86_64-linux -stop-after=early-tailduplication < %s | FileCheck %s 3 4; Ensure that we don't duplicate a block with an "INLINEASM_BR" instruction 5; during code gen. 6declare dso_local void @foo() 7 8define i8* @test1(i8** %arg1, i8* %arg2) { 9 ; CHECK-LABEL: name: test1 10 ; CHECK: bb.0.bb: 11 ; CHECK: successors: %bb.1(0x50000000), %bb.2(0x30000000) 12 ; CHECK: liveins: $rdi, $rsi 13 ; CHECK: [[COPY:%[0-9]+]]:gr64 = COPY $rsi 14 ; CHECK: [[COPY1:%[0-9]+]]:gr64 = COPY $rdi 15 ; CHECK: [[MOV64rm:%[0-9]+]]:gr64 = MOV64rm [[COPY1]], 1, $noreg, 0, $noreg :: (load 8 from %ir.arg1) 16 ; CHECK: [[SUB64rr:%[0-9]+]]:gr64 = SUB64rr [[MOV64rm]], [[COPY]], implicit-def $eflags 17 ; CHECK: JCC_1 %bb.2, 4, implicit $eflags 18 ; CHECK: JMP_1 %bb.1 19 ; CHECK: bb.1.bb100: 20 ; CHECK: successors: %bb.3(0x80000000) 21 ; CHECK: MOV64mi32 [[COPY1]], 1, $noreg, 0, $noreg, 0 :: (store 8 into %ir.arg1) 22 ; CHECK: JMP_1 %bb.3 23 ; CHECK: bb.2.bb106: 24 ; CHECK: successors: %bb.3(0x80000000) 25 ; CHECK: ADJCALLSTACKDOWN64 0, 0, 0, implicit-def dead $rsp, implicit-def dead $eflags, implicit-def dead $ssp, implicit $rsp, implicit $ssp 26 ; CHECK: CALL64pcrel32 @foo, csr_64, implicit $rsp, implicit $ssp, implicit-def $rsp, implicit-def $ssp 27 ; CHECK: ADJCALLSTACKUP64 0, 0, implicit-def dead $rsp, implicit-def dead $eflags, implicit-def dead $ssp, implicit $rsp, implicit $ssp 28 ; CHECK: bb.3.bb110: 29 ; CHECK: successors: %bb.5(0x80000000), %bb.4(0x00000000) 30 ; CHECK: [[PHI:%[0-9]+]]:gr64 = PHI [[COPY]], %bb.2, [[MOV64rm]], %bb.1 31 ; CHECK: INLINEASM_BR &"#$0 $1 $2", 9 /* sideeffect mayload attdialect */, 13 /* imm */, 42, 13 /* imm */, 0, 13 /* imm */, blockaddress(@test1, %ir-block.bb17.i.i.i), 12 /* clobber */, implicit-def early-clobber $df, 12 /* clobber */, implicit-def early-clobber $fpsw, 12 /* clobber */, implicit-def early-clobber $eflags 32 ; CHECK: JMP_1 %bb.5 33 ; CHECK: bb.4.bb17.i.i.i (address-taken): 34 ; CHECK: successors: %bb.5(0x80000000) 35 ; CHECK: bb.5.kmem_cache_has_cpu_partial.exit: 36 ; CHECK: $rax = COPY [[PHI]] 37 ; CHECK: RET 0, $rax 38bb: 39 %i28.i = load i8*, i8** %arg1, align 8 40 %if = icmp ne i8* %i28.i, %arg2 41 br i1 %if, label %bb100, label %bb106 42 43bb100: ; preds = %bb 44 store i8* null, i8** %arg1, align 8 45 br label %bb110 46 47bb106: ; preds = %bb 48 call void @foo() 49 br label %bb110 50 51bb110: ; preds = %bb106, %bb100 52 %i10.1 = phi i8* [ %arg2, %bb106 ], [ %i28.i, %bb100 ] 53 callbr void asm sideeffect "#$0 $1 $2", "i,i,X,~{dirflag},~{fpsr},~{flags}"(i32 42, i1 false, i8* blockaddress(@test1, %bb17.i.i.i)) 54 to label %kmem_cache_has_cpu_partial.exit [label %bb17.i.i.i] 55 56bb17.i.i.i: ; preds = %bb110 57 br label %kmem_cache_has_cpu_partial.exit 58 59kmem_cache_has_cpu_partial.exit: ; preds = %bb110 60 ret i8* %i10.1 61} 62