1; RUN: opt -objc-arc -S < %s | FileCheck %s 2 3%struct.__objcFastEnumerationState = type { i64, i8**, i64*, [5 x i64] } 4 5@"\01L_OBJC_METH_VAR_NAME_" = internal global [43 x i8] c"countByEnumeratingWithState:objects:count:\00", section "__TEXT,__objc_methname,cstring_literals", align 1 6@"\01L_OBJC_SELECTOR_REFERENCES_" = internal global i8* getelementptr inbounds ([43 x i8], [43 x i8]* @"\01L_OBJC_METH_VAR_NAME_", i64 0, i64 0), section "__DATA, __objc_selrefs, literal_pointers, no_dead_strip" 7@g = common global i8* null, align 8 8@"\01L_OBJC_IMAGE_INFO" = internal constant [2 x i32] [i32 0, i32 16], section "__DATA, __objc_imageinfo, regular, no_dead_strip" 9 10declare void @callee() 11declare i8* @returner() 12declare i8* @objc_retainAutoreleasedReturnValue(i8*) 13declare i8* @objc_retain(i8*) 14declare void @objc_enumerationMutation(i8*) 15declare void @llvm.memset.p0i8.i64(i8* nocapture, i8, i64, i1) nounwind 16declare i8* @objc_msgSend(i8*, i8*, ...) nonlazybind 17declare void @use(i8*) 18declare void @objc_release(i8*) 19declare i8* @def() 20declare void @__crasher_block_invoke(i8* nocapture) 21declare i8* @objc_retainBlock(i8*) 22declare void @__crasher_block_invoke1(i8* nocapture) 23 24!0 = !{} 25 26; Delete a nested retain+release pair. 27 28; CHECK-LABEL: define void @test0( 29; CHECK: call i8* @objc_retain 30; CHECK-NOT: @objc_retain 31; CHECK: } 32define void @test0(i8* %a) nounwind { 33entry: 34 %state.ptr = alloca %struct.__objcFastEnumerationState, align 8 35 %items.ptr = alloca [16 x i8*], align 8 36 %0 = call i8* @objc_retain(i8* %a) nounwind 37 %tmp = bitcast %struct.__objcFastEnumerationState* %state.ptr to i8* 38 call void @llvm.memset.p0i8.i64(i8* align 8 %tmp, i8 0, i64 64, i1 false) 39 %1 = call i8* @objc_retain(i8* %0) nounwind 40 %tmp2 = load i8*, i8** @"\01L_OBJC_SELECTOR_REFERENCES_", align 8 41 %call = call i64 bitcast (i8* (i8*, i8*, ...)* @objc_msgSend to i64 (i8*, i8*, %struct.__objcFastEnumerationState*, [16 x i8*]*, i64)*)(i8* %1, i8* %tmp2, %struct.__objcFastEnumerationState* %state.ptr, [16 x i8*]* %items.ptr, i64 16) 42 %iszero = icmp eq i64 %call, 0 43 br i1 %iszero, label %forcoll.empty, label %forcoll.loopinit 44 45forcoll.loopinit: 46 %mutationsptr.ptr = getelementptr inbounds %struct.__objcFastEnumerationState, %struct.__objcFastEnumerationState* %state.ptr, i64 0, i32 2 47 %mutationsptr = load i64*, i64** %mutationsptr.ptr, align 8 48 %forcoll.initial-mutations = load i64, i64* %mutationsptr, align 8 49 %stateitems.ptr = getelementptr inbounds %struct.__objcFastEnumerationState, %struct.__objcFastEnumerationState* %state.ptr, i64 0, i32 1 50 br label %forcoll.loopbody.outer 51 52forcoll.loopbody.outer: 53 %forcoll.count.ph = phi i64 [ %call, %forcoll.loopinit ], [ %call6, %forcoll.refetch ] 54 %tmp7 = icmp ugt i64 %forcoll.count.ph, 1 55 %umax = select i1 %tmp7, i64 %forcoll.count.ph, i64 1 56 br label %forcoll.loopbody 57 58forcoll.loopbody: 59 %forcoll.index = phi i64 [ 0, %forcoll.loopbody.outer ], [ %4, %forcoll.notmutated ] 60 %mutationsptr3 = load i64*, i64** %mutationsptr.ptr, align 8 61 %statemutations = load i64, i64* %mutationsptr3, align 8 62 %2 = icmp eq i64 %statemutations, %forcoll.initial-mutations 63 br i1 %2, label %forcoll.notmutated, label %forcoll.mutated 64 65forcoll.mutated: 66 call void @objc_enumerationMutation(i8* %1) 67 br label %forcoll.notmutated 68 69forcoll.notmutated: 70 %stateitems = load i8**, i8*** %stateitems.ptr, align 8 71 %currentitem.ptr = getelementptr i8*, i8** %stateitems, i64 %forcoll.index 72 %3 = load i8*, i8** %currentitem.ptr, align 8 73 call void @use(i8* %3) 74 %4 = add i64 %forcoll.index, 1 75 %exitcond = icmp eq i64 %4, %umax 76 br i1 %exitcond, label %forcoll.refetch, label %forcoll.loopbody 77 78forcoll.refetch: 79 %tmp5 = load i8*, i8** @"\01L_OBJC_SELECTOR_REFERENCES_", align 8 80 %call6 = call i64 bitcast (i8* (i8*, i8*, ...)* @objc_msgSend to i64 (i8*, i8*, %struct.__objcFastEnumerationState*, [16 x i8*]*, i64)*)(i8* %1, i8* %tmp5, %struct.__objcFastEnumerationState* %state.ptr, [16 x i8*]* %items.ptr, i64 16) 81 %5 = icmp eq i64 %call6, 0 82 br i1 %5, label %forcoll.empty, label %forcoll.loopbody.outer 83 84forcoll.empty: 85 call void @objc_release(i8* %1) nounwind 86 call void @objc_release(i8* %0) nounwind, !clang.imprecise_release !0 87 ret void 88} 89 90; Delete a nested retain+release pair. 91 92; CHECK-LABEL: define void @test2( 93; CHECK: call i8* @objc_retain 94; CHECK-NOT: @objc_retain 95; CHECK: } 96define void @test2() nounwind { 97entry: 98 %state.ptr = alloca %struct.__objcFastEnumerationState, align 8 99 %items.ptr = alloca [16 x i8*], align 8 100 %call = call i8* @returner() 101 %0 = call i8* @objc_retainAutoreleasedReturnValue(i8* %call) nounwind 102 %tmp = bitcast %struct.__objcFastEnumerationState* %state.ptr to i8* 103 call void @llvm.memset.p0i8.i64(i8* align 8 %tmp, i8 0, i64 64, i1 false) 104 %1 = call i8* @objc_retain(i8* %0) nounwind 105 %tmp2 = load i8*, i8** @"\01L_OBJC_SELECTOR_REFERENCES_", align 8 106 %call3 = call i64 bitcast (i8* (i8*, i8*, ...)* @objc_msgSend to i64 (i8*, i8*, %struct.__objcFastEnumerationState*, [16 x i8*]*, i64)*)(i8* %1, i8* %tmp2, %struct.__objcFastEnumerationState* %state.ptr, [16 x i8*]* %items.ptr, i64 16) 107 %iszero = icmp eq i64 %call3, 0 108 br i1 %iszero, label %forcoll.empty, label %forcoll.loopinit 109 110forcoll.loopinit: 111 %mutationsptr.ptr = getelementptr inbounds %struct.__objcFastEnumerationState, %struct.__objcFastEnumerationState* %state.ptr, i64 0, i32 2 112 %mutationsptr = load i64*, i64** %mutationsptr.ptr, align 8 113 %forcoll.initial-mutations = load i64, i64* %mutationsptr, align 8 114 %stateitems.ptr = getelementptr inbounds %struct.__objcFastEnumerationState, %struct.__objcFastEnumerationState* %state.ptr, i64 0, i32 1 115 br label %forcoll.loopbody.outer 116 117forcoll.loopbody.outer: 118 %forcoll.count.ph = phi i64 [ %call3, %forcoll.loopinit ], [ %call7, %forcoll.refetch ] 119 %tmp8 = icmp ugt i64 %forcoll.count.ph, 1 120 %umax = select i1 %tmp8, i64 %forcoll.count.ph, i64 1 121 br label %forcoll.loopbody 122 123forcoll.loopbody: 124 %forcoll.index = phi i64 [ 0, %forcoll.loopbody.outer ], [ %4, %forcoll.notmutated ] 125 %mutationsptr4 = load i64*, i64** %mutationsptr.ptr, align 8 126 %statemutations = load i64, i64* %mutationsptr4, align 8 127 %2 = icmp eq i64 %statemutations, %forcoll.initial-mutations 128 br i1 %2, label %forcoll.notmutated, label %forcoll.mutated 129 130forcoll.mutated: 131 call void @objc_enumerationMutation(i8* %1) 132 br label %forcoll.notmutated 133 134forcoll.notmutated: 135 %stateitems = load i8**, i8*** %stateitems.ptr, align 8 136 %currentitem.ptr = getelementptr i8*, i8** %stateitems, i64 %forcoll.index 137 %3 = load i8*, i8** %currentitem.ptr, align 8 138 call void @use(i8* %3) 139 %4 = add i64 %forcoll.index, 1 140 %exitcond = icmp eq i64 %4, %umax 141 br i1 %exitcond, label %forcoll.refetch, label %forcoll.loopbody 142 143forcoll.refetch: 144 %tmp6 = load i8*, i8** @"\01L_OBJC_SELECTOR_REFERENCES_", align 8 145 %call7 = call i64 bitcast (i8* (i8*, i8*, ...)* @objc_msgSend to i64 (i8*, i8*, %struct.__objcFastEnumerationState*, [16 x i8*]*, i64)*)(i8* %1, i8* %tmp6, %struct.__objcFastEnumerationState* %state.ptr, [16 x i8*]* %items.ptr, i64 16) 146 %5 = icmp eq i64 %call7, 0 147 br i1 %5, label %forcoll.empty, label %forcoll.loopbody.outer 148 149forcoll.empty: 150 call void @objc_release(i8* %1) nounwind 151 call void @objc_release(i8* %0) nounwind, !clang.imprecise_release !0 152 ret void 153} 154 155; Delete a nested retain+release pair. 156 157; CHECK-LABEL: define void @test4( 158; CHECK: call i8* @objc_retain 159; CHECK-NOT: @objc_retain 160; CHECK: } 161define void @test4() nounwind { 162entry: 163 %state.ptr = alloca %struct.__objcFastEnumerationState, align 8 164 %items.ptr = alloca [16 x i8*], align 8 165 %tmp = load i8*, i8** @g, align 8 166 %0 = call i8* @objc_retain(i8* %tmp) nounwind 167 %tmp2 = bitcast %struct.__objcFastEnumerationState* %state.ptr to i8* 168 call void @llvm.memset.p0i8.i64(i8* align 8 %tmp2, i8 0, i64 64, i1 false) 169 %1 = call i8* @objc_retain(i8* %0) nounwind 170 %tmp4 = load i8*, i8** @"\01L_OBJC_SELECTOR_REFERENCES_", align 8 171 %call = call i64 bitcast (i8* (i8*, i8*, ...)* @objc_msgSend to i64 (i8*, i8*, %struct.__objcFastEnumerationState*, [16 x i8*]*, i64)*)(i8* %1, i8* %tmp4, %struct.__objcFastEnumerationState* %state.ptr, [16 x i8*]* %items.ptr, i64 16) 172 %iszero = icmp eq i64 %call, 0 173 br i1 %iszero, label %forcoll.empty, label %forcoll.loopinit 174 175forcoll.loopinit: 176 %mutationsptr.ptr = getelementptr inbounds %struct.__objcFastEnumerationState, %struct.__objcFastEnumerationState* %state.ptr, i64 0, i32 2 177 %mutationsptr = load i64*, i64** %mutationsptr.ptr, align 8 178 %forcoll.initial-mutations = load i64, i64* %mutationsptr, align 8 179 %stateitems.ptr = getelementptr inbounds %struct.__objcFastEnumerationState, %struct.__objcFastEnumerationState* %state.ptr, i64 0, i32 1 180 br label %forcoll.loopbody.outer 181 182forcoll.loopbody.outer: 183 %forcoll.count.ph = phi i64 [ %call, %forcoll.loopinit ], [ %call8, %forcoll.refetch ] 184 %tmp9 = icmp ugt i64 %forcoll.count.ph, 1 185 %umax = select i1 %tmp9, i64 %forcoll.count.ph, i64 1 186 br label %forcoll.loopbody 187 188forcoll.loopbody: 189 %forcoll.index = phi i64 [ 0, %forcoll.loopbody.outer ], [ %4, %forcoll.notmutated ] 190 %mutationsptr5 = load i64*, i64** %mutationsptr.ptr, align 8 191 %statemutations = load i64, i64* %mutationsptr5, align 8 192 %2 = icmp eq i64 %statemutations, %forcoll.initial-mutations 193 br i1 %2, label %forcoll.notmutated, label %forcoll.mutated 194 195forcoll.mutated: 196 call void @objc_enumerationMutation(i8* %1) 197 br label %forcoll.notmutated 198 199forcoll.notmutated: 200 %stateitems = load i8**, i8*** %stateitems.ptr, align 8 201 %currentitem.ptr = getelementptr i8*, i8** %stateitems, i64 %forcoll.index 202 %3 = load i8*, i8** %currentitem.ptr, align 8 203 call void @use(i8* %3) 204 %4 = add i64 %forcoll.index, 1 205 %exitcond = icmp eq i64 %4, %umax 206 br i1 %exitcond, label %forcoll.refetch, label %forcoll.loopbody 207 208forcoll.refetch: 209 %tmp7 = load i8*, i8** @"\01L_OBJC_SELECTOR_REFERENCES_", align 8 210 %call8 = call i64 bitcast (i8* (i8*, i8*, ...)* @objc_msgSend to i64 (i8*, i8*, %struct.__objcFastEnumerationState*, [16 x i8*]*, i64)*)(i8* %1, i8* %tmp7, %struct.__objcFastEnumerationState* %state.ptr, [16 x i8*]* %items.ptr, i64 16) 211 %5 = icmp eq i64 %call8, 0 212 br i1 %5, label %forcoll.empty, label %forcoll.loopbody.outer 213 214forcoll.empty: 215 call void @objc_release(i8* %1) nounwind 216 call void @objc_release(i8* %0) nounwind, !clang.imprecise_release !0 217 ret void 218} 219 220; Delete a nested retain+release pair. 221 222; CHECK-LABEL: define void @test5( 223; CHECK: call i8* @objc_retain 224; CHECK-NOT: @objc_retain 225; CHECK: } 226define void @test5() nounwind { 227entry: 228 %state.ptr = alloca %struct.__objcFastEnumerationState, align 8 229 %items.ptr = alloca [16 x i8*], align 8 230 %call = call i8* @returner() 231 %0 = call i8* @objc_retainAutoreleasedReturnValue(i8* %call) nounwind 232 %tmp = bitcast %struct.__objcFastEnumerationState* %state.ptr to i8* 233 call void @llvm.memset.p0i8.i64(i8* align 8 %tmp, i8 0, i64 64, i1 false) 234 %1 = call i8* @objc_retain(i8* %0) nounwind 235 %tmp2 = load i8*, i8** @"\01L_OBJC_SELECTOR_REFERENCES_", align 8 236 %call3 = call i64 bitcast (i8* (i8*, i8*, ...)* @objc_msgSend to i64 (i8*, i8*, %struct.__objcFastEnumerationState*, [16 x i8*]*, i64)*)(i8* %1, i8* %tmp2, %struct.__objcFastEnumerationState* %state.ptr, [16 x i8*]* %items.ptr, i64 16) 237 %iszero = icmp eq i64 %call3, 0 238 br i1 %iszero, label %forcoll.empty, label %forcoll.loopinit 239 240forcoll.loopinit: 241 %mutationsptr.ptr = getelementptr inbounds %struct.__objcFastEnumerationState, %struct.__objcFastEnumerationState* %state.ptr, i64 0, i32 2 242 %mutationsptr = load i64*, i64** %mutationsptr.ptr, align 8 243 %forcoll.initial-mutations = load i64, i64* %mutationsptr, align 8 244 %stateitems.ptr = getelementptr inbounds %struct.__objcFastEnumerationState, %struct.__objcFastEnumerationState* %state.ptr, i64 0, i32 1 245 br label %forcoll.loopbody.outer 246 247forcoll.loopbody.outer: 248 %forcoll.count.ph = phi i64 [ %call3, %forcoll.loopinit ], [ %call7, %forcoll.refetch ] 249 %tmp8 = icmp ugt i64 %forcoll.count.ph, 1 250 %umax = select i1 %tmp8, i64 %forcoll.count.ph, i64 1 251 br label %forcoll.loopbody 252 253forcoll.loopbody: 254 %forcoll.index = phi i64 [ 0, %forcoll.loopbody.outer ], [ %4, %forcoll.notmutated ] 255 %mutationsptr4 = load i64*, i64** %mutationsptr.ptr, align 8 256 %statemutations = load i64, i64* %mutationsptr4, align 8 257 %2 = icmp eq i64 %statemutations, %forcoll.initial-mutations 258 br i1 %2, label %forcoll.notmutated, label %forcoll.mutated 259 260forcoll.mutated: 261 call void @objc_enumerationMutation(i8* %1) 262 br label %forcoll.notmutated 263 264forcoll.notmutated: 265 %stateitems = load i8**, i8*** %stateitems.ptr, align 8 266 %currentitem.ptr = getelementptr i8*, i8** %stateitems, i64 %forcoll.index 267 %3 = load i8*, i8** %currentitem.ptr, align 8 268 call void @use(i8* %3) 269 %4 = add i64 %forcoll.index, 1 270 %exitcond = icmp eq i64 %4, %umax 271 br i1 %exitcond, label %forcoll.refetch, label %forcoll.loopbody 272 273forcoll.refetch: 274 %tmp6 = load i8*, i8** @"\01L_OBJC_SELECTOR_REFERENCES_", align 8 275 %call7 = call i64 bitcast (i8* (i8*, i8*, ...)* @objc_msgSend to i64 (i8*, i8*, %struct.__objcFastEnumerationState*, [16 x i8*]*, i64)*)(i8* %1, i8* %tmp6, %struct.__objcFastEnumerationState* %state.ptr, [16 x i8*]* %items.ptr, i64 16) 276 %5 = icmp eq i64 %call7, 0 277 br i1 %5, label %forcoll.empty, label %forcoll.loopbody.outer 278 279forcoll.empty: 280 call void @objc_release(i8* %1) nounwind 281 call void @objc_release(i8* %0) nounwind, !clang.imprecise_release !0 282 ret void 283} 284 285; We handle this now due to the fact that a release just needs a post dominating 286; use. 287; 288; CHECK-LABEL: define void @test6( 289; CHECK: call i8* @objc_retain 290; CHECK-NOT: @objc_retain 291; CHECK: } 292define void @test6() nounwind { 293entry: 294 %state.ptr = alloca %struct.__objcFastEnumerationState, align 8 295 %items.ptr = alloca [16 x i8*], align 8 296 %call = call i8* @returner() 297 %0 = call i8* @objc_retainAutoreleasedReturnValue(i8* %call) nounwind 298 %tmp = bitcast %struct.__objcFastEnumerationState* %state.ptr to i8* 299 call void @llvm.memset.p0i8.i64(i8* align 8 %tmp, i8 0, i64 64, i1 false) 300 %1 = call i8* @objc_retain(i8* %0) nounwind 301 %tmp2 = load i8*, i8** @"\01L_OBJC_SELECTOR_REFERENCES_", align 8 302 %call3 = call i64 bitcast (i8* (i8*, i8*, ...)* @objc_msgSend to i64 (i8*, i8*, %struct.__objcFastEnumerationState*, [16 x i8*]*, i64)*)(i8* %1, i8* %tmp2, %struct.__objcFastEnumerationState* %state.ptr, [16 x i8*]* %items.ptr, i64 16) 303 %iszero = icmp eq i64 %call3, 0 304 br i1 %iszero, label %forcoll.empty, label %forcoll.loopinit 305 306forcoll.loopinit: 307 %mutationsptr.ptr = getelementptr inbounds %struct.__objcFastEnumerationState, %struct.__objcFastEnumerationState* %state.ptr, i64 0, i32 2 308 %mutationsptr = load i64*, i64** %mutationsptr.ptr, align 8 309 %forcoll.initial-mutations = load i64, i64* %mutationsptr, align 8 310 %stateitems.ptr = getelementptr inbounds %struct.__objcFastEnumerationState, %struct.__objcFastEnumerationState* %state.ptr, i64 0, i32 1 311 br label %forcoll.loopbody.outer 312 313forcoll.loopbody.outer: 314 %forcoll.count.ph = phi i64 [ %call3, %forcoll.loopinit ], [ %call7, %forcoll.refetch ] 315 %tmp8 = icmp ugt i64 %forcoll.count.ph, 1 316 %umax = select i1 %tmp8, i64 %forcoll.count.ph, i64 1 317 br label %forcoll.loopbody 318 319forcoll.loopbody: 320 %forcoll.index = phi i64 [ 0, %forcoll.loopbody.outer ], [ %4, %forcoll.notmutated ] 321 %mutationsptr4 = load i64*, i64** %mutationsptr.ptr, align 8 322 %statemutations = load i64, i64* %mutationsptr4, align 8 323 %2 = icmp eq i64 %statemutations, %forcoll.initial-mutations 324 br i1 %2, label %forcoll.notmutated, label %forcoll.mutated 325 326forcoll.mutated: 327 call void @objc_enumerationMutation(i8* %1) 328 br label %forcoll.notmutated 329 330forcoll.notmutated: 331 %stateitems = load i8**, i8*** %stateitems.ptr, align 8 332 %currentitem.ptr = getelementptr i8*, i8** %stateitems, i64 %forcoll.index 333 %3 = load i8*, i8** %currentitem.ptr, align 8 334 call void @use(i8* %3) 335 %4 = add i64 %forcoll.index, 1 336 %exitcond = icmp eq i64 %4, %umax 337 br i1 %exitcond, label %forcoll.refetch, label %forcoll.loopbody 338 339forcoll.refetch: 340 %tmp6 = load i8*, i8** @"\01L_OBJC_SELECTOR_REFERENCES_", align 8 341 %call7 = call i64 bitcast (i8* (i8*, i8*, ...)* @objc_msgSend to i64 (i8*, i8*, %struct.__objcFastEnumerationState*, [16 x i8*]*, i64)*)(i8* %1, i8* %tmp6, %struct.__objcFastEnumerationState* %state.ptr, [16 x i8*]* %items.ptr, i64 16) 342 %5 = icmp eq i64 %call7, 0 343 br i1 %5, label %forcoll.empty, label %forcoll.loopbody.outer 344 345forcoll.empty: 346 call void @objc_release(i8* %1) nounwind 347 call void @callee() 348 call void @objc_release(i8* %0) nounwind, !clang.imprecise_release !0 349 ret void 350} 351 352; TODO: Delete a nested retain+release pair. 353; The optimizer currently can't do this, because isn't isn't sophisticated enough in 354; reasnoning about nesting. 355 356; CHECK-LABEL: define void @test7( 357; CHECK: call i8* @objc_retain 358; CHECK: @objc_retain 359; CHECK: } 360define void @test7() nounwind { 361entry: 362 %state.ptr = alloca %struct.__objcFastEnumerationState, align 8 363 %items.ptr = alloca [16 x i8*], align 8 364 %call = call i8* @returner() 365 %0 = call i8* @objc_retainAutoreleasedReturnValue(i8* %call) nounwind 366 call void @callee() 367 %tmp = bitcast %struct.__objcFastEnumerationState* %state.ptr to i8* 368 call void @llvm.memset.p0i8.i64(i8* align 8 %tmp, i8 0, i64 64, i1 false) 369 %1 = call i8* @objc_retain(i8* %0) nounwind 370 %tmp2 = load i8*, i8** @"\01L_OBJC_SELECTOR_REFERENCES_", align 8 371 %call3 = call i64 bitcast (i8* (i8*, i8*, ...)* @objc_msgSend to i64 (i8*, i8*, %struct.__objcFastEnumerationState*, [16 x i8*]*, i64)*)(i8* %1, i8* %tmp2, %struct.__objcFastEnumerationState* %state.ptr, [16 x i8*]* %items.ptr, i64 16) 372 %iszero = icmp eq i64 %call3, 0 373 br i1 %iszero, label %forcoll.empty, label %forcoll.loopinit 374 375forcoll.loopinit: 376 %mutationsptr.ptr = getelementptr inbounds %struct.__objcFastEnumerationState, %struct.__objcFastEnumerationState* %state.ptr, i64 0, i32 2 377 %mutationsptr = load i64*, i64** %mutationsptr.ptr, align 8 378 %forcoll.initial-mutations = load i64, i64* %mutationsptr, align 8 379 %stateitems.ptr = getelementptr inbounds %struct.__objcFastEnumerationState, %struct.__objcFastEnumerationState* %state.ptr, i64 0, i32 1 380 br label %forcoll.loopbody.outer 381 382forcoll.loopbody.outer: 383 %forcoll.count.ph = phi i64 [ %call3, %forcoll.loopinit ], [ %call7, %forcoll.refetch ] 384 %tmp8 = icmp ugt i64 %forcoll.count.ph, 1 385 %umax = select i1 %tmp8, i64 %forcoll.count.ph, i64 1 386 br label %forcoll.loopbody 387 388forcoll.loopbody: 389 %forcoll.index = phi i64 [ 0, %forcoll.loopbody.outer ], [ %4, %forcoll.notmutated ] 390 %mutationsptr4 = load i64*, i64** %mutationsptr.ptr, align 8 391 %statemutations = load i64, i64* %mutationsptr4, align 8 392 %2 = icmp eq i64 %statemutations, %forcoll.initial-mutations 393 br i1 %2, label %forcoll.notmutated, label %forcoll.mutated 394 395forcoll.mutated: 396 call void @objc_enumerationMutation(i8* %1) 397 br label %forcoll.notmutated 398 399forcoll.notmutated: 400 %stateitems = load i8**, i8*** %stateitems.ptr, align 8 401 %currentitem.ptr = getelementptr i8*, i8** %stateitems, i64 %forcoll.index 402 %3 = load i8*, i8** %currentitem.ptr, align 8 403 call void @use(i8* %3) 404 %4 = add i64 %forcoll.index, 1 405 %exitcond = icmp eq i64 %4, %umax 406 br i1 %exitcond, label %forcoll.refetch, label %forcoll.loopbody 407 408forcoll.refetch: 409 %tmp6 = load i8*, i8** @"\01L_OBJC_SELECTOR_REFERENCES_", align 8 410 %call7 = call i64 bitcast (i8* (i8*, i8*, ...)* @objc_msgSend to i64 (i8*, i8*, %struct.__objcFastEnumerationState*, [16 x i8*]*, i64)*)(i8* %1, i8* %tmp6, %struct.__objcFastEnumerationState* %state.ptr, [16 x i8*]* %items.ptr, i64 16) 411 %5 = icmp eq i64 %call7, 0 412 br i1 %5, label %forcoll.empty, label %forcoll.loopbody.outer 413 414forcoll.empty: 415 call void @objc_release(i8* %1) nounwind 416 call void @callee() 417 call void @objc_release(i8* %0) nounwind, !clang.imprecise_release !0 418 ret void 419} 420 421; Delete a nested retain+release pair. 422 423; CHECK-LABEL: define void @test8( 424; CHECK: call i8* @objc_retain 425; CHECK-NOT: @objc_retain 426; CHECK: } 427define void @test8() nounwind { 428entry: 429 %state.ptr = alloca %struct.__objcFastEnumerationState, align 8 430 %items.ptr = alloca [16 x i8*], align 8 431 %call = call i8* @returner() 432 %0 = call i8* @objc_retainAutoreleasedReturnValue(i8* %call) nounwind 433 %tmp = bitcast %struct.__objcFastEnumerationState* %state.ptr to i8* 434 call void @llvm.memset.p0i8.i64(i8* align 8 %tmp, i8 0, i64 64, i1 false) 435 %1 = call i8* @objc_retain(i8* %0) nounwind 436 %tmp2 = load i8*, i8** @"\01L_OBJC_SELECTOR_REFERENCES_", align 8 437 %call3 = call i64 bitcast (i8* (i8*, i8*, ...)* @objc_msgSend to i64 (i8*, i8*, %struct.__objcFastEnumerationState*, [16 x i8*]*, i64)*)(i8* %1, i8* %tmp2, %struct.__objcFastEnumerationState* %state.ptr, [16 x i8*]* %items.ptr, i64 16) 438 %iszero = icmp eq i64 %call3, 0 439 br i1 %iszero, label %forcoll.empty, label %forcoll.loopinit 440 441forcoll.loopinit: 442 %mutationsptr.ptr = getelementptr inbounds %struct.__objcFastEnumerationState, %struct.__objcFastEnumerationState* %state.ptr, i64 0, i32 2 443 %mutationsptr = load i64*, i64** %mutationsptr.ptr, align 8 444 %forcoll.initial-mutations = load i64, i64* %mutationsptr, align 8 445 %stateitems.ptr = getelementptr inbounds %struct.__objcFastEnumerationState, %struct.__objcFastEnumerationState* %state.ptr, i64 0, i32 1 446 br label %forcoll.loopbody.outer 447 448forcoll.loopbody.outer: 449 %forcoll.count.ph = phi i64 [ %call3, %forcoll.loopinit ], [ %call7, %forcoll.refetch ] 450 %tmp8 = icmp ugt i64 %forcoll.count.ph, 1 451 %umax = select i1 %tmp8, i64 %forcoll.count.ph, i64 1 452 br label %forcoll.loopbody 453 454forcoll.loopbody: 455 %forcoll.index = phi i64 [ 0, %forcoll.loopbody.outer ], [ %4, %forcoll.next ] 456 %mutationsptr4 = load i64*, i64** %mutationsptr.ptr, align 8 457 %statemutations = load i64, i64* %mutationsptr4, align 8 458 %2 = icmp eq i64 %statemutations, %forcoll.initial-mutations 459 br i1 %2, label %forcoll.notmutated, label %forcoll.mutated 460 461forcoll.mutated: 462 call void @objc_enumerationMutation(i8* %1) 463 br label %forcoll.notmutated 464 465forcoll.notmutated: 466 %stateitems = load i8**, i8*** %stateitems.ptr, align 8 467 %currentitem.ptr = getelementptr i8*, i8** %stateitems, i64 %forcoll.index 468 %3 = load i8*, i8** %currentitem.ptr, align 8 469 %tobool = icmp eq i8* %3, null 470 br i1 %tobool, label %forcoll.next, label %if.then 471 472if.then: 473 call void @callee() 474 br label %forcoll.next 475 476forcoll.next: 477 %4 = add i64 %forcoll.index, 1 478 %exitcond = icmp eq i64 %4, %umax 479 br i1 %exitcond, label %forcoll.refetch, label %forcoll.loopbody 480 481forcoll.refetch: 482 %tmp6 = load i8*, i8** @"\01L_OBJC_SELECTOR_REFERENCES_", align 8 483 %call7 = call i64 bitcast (i8* (i8*, i8*, ...)* @objc_msgSend to i64 (i8*, i8*, %struct.__objcFastEnumerationState*, [16 x i8*]*, i64)*)(i8* %1, i8* %tmp6, %struct.__objcFastEnumerationState* %state.ptr, [16 x i8*]* %items.ptr, i64 16) 484 %5 = icmp eq i64 %call7, 0 485 br i1 %5, label %forcoll.empty, label %forcoll.loopbody.outer 486 487forcoll.empty: 488 call void @objc_release(i8* %1) nounwind 489 call void @objc_release(i8* %0) nounwind, !clang.imprecise_release !0 490 ret void 491} 492 493; TODO: Delete a nested retain+release pair. 494; The optimizer currently can't do this, because of a split loop backedge. 495; See test9b for the same testcase without a split backedge. 496 497; CHECK-LABEL: define void @test9( 498; CHECK: call i8* @objc_retain 499; CHECK: call i8* @objc_retain 500; CHECK: call i8* @objc_retain 501; CHECK: } 502define void @test9() nounwind { 503entry: 504 %state.ptr = alloca %struct.__objcFastEnumerationState, align 8 505 %items.ptr = alloca [16 x i8*], align 8 506 %call = call i8* @returner() 507 %0 = call i8* @objc_retainAutoreleasedReturnValue(i8* %call) nounwind 508 %call1 = call i8* @returner() 509 %1 = call i8* @objc_retainAutoreleasedReturnValue(i8* %call1) nounwind 510 %tmp = bitcast %struct.__objcFastEnumerationState* %state.ptr to i8* 511 call void @llvm.memset.p0i8.i64(i8* align 8 %tmp, i8 0, i64 64, i1 false) 512 %2 = call i8* @objc_retain(i8* %0) nounwind 513 %tmp3 = load i8*, i8** @"\01L_OBJC_SELECTOR_REFERENCES_", align 8 514 %call4 = call i64 bitcast (i8* (i8*, i8*, ...)* @objc_msgSend to i64 (i8*, i8*, %struct.__objcFastEnumerationState*, [16 x i8*]*, i64)*)(i8* %2, i8* %tmp3, %struct.__objcFastEnumerationState* %state.ptr, [16 x i8*]* %items.ptr, i64 16) 515 %iszero = icmp eq i64 %call4, 0 516 br i1 %iszero, label %forcoll.empty, label %forcoll.loopinit 517 518forcoll.loopinit: 519 %mutationsptr.ptr = getelementptr inbounds %struct.__objcFastEnumerationState, %struct.__objcFastEnumerationState* %state.ptr, i64 0, i32 2 520 %mutationsptr = load i64*, i64** %mutationsptr.ptr, align 8 521 %forcoll.initial-mutations = load i64, i64* %mutationsptr, align 8 522 br label %forcoll.loopbody.outer 523 524forcoll.loopbody.outer: 525 %forcoll.count.ph = phi i64 [ %call4, %forcoll.loopinit ], [ %call7, %forcoll.refetch ] 526 %tmp9 = icmp ugt i64 %forcoll.count.ph, 1 527 %umax = select i1 %tmp9, i64 %forcoll.count.ph, i64 1 528 br label %forcoll.loopbody 529 530forcoll.loopbody: 531 %forcoll.index = phi i64 [ %phitmp, %forcoll.notmutated.forcoll.loopbody_crit_edge ], [ 1, %forcoll.loopbody.outer ] 532 %mutationsptr5 = load i64*, i64** %mutationsptr.ptr, align 8 533 %statemutations = load i64, i64* %mutationsptr5, align 8 534 %3 = icmp eq i64 %statemutations, %forcoll.initial-mutations 535 br i1 %3, label %forcoll.notmutated, label %forcoll.mutated 536 537forcoll.mutated: 538 call void @objc_enumerationMutation(i8* %2) 539 br label %forcoll.notmutated 540 541forcoll.notmutated: 542 %exitcond = icmp eq i64 %forcoll.index, %umax 543 br i1 %exitcond, label %forcoll.refetch, label %forcoll.notmutated.forcoll.loopbody_crit_edge 544 545forcoll.notmutated.forcoll.loopbody_crit_edge: 546 %phitmp = add i64 %forcoll.index, 1 547 br label %forcoll.loopbody 548 549forcoll.refetch: 550 %tmp6 = load i8*, i8** @"\01L_OBJC_SELECTOR_REFERENCES_", align 8 551 %call7 = call i64 bitcast (i8* (i8*, i8*, ...)* @objc_msgSend to i64 (i8*, i8*, %struct.__objcFastEnumerationState*, [16 x i8*]*, i64)*)(i8* %2, i8* %tmp6, %struct.__objcFastEnumerationState* %state.ptr, [16 x i8*]* %items.ptr, i64 16) 552 %4 = icmp eq i64 %call7, 0 553 br i1 %4, label %forcoll.empty, label %forcoll.loopbody.outer 554 555forcoll.empty: 556 call void @objc_release(i8* %2) nounwind 557 call void @objc_release(i8* %1) nounwind, !clang.imprecise_release !0 558 call void @objc_release(i8* %0) nounwind, !clang.imprecise_release !0 559 ret void 560} 561 562; Like test9, but without a split backedge. TODO: optimize this. 563 564; CHECK-LABEL: define void @test9b( 565; CHECK: call i8* @objc_retain 566; CHECK: call i8* @objc_retain 567; CHECK: @objc_retain 568; CHECK: } 569define void @test9b() nounwind { 570entry: 571 %state.ptr = alloca %struct.__objcFastEnumerationState, align 8 572 %items.ptr = alloca [16 x i8*], align 8 573 %call = call i8* @returner() 574 %0 = call i8* @objc_retainAutoreleasedReturnValue(i8* %call) nounwind 575 %call1 = call i8* @returner() 576 %1 = call i8* @objc_retainAutoreleasedReturnValue(i8* %call1) nounwind 577 %tmp = bitcast %struct.__objcFastEnumerationState* %state.ptr to i8* 578 call void @llvm.memset.p0i8.i64(i8* align 8 %tmp, i8 0, i64 64, i1 false) 579 %2 = call i8* @objc_retain(i8* %0) nounwind 580 %tmp3 = load i8*, i8** @"\01L_OBJC_SELECTOR_REFERENCES_", align 8 581 %call4 = call i64 bitcast (i8* (i8*, i8*, ...)* @objc_msgSend to i64 (i8*, i8*, %struct.__objcFastEnumerationState*, [16 x i8*]*, i64)*)(i8* %2, i8* %tmp3, %struct.__objcFastEnumerationState* %state.ptr, [16 x i8*]* %items.ptr, i64 16) 582 %iszero = icmp eq i64 %call4, 0 583 br i1 %iszero, label %forcoll.empty, label %forcoll.loopinit 584 585forcoll.loopinit: 586 %mutationsptr.ptr = getelementptr inbounds %struct.__objcFastEnumerationState, %struct.__objcFastEnumerationState* %state.ptr, i64 0, i32 2 587 %mutationsptr = load i64*, i64** %mutationsptr.ptr, align 8 588 %forcoll.initial-mutations = load i64, i64* %mutationsptr, align 8 589 br label %forcoll.loopbody.outer 590 591forcoll.loopbody.outer: 592 %forcoll.count.ph = phi i64 [ %call4, %forcoll.loopinit ], [ %call7, %forcoll.refetch ] 593 %tmp9 = icmp ugt i64 %forcoll.count.ph, 1 594 %umax = select i1 %tmp9, i64 %forcoll.count.ph, i64 1 595 br label %forcoll.loopbody 596 597forcoll.loopbody: 598 %forcoll.index = phi i64 [ %phitmp, %forcoll.notmutated ], [ 0, %forcoll.loopbody.outer ] 599 %mutationsptr5 = load i64*, i64** %mutationsptr.ptr, align 8 600 %statemutations = load i64, i64* %mutationsptr5, align 8 601 %3 = icmp eq i64 %statemutations, %forcoll.initial-mutations 602 br i1 %3, label %forcoll.notmutated, label %forcoll.mutated 603 604forcoll.mutated: 605 call void @objc_enumerationMutation(i8* %2) 606 br label %forcoll.notmutated 607 608forcoll.notmutated: 609 %phitmp = add i64 %forcoll.index, 1 610 %exitcond = icmp eq i64 %phitmp, %umax 611 br i1 %exitcond, label %forcoll.refetch, label %forcoll.loopbody 612 613forcoll.refetch: 614 %tmp6 = load i8*, i8** @"\01L_OBJC_SELECTOR_REFERENCES_", align 8 615 %call7 = call i64 bitcast (i8* (i8*, i8*, ...)* @objc_msgSend to i64 (i8*, i8*, %struct.__objcFastEnumerationState*, [16 x i8*]*, i64)*)(i8* %2, i8* %tmp6, %struct.__objcFastEnumerationState* %state.ptr, [16 x i8*]* %items.ptr, i64 16) 616 %4 = icmp eq i64 %call7, 0 617 br i1 %4, label %forcoll.empty, label %forcoll.loopbody.outer 618 619forcoll.empty: 620 call void @objc_release(i8* %2) nounwind 621 call void @objc_release(i8* %1) nounwind, !clang.imprecise_release !0 622 call void @objc_release(i8* %0) nounwind, !clang.imprecise_release !0 623 ret void 624} 625 626; TODO: Delete a nested retain+release pair. 627; The optimizer currently can't do this, because of a split loop backedge. 628; See test10b for the same testcase without a split backedge. 629 630; CHECK-LABEL: define void @test10( 631; CHECK: call i8* @objc_retain 632; CHECK: call i8* @objc_retain 633; CHECK: call i8* @objc_retain 634; CHECK: } 635define void @test10() nounwind { 636entry: 637 %state.ptr = alloca %struct.__objcFastEnumerationState, align 8 638 %items.ptr = alloca [16 x i8*], align 8 639 %call = call i8* @returner() 640 %0 = call i8* @objc_retainAutoreleasedReturnValue(i8* %call) nounwind 641 %call1 = call i8* @returner() 642 %1 = call i8* @objc_retainAutoreleasedReturnValue(i8* %call1) nounwind 643 call void @callee() 644 %tmp = bitcast %struct.__objcFastEnumerationState* %state.ptr to i8* 645 call void @llvm.memset.p0i8.i64(i8* align 8 %tmp, i8 0, i64 64, i1 false) 646 %2 = call i8* @objc_retain(i8* %0) nounwind 647 %tmp3 = load i8*, i8** @"\01L_OBJC_SELECTOR_REFERENCES_", align 8 648 %call4 = call i64 bitcast (i8* (i8*, i8*, ...)* @objc_msgSend to i64 (i8*, i8*, %struct.__objcFastEnumerationState*, [16 x i8*]*, i64)*)(i8* %2, i8* %tmp3, %struct.__objcFastEnumerationState* %state.ptr, [16 x i8*]* %items.ptr, i64 16) 649 %iszero = icmp eq i64 %call4, 0 650 br i1 %iszero, label %forcoll.empty, label %forcoll.loopinit 651 652forcoll.loopinit: 653 %mutationsptr.ptr = getelementptr inbounds %struct.__objcFastEnumerationState, %struct.__objcFastEnumerationState* %state.ptr, i64 0, i32 2 654 %mutationsptr = load i64*, i64** %mutationsptr.ptr, align 8 655 %forcoll.initial-mutations = load i64, i64* %mutationsptr, align 8 656 br label %forcoll.loopbody.outer 657 658forcoll.loopbody.outer: 659 %forcoll.count.ph = phi i64 [ %call4, %forcoll.loopinit ], [ %call7, %forcoll.refetch ] 660 %tmp9 = icmp ugt i64 %forcoll.count.ph, 1 661 %umax = select i1 %tmp9, i64 %forcoll.count.ph, i64 1 662 br label %forcoll.loopbody 663 664forcoll.loopbody: 665 %forcoll.index = phi i64 [ %phitmp, %forcoll.notmutated.forcoll.loopbody_crit_edge ], [ 1, %forcoll.loopbody.outer ] 666 %mutationsptr5 = load i64*, i64** %mutationsptr.ptr, align 8 667 %statemutations = load i64, i64* %mutationsptr5, align 8 668 %3 = icmp eq i64 %statemutations, %forcoll.initial-mutations 669 br i1 %3, label %forcoll.notmutated, label %forcoll.mutated 670 671forcoll.mutated: 672 call void @objc_enumerationMutation(i8* %2) 673 br label %forcoll.notmutated 674 675forcoll.notmutated: 676 %exitcond = icmp eq i64 %forcoll.index, %umax 677 br i1 %exitcond, label %forcoll.refetch, label %forcoll.notmutated.forcoll.loopbody_crit_edge 678 679forcoll.notmutated.forcoll.loopbody_crit_edge: 680 %phitmp = add i64 %forcoll.index, 1 681 br label %forcoll.loopbody 682 683forcoll.refetch: 684 %tmp6 = load i8*, i8** @"\01L_OBJC_SELECTOR_REFERENCES_", align 8 685 %call7 = call i64 bitcast (i8* (i8*, i8*, ...)* @objc_msgSend to i64 (i8*, i8*, %struct.__objcFastEnumerationState*, [16 x i8*]*, i64)*)(i8* %2, i8* %tmp6, %struct.__objcFastEnumerationState* %state.ptr, [16 x i8*]* %items.ptr, i64 16) 686 %4 = icmp eq i64 %call7, 0 687 br i1 %4, label %forcoll.empty, label %forcoll.loopbody.outer 688 689forcoll.empty: 690 call void @objc_release(i8* %2) nounwind 691 call void @objc_release(i8* %1) nounwind, !clang.imprecise_release !0 692 call void @objc_release(i8* %0) nounwind, !clang.imprecise_release !0 693 ret void 694} 695 696; Like test10, but without a split backedge. TODO: optimize this. 697 698; CHECK-LABEL: define void @test10b( 699; CHECK: call i8* @objc_retain 700; CHECK: call i8* @objc_retain 701; CHECK: @objc_retain 702; CHECK: } 703define void @test10b() nounwind { 704entry: 705 %state.ptr = alloca %struct.__objcFastEnumerationState, align 8 706 %items.ptr = alloca [16 x i8*], align 8 707 %call = call i8* @returner() 708 %0 = call i8* @objc_retainAutoreleasedReturnValue(i8* %call) nounwind 709 %call1 = call i8* @returner() 710 %1 = call i8* @objc_retainAutoreleasedReturnValue(i8* %call1) nounwind 711 call void @callee() 712 %tmp = bitcast %struct.__objcFastEnumerationState* %state.ptr to i8* 713 call void @llvm.memset.p0i8.i64(i8* align 8 %tmp, i8 0, i64 64, i1 false) 714 %2 = call i8* @objc_retain(i8* %0) nounwind 715 %tmp3 = load i8*, i8** @"\01L_OBJC_SELECTOR_REFERENCES_", align 8 716 %call4 = call i64 bitcast (i8* (i8*, i8*, ...)* @objc_msgSend to i64 (i8*, i8*, %struct.__objcFastEnumerationState*, [16 x i8*]*, i64)*)(i8* %2, i8* %tmp3, %struct.__objcFastEnumerationState* %state.ptr, [16 x i8*]* %items.ptr, i64 16) 717 %iszero = icmp eq i64 %call4, 0 718 br i1 %iszero, label %forcoll.empty, label %forcoll.loopinit 719 720forcoll.loopinit: 721 %mutationsptr.ptr = getelementptr inbounds %struct.__objcFastEnumerationState, %struct.__objcFastEnumerationState* %state.ptr, i64 0, i32 2 722 %mutationsptr = load i64*, i64** %mutationsptr.ptr, align 8 723 %forcoll.initial-mutations = load i64, i64* %mutationsptr, align 8 724 br label %forcoll.loopbody.outer 725 726forcoll.loopbody.outer: 727 %forcoll.count.ph = phi i64 [ %call4, %forcoll.loopinit ], [ %call7, %forcoll.refetch ] 728 %tmp9 = icmp ugt i64 %forcoll.count.ph, 1 729 %umax = select i1 %tmp9, i64 %forcoll.count.ph, i64 1 730 br label %forcoll.loopbody 731 732forcoll.loopbody: 733 %forcoll.index = phi i64 [ %phitmp, %forcoll.notmutated ], [ 0, %forcoll.loopbody.outer ] 734 %mutationsptr5 = load i64*, i64** %mutationsptr.ptr, align 8 735 %statemutations = load i64, i64* %mutationsptr5, align 8 736 %3 = icmp eq i64 %statemutations, %forcoll.initial-mutations 737 br i1 %3, label %forcoll.notmutated, label %forcoll.mutated 738 739forcoll.mutated: 740 call void @objc_enumerationMutation(i8* %2) 741 br label %forcoll.notmutated 742 743forcoll.notmutated: 744 %phitmp = add i64 %forcoll.index, 1 745 %exitcond = icmp eq i64 %phitmp, %umax 746 br i1 %exitcond, label %forcoll.refetch, label %forcoll.loopbody 747 748forcoll.refetch: 749 %tmp6 = load i8*, i8** @"\01L_OBJC_SELECTOR_REFERENCES_", align 8 750 %call7 = call i64 bitcast (i8* (i8*, i8*, ...)* @objc_msgSend to i64 (i8*, i8*, %struct.__objcFastEnumerationState*, [16 x i8*]*, i64)*)(i8* %2, i8* %tmp6, %struct.__objcFastEnumerationState* %state.ptr, [16 x i8*]* %items.ptr, i64 16) 751 %4 = icmp eq i64 %call7, 0 752 br i1 %4, label %forcoll.empty, label %forcoll.loopbody.outer 753 754forcoll.empty: 755 call void @objc_release(i8* %2) nounwind 756 call void @objc_release(i8* %1) nounwind, !clang.imprecise_release !0 757 call void @objc_release(i8* %0) nounwind, !clang.imprecise_release !0 758 ret void 759} 760 761; Pointers to strong pointers can obscure provenance relationships. Be conservative 762; in the face of escaping pointers. rdar://12150909. 763 764%struct.__block_d = type { i64, i64 } 765 766@_NSConcreteStackBlock = external global i8* 767@__block_d_tmp = external hidden constant { i64, i64, i8*, i8*, i8*, i8* } 768@__block_d_tmp5 = external hidden constant { i64, i64, i8*, i8*, i8*, i8* } 769 770; CHECK-LABEL: define void @test11( 771; CHECK: tail call i8* @objc_retain(i8* %call) [[NUW:#[0-9]+]] 772; CHECK: tail call i8* @objc_retain(i8* %call) [[NUW]] 773; CHECK: call void @objc_release(i8* %call) [[NUW]], !clang.imprecise_release !0 774; CHECK: } 775define void @test11() { 776entry: 777 %block = alloca <{ i8*, i32, i32, i8*, %struct.__block_d*, i8* }>, align 8 778 %block9 = alloca <{ i8*, i32, i32, i8*, %struct.__block_d*, i8* }>, align 8 779 %call = call i8* @def(), !clang.arc.no_objc_arc_exceptions !0 780 %foo = getelementptr inbounds <{ i8*, i32, i32, i8*, %struct.__block_d*, i8* }>, <{ i8*, i32, i32, i8*, %struct.__block_d*, i8* }>* %block, i64 0, i32 5 781 %block.isa = getelementptr inbounds <{ i8*, i32, i32, i8*, %struct.__block_d*, i8* }>, <{ i8*, i32, i32, i8*, %struct.__block_d*, i8* }>* %block, i64 0, i32 0 782 store i8* bitcast (i8** @_NSConcreteStackBlock to i8*), i8** %block.isa, align 8 783 %block.flags = getelementptr inbounds <{ i8*, i32, i32, i8*, %struct.__block_d*, i8* }>, <{ i8*, i32, i32, i8*, %struct.__block_d*, i8* }>* %block, i64 0, i32 1 784 store i32 1107296256, i32* %block.flags, align 8 785 %block.reserved = getelementptr inbounds <{ i8*, i32, i32, i8*, %struct.__block_d*, i8* }>, <{ i8*, i32, i32, i8*, %struct.__block_d*, i8* }>* %block, i64 0, i32 2 786 store i32 0, i32* %block.reserved, align 4 787 %block.invoke = getelementptr inbounds <{ i8*, i32, i32, i8*, %struct.__block_d*, i8* }>, <{ i8*, i32, i32, i8*, %struct.__block_d*, i8* }>* %block, i64 0, i32 3 788 store i8* bitcast (void (i8*)* @__crasher_block_invoke to i8*), i8** %block.invoke, align 8 789 %block.d = getelementptr inbounds <{ i8*, i32, i32, i8*, %struct.__block_d*, i8* }>, <{ i8*, i32, i32, i8*, %struct.__block_d*, i8* }>* %block, i64 0, i32 4 790 store %struct.__block_d* bitcast ({ i64, i64, i8*, i8*, i8*, i8* }* @__block_d_tmp to %struct.__block_d*), %struct.__block_d** %block.d, align 8 791 %foo2 = tail call i8* @objc_retain(i8* %call) nounwind 792 store i8* %foo2, i8** %foo, align 8 793 %foo4 = bitcast <{ i8*, i32, i32, i8*, %struct.__block_d*, i8* }>* %block to i8* 794 %foo5 = call i8* @objc_retainBlock(i8* %foo4) nounwind 795 call void @use(i8* %foo5), !clang.arc.no_objc_arc_exceptions !0 796 call void @objc_release(i8* %foo5) nounwind 797 %strongdestroy = load i8*, i8** %foo, align 8 798 call void @objc_release(i8* %strongdestroy) nounwind, !clang.imprecise_release !0 799 %foo10 = getelementptr inbounds <{ i8*, i32, i32, i8*, %struct.__block_d*, i8* }>, <{ i8*, i32, i32, i8*, %struct.__block_d*, i8* }>* %block9, i64 0, i32 5 800 %block.isa11 = getelementptr inbounds <{ i8*, i32, i32, i8*, %struct.__block_d*, i8* }>, <{ i8*, i32, i32, i8*, %struct.__block_d*, i8* }>* %block9, i64 0, i32 0 801 store i8* bitcast (i8** @_NSConcreteStackBlock to i8*), i8** %block.isa11, align 8 802 %block.flags12 = getelementptr inbounds <{ i8*, i32, i32, i8*, %struct.__block_d*, i8* }>, <{ i8*, i32, i32, i8*, %struct.__block_d*, i8* }>* %block9, i64 0, i32 1 803 store i32 1107296256, i32* %block.flags12, align 8 804 %block.reserved13 = getelementptr inbounds <{ i8*, i32, i32, i8*, %struct.__block_d*, i8* }>, <{ i8*, i32, i32, i8*, %struct.__block_d*, i8* }>* %block9, i64 0, i32 2 805 store i32 0, i32* %block.reserved13, align 4 806 %block.invoke14 = getelementptr inbounds <{ i8*, i32, i32, i8*, %struct.__block_d*, i8* }>, <{ i8*, i32, i32, i8*, %struct.__block_d*, i8* }>* %block9, i64 0, i32 3 807 store i8* bitcast (void (i8*)* @__crasher_block_invoke1 to i8*), i8** %block.invoke14, align 8 808 %block.d15 = getelementptr inbounds <{ i8*, i32, i32, i8*, %struct.__block_d*, i8* }>, <{ i8*, i32, i32, i8*, %struct.__block_d*, i8* }>* %block9, i64 0, i32 4 809 store %struct.__block_d* bitcast ({ i64, i64, i8*, i8*, i8*, i8* }* @__block_d_tmp5 to %struct.__block_d*), %struct.__block_d** %block.d15, align 8 810 %foo18 = call i8* @objc_retain(i8* %call) nounwind 811 store i8* %call, i8** %foo10, align 8 812 %foo20 = bitcast <{ i8*, i32, i32, i8*, %struct.__block_d*, i8* }>* %block9 to i8* 813 %foo21 = call i8* @objc_retainBlock(i8* %foo20) nounwind 814 call void @use(i8* %foo21), !clang.arc.no_objc_arc_exceptions !0 815 call void @objc_release(i8* %foo21) nounwind 816 %strongdestroy25 = load i8*, i8** %foo10, align 8 817 call void @objc_release(i8* %strongdestroy25) nounwind, !clang.imprecise_release !0 818 call void @objc_release(i8* %call) nounwind, !clang.imprecise_release !0 819 ret void 820} 821 822 823; CHECK: attributes #0 = { argmemonly nounwind } 824; CHECK: attributes #1 = { nonlazybind } 825; CHECK: attributes [[NUW]] = { nounwind } 826