• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1; NOTE: Assertions have been autogenerated by utils/update_test_checks.py
2; RUN: opt -S -early-cse -earlycse-debug-hash < %s | FileCheck %s --check-prefixes=CHECK,NO_ASSUME
3; RUN: opt -S -basic-aa -early-cse-memssa < %s | FileCheck %s --check-prefixes=CHECK,NO_ASSUME
4; RUN: opt -S -basic-aa -early-cse-memssa --enable-knowledge-retention < %s | FileCheck %s --check-prefixes=CHECK,USE_ASSUME
5
6declare void @clobber_and_use(i32)
7
8define void @f_0(i32* %ptr) {
9; NO_ASSUME-LABEL: @f_0(
10; NO_ASSUME-NEXT:    [[VAL0:%.*]] = load i32, i32* [[PTR:%.*]], align 4, !invariant.load !0
11; NO_ASSUME-NEXT:    call void @clobber_and_use(i32 [[VAL0]])
12; NO_ASSUME-NEXT:    call void @clobber_and_use(i32 [[VAL0]])
13; NO_ASSUME-NEXT:    call void @clobber_and_use(i32 [[VAL0]])
14; NO_ASSUME-NEXT:    ret void
15;
16; USE_ASSUME-LABEL: @f_0(
17; USE_ASSUME-NEXT:    [[VAL0:%.*]] = load i32, i32* [[PTR:%.*]], align 4, !invariant.load !0
18; USE_ASSUME-NEXT:    call void @clobber_and_use(i32 [[VAL0]])
19; USE_ASSUME-NEXT:    call void @llvm.assume(i1 true) [ "dereferenceable"(i32* [[PTR]], i64 4), "nonnull"(i32* [[PTR]]), "align"(i32* [[PTR]], i64 4) ]
20; USE_ASSUME-NEXT:    call void @clobber_and_use(i32 [[VAL0]])
21; USE_ASSUME-NEXT:    call void @clobber_and_use(i32 [[VAL0]])
22; USE_ASSUME-NEXT:    ret void
23;
24
25  %val0 = load i32, i32* %ptr, !invariant.load !{}
26  call void @clobber_and_use(i32 %val0)
27  %val1 = load i32, i32* %ptr, !invariant.load !{}
28  call void @clobber_and_use(i32 %val1)
29  %val2 = load i32, i32* %ptr, !invariant.load !{}
30  call void @clobber_and_use(i32 %val2)
31  ret void
32}
33
34define void @f_1(i32* %ptr) {
35; We can forward invariant loads to non-invariant loads.
36; NO_ASSUME-LABEL: @f_1(
37; NO_ASSUME-NEXT:    [[VAL0:%.*]] = load i32, i32* [[PTR:%.*]], align 4, !invariant.load !0
38; NO_ASSUME-NEXT:    call void @clobber_and_use(i32 [[VAL0]])
39; NO_ASSUME-NEXT:    call void @clobber_and_use(i32 [[VAL0]])
40; NO_ASSUME-NEXT:    ret void
41;
42; USE_ASSUME-LABEL: @f_1(
43; USE_ASSUME-NEXT:    [[VAL0:%.*]] = load i32, i32* [[PTR:%.*]], align 4, !invariant.load !0
44; USE_ASSUME-NEXT:    call void @clobber_and_use(i32 [[VAL0]])
45; USE_ASSUME-NEXT:    call void @llvm.assume(i1 true) [ "dereferenceable"(i32* [[PTR]], i64 4), "nonnull"(i32* [[PTR]]), "align"(i32* [[PTR]], i64 4) ]
46; USE_ASSUME-NEXT:    call void @clobber_and_use(i32 [[VAL0]])
47; USE_ASSUME-NEXT:    ret void
48;
49
50  %val0 = load i32, i32* %ptr, !invariant.load !{}
51  call void @clobber_and_use(i32 %val0)
52  %val1 = load i32, i32* %ptr
53  call void @clobber_and_use(i32 %val1)
54  ret void
55}
56
57define void @f_2(i32* %ptr) {
58; We can forward a non-invariant load into an invariant load.
59; NO_ASSUME-LABEL: @f_2(
60; NO_ASSUME-NEXT:    [[VAL0:%.*]] = load i32, i32* [[PTR:%.*]], align 4
61; NO_ASSUME-NEXT:    call void @clobber_and_use(i32 [[VAL0]])
62; NO_ASSUME-NEXT:    call void @clobber_and_use(i32 [[VAL0]])
63; NO_ASSUME-NEXT:    ret void
64;
65; USE_ASSUME-LABEL: @f_2(
66; USE_ASSUME-NEXT:    [[VAL0:%.*]] = load i32, i32* [[PTR:%.*]], align 4
67; USE_ASSUME-NEXT:    call void @clobber_and_use(i32 [[VAL0]])
68; USE_ASSUME-NEXT:    call void @llvm.assume(i1 true) [ "dereferenceable"(i32* [[PTR]], i64 4), "nonnull"(i32* [[PTR]]), "align"(i32* [[PTR]], i64 4) ]
69; USE_ASSUME-NEXT:    call void @clobber_and_use(i32 [[VAL0]])
70; USE_ASSUME-NEXT:    ret void
71;
72
73  %val0 = load i32, i32* %ptr
74  call void @clobber_and_use(i32 %val0)
75  %val1 = load i32, i32* %ptr, !invariant.load !{}
76  call void @clobber_and_use(i32 %val1)
77  ret void
78}
79
80define void @f_3(i1 %cond, i32* %ptr) {
81; NO_ASSUME-LABEL: @f_3(
82; NO_ASSUME-NEXT:    [[VAL0:%.*]] = load i32, i32* [[PTR:%.*]], align 4, !invariant.load !0
83; NO_ASSUME-NEXT:    call void @clobber_and_use(i32 [[VAL0]])
84; NO_ASSUME-NEXT:    br i1 [[COND:%.*]], label [[LEFT:%.*]], label [[RIGHT:%.*]]
85; NO_ASSUME:       left:
86; NO_ASSUME-NEXT:    call void @clobber_and_use(i32 [[VAL0]])
87; NO_ASSUME-NEXT:    ret void
88; NO_ASSUME:       right:
89; NO_ASSUME-NEXT:    ret void
90;
91; USE_ASSUME-LABEL: @f_3(
92; USE_ASSUME-NEXT:    [[VAL0:%.*]] = load i32, i32* [[PTR:%.*]], align 4, !invariant.load !0
93; USE_ASSUME-NEXT:    call void @clobber_and_use(i32 [[VAL0]])
94; USE_ASSUME-NEXT:    br i1 [[COND:%.*]], label [[LEFT:%.*]], label [[RIGHT:%.*]]
95; USE_ASSUME:       left:
96; USE_ASSUME-NEXT:    call void @llvm.assume(i1 true) [ "dereferenceable"(i32* [[PTR]], i64 4), "nonnull"(i32* [[PTR]]), "align"(i32* [[PTR]], i64 4) ]
97; USE_ASSUME-NEXT:    call void @clobber_and_use(i32 [[VAL0]])
98; USE_ASSUME-NEXT:    ret void
99; USE_ASSUME:       right:
100; USE_ASSUME-NEXT:    ret void
101;
102  %val0 = load i32, i32* %ptr, !invariant.load !{}
103  call void @clobber_and_use(i32 %val0)
104  br i1 %cond, label %left, label %right
105
106
107left:
108  %val1 = load i32, i32* %ptr
109  call void @clobber_and_use(i32 %val1)
110  ret void
111
112right:
113  ret void
114}
115
116define void @f_4(i1 %cond, i32* %ptr) {
117; Negative test -- can't forward %val0 to %va1 because that'll break
118; def-dominates-use.
119; CHECK-LABEL: @f_4(
120; CHECK-NEXT:    br i1 [[COND:%.*]], label [[LEFT:%.*]], label [[MERGE:%.*]]
121; CHECK:       left:
122; CHECK-NEXT:    [[VAL0:%.*]] = load i32, i32* [[PTR:%.*]], align 4, !invariant.load !0
123; CHECK-NEXT:    call void @clobber_and_use(i32 [[VAL0]])
124; CHECK-NEXT:    br label [[MERGE]]
125; CHECK:       merge:
126; CHECK-NEXT:    [[VAL1:%.*]] = load i32, i32* [[PTR]], align 4
127; CHECK-NEXT:    call void @clobber_and_use(i32 [[VAL1]])
128; CHECK-NEXT:    ret void
129;
130  br i1 %cond, label %left, label %merge
131
132left:
133
134  %val0 = load i32, i32* %ptr, !invariant.load !{}
135  call void @clobber_and_use(i32 %val0)
136  br label %merge
137
138merge:
139
140  %val1 = load i32, i32* %ptr
141  call void @clobber_and_use(i32 %val1)
142  ret void
143}
144
145; By assumption, the call can't change contents of p
146; LangRef is a bit unclear about whether the store is reachable, so
147; for the moment we chose to be conservative and just assume it's valid
148; to restore the same unchanging value.
149define void @test_dse1(i32* %p) {
150; NO_ASSUME-LABEL: @test_dse1(
151; NO_ASSUME-NEXT:    [[V1:%.*]] = load i32, i32* [[P:%.*]], align 4, !invariant.load !0
152; NO_ASSUME-NEXT:    call void @clobber_and_use(i32 [[V1]])
153; NO_ASSUME-NEXT:    ret void
154;
155; USE_ASSUME-LABEL: @test_dse1(
156; USE_ASSUME-NEXT:    [[V1:%.*]] = load i32, i32* [[P:%.*]], align 4, !invariant.load !0
157; USE_ASSUME-NEXT:    call void @clobber_and_use(i32 [[V1]])
158; USE_ASSUME-NEXT:    call void @llvm.assume(i1 true) [ "dereferenceable"(i32* [[P]], i64 4), "nonnull"(i32* [[P]]), "align"(i32* [[P]], i64 4) ]
159; USE_ASSUME-NEXT:    ret void
160;
161  %v1 = load i32, i32* %p, !invariant.load !{}
162  call void @clobber_and_use(i32 %v1)
163  store i32 %v1, i32* %p
164  ret void
165}
166
167; By assumption, v1 must equal v2 (TODO)
168define void @test_false_negative_dse2(i32* %p, i32 %v2) {
169; CHECK-LABEL: @test_false_negative_dse2(
170; CHECK-NEXT:    [[V1:%.*]] = load i32, i32* [[P:%.*]], align 4, !invariant.load !0
171; CHECK-NEXT:    call void @clobber_and_use(i32 [[V1]])
172; CHECK-NEXT:    store i32 [[V2:%.*]], i32* [[P]], align 4
173; CHECK-NEXT:    ret void
174;
175  %v1 = load i32, i32* %p, !invariant.load !{}
176  call void @clobber_and_use(i32 %v1)
177  store i32 %v2, i32* %p
178  ret void
179}
180
181; If we remove the load, we still start an invariant scope since
182; it lets us remove later loads not explicitly marked invariant
183define void @test_scope_start_without_load(i32* %p) {
184; NO_ASSUME-LABEL: @test_scope_start_without_load(
185; NO_ASSUME-NEXT:    [[V1:%.*]] = load i32, i32* [[P:%.*]], align 4
186; NO_ASSUME-NEXT:    [[ADD:%.*]] = add i32 [[V1]], [[V1]]
187; NO_ASSUME-NEXT:    call void @clobber_and_use(i32 [[ADD]])
188; NO_ASSUME-NEXT:    call void @clobber_and_use(i32 [[V1]])
189; NO_ASSUME-NEXT:    ret void
190;
191; USE_ASSUME-LABEL: @test_scope_start_without_load(
192; USE_ASSUME-NEXT:    [[V1:%.*]] = load i32, i32* [[P:%.*]], align 4
193; USE_ASSUME-NEXT:    call void @llvm.assume(i1 true) [ "dereferenceable"(i32* [[P]], i64 4), "nonnull"(i32* [[P]]), "align"(i32* [[P]], i64 4) ]
194; USE_ASSUME-NEXT:    [[ADD:%.*]] = add i32 [[V1]], [[V1]]
195; USE_ASSUME-NEXT:    call void @clobber_and_use(i32 [[ADD]])
196; USE_ASSUME-NEXT:    call void @clobber_and_use(i32 [[V1]])
197; USE_ASSUME-NEXT:    ret void
198;
199  %v1 = load i32, i32* %p
200  %v2 = load i32, i32* %p, !invariant.load !{}
201  %add = add i32 %v1, %v2
202  call void @clobber_and_use(i32 %add)
203  %v3 = load i32, i32* %p
204  call void @clobber_and_use(i32 %v3)
205  ret void
206}
207
208; If we already have an invariant scope, don't want to start a new one
209; with a potentially greater generation.  This hides the earlier invariant
210; load
211define void @test_scope_restart(i32* %p) {
212; NO_ASSUME-LABEL: @test_scope_restart(
213; NO_ASSUME-NEXT:    [[V1:%.*]] = load i32, i32* [[P:%.*]], align 4, !invariant.load !0
214; NO_ASSUME-NEXT:    call void @clobber_and_use(i32 [[V1]])
215; NO_ASSUME-NEXT:    [[ADD:%.*]] = add i32 [[V1]], [[V1]]
216; NO_ASSUME-NEXT:    call void @clobber_and_use(i32 [[ADD]])
217; NO_ASSUME-NEXT:    call void @clobber_and_use(i32 [[V1]])
218; NO_ASSUME-NEXT:    ret void
219;
220; USE_ASSUME-LABEL: @test_scope_restart(
221; USE_ASSUME-NEXT:    [[V1:%.*]] = load i32, i32* [[P:%.*]], align 4, !invariant.load !0
222; USE_ASSUME-NEXT:    call void @clobber_and_use(i32 [[V1]])
223; USE_ASSUME-NEXT:    call void @llvm.assume(i1 true) [ "dereferenceable"(i32* [[P]], i64 4), "nonnull"(i32* [[P]]), "align"(i32* [[P]], i64 4) ]
224; USE_ASSUME-NEXT:    [[ADD:%.*]] = add i32 [[V1]], [[V1]]
225; USE_ASSUME-NEXT:    call void @clobber_and_use(i32 [[ADD]])
226; USE_ASSUME-NEXT:    call void @clobber_and_use(i32 [[V1]])
227; USE_ASSUME-NEXT:    ret void
228;
229  %v1 = load i32, i32* %p, !invariant.load !{}
230  call void @clobber_and_use(i32 %v1)
231  %v2 = load i32, i32* %p, !invariant.load !{}
232  %add = add i32 %v1, %v2
233  call void @clobber_and_use(i32 %add)
234  %v3 = load i32, i32* %p
235  call void @clobber_and_use(i32 %v3)
236  ret void
237}
238