1 /*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #include "prepare_for_register_allocation.h"
18
19 #include "dex/dex_file_types.h"
20 #include "driver/compiler_options.h"
21 #include "jni/jni_internal.h"
22 #include "nodes.h"
23 #include "optimizing_compiler_stats.h"
24 #include "well_known_classes.h"
25
26 namespace art HIDDEN {
27
28 class PrepareForRegisterAllocationVisitor final : public HGraphDelegateVisitor {
29 public:
PrepareForRegisterAllocationVisitor(HGraph * graph,const CompilerOptions & compiler_options,OptimizingCompilerStats * stats)30 PrepareForRegisterAllocationVisitor(HGraph* graph,
31 const CompilerOptions& compiler_options,
32 OptimizingCompilerStats* stats)
33 : HGraphDelegateVisitor(graph, stats),
34 compiler_options_(compiler_options) {}
35
36 private:
37 void VisitCheckCast(HCheckCast* check_cast) override;
38 void VisitInstanceOf(HInstanceOf* instance_of) override;
39 void VisitNullCheck(HNullCheck* check) override;
40 void VisitDivZeroCheck(HDivZeroCheck* check) override;
41 void VisitBoundsCheck(HBoundsCheck* check) override;
42 void VisitBoundType(HBoundType* bound_type) override;
43 void VisitArraySet(HArraySet* instruction) override;
44 void VisitClinitCheck(HClinitCheck* check) override;
45 void VisitIf(HIf* if_instr) override;
46 void VisitSelect(HSelect* select) override;
47 void VisitConstructorFence(HConstructorFence* constructor_fence) override;
48 void VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) override;
49 void VisitDeoptimize(HDeoptimize* deoptimize) override;
50 void VisitTypeConversion(HTypeConversion* instruction) override;
51
52 bool CanMoveClinitCheck(HInstruction* input, HInstruction* user) const;
53 bool CanEmitConditionAt(HCondition* condition, HInstruction* user) const;
54 void TryToMoveConditionToUser(HInstruction* maybe_condition, HInstruction* user);
55
56 const CompilerOptions& compiler_options_;
57 };
58
Run()59 bool PrepareForRegisterAllocation::Run() {
60 PrepareForRegisterAllocationVisitor visitor(graph_, compiler_options_, stats_);
61 // Order does not matter.
62 for (HBasicBlock* block : graph_->GetReversePostOrder()) {
63 // No need to visit the phis.
64 for (HInstructionIteratorHandleChanges inst_it(block->GetInstructions()); !inst_it.Done();
65 inst_it.Advance()) {
66 inst_it.Current()->Accept(&visitor);
67 }
68 }
69 return true;
70 }
71
VisitCheckCast(HCheckCast * check_cast)72 void PrepareForRegisterAllocationVisitor::VisitCheckCast(HCheckCast* check_cast) {
73 // Record only those bitstring type checks that make it to the codegen stage.
74 if (check_cast->GetTypeCheckKind() == TypeCheckKind::kBitstringCheck) {
75 MaybeRecordStat(stats_, MethodCompilationStat::kBitstringTypeCheck);
76 }
77 }
78
VisitInstanceOf(HInstanceOf * instance_of)79 void PrepareForRegisterAllocationVisitor::VisitInstanceOf(HInstanceOf* instance_of) {
80 // Record only those bitstring type checks that make it to the codegen stage.
81 if (instance_of->GetTypeCheckKind() == TypeCheckKind::kBitstringCheck) {
82 MaybeRecordStat(stats_, MethodCompilationStat::kBitstringTypeCheck);
83 }
84 }
85
VisitNullCheck(HNullCheck * check)86 void PrepareForRegisterAllocationVisitor::VisitNullCheck(HNullCheck* check) {
87 check->ReplaceWith(check->InputAt(0));
88 if (compiler_options_.GetImplicitNullChecks()) {
89 HInstruction* next = check->GetNext();
90
91 // The `PrepareForRegisterAllocation` pass removes `HBoundType` from the graph,
92 // so do it ourselves now to not prevent optimizations.
93 while (next->IsBoundType()) {
94 next = next->GetNext();
95 VisitBoundType(next->GetPrevious()->AsBoundType());
96 }
97 if (next->CanDoImplicitNullCheckOn(check->InputAt(0))) {
98 check->MarkEmittedAtUseSite();
99 }
100 }
101 }
102
VisitDivZeroCheck(HDivZeroCheck * check)103 void PrepareForRegisterAllocationVisitor::VisitDivZeroCheck(HDivZeroCheck* check) {
104 check->ReplaceWith(check->InputAt(0));
105 }
106
VisitDeoptimize(HDeoptimize * deoptimize)107 void PrepareForRegisterAllocationVisitor::VisitDeoptimize(HDeoptimize* deoptimize) {
108 if (deoptimize->GuardsAnInput()) {
109 // Replace the uses with the actual guarded instruction.
110 deoptimize->ReplaceWith(deoptimize->GuardedInput());
111 deoptimize->RemoveGuard();
112 }
113 TryToMoveConditionToUser(deoptimize->InputAt(0), deoptimize);
114 }
115
VisitBoundsCheck(HBoundsCheck * check)116 void PrepareForRegisterAllocationVisitor::VisitBoundsCheck(HBoundsCheck* check) {
117 check->ReplaceWith(check->InputAt(0));
118 if (check->IsStringCharAt()) {
119 // Add a fake environment for String.charAt() inline info as we want the exception
120 // to appear as being thrown from there. Skip if we're compiling String.charAt() itself.
121 ArtMethod* char_at_method = WellKnownClasses::java_lang_String_charAt;
122 if (GetGraph()->GetArtMethod() != char_at_method) {
123 ArenaAllocator* allocator = GetGraph()->GetAllocator();
124 HEnvironment* environment = HEnvironment::Create(allocator,
125 /* number_of_vregs= */ 0u,
126 char_at_method,
127 /* dex_pc= */ dex::kDexNoIndex,
128 check);
129 check->InsertRawEnvironment(environment);
130 }
131 }
132 }
133
VisitBoundType(HBoundType * bound_type)134 void PrepareForRegisterAllocationVisitor::VisitBoundType(HBoundType* bound_type) {
135 bound_type->ReplaceWith(bound_type->InputAt(0));
136 bound_type->GetBlock()->RemoveInstruction(bound_type);
137 }
138
VisitArraySet(HArraySet * instruction)139 void PrepareForRegisterAllocationVisitor::VisitArraySet(HArraySet* instruction) {
140 HInstruction* value = instruction->GetValue();
141 // PrepareForRegisterAllocationVisitor::VisitBoundType may have replaced a
142 // BoundType (as value input of this ArraySet) with a NullConstant.
143 // If so, this ArraySet no longer needs a type check.
144 if (value->IsNullConstant()) {
145 DCHECK_EQ(value->GetType(), DataType::Type::kReference);
146 if (instruction->NeedsTypeCheck()) {
147 instruction->ClearTypeCheck();
148 }
149 }
150 }
151
VisitClinitCheck(HClinitCheck * check)152 void PrepareForRegisterAllocationVisitor::VisitClinitCheck(HClinitCheck* check) {
153 // Try to find a static invoke or a new-instance from which this check originated.
154 HInstruction* implicit_clinit = nullptr;
155 for (const HUseListNode<HInstruction*>& use : check->GetUses()) {
156 HInstruction* user = use.GetUser();
157 if ((user->IsInvokeStaticOrDirect() || user->IsNewInstance()) &&
158 CanMoveClinitCheck(check, user)) {
159 implicit_clinit = user;
160 if (user->IsInvokeStaticOrDirect()) {
161 DCHECK(user->AsInvokeStaticOrDirect()->IsStaticWithExplicitClinitCheck());
162 user->AsInvokeStaticOrDirect()->RemoveExplicitClinitCheck(
163 HInvokeStaticOrDirect::ClinitCheckRequirement::kImplicit);
164 } else {
165 DCHECK(user->IsNewInstance());
166 // We delegate the initialization duty to the allocation.
167 if (user->AsNewInstance()->GetEntrypoint() == kQuickAllocObjectInitialized) {
168 user->AsNewInstance()->SetEntrypoint(kQuickAllocObjectResolved);
169 }
170 }
171 break;
172 }
173 }
174 // If we found a static invoke or new-instance for merging, remove the check
175 // from dominated static invokes.
176 if (implicit_clinit != nullptr) {
177 const HUseList<HInstruction*>& uses = check->GetUses();
178 for (auto it = uses.begin(), end = uses.end(); it != end; /* ++it below */) {
179 HInstruction* user = it->GetUser();
180 // All other uses must be dominated.
181 DCHECK(implicit_clinit->StrictlyDominates(user) || (implicit_clinit == user));
182 ++it; // Advance before we remove the node, reference to the next node is preserved.
183 if (user->IsInvokeStaticOrDirect()) {
184 user->AsInvokeStaticOrDirect()->RemoveExplicitClinitCheck(
185 HInvokeStaticOrDirect::ClinitCheckRequirement::kNone);
186 }
187 }
188 }
189
190 HLoadClass* load_class = check->GetLoadClass();
191 bool can_merge_with_load_class = CanMoveClinitCheck(load_class, check);
192
193 check->ReplaceWith(load_class);
194
195 if (implicit_clinit != nullptr) {
196 // Remove the check from the graph. It has been merged into the invoke or new-instance.
197 check->GetBlock()->RemoveInstruction(check);
198 // Check if we can merge the load class as well, or whether the LoadClass is now dead.
199 if ((can_merge_with_load_class || !load_class->CanThrow()) && !load_class->HasUses()) {
200 load_class->GetBlock()->RemoveInstruction(load_class);
201 }
202 } else if (can_merge_with_load_class &&
203 load_class->GetLoadKind() != HLoadClass::LoadKind::kRuntimeCall) {
204 // Pass the initialization duty to the `HLoadClass` instruction,
205 // and remove the instruction from the graph.
206 DCHECK(load_class->HasEnvironment());
207 load_class->SetMustGenerateClinitCheck(true);
208 check->GetBlock()->RemoveInstruction(check);
209 }
210 }
211
212 // Determine if moving `condition` to `user` would observably extend the lifetime of a reference.
213 // By "observably" we understand that the reference would need to be visible to the GC for longer.
214 // We're not concerned with the lifetime for the purposes of register allocation here.
ConditionMoveWouldExtendReferenceLifetime(HCondition * condition,HInstruction * user)215 static bool ConditionMoveWouldExtendReferenceLifetime(HCondition* condition, HInstruction* user) {
216 HInstruction* lhs = condition->InputAt(0);
217 if (lhs->GetType() != DataType::Type::kReference) {
218 return false;
219 }
220 HInstruction* rhs = condition->InputAt(1);
221 DCHECK_EQ(rhs->GetType(), DataType::Type::kReference);
222 if (lhs->IsNullConstant() && rhs->IsNullConstant()) {
223 return false;
224 }
225 // Check if the last instruction with environment before `user` has all non-null
226 // inputs in the environment. If so, we would not be extending the lifetime.
227 HInstruction* instruction_with_env = user->GetPrevious();
228 while (instruction_with_env != nullptr &&
229 instruction_with_env != condition &&
230 instruction_with_env->GetEnvironment() == nullptr) {
231 DCHECK(!instruction_with_env->GetSideEffects().Includes(SideEffects::CanTriggerGC()));
232 instruction_with_env = instruction_with_env->GetPrevious();
233 }
234 if (instruction_with_env == nullptr) {
235 // No env use in the user's block. Do not search other blocks. Conservatively assume that
236 // moving the `condition` to the `user` would indeed extend the lifetime of a reference.
237 return true;
238 }
239 if (instruction_with_env == condition) {
240 // There is no instruction with an environment between `condition` and `user`, so moving
241 // the condition before the user shall not observably extend the lifetime of the reference.
242 return false;
243 }
244 DCHECK(instruction_with_env->HasEnvironment());
245 auto env_inputs = instruction_with_env->GetEnvironment()->GetEnvInputs();
246 auto extends_lifetime = [&](HInstruction* instruction) {
247 return !instruction->IsNullConstant() &&
248 std::find(env_inputs.begin(), env_inputs.end(), instruction) == env_inputs.end();
249 };
250 return extends_lifetime(lhs) || extends_lifetime(rhs);
251 }
252
CanEmitConditionAt(HCondition * condition,HInstruction * user) const253 bool PrepareForRegisterAllocationVisitor::CanEmitConditionAt(HCondition* condition,
254 HInstruction* user) const {
255 DCHECK(user->IsIf() || user->IsDeoptimize() || user->IsSelect());
256
257 if (GetGraph()->IsCompilingBaseline() && compiler_options_.ProfileBranches()) {
258 // To do branch profiling, we cannot emit conditions at use site.
259 return false;
260 }
261
262 // Move only a single-user `HCondition` to the `user`.
263 if (!condition->HasOnlyOneNonEnvironmentUse()) {
264 return false;
265 }
266 DCHECK(condition->GetUses().front().GetUser() == user);
267
268 if (condition->GetNext() != user) {
269 // Avoid moving across blocks if the graph has any irreducible loops.
270 if (condition->GetBlock() != user->GetBlock() && GetGraph()->HasIrreducibleLoops()) {
271 return false;
272 }
273 // Avoid extending the lifetime of references by moving the condition.
274 if (ConditionMoveWouldExtendReferenceLifetime(condition, user)) {
275 return false;
276 }
277 }
278
279 return true;
280 }
281
TryToMoveConditionToUser(HInstruction * maybe_condition,HInstruction * user)282 void PrepareForRegisterAllocationVisitor::TryToMoveConditionToUser(HInstruction* maybe_condition,
283 HInstruction* user) {
284 DCHECK(user->IsIf() || user->IsDeoptimize() || user->IsSelect());
285 if (maybe_condition->IsCondition() && CanEmitConditionAt(maybe_condition->AsCondition(), user)) {
286 if (maybe_condition->GetNext() != user) {
287 maybe_condition->MoveBefore(user);
288 #ifdef ART_ENABLE_CODEGEN_x86
289 for (HInstruction* input : maybe_condition->GetInputs()) {
290 if (input->IsEmittedAtUseSite()) {
291 DCHECK(input->IsX86LoadFromConstantTable());
292 input->MoveBefore(maybe_condition);
293 HInstruction* inputs_input = input->InputAt(0);
294 DCHECK(inputs_input->IsX86ComputeBaseMethodAddress());
295 if (inputs_input->HasOnlyOneNonEnvironmentUse()) {
296 inputs_input->MoveBefore(input);
297 }
298 }
299 }
300 #else // ART_ENABLE_CODEGEN_x86
301 if (kIsDebugBuild) {
302 for (HInstruction* input : maybe_condition->GetInputs()) {
303 CHECK(!input->IsEmittedAtUseSite()) << input->DebugName() << "#" << input->GetId();
304 }
305 }
306 #endif
307 }
308 maybe_condition->MarkEmittedAtUseSite();
309 }
310 }
311
VisitIf(HIf * if_instr)312 void PrepareForRegisterAllocationVisitor::VisitIf(HIf* if_instr) {
313 TryToMoveConditionToUser(if_instr->InputAt(0), if_instr);
314 }
315
VisitSelect(HSelect * select)316 void PrepareForRegisterAllocationVisitor::VisitSelect(HSelect* select) {
317 TryToMoveConditionToUser(select->GetCondition(), select);
318 }
319
VisitConstructorFence(HConstructorFence * constructor_fence)320 void PrepareForRegisterAllocationVisitor::VisitConstructorFence(
321 HConstructorFence* constructor_fence) {
322 // Trivially remove redundant HConstructorFence when it immediately follows an HNewInstance
323 // to an uninitialized class. In this special case, the art_quick_alloc_object_resolved
324 // will already have the 'dmb' which is strictly stronger than an HConstructorFence.
325 //
326 // The instruction builder always emits "x = HNewInstance; HConstructorFence(x)" so this
327 // is effectively pattern-matching that particular case and undoing the redundancy the builder
328 // had introduced.
329 //
330 // TODO: Move this to a separate pass.
331 HInstruction* allocation_inst = constructor_fence->GetAssociatedAllocation();
332 if (allocation_inst != nullptr && allocation_inst->IsNewInstance()) {
333 HNewInstance* new_inst = allocation_inst->AsNewInstance();
334 // This relies on the entrypoint already being set to the more optimized version;
335 // as that happens in this pass, this redundancy removal also cannot happen any earlier.
336 if (new_inst != nullptr && new_inst->GetEntrypoint() == kQuickAllocObjectResolved) {
337 // If this was done in an earlier pass, we would want to match that `previous` was an input
338 // to the `constructor_fence`. However, since this pass removes the inputs to the fence,
339 // we can ignore the inputs and just remove the instruction from its block.
340 DCHECK_EQ(1u, constructor_fence->InputCount());
341 // TODO: GetAssociatedAllocation should not care about multiple inputs
342 // if we are in prepare_for_register_allocation pass only.
343 constructor_fence->GetBlock()->RemoveInstruction(constructor_fence);
344 MaybeRecordStat(stats_,
345 MethodCompilationStat::kConstructorFenceRemovedPFRA);
346 return;
347 }
348
349 // HNewArray does not need this check because the art_quick_alloc_array does not itself
350 // have a dmb in any normal situation (i.e. the array class is never exactly in the
351 // "resolved" state). If the array class is not yet loaded, it will always go from
352 // Unloaded->Initialized state.
353 }
354
355 // Remove all the inputs to the constructor fence;
356 // they aren't used by the InstructionCodeGenerator and this lets us avoid creating a
357 // LocationSummary in the LocationsBuilder.
358 constructor_fence->RemoveAllInputs();
359 }
360
VisitInvokeStaticOrDirect(HInvokeStaticOrDirect * invoke)361 void PrepareForRegisterAllocationVisitor::VisitInvokeStaticOrDirect(
362 HInvokeStaticOrDirect* invoke) {
363 if (invoke->IsStaticWithExplicitClinitCheck()) {
364 HInstruction* last_input = invoke->GetInputs().back();
365 DCHECK(last_input->IsLoadClass())
366 << "Last input is not HLoadClass. It is " << last_input->DebugName();
367
368 // Detach the explicit class initialization check from the invoke.
369 // Keeping track of the initializing instruction is no longer required
370 // at this stage (i.e., after inlining has been performed).
371 invoke->RemoveExplicitClinitCheck(HInvokeStaticOrDirect::ClinitCheckRequirement::kNone);
372
373 // Merging with load class should have happened in VisitClinitCheck().
374 DCHECK(!CanMoveClinitCheck(last_input, invoke));
375 }
376 }
377
CanMoveClinitCheck(HInstruction * input,HInstruction * user) const378 bool PrepareForRegisterAllocationVisitor::CanMoveClinitCheck(HInstruction* input,
379 HInstruction* user) const {
380 // Determine if input and user come from the same dex instruction, so that we can move
381 // the clinit check responsibility from one to the other, i.e. from HClinitCheck (user)
382 // to HLoadClass (input), or from HClinitCheck (input) to HInvokeStaticOrDirect (user),
383 // or from HLoadClass (input) to HNewInstance (user).
384
385 // Start with a quick dex pc check.
386 if (user->GetDexPc() != input->GetDexPc()) {
387 return false;
388 }
389
390 if (user->IsNewInstance() && user->AsNewInstance()->IsPartialMaterialization()) {
391 return false;
392 }
393
394 // Now do a thorough environment check that this is really coming from the same instruction in
395 // the same inlined graph. Unfortunately, we have to go through the whole environment chain.
396 HEnvironment* user_environment = user->GetEnvironment();
397 HEnvironment* input_environment = input->GetEnvironment();
398 while (user_environment != nullptr || input_environment != nullptr) {
399 if (user_environment == nullptr || input_environment == nullptr) {
400 // Different environment chain length. This happens when a method is called
401 // once directly and once indirectly through another inlined method.
402 return false;
403 }
404 if (user_environment->GetDexPc() != input_environment->GetDexPc() ||
405 user_environment->GetMethod() != input_environment->GetMethod()) {
406 return false;
407 }
408 user_environment = user_environment->GetParent();
409 input_environment = input_environment->GetParent();
410 }
411
412 // Check for code motion taking the input to a different block.
413 if (user->GetBlock() != input->GetBlock()) {
414 return false;
415 }
416
417 // If there's a instruction between them that can throw or it has side effects, we cannot move the
418 // responsibility.
419 for (HInstruction* between = input->GetNext(); between != user; between = between->GetNext()) {
420 DCHECK(between != nullptr) << " User must be after input in the same block. input: " << *input
421 << ", user: " << *user;
422 if (between->CanThrow() || between->HasSideEffects()) {
423 return false;
424 }
425 }
426
427 return true;
428 }
429
VisitTypeConversion(HTypeConversion * instruction)430 void PrepareForRegisterAllocationVisitor::VisitTypeConversion(HTypeConversion* instruction) {
431 // For simplicity, our code generators don't handle implicit type conversion, so ensure
432 // there are none before hitting codegen.
433 if (instruction->IsImplicitConversion()) {
434 instruction->ReplaceWith(instruction->GetInput());
435 instruction->GetBlock()->RemoveInstruction(instruction);
436 }
437 }
438
439 } // namespace art
440