• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (c) 2021-2025 Huawei Device Co., Ltd.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at
6  *
7  * http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 
16 #ifndef COMPILER_ANALYSIS_ESCAPE_H
17 #define COMPILER_ANALYSIS_ESCAPE_H
18 
19 #include <algorithm>
20 #include <variant>
21 
22 #include "compiler_options.h"
23 #include "optimizer/ir/constants.h"
24 #include "optimizer/ir/graph.h"
25 #include "optimizer/ir/graph_visitor.h"
26 #include "optimizer/pass.h"
27 #include "optimizer/analysis/live_in_analysis.h"
28 #include "optimizer/analysis/loop_analyzer.h"
29 #include "optimizer/ir/runtime_interface.h"
30 #include "mem/arena_allocator.h"
31 #include "utils/bit_vector.h"
32 #include "compiler_logger.h"
33 #include "utils/logger.h"
34 
35 namespace ark::compiler {
36 using FieldId = uint64_t;
37 using InstId = uint32_t;
38 using BlockId = uint32_t;
39 using StateId = uint32_t;
40 using FieldPtr = RuntimeInterface::FieldPtr;
41 using ClassPtr = RuntimeInterface::ClassPtr;
42 
43 class VirtualState;
44 class PhiState;
45 class ScalarReplacement;
46 class ZeroInst;
47 struct Index;
48 
49 using StateOwner = std::variant<Inst *, PhiState *, const ZeroInst *>;
50 using MaterializationSite = std::variant<Inst *, BasicBlock *>;
51 using Field = std::variant<FieldPtr, Index>;
52 
53 struct Index {
54     ClassPtr klass;  // NOLINT(misc-non-private-member-variables-in-classes)
55     uint64_t index;  // NOLINT(misc-non-private-member-variables-in-classes)
56 
57     bool operator==(const Index &other) const
58     {
59         return klass == other.klass && index == other.index;
60     }
61 
62     bool operator<(const Index &other) const
63     {
64         return klass < other.klass || (klass == other.klass && index < other.index);
65     }
66 };
67 
68 struct FieldComporator {
operatorFieldComporator69     bool operator()(const Field &field1, const Field &field2) const
70     {
71         return GetUniqVal(field1) < GetUniqVal(field2);
72     }
73 
74 private:
GetUniqValFieldComporator75     uint64_t GetUniqVal(const Field &field) const
76     {
77         if (std::holds_alternative<FieldPtr>(field)) {
78             return reinterpret_cast<uint64_t>(std::get<FieldPtr>(field));
79         }
80         auto index = std::get<Index>(field);
81         // NOLINTNEXTLINE(readability-magic-numbers)
82         return (reinterpret_cast<uint64_t>(index.klass) << 32U) | index.index;
83     }
84 };
85 
86 // NOLINTNEXTLINE(fuchsia-multiple-inheritance)
87 class EscapeAnalysis : public Optimization, public GraphVisitor {
88 public:
89     class BasicBlockState;
90 
91     static constexpr StateId MATERIALIZED_ID = 0;
92 
EscapeAnalysis(Graph * graph)93     explicit EscapeAnalysis(Graph *graph)
94         : Optimization(graph),
95           blockStates_(graph->GetLocalAllocator()->Adapter()),
96           aliases_(graph->GetLocalAllocator()->Adapter()),
97           materializationInfo_(graph->GetLocalAllocator()->Adapter()),
98           phis_(graph->GetLocalAllocator()->Adapter()),
99           saveStateInfo_(graph->GetLocalAllocator()->Adapter()),
100           virtualizableAllocations_(graph->GetLocalAllocator()->Adapter()),
101           mergeProcessor_(this),
102           liveIns_(graph)
103     {
104     }
105 
106     NO_MOVE_SEMANTIC(EscapeAnalysis);
107     NO_COPY_SEMANTIC(EscapeAnalysis);
108     ~EscapeAnalysis() override = default;
109 
110     bool RunImpl() override;
111 
GetPassName()112     const char *GetPassName() const override
113     {
114         return "EscapeAnalysis";
115     }
116 
IsEnable()117     bool IsEnable() const override
118     {
119         return g_options.IsCompilerScalarReplacement();
120     }
121 
122 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage)
123 #define DEFINE_VISIT(InstName)                                   \
124     static void Visit##InstName(GraphVisitor *v, Inst *inst)     \
125     {                                                            \
126         static_cast<EscapeAnalysis *>(v)->Visit##InstName(inst); \
127     }
128 
129 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage)
130 #define DEFINE_VISIT_WITH_CALLBACK(InstName, Callback)       \
131     static void Visit##InstName(GraphVisitor *v, Inst *inst) \
132     {                                                        \
133         static_cast<EscapeAnalysis *>(v)->Callback(inst);    \
134     }
135 
136     DEFINE_VISIT(NewObject);
137     DEFINE_VISIT(NewArray);
138     DEFINE_VISIT(LoadObject);
139     DEFINE_VISIT(LoadArray);
140     DEFINE_VISIT(StoreObject);
141     DEFINE_VISIT(StoreArray);
142     DEFINE_VISIT(NullCheck);
143     DEFINE_VISIT(SaveState);
144     DEFINE_VISIT(SafePoint);
145     // NOTE(schernykh): support it
146     // DEFINE_VISIT(SaveStateDeoptimize)
147     DEFINE_VISIT(GetInstanceClass);
148 
149     DEFINE_VISIT_WITH_CALLBACK(Deoptimize, MaterializeDeoptSaveState);
150     DEFINE_VISIT_WITH_CALLBACK(DeoptimizeIf, MaterializeDeoptSaveState);
151     DEFINE_VISIT_WITH_CALLBACK(DeoptimizeCompare, MaterializeDeoptSaveState);
152     DEFINE_VISIT_WITH_CALLBACK(DeoptimizeCompareImm, MaterializeDeoptSaveState);
153     DEFINE_VISIT_WITH_CALLBACK(LoadAndInitClass, VisitSaveStateUser);
154     DEFINE_VISIT_WITH_CALLBACK(LoadClass, VisitSaveStateUser);
155 
156 #undef DEFINE_VISIT
157 #undef DEFINE_VISIT_WITH_CALLBACK
158 
VisitCallStatic(GraphVisitor * v,Inst * inst)159     static void VisitCallStatic([[maybe_unused]] GraphVisitor *v, [[maybe_unused]] Inst *inst)
160     {
161         static_cast<EscapeAnalysis *>(v)->VisitCall(inst->CastToCallStatic());
162     }
VisitCallVirtual(GraphVisitor * v,Inst * inst)163     static void VisitCallVirtual([[maybe_unused]] GraphVisitor *v, [[maybe_unused]] Inst *inst)
164     {
165         static_cast<EscapeAnalysis *>(v)->VisitCall(inst->CastToCallVirtual());
166     }
VisitCompare(GraphVisitor * v,Inst * inst)167     static void VisitCompare([[maybe_unused]] GraphVisitor *v, [[maybe_unused]] Inst *inst)
168     {
169         static_cast<EscapeAnalysis *>(v)->VisitCmpRef(inst, inst->CastToCompare()->GetCc());
170     }
VisitIf(GraphVisitor * v,Inst * inst)171     static void VisitIf([[maybe_unused]] GraphVisitor *v, [[maybe_unused]] Inst *inst)
172     {
173         static_cast<EscapeAnalysis *>(v)->VisitCmpRef(inst, inst->CastToIf()->GetCc());
174     }
VisitPhi(GraphVisitor * v,Inst * inst)175     static void VisitPhi([[maybe_unused]] GraphVisitor *v, [[maybe_unused]] Inst *inst)
176     {
177         // Phi's handled during block's state merge
178     }
VisitReturnInlined(GraphVisitor * v,Inst * inst)179     static void VisitReturnInlined([[maybe_unused]] GraphVisitor *v, [[maybe_unused]] Inst *inst)
180     {
181         // do nothing
182     }
183     // by default all ref-input are materialized and materialized state is recorded for ref-output
VisitDefault(Inst * inst)184     void VisitDefault([[maybe_unused]] Inst *inst) override
185     {
186         VisitSaveStateUser(inst);
187         Materialize(inst);
188     }
189     void VisitAllocation(Inst *inst);
190 
GetBlocksToVisit()191     const ArenaVector<BasicBlock *> &GetBlocksToVisit() const override
192     {
193         UNREACHABLE();
194     }
195 
196     // only for testing
RelaxClassRestrictions()197     void RelaxClassRestrictions()
198     {
199         relaxClassRestrictions_ = true;
200     }
201 
202 #include "optimizer/ir/visitor.inc"
203 private:
204     static constexpr InstId ZERO_INST_ID = std::numeric_limits<InstId>::max() - 1U;
205     static constexpr size_t MAX_NESTNESS = 5;
206 
207     class MergeProcessor {
208     public:
MergeProcessor(EscapeAnalysis * parent)209         explicit MergeProcessor(EscapeAnalysis *parent)
210             : parent_(parent),
211               fieldsMergeBuffer_(parent->GetLocalAllocator()->Adapter()),
212               statesMergeBuffer_(parent->GetLocalAllocator()->Adapter()),
213               allFields_(parent->GetLocalAllocator()->Adapter()),
214               pendingInsts_(parent->GetLocalAllocator()->Adapter())
215         {
216         }
217         ~MergeProcessor() = default;
218         NO_COPY_SEMANTIC(MergeProcessor);
219         NO_MOVE_SEMANTIC(MergeProcessor);
220 
221         // Compute initial state of a block by merging states of its predecessors.
222         void MergeStates(BasicBlock *block);
223 
224     private:
225         EscapeAnalysis *parent_;
226         ArenaVector<StateOwner> fieldsMergeBuffer_;
227         ArenaVector<StateOwner> statesMergeBuffer_;
228         ArenaVector<Field> allFields_;
229         ArenaVector<StateOwner> pendingInsts_;
230 
231         bool MergeFields(BasicBlock *block, BasicBlockState *blockState, StateId stateToMerge, VirtualState *vstate,
232                          ArenaVector<StateOwner> &mergeQueue);
233         bool MergeState(StateOwner inst, BasicBlock *block, BasicBlockState *newState);
234         void CheckStatesAndInsertIntoBuffer(StateOwner inst, BasicBlock *block);
235         void MaterializeObjectsAtTheBeginningOfBlock(BasicBlock *block);
236         void CollectInstructionsToMerge(BasicBlock *block);
237         void ProcessPhis(BasicBlock *block, BasicBlockState *newState);
238     };
239 
240     Marker visited_ {UNDEF_MARKER};
241     ArenaVector<BasicBlockState *> blockStates_;
242     ArenaMap<Inst *, StateOwner> aliases_;
243     // list of instructions with corresponding virtual state values bound
244     // to a save state or an allocation site
245     ArenaUnorderedMap<MaterializationSite, ArenaMap<Inst *, VirtualState *>> materializationInfo_;
246     ArenaVector<ArenaMap<Field, PhiState *, FieldComporator>> phis_;
247     ArenaUnorderedMap<Inst *, ArenaBitVector> saveStateInfo_;
248     ArenaSet<Inst *> virtualizableAllocations_;
249     MergeProcessor mergeProcessor_;
250     LiveInAnalysis liveIns_;
251     // 0 is materialized state
252     StateId stateId_ = MATERIALIZED_ID + 1;
253     bool virtualizationChanged_ {false};
254     bool relaxClassRestrictions_ {false};
255 
256     void VisitCmpRef(Inst *inst, ConditionCode cc);
257     void VisitSaveStateUser(Inst *inst);
258     void VisitCall(CallInst *inst);
259     void VisitNewObject(Inst *inst);
260     void VisitLoadObject(Inst *inst);
261     void VisitStoreObject(Inst *inst);
262     void VisitNewArray(Inst *inst);
263     void VisitStoreArray(Inst *inst);
264     void VisitLoadArray(Inst *inst);
265     void VisitNullCheck(Inst *inst);
266     void VisitSaveState(Inst *inst);
267     void VisitSaveStateDeoptimize(Inst *inst);
268     void VisitSafePoint(Inst *inst);
269     void VisitGetInstanceClass(Inst *inst);
270     void VisitBlockInternal(BasicBlock *block);
271     void HandleMaterializationSite(Inst *inst);
272 
273     void MaterializeDeoptSaveState(Inst *inst);
274 
275     void Initialize();
276     void PropagateLoadObjectsUpwards();
277     void CreateTemporaryLoads(Inst *phi);
278     bool FindVirtualizableAllocations();
279     bool AllPredecessorsVisited(const BasicBlock *block);
280 #ifndef NDEBUG
281     Marker removableLoads_ {UNDEF_MARKER};
282     bool EnsureLoadsRemoval();
283 #endif
284 
IsRefField(FieldPtr field)285     bool IsRefField(FieldPtr field) const
286     {
287         return IsReference(GetGraph()->GetRuntime()->GetFieldType(field));
288     }
289 
GetState(const BasicBlock * block)290     BasicBlockState *GetState(const BasicBlock *block) const
291     {
292         return blockStates_[block->GetId()];
293     }
294 
295     std::pair<PhiState *, bool> CreatePhi(BasicBlock *targetBlock, BasicBlockState *blockState, Field field,
296                                           ArenaVector<StateOwner> &inputs, VirtualState *vstate);
297     VirtualState *CreateVState(Inst *inst);
298     VirtualState *CreateVState(Inst *inst, StateId id);
299 
300     void MaterializeInBlock(StateOwner inst, BasicBlock *block);
301     void Materialize(StateOwner inst, BasicBlock *block);
302     void Materialize(StateOwner inst, Inst *before);
303     void Materialize(Inst *inst);
304     void RegisterMaterialization(MaterializationSite site, Inst *inst);
305     void RegisterVirtualObjectFieldsForMaterialization(Inst *ss);
306     bool RegisterFieldsMaterialization(Inst *site, VirtualState *state, BasicBlockState *blockState,
307                                        const ArenaMap<Inst *, VirtualState *> &states);
308 
309     bool ProcessBlock(BasicBlock *block, size_t depth = 0);
310     bool ProcessLoop(BasicBlock *header, size_t depth = 0);
311     void FillVirtualInputs(Inst *inst);
AddVirtualizableAllocation(Inst * inst)312     void AddVirtualizableAllocation(Inst *inst)
313     {
314         COMPILER_LOG(DEBUG, PEA) << "Candidate for virtualization: " << *inst;
315         virtualizableAllocations_.insert(inst);
316     }
RemoveVirtualizableAllocation(Inst * inst)317     void RemoveVirtualizableAllocation(Inst *inst)
318     {
319         virtualizableAllocations_.erase(inst);
320     }
321 
GetLocalAllocator()322     ArenaAllocator *GetLocalAllocator()
323     {
324         return GetGraph()->GetLocalAllocator();
325     }
326 
327     void DumpVStates();
328     void DumpMStates();
329     void DumpAliases();
330     void Dump();
331 };
332 
333 class ScalarReplacement {
334 public:
ScalarReplacement(Graph * graph,ArenaMap<Inst *,StateOwner> & aliases,ArenaVector<ArenaMap<Field,PhiState *,FieldComporator>> & phis,ArenaUnorderedMap<MaterializationSite,ArenaMap<Inst *,VirtualState * >> & materializationSites,ArenaUnorderedMap<Inst *,ArenaBitVector> & saveStateLiveness)335     ScalarReplacement(Graph *graph, ArenaMap<Inst *, StateOwner> &aliases,
336                       ArenaVector<ArenaMap<Field, PhiState *, FieldComporator>> &phis,
337                       ArenaUnorderedMap<MaterializationSite, ArenaMap<Inst *, VirtualState *>> &materializationSites,
338                       ArenaUnorderedMap<Inst *, ArenaBitVector> &saveStateLiveness)
339         : graph_(graph),
340           aliases_(aliases),
341           phis_(phis),
342           materializationSites_(materializationSites),
343           saveStateLiveness_(saveStateLiveness),
344           allocatedPhis_(graph_->GetLocalAllocator()->Adapter()),
345           materializedObjects_(graph_->GetLocalAllocator()->Adapter()),
346           removalQueue_(graph_->GetLocalAllocator()->Adapter())
347     {
348     }
349 
350     ~ScalarReplacement() = default;
351     NO_COPY_SEMANTIC(ScalarReplacement);
352     NO_MOVE_SEMANTIC(ScalarReplacement);
353 
354     void Apply(ArenaSet<Inst *> &candidates);
355 
356 private:
357     Graph *graph_;
358     ArenaMap<Inst *, StateOwner> &aliases_;
359     ArenaVector<ArenaMap<Field, PhiState *, FieldComporator>> &phis_;
360     ArenaUnorderedMap<MaterializationSite, ArenaMap<Inst *, VirtualState *>> &materializationSites_;
361     ArenaUnorderedMap<Inst *, ArenaBitVector> &saveStateLiveness_;
362 
363     ArenaMap<PhiState *, PhiInst *> allocatedPhis_;
364     ArenaMap<Inst *, ArenaVector<Inst *>> materializedObjects_;
365 
366     ArenaVector<Inst *> removalQueue_;
367     Marker removeInstMarker_ {UNDEF_MARKER};
368 
369     void ProcessRemovalQueue();
370     bool IsEnqueuedForRemoval(Inst *inst) const;
371     void EnqueueForRemoval(Inst *inst);
372     void UpdateAllocationUsers();
373     void UpdateSaveStates();
374     Inst *ResolveAllocation(Inst *inst, BasicBlock *block);
375     void ResolvePhiInputs();
376     void ReplaceAliases();
377     Inst *ResolveAlias(const StateOwner &alias, const Inst *inst);
378     Inst *CreateNewObject(Inst *originalInst, Inst *saveState);
379     Inst *CreateNewArray(Inst *originalInst, Inst *saveState);
380     void InitializeObject(Inst *alloc, Inst *instBefore, VirtualState *state);
381     void MaterializeObjects();
382     void Materialize(Inst *originalInst, Inst *ssAlloc, Inst *ssInit, VirtualState *state);
383     void MaterializeAtNewSaveState(Inst *site, ArenaMap<Inst *, VirtualState *> &state);
384     void MaterializeInEmptyBlock(BasicBlock *block, ArenaMap<Inst *, VirtualState *> &state);
385     void MaterializeAtExistingSaveState(SaveStateInst *saveState, ArenaMap<Inst *, VirtualState *> &state);
386     void CreatePhis();
387     SaveStateInst *CopySaveState(Inst *inst, VirtualState *except);
388     void PatchSaveStates();
389     void PatchSaveStatesInBlock(BasicBlock *block, ArenaVector<ArenaSet<Inst *>> &liveness);
390     void PatchSaveStatesInLoop(Loop *loop, ArenaSet<Inst *> &loopLiveIns, ArenaVector<ArenaSet<Inst *>> &liveness);
391     void FillLiveInsts(BasicBlock *block, ArenaSet<Inst *> &liveIns, ArenaVector<ArenaSet<Inst *>> &liveness);
392     void PatchSaveState(SaveStateInst *saveState, ArenaSet<Inst *> &liveInstructions);
393     void AddLiveInputs(Inst *inst, ArenaSet<Inst *> &liveIns);
394     CallInst *FindCallerInst(BasicBlock *target, Inst *start = nullptr);
395     void FixPhiInputTypes();
396     bool HasUsageOutsideBlock(Inst *inst, BasicBlock *initialBlock);
397 };
398 }  // namespace ark::compiler
399 
400 #endif  // COMPILER_ANALYSIS_ESCAPE_H
401