• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (c) 2023 Huawei Device Co., Ltd.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at
6  *
7  *     http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 
16 #ifndef MAPLEBE_INCLUDE_CG_SCHEDULE_H
17 #define MAPLEBE_INCLUDE_CG_SCHEDULE_H
18 
19 #include "insn.h"
20 #include "live.h"
21 #include "mad.h"
22 #include "dependence.h"
23 
24 namespace maplebe {
25 #define LIST_SCHED_DUMP_NEWPM CG_DEBUG_FUNC(f)
26 #define LIST_SCHED_DUMP_REF CG_DEBUG_FUNC(cgFunc)
27 
28 class ScheduleProcessInfo {
29 public:
ScheduleProcessInfo(uint32 size)30     explicit ScheduleProcessInfo(uint32 size)
31     {
32         availableReadyList.reserve(size);
33         scheduledNodes.reserve(size);
34     }
35 
36     virtual ~ScheduleProcessInfo() = default;
37 
GetLastUpdateCycle()38     uint32 GetLastUpdateCycle() const
39     {
40         return lastUpdateCycle;
41     }
42 
SetLastUpdateCycle(uint32 updateCycle)43     void SetLastUpdateCycle(uint32 updateCycle)
44     {
45         lastUpdateCycle = updateCycle;
46     }
47 
GetCurrCycle()48     uint32 GetCurrCycle() const
49     {
50         return currCycle;
51     }
52 
IncCurrCycle()53     void IncCurrCycle()
54     {
55         ++currCycle;
56     }
57 
DecAdvanceCycle()58     void DecAdvanceCycle()
59     {
60         advanceCycle--;
61     }
62 
GetAdvanceCycle()63     uint32 GetAdvanceCycle() const
64     {
65         return advanceCycle;
66     }
67 
SetAdvanceCycle(uint32 cycle)68     void SetAdvanceCycle(uint32 cycle)
69     {
70         advanceCycle = cycle;
71     }
72 
ClearAvailableReadyList()73     void ClearAvailableReadyList()
74     {
75         availableReadyList.clear();
76     }
77 
PushElemIntoAvailableReadyList(DepNode * node)78     void PushElemIntoAvailableReadyList(DepNode *node)
79     {
80         availableReadyList.emplace_back(node);
81     }
82 
SizeOfAvailableReadyList()83     size_t SizeOfAvailableReadyList() const
84     {
85         return availableReadyList.size();
86     }
87 
AvailableReadyListIsEmpty()88     bool AvailableReadyListIsEmpty() const
89     {
90         return availableReadyList.empty();
91     }
92 
SetAvailableReadyList(const std::vector<DepNode * > & tempReadyList)93     void SetAvailableReadyList(const std::vector<DepNode *> &tempReadyList)
94     {
95         availableReadyList = tempReadyList;
96     }
97 
GetAvailableReadyList()98     const std::vector<DepNode *> &GetAvailableReadyList() const
99     {
100         return availableReadyList;
101     }
102 
GetAvailableReadyList()103     const std::vector<DepNode *> &GetAvailableReadyList()
104     {
105         return availableReadyList;
106     }
107 
PushElemIntoScheduledNodes(DepNode * node)108     void PushElemIntoScheduledNodes(DepNode *node)
109     {
110         node->SetState(kScheduled);
111         node->SetSchedCycle(currCycle);
112         node->OccupyRequiredUnits();
113         scheduledNodes.emplace_back(node);
114     }
115 
IsFirstSeparator()116     bool IsFirstSeparator() const
117     {
118         return isFirstSeparator;
119     }
120 
ResetIsFirstSeparator()121     void ResetIsFirstSeparator()
122     {
123         isFirstSeparator = false;
124     }
125 
SizeOfScheduledNodes()126     size_t SizeOfScheduledNodes() const
127     {
128         return scheduledNodes.size();
129     }
130 
GetScheduledNodes()131     const std::vector<DepNode *> &GetScheduledNodes() const
132     {
133         return scheduledNodes;
134     }
135 
136 private:
137     std::vector<DepNode *> availableReadyList;
138     std::vector<DepNode *> scheduledNodes;
139     uint32 lastUpdateCycle = 0;
140     uint32 currCycle = 0;
141     uint32 advanceCycle = 0;
142     bool isFirstSeparator = true;
143 };
144 
145 class RegPressureSchedule {
146 public:
RegPressureSchedule(CGFunc & func,MapleAllocator & alloc)147     RegPressureSchedule(CGFunc &func, MapleAllocator &alloc)
148         : cgFunc(func),
149           liveReg(alloc.Adapter()),
150           scheduledNode(alloc.Adapter()),
151           originalNodeSeries(alloc.Adapter()),
152           readyList(alloc.Adapter()),
153           partialList(alloc.Adapter()),
154           partialSet(alloc.Adapter()),
155           partialScheduledNode(alloc.Adapter()),
156           optimisticScheduledNodes(alloc.Adapter()),
157           splitterIndexes(alloc.Adapter()),
158           integerRegisterPressureList(alloc.Adapter()),
159           liveInRegNO(alloc.Adapter()),
160           liveOutRegNO(alloc.Adapter())
161     {
162     }
163     virtual ~RegPressureSchedule() = default;
164 
165     void InitBBInfo(BB &b, MemPool &memPool, const MapleVector<DepNode *> &nodes);
166     void BuildPhyRegInfo(const std::vector<int32> &regNumVec) const;
167     void InitPartialSplitters(const MapleVector<DepNode *> &nodes);
168     void Init(const MapleVector<DepNode *> &nodes);
169     void UpdateBBPressure(const DepNode &node);
170     void CalculatePressure(const DepNode &node, regno_t reg, bool def) const;
171     void SortReadyList();
172     static bool IsLastUse(const DepNode &node, regno_t regNO);
173     void ReCalculateDepNodePressure(const DepNode &node) const;
174     void UpdateLiveReg(const DepNode &node, regno_t reg, bool def);
175     bool CanSchedule(const DepNode &node) const;
176     void UpdateReadyList(const DepNode &node);
177     void BruteUpdateReadyList(const DepNode &node, std::vector<bool> &changedToReady);
178     void RestoreReadyList(DepNode &node, std::vector<bool> &changedToReady);
179     void UpdatePriority(DepNode &node);
180     void CalculateMaxDepth(const MapleVector<DepNode *> &nodes) const;
181     void CalculateNear(const DepNode &node) const;
182     static bool DepNodePriorityCmp(const DepNode *node1, const DepNode *node2);
183     DepNode *ChooseNode();
184     void DoScheduling(MapleVector<DepNode *> &nodes);
185     void HeuristicScheduling(MapleVector<DepNode *> &nodes);
186     int CalculateRegisterPressure(MapleVector<DepNode *> &nodes);
187     void PartialScheduling(MapleVector<DepNode *> &nodes);
188     void BruteForceScheduling();
189     void CalculatePredSize(DepNode &node);
190     void InitBruteForceScheduling(MapleVector<DepNode *> &nodes);
191     void EmitSchedulingSeries(MapleVector<DepNode *> &nodes);
192 
193 private:
194     void DumpBBPressureInfo() const;
195     void DumpBBLiveInfo() const;
196     void DumpReadyList() const;
197     void DumpSelectInfo(const DepNode &node) const;
198     static void DumpDependencyInfo(const MapleVector<DepNode *> &nodes);
199     void ReportScheduleError() const;
200     void ReportScheduleOutput() const;
201     RegType GetRegisterType(regno_t reg) const;
202 
203     CGFunc &cgFunc;
204     BB *bb = nullptr;
205     int32 *maxPressure = nullptr;
206     int32 *curPressure = nullptr;
207     MapleUnorderedSet<regno_t> liveReg;
208     /* save node that has been scheduled. */
209     MapleVector<DepNode *> scheduledNode;
210     MapleVector<DepNode *> originalNodeSeries;
211     MapleVector<DepNode *> readyList;
212     /* save partial nodes to be scheduled */
213     MapleVector<DepNode *> partialList;
214     MapleSet<DepNode *> partialSet;
215     /* save partial nodes which have been scheduled. */
216     MapleVector<DepNode *> partialScheduledNode;
217     /* optimistic schedule series with minimum register pressure */
218     MapleVector<DepNode *> optimisticScheduledNodes;
219     /* save split points */
220     MapleVector<int> splitterIndexes;
221     /* save integer register pressure */
222     MapleVector<int> integerRegisterPressureList;
223     /* save the amount of every type register. */
224     int32 *physicalRegNum = nullptr;
225     int32 maxPriority = 0;
226     int32 scheduleSeriesCount = 0;
227     /* live in register set */
228     MapleSet<regno_t> liveInRegNO;
229     /* live out register set */
230     MapleSet<regno_t> liveOutRegNO;
231     /* register pressure without pre-scheduling */
232     int originalPressure = 0;
233     /* register pressure after pre-scheduling */
234     int scheduledPressure = 0;
235     /* minimum pressure ever met */
236     int minPressure = -1;
237 };
238 
239 enum SimulateType : uint8 { kListSchedule, kBruteForce, kSimulateOnly };
240 
241 class Schedule {
242 public:
Schedule(CGFunc & func,MemPool & memPool,LiveAnalysis & liveAnalysis,const std::string & phase)243     Schedule(CGFunc &func, MemPool &memPool, LiveAnalysis &liveAnalysis, const std::string &phase)
244         : phaseName(phase),
245           cgFunc(func),
246           memPool(memPool),
247           alloc(&memPool),
248           live(liveAnalysis),
249           considerRegPressure(false),
250           nodes(alloc.Adapter()),
251           readyList(alloc.Adapter()),
252           liveInRegNo(alloc.Adapter()),
253           liveOutRegNo(alloc.Adapter())
254     {
255     }
256 
257     virtual ~Schedule() = default;
258     virtual void MemoryAccessPairOpt() = 0;
259     virtual void ClinitPairOpt() = 0;
260     virtual uint32 DoSchedule() = 0;
261     virtual uint32 DoBruteForceSchedule() = 0;
262     virtual uint32 SimulateOnly() = 0;
263     virtual void UpdateBruteForceSchedCycle() = 0;
264     virtual void IterateBruteForce(DepNode &targetNode, MapleVector<DepNode *> &readyList, uint32 currCycle,
265                                    MapleVector<DepNode *> &scheduledNodes, uint32 &maxCycleCount,
266                                    MapleVector<DepNode *> &optimizedScheduledNodes) = 0;
267     virtual void UpdateReadyList(DepNode &targetNode, MapleVector<DepNode *> &readyList, bool updateEStart) = 0;
268     virtual void ListScheduling(bool beforeRA) = 0;
269     virtual void FinalizeScheduling(BB &bb, const DepAnalysis &depAnalysis) = 0;
270 
271 protected:
272     virtual void Init() = 0;
273     virtual uint32 ComputeEstart(uint32 cycle) = 0;
274     virtual void ComputeLstart(uint32 maxEstart) = 0;
275     virtual void UpdateELStartsOnCycle(uint32 cycle) = 0;
276     virtual void RandomTest() = 0;
277     virtual void EraseNodeFromReadyList(const DepNode &target) = 0;
278     virtual void EraseNodeFromNodeList(const DepNode &target, MapleVector<DepNode *> &nodeList) = 0;
279     virtual uint32 GetNextSepIndex() const = 0;
280     virtual void CountUnitKind(const DepNode &depNode, uint32 array[], const uint32 arraySize) const = 0;
281     virtual void FindAndCombineMemoryAccessPair(const std::vector<DepNode *> &memList) = 0;
282     virtual void RegPressureScheduling(BB &bb, MapleVector<DepNode *> &nodes) = 0;
283     virtual bool CanCombine(const Insn &insn) const = 0;
SetConsiderRegPressure()284     void SetConsiderRegPressure()
285     {
286         considerRegPressure = true;
287     }
GetConsiderRegPressure()288     bool GetConsiderRegPressure() const
289     {
290         return considerRegPressure;
291     }
292     void InitIDAndLoc();
293     void RestoreFirstLoc();
PhaseName()294     std::string PhaseName() const
295     {
296         return phaseName;
297     }
298 
299     const std::string phaseName;
300     CGFunc &cgFunc;
301     MemPool &memPool;
302     MapleAllocator alloc;
303     LiveAnalysis &live;
304     DepAnalysis *depAnalysis = nullptr;
305     MAD *mad = nullptr;
306     uint32 lastSeparatorIndex = 0;
307     uint32 nodeSize = 0;
308     bool considerRegPressure = false;
309     MapleVector<DepNode *> nodes;     /* Dependence graph */
310     MapleVector<DepNode *> readyList; /* Ready list. */
311     MapleSet<regno_t> liveInRegNo;
312     MapleSet<regno_t> liveOutRegNo;
313 };
314 
315 MAPLE_FUNC_PHASE_DECLARE(CgPreScheduling, maplebe::CGFunc)
316 MAPLE_FUNC_PHASE_DECLARE(CgScheduling, maplebe::CGFunc)
317 } /* namespace maplebe */
318 
319 #endif /* MAPLEBE_INCLUDE_CG_SCHEDULE_H */
320