1 /* 2 * Copyright (c) 2023 Huawei Device Co., Ltd. 3 * Licensed under the Apache License, Version 2.0 (the "License"); 4 * you may not use this file except in compliance with the License. 5 * You may obtain a copy of the License at 6 * 7 * http://www.apache.org/licenses/LICENSE-2.0 8 * 9 * Unless required by applicable law or agreed to in writing, software 10 * distributed under the License is distributed on an "AS IS" BASIS, 11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 * See the License for the specific language governing permissions and 13 * limitations under the License. 14 */ 15 16 #ifndef MAPLEBE_INCLUDE_BE_LOWERER_H 17 #define MAPLEBE_INCLUDE_BE_LOWERER_H 18 /* C++ headers. */ 19 #include <vector> 20 #include <unordered_map> 21 #include <utility> 22 #include <cstddef> 23 #include <cstdarg> 24 #include <regex> 25 #include "intrinsics.h" /* For IntrinDesc. This includes 'intrinsic_op.h' as well */ 26 #include "becommon.h" 27 #include "cg.h" 28 #include "bbt.h" 29 /* MapleIR headers. */ 30 #include "mir_nodes.h" 31 #include "mir_module.h" 32 #include "mir_function.h" 33 #include "mir_lower.h" 34 #include "simplify.h" 35 36 namespace maplebe { 37 class CGLowerer { 38 enum Option : uint64 { 39 kUndefined = 0, 40 kGenEh = 1ULL << 0, 41 kVerboseCG = 1ULL << 1, 42 }; 43 44 using BuiltinFunctionID = uint32; 45 using OptionFlag = uint64; 46 47 public: mirModule(mod)48 CGLowerer(MIRModule &mod, BECommon &common, MIRFunction *func = nullptr) : mirModule(mod), beCommon(common) 49 { 50 SetOptions(kGenEh); 51 mirBuilder = mod.GetMIRBuilder(); 52 SetCurrentFunc(func); 53 } 54 CGLowerer(MIRModule & mod,BECommon & common,bool genEh,bool verboseCG)55 CGLowerer(MIRModule &mod, BECommon &common, bool genEh, bool verboseCG) : mirModule(mod), beCommon(common) 56 { 57 OptionFlag option = 0; 58 if (genEh) { 59 option |= kGenEh; 60 } 61 if (verboseCG) { 62 option |= kVerboseCG; 63 } 64 SetOptions(option); 65 mirBuilder = mod.GetMIRBuilder(); 66 SetCurrentFunc(nullptr); 67 } 68 ~CGLowerer()69 ~CGLowerer() 70 { 71 mirBuilder = nullptr; 72 currentBlock = nullptr; 73 } 74 75 MIRFunction *RegisterFunctionVoidStarToVoid(BuiltinFunctionID id, const std::string &name, 76 const std::string ¶mName); 77 78 void RegisterBuiltIns(); 79 80 void LowerFunc(MIRFunction &func); 81 82 BaseNode *LowerIntrinsicop(const BaseNode &, IntrinsicopNode &, BlockNode &); 83 84 BaseNode *LowerIntrinsicopwithtype(const BaseNode &, IntrinsicopNode &, BlockNode &); 85 86 StmtNode *LowerIntrinsicMplClearStack(const IntrinsiccallNode &intrinCall, BlockNode &newBlk); 87 88 StmtNode *LowerIntrinsicRCCall(const IntrinsiccallNode &intrinCall); 89 90 void LowerArrayStore(const IntrinsiccallNode &intrinCall, BlockNode &newBlk); 91 92 StmtNode *LowerDefaultIntrinsicCall(IntrinsiccallNode &intrinCall, MIRSymbol &st, MIRFunction &fn); 93 94 StmtNode *LowerIntrinsicMplCleanupLocalRefVarsSkip(IntrinsiccallNode &intrinCall); 95 96 StmtNode *LowerIntrinsiccall(IntrinsiccallNode &intrinCall, BlockNode &); 97 98 StmtNode *LowerSyncEnterSyncExit(StmtNode &stmt); 99 GetCurrentFunc()100 MIRFunction *GetCurrentFunc() const 101 { 102 return mirModule.CurFunction(); 103 } 104 105 BaseNode *LowerExpr(BaseNode &, BaseNode &, BlockNode &); 106 107 BaseNode *LowerDread(DreadNode &dread, const BlockNode &block); 108 LowerIread(IreadNode & iread)109 BaseNode *LowerIread(IreadNode &iread) 110 { 111 /* use PTY_u8 for boolean type in dread/iread */ 112 if (iread.GetPrimType() == PTY_u1) { 113 iread.SetPrimType(PTY_u8); 114 } 115 return (iread.GetFieldID() == 0 ? &iread : LowerIreadBitfield(iread)); 116 } 117 118 BaseNode *LowerCastExpr(BaseNode &expr); 119 120 BaseNode *ExtractSymbolAddress(const StIdx &stIdx); 121 BaseNode *LowerDreadToThreadLocal(BaseNode &expr, const BlockNode &block); 122 StmtNode *LowerDassignToThreadLocal(StmtNode &stmt, const BlockNode &block); 123 124 void LowerDassign(DassignNode &dassign, BlockNode &block); 125 126 void LowerResetStmt(StmtNode &stmt, BlockNode &block); 127 128 void LowerIassign(IassignNode &iassign, BlockNode &block); 129 130 void LowerRegassign(RegassignNode ®Assign, BlockNode &block); 131 132 void AddElemToPrintf(MapleVector<BaseNode *> &argsPrintf, int num, ...) const; 133 AssertBoundaryGetFileName(StmtNode & stmt)134 std::string AssertBoundaryGetFileName(StmtNode &stmt) 135 { 136 size_t pos = mirModule.GetFileNameFromFileNum(stmt.GetSrcPos().FileNum()).rfind('/'); 137 return mirModule.GetFileNameFromFileNum(stmt.GetSrcPos().FileNum()).substr(pos + 1); 138 } 139 140 std::string GetFileNameSymbolName(const std::string &fileName) const; 141 142 void LowerAssertBoundary(StmtNode &stmt, BlockNode &block, BlockNode &newBlk, std::vector<StmtNode *> &abortNode); 143 144 StmtNode *LowerIntrinsicopDassign(const DassignNode &dassign, IntrinsicopNode &intrinsic, BlockNode &block); 145 146 void LowerGCMalloc(const BaseNode &node, const GCMallocNode &gcNode, BlockNode &blkNode, bool perm = false); 147 148 std::string GetNewArrayFuncName(const uint32 elemSize, const bool perm) const; 149 LowerAddrof(AddrofNode & addrof)150 BaseNode *LowerAddrof(AddrofNode &addrof) const 151 { 152 return &addrof; 153 } 154 155 BaseNode *LowerIaddrof(const IreadNode &iaddrof); 156 BaseNode *SplitBinaryNodeOpnd1(BinaryNode &bNode, BlockNode &blkNode); 157 BaseNode *SplitTernaryNodeResult(TernaryNode &tNode, BaseNode &parent, BlockNode &blkNode); 158 bool IsComplexSelect(const TernaryNode &tNode) const; 159 int32 FindTheCurrentStmtFreq(const StmtNode *stmt) const; 160 BaseNode *LowerComplexSelect(const TernaryNode &tNode, BaseNode &parent, BlockNode &blkNode); 161 BaseNode *LowerFarray(ArrayNode &array); 162 BaseNode *LowerArrayDim(ArrayNode &array, int32 dim); 163 BaseNode *LowerArrayForLazyBiding(BaseNode &baseNode, BaseNode &offsetNode, const BaseNode &parent); 164 BaseNode *LowerArray(ArrayNode &array, const BaseNode &parent); 165 166 DassignNode *SaveReturnValueInLocal(StIdx, uint16); 167 BaseNode *NeedRetypeWhenLowerCallAssigned(PrimType pType); 168 void LowerCallStmt(StmtNode &, StmtNode *&, BlockNode &, MIRType *retty = nullptr, bool uselvar = false, 169 bool isIntrinAssign = false); 170 BlockNode *LowerIntrinsiccallAassignedToAssignStmt(IntrinsiccallNode &intrinsicCall); 171 BlockNode *LowerCallAssignedStmt(StmtNode &stmt, bool uselvar = false); 172 /* Intrinsiccall will processe return and vector as a call separately. 173 * To be able to handle them in a unified manner, we lower intrinsiccall to Intrinsicsicop. 174 */ 175 BlockNode *LowerIntrinsiccallToIntrinsicop(StmtNode &stmt); 176 bool LowerStructReturnInRegs(BlockNode &newBlk, StmtNode &stmt, const MIRSymbol &retSym); 177 void LowerStructReturnInGpRegs(BlockNode &newBlk, const StmtNode &stmt, const MIRSymbol &symbol); 178 void LowerStructReturnInFpRegs(BlockNode &newBlk, const StmtNode &stmt, const MIRSymbol &symbol, PrimType primType, 179 size_t elemNum); 180 bool LowerStructReturn(BlockNode &newBlk, StmtNode &stmt, bool &lvar); 181 BlockNode *LowerMemop(StmtNode &); 182 183 BaseNode *LowerRem(BaseNode &rem, BlockNode &block); 184 185 void LowerStmt(StmtNode &stmt, BlockNode &block); 186 187 void LowerSwitchOpnd(StmtNode &stmt, BlockNode &block); 188 189 MIRSymbol *CreateNewRetVar(const MIRType &ty, const std::string &prefix); 190 191 void RegisterExternalLibraryFunctions(); 192 193 BlockNode *LowerBlock(BlockNode &block); 194 195 void SimplifyBlock(BlockNode &block) const; 196 197 void LowerTryCatchBlocks(BlockNode &body); 198 199 #if TARGARM32 || TARGAARCH64 || TARGRISCV64 || TARGX86_64 200 BlockNode *LowerReturnStructUsingFakeParm(NaryStmtNode &retNode); 201 #endif 202 BlockNode *LowerReturn(NaryStmtNode &retNode); 203 void LowerEntry(MIRFunction &func); 204 205 void SplitCallArg(CallNode &callNode, BaseNode *newOpnd, size_t i, BlockNode &newBlk); 206 207 void CleanupBranches(MIRFunction &func) const; 208 209 void LowerTypePtr(BaseNode &expr) const; 210 211 BaseNode *GetBitField(int32 byteOffset, BaseNode *baseAddr, PrimType fieldPrimType); 212 StmtNode *WriteBitField(const std::pair<int32, int32> &byteBitOffsets, const MIRBitFieldType *fieldType, 213 BaseNode *baseAddr, BaseNode *rhs, BlockNode *block); 214 BaseNode *ReadBitField(const std::pair<int32, int32> &byteBitOffsets, const MIRBitFieldType *fieldType, 215 BaseNode *baseAddr); 216 BaseNode *LowerDreadBitfield(DreadNode &dread); 217 BaseNode *LowerIreadBitfield(IreadNode &iread); 218 StmtNode *LowerDassignBitfield(DassignNode &dassign, BlockNode &block); 219 StmtNode *LowerIassignBitfield(IassignNode &iassign, BlockNode &block); 220 221 void LowerAsmStmt(AsmNode *asmNode, BlockNode *blk); 222 ShouldOptarray()223 bool ShouldOptarray() const 224 { 225 DEBUG_ASSERT(mirModule.CurFunction() != nullptr, "nullptr check"); 226 return MIRLower::ShouldOptArrayMrt(*mirModule.CurFunction()); 227 } 228 229 BaseNode *NodeConvert(PrimType mtype, BaseNode &expr); 230 /* Lower pointer/reference types if found in pseudo registers. */ 231 void LowerPseudoRegs(const MIRFunction &func) const; 232 233 /* A pseudo register refers to a symbol when DreadNode is converted to RegreadNode. */ GetSymbolReferredToByPseudoRegister(PregIdx regNO)234 StIdx GetSymbolReferredToByPseudoRegister(PregIdx regNO) const 235 { 236 (void)regNO; 237 return StIdx(); 238 } 239 SetOptions(OptionFlag option)240 void SetOptions(OptionFlag option) 241 { 242 options = option; 243 } 244 SetCheckLoadStore(bool value)245 void SetCheckLoadStore(bool value) 246 { 247 checkLoadStore = value; 248 } 249 250 /* if it defines a built-in to use for the given intrinsic, return the name. otherwise, return nullptr */ 251 PUIdx GetBuiltinToUse(BuiltinFunctionID id) const; 252 void InitArrayClassCacheTableIndex(); 253 254 MIRModule &mirModule; 255 BECommon &beCommon; 256 BlockNode *currentBlock = nullptr; /* current block for lowered statements to be inserted to */ 257 bool checkLoadStore = false; 258 int64 seed = 0; 259 SimplifyMemOp simplifyMemOp; 260 static const std::string kIntrnRetValPrefix; 261 static const std::string kUserRetValPrefix; 262 263 static constexpr PUIdx kFuncNotFound = PUIdx(-1); 264 static constexpr int kThreeDimArray = 3; 265 static constexpr int kNodeThirdOpnd = 2; 266 static constexpr int kMCCSyncEnterFast0 = 0; 267 static constexpr int kMCCSyncEnterFast1 = 1; 268 static constexpr int kMCCSyncEnterFast2 = 2; 269 static constexpr int kMCCSyncEnterFast3 = 3; 270 271 protected: 272 /* 273 * true if the lower level (e.g. mplcg) can handle the intrinsic directly. 274 * For example, the INTRN_MPL_ATOMIC_EXCHANGE_PTR can be directly handled by mplcg, 275 * and generate machine code sequences not containing any function calls. 276 * Such intrinsics will bypass the lowering of "assigned", 277 * and let mplcg handle the intrinsic results which are not return values. 278 */ 279 bool IsIntrinsicCallHandledAtLowerLevel(MIRIntrinsicID intrinsic) const; 280 281 bool IsIntrinsicOpHandledAtLowerLevel(MIRIntrinsicID intrinsic) const; 282 283 private: SetCurrentFunc(MIRFunction * func)284 void SetCurrentFunc(MIRFunction *func) 285 { 286 mirModule.SetCurFunction(func); 287 simplifyMemOp.SetFunction(func); 288 if (func != nullptr) { 289 const std::string &dumpFunc = CGOptions::GetDumpFunc(); 290 const bool debug = CGOptions::GetDumpPhases().find("cglower") != CGOptions::GetDumpPhases().end() && 291 (dumpFunc == "*" || dumpFunc == func->GetName()); 292 simplifyMemOp.SetDebug(debug); 293 } 294 } 295 ShouldAddAdditionalComment()296 bool ShouldAddAdditionalComment() const 297 { 298 return (options & kVerboseCG) != 0; 299 } 300 GenerateExceptionHandlingCode()301 bool GenerateExceptionHandlingCode() const 302 { 303 return (options & kGenEh) != 0; 304 } 305 306 BaseNode *MergeToCvtType(PrimType dtyp, PrimType styp, BaseNode &src) const; 307 StmtNode *CreateStmtCallWithReturnValue(const IntrinsicopNode &intrinNode, const MIRSymbol &ret, PUIdx bFunc, 308 BaseNode *extraInfo = nullptr) const; 309 StmtNode *CreateStmtCallWithReturnValue(const IntrinsicopNode &intrinNode, PregIdx retPregIdx, PUIdx bFunc, 310 BaseNode *extraInfo = nullptr) const; 311 312 MIRType *GetArrayNodeType(BaseNode &baseNode); 313 IreadNode &GetLenNode(BaseNode &opnd0); 314 LabelIdx GetLabelIdx(MIRFunction &curFunc) const; 315 void ProcessArrayExpr(BaseNode &expr, BlockNode &blkNode); 316 void ProcessClassInfo(MIRType &classType, bool &classInfoFromRt, std::string &classInfo) const; 317 StmtNode *GenCallNode(const StmtNode &stmt, PUIdx &funcCalled, CallNode &origCall); 318 StmtNode *GenIntrinsiccallNode(const StmtNode &stmt, PUIdx &funcCalled, bool &handledAtLowerLevel, 319 IntrinsiccallNode &origCall); 320 StmtNode *GenIcallNode(PUIdx &funcCalled, IcallNode &origCall); 321 BlockNode *GenBlockNode(StmtNode &newCall, const CallReturnVector &p2nRets, const Opcode &opcode, 322 const PUIdx &funcCalled, bool handledAtLowerLevel, bool uselvar); 323 BaseNode *GetClassInfoExpr(const std::string &classInfo) const; 324 BaseNode *GetBaseNodeFromCurFunc(MIRFunction &curFunc, bool isJarray); 325 326 OptionFlag options = 0; 327 bool needBranchCleanup = false; 328 bool hasTry = false; 329 330 static std::vector<std::pair<BuiltinFunctionID, PUIdx>> builtinFuncIDs; 331 MIRBuilder *mirBuilder = nullptr; 332 uint32 labelIdx = 0; 333 static std::unordered_map<IntrinDesc *, PUIdx> intrinFuncIDs; 334 static std::unordered_map<std::string, size_t> arrayClassCacheIndex; 335 }; 336 } /* namespace maplebe */ 337 338 #endif /* MAPLEBE_INCLUDE_BE_LOWERER_H */ 339