• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (c) 2023 Huawei Device Co., Ltd.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at
6  *
7  *     http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 
16 #include "x64_cg.h"
17 
18 namespace maplebe {
19 using namespace maple;
20 
Run()21 void X64MoveRegArgs::Run()
22 {
23     MoveVRegisterArgs();
24     MoveRegisterArgs();
25 }
26 
CollectRegisterArgs(std::map<uint32,X64reg> & argsList,std::vector<uint32> & indexList,std::map<uint32,X64reg> & pairReg,std::vector<uint32> & numFpRegs,std::vector<uint32> & fpSize) const27 void X64MoveRegArgs::CollectRegisterArgs(std::map<uint32, X64reg> &argsList, std::vector<uint32> &indexList,
28                                          std::map<uint32, X64reg> &pairReg, std::vector<uint32> &numFpRegs,
29                                          std::vector<uint32> &fpSize) const
30 {
31     CHECK_FATAL(cgFunc != nullptr, "nullptr check");
32     X64CGFunc *x64CGFunc = static_cast<X64CGFunc *>(cgFunc);
33     uint32 numFormal = static_cast<uint32>(x64CGFunc->GetFunction().GetFormalCount());
34     numFpRegs.resize(numFormal);
35     fpSize.resize(numFormal);
36     X64CallConvImpl parmlocator(x64CGFunc->GetBecommon());
37     CCLocInfo ploc;
38     uint32 start = 0;
39     if (numFormal) {
40         MIRFunction *func = const_cast<MIRFunction *>(x64CGFunc->GetBecommon().GetMIRModule().CurFunction());
41         if (x64CGFunc->GetBecommon().HasFuncReturnType(*func)) {
42             TyIdx tyIdx = x64CGFunc->GetBecommon().GetFuncReturnType(*func);
43             if (GlobalTables::GetTypeTable().GetTypeFromTyIdx(tyIdx)->GetSize() <= k16ByteSize) {
44                 start = 1;
45             }
46         }
47     }
48     for (uint32 i = start; i < numFormal; ++i) {
49         MIRType *ty = x64CGFunc->GetFunction().GetNthParamType(i);
50         parmlocator.LocateNextParm(*ty, ploc, i == 0, &x64CGFunc->GetFunction());
51         if (ploc.reg0 == kRinvalid) {
52             continue;
53         }
54         X64reg reg0 = static_cast<X64reg>(ploc.reg0);
55         MIRSymbol *sym = x64CGFunc->GetFunction().GetFormal(i);
56         if (sym->IsPreg()) {
57             continue;
58         }
59         argsList[i] = reg0;
60         indexList.emplace_back(i);
61         if (ploc.reg1 == kRinvalid) {
62             continue;
63         }
64         if (ploc.numFpPureRegs) {
65             uint32 index = i;
66             numFpRegs[index] = ploc.numFpPureRegs;
67             fpSize[index] = ploc.fpSize;
68             continue;
69         }
70         pairReg[i] = static_cast<X64reg>(ploc.reg1);
71     }
72 }
73 
GetArgInfo(std::map<uint32,X64reg> & argsList,uint32 argIndex,std::vector<uint32> & numFpRegs,std::vector<uint32> & fpSize) const74 X64ArgInfo X64MoveRegArgs::GetArgInfo(std::map<uint32, X64reg> &argsList, uint32 argIndex,
75                                       std::vector<uint32> &numFpRegs, std::vector<uint32> &fpSize) const
76 {
77     X64CGFunc *x64CGFunc = static_cast<X64CGFunc *>(cgFunc);
78     X64ArgInfo argInfo;
79     argInfo.reg = argsList[argIndex];
80     argInfo.mirTy = x64CGFunc->GetFunction().GetNthParamType(argIndex);
81     argInfo.symSize = argInfo.mirTy->GetSize();
82     argInfo.memPairSecondRegSize = 0;
83     argInfo.doMemPairOpt = false;
84     argInfo.createTwoStores = false;
85     argInfo.isTwoRegParm = false;
86     if ((argInfo.symSize > k8ByteSize) && (argInfo.symSize <= k16ByteSize)) {
87         argInfo.isTwoRegParm = true;
88         if (numFpRegs[argIndex] > kOneRegister) {
89             argInfo.symSize = argInfo.stkSize = fpSize[argIndex];
90         } else {
91             if (argInfo.symSize > k12ByteSize) {
92                 argInfo.memPairSecondRegSize = k8ByteSize;
93             } else {
94                 /* Round to 4 the stack space required for storing the struct */
95                 argInfo.memPairSecondRegSize = k4ByteSize;
96             }
97             argInfo.doMemPairOpt = true;
98             argInfo.symSize = argInfo.stkSize = GetPointerSize();
99         }
100     } else if (argInfo.symSize > k16ByteSize) {
101         /* For large struct passing, a pointer to the copy is used. */
102         argInfo.symSize = argInfo.stkSize = GetPointerSize();
103     } else {
104         argInfo.stkSize = (argInfo.symSize < k4ByteSize) ? k4ByteSize : argInfo.symSize;
105         if (argInfo.symSize > k4ByteSize) {
106             argInfo.symSize = k8ByteSize;
107         }
108     }
109 
110     argInfo.regType = (argInfo.reg < V0) ? kRegTyInt : kRegTyFloat;
111     argInfo.sym = x64CGFunc->GetFunction().GetFormal(argIndex);
112     CHECK_NULL_FATAL(argInfo.sym);
113     argInfo.symLoc =
114         static_cast<const X64SymbolAlloc *>(x64CGFunc->GetMemlayout()->GetSymAllocInfo(argInfo.sym->GetStIndex()));
115     CHECK_NULL_FATAL(argInfo.symLoc);
116     return argInfo;
117 }
118 
GenerateMovInsn(X64ArgInfo & argInfo,X64reg reg2)119 void X64MoveRegArgs::GenerateMovInsn(X64ArgInfo &argInfo, X64reg reg2)
120 {
121     /* reg2 is required when the struct size is between 8-16 bytes */
122     X64CGFunc *x64CGFunc = static_cast<X64CGFunc *>(cgFunc);
123     int32 stOffset = x64CGFunc->GetBaseOffset(*argInfo.symLoc);
124     RegOperand *baseOpnd = x64CGFunc->GetBaseReg(*argInfo.symLoc);
125     uint32 opndSize = argInfo.symSize * kBitsPerByte;
126     RegOperand &regOpnd = x64CGFunc->GetOpndBuilder()->CreatePReg(argInfo.reg, opndSize, argInfo.regType);
127     MemOperand *memOpnd = &x64CGFunc->GetOpndBuilder()->CreateMem(*baseOpnd, stOffset, opndSize);
128 
129     MOperator mOp = x64::MOP_begin;
130     if (opndSize == k64BitSize) {
131         mOp = argInfo.regType == kRegTyInt ? x64::MOP_movq_r_m : x64::MOP_movfd_r_m;
132     } else if (opndSize == k32BitSize) {
133         mOp = argInfo.regType == kRegTyInt ? x64::MOP_movl_r_m : x64::MOP_movfs_r_m;
134     } else if (opndSize == k16BitSize) {
135         mOp = argInfo.regType == kRegTyInt ? x64::MOP_movw_r_m : x64::MOP_begin;
136     } else if (opndSize == k8BitSize) {
137         mOp = argInfo.regType == kRegTyInt ? x64::MOP_movb_r_m : x64::MOP_begin;
138     } else {
139         CHECK_FATAL(false, "NIY");
140     }
141     CHECK_FATAL(mOp != x64::MOP_begin, "NIY");
142     Insn &insn = x64CGFunc->GetInsnBuilder()->BuildInsn(mOp, X64CG::kMd[mOp]);
143     insn.AddOpndChain(regOpnd).AddOpndChain(*memOpnd);
144     x64CGFunc->GetCurBB()->AppendInsn(insn);
145     if (reg2 != kRinvalid) {
146         RegOperand &regOpnd2 = x64CGFunc->GetOpndBuilder()->CreatePReg(reg2, opndSize, argInfo.regType);
147         MemOperand *memOpnd2 = &x64CGFunc->GetOpndBuilder()->CreateMem(*baseOpnd, stOffset + 8, opndSize);
148         Insn &insn2 = x64CGFunc->GetInsnBuilder()->BuildInsn(mOp, X64CG::kMd[mOp]);
149         insn2.AddOpndChain(regOpnd2).AddOpndChain(*memOpnd2);
150         x64CGFunc->GetCurBB()->AppendInsn(insn2);
151     }
152 }
153 
MoveRegisterArgs()154 void X64MoveRegArgs::MoveRegisterArgs()
155 {
156     X64CGFunc *x64CGFunc = static_cast<X64CGFunc *>(cgFunc);
157     BB *formerCurBB = x64CGFunc->GetCurBB();
158     x64CGFunc->GetDummyBB()->ClearInsns();
159     x64CGFunc->SetCurBB(*x64CGFunc->GetDummyBB());
160 
161     /* <[0], maplebe::R0>; <[1], maplebe::V0> */
162     std::map<uint32, X64reg> movePara;
163     /* [0], [1] */
164     std::vector<uint32> moveParaIndex;
165     std::map<uint32, X64reg> pairReg;
166     std::vector<uint32> numFpRegs;
167     std::vector<uint32> fpSize;
168     CollectRegisterArgs(movePara, moveParaIndex, pairReg, numFpRegs, fpSize);
169 
170     for (auto indexItem = moveParaIndex.begin(); indexItem != moveParaIndex.end(); ++indexItem) {
171         uint32 index = *indexItem;
172         X64ArgInfo argInfo = GetArgInfo(movePara, index, numFpRegs, fpSize);
173         GenerateMovInsn(argInfo, pairReg[index]);
174     }
175 
176     x64CGFunc->GetFirstBB()->InsertAtBeginning(*x64CGFunc->GetDummyBB());
177     x64CGFunc->SetCurBB(*formerCurBB);
178 }
179 
LoadStackArgsToVReg(MIRSymbol & mirSym)180 void X64MoveRegArgs::LoadStackArgsToVReg(MIRSymbol &mirSym)
181 {
182     DEBUG_ASSERT(mirSym.GetStorageClass() == kScFormal, "NIY, vreg parameters should be kScFormal type.");
183     X64CGFunc *x64CGFunc = static_cast<X64CGFunc *>(cgFunc);
184     PrimType stype = mirSym.GetType()->GetPrimType();
185     uint32 opndSize = GetPrimTypeBitSize(stype);
186     RegType regType = cgFunc->GetRegTyFromPrimTy(stype);
187     auto symLoc = static_cast<const X64SymbolAlloc *>(x64CGFunc->GetMemlayout()->GetSymAllocInfo(mirSym.GetStIndex()));
188     int32 stOffset = x64CGFunc->GetBaseOffset(*symLoc);
189     RegOperand *baseOpnd = x64CGFunc->GetBaseReg(*symLoc);
190     MemOperand &memOpnd = x64CGFunc->GetOpndBuilder()->CreateMem(*baseOpnd, stOffset, opndSize);
191     PregIdx pregIdx = x64CGFunc->GetFunction().GetPregTab()->GetPregIdxFromPregno(mirSym.GetPreg()->GetPregNo());
192     RegOperand &dstRegOpnd = x64CGFunc->GetOpndBuilder()->CreateVReg(
193         x64CGFunc->GetVirtualRegNOFromPseudoRegIdx(pregIdx), opndSize, cgFunc->GetRegTyFromPrimTy(stype));
194 
195     MOperator mOp;
196     if (opndSize == k64BitSize) {
197         mOp = regType == kRegTyInt ? x64::MOP_movq_m_r : x64::MOP_movfd_m_r;
198     } else if (opndSize == k32BitSize) {
199         mOp = regType == kRegTyInt ? x64::MOP_movl_m_r : x64::MOP_movfs_m_r;
200     } else if (opndSize == k16BitSize) {
201         mOp = regType == kRegTyInt ? x64::MOP_movw_m_r : x64::MOP_begin;
202     } else if (opndSize == k8BitSize) {
203         mOp = regType == kRegTyInt ? x64::MOP_movb_m_r : x64::MOP_begin;
204     } else {
205         CHECK_FATAL(false, "NIY");
206     }
207     CHECK_FATAL(mOp != x64::MOP_begin, "should not happen");
208     Insn &insn = x64CGFunc->GetInsnBuilder()->BuildInsn(mOp, X64CG::kMd[mOp]);
209     insn.AddOpndChain(memOpnd).AddOpndChain(dstRegOpnd);
210     if (x64CGFunc->GetCG()->GenerateVerboseCG()) {
211         std::string key = "param: %%";
212         key += std::to_string(mirSym.GetPreg()->GetPregNo());
213         insn.SetComment(key);
214     }
215     x64CGFunc->GetCurBB()->InsertInsnBegin(insn);
216 }
217 
MoveArgsToVReg(const CCLocInfo & ploc,MIRSymbol & mirSym)218 void X64MoveRegArgs::MoveArgsToVReg(const CCLocInfo &ploc, MIRSymbol &mirSym)
219 {
220     DEBUG_ASSERT(mirSym.GetStorageClass() == kScFormal, "NIY, vreg parameters should be kScFormal type.");
221     X64CGFunc *x64CGFunc = static_cast<X64CGFunc *>(cgFunc);
222     RegType regType = (ploc.reg0 < V0) ? kRegTyInt : kRegTyFloat;
223     PrimType stype = mirSym.GetType()->GetPrimType();
224     uint32 byteSize = GetPrimTypeSize(stype);
225     uint32 srcBitSize = ((byteSize < k4ByteSize) ? k4ByteSize : byteSize) * kBitsPerByte;
226     PregIdx pregIdx = x64CGFunc->GetFunction().GetPregTab()->GetPregIdxFromPregno(mirSym.GetPreg()->GetPregNo());
227     RegOperand &dstRegOpnd = x64CGFunc->GetOpndBuilder()->CreateVReg(
228         x64CGFunc->GetVirtualRegNOFromPseudoRegIdx(pregIdx), srcBitSize, regType);
229     RegOperand &srcRegOpnd = x64CGFunc->GetOpndBuilder()->CreateVReg(ploc.reg0, srcBitSize, regType);
230 
231     MOperator mOp;
232     if (srcBitSize == k64BitSize) {
233         mOp = (regType == kRegTyInt) ? x64::MOP_movq_r_r : x64::MOP_movfd_r_r;
234     } else if (srcBitSize == k32BitSize) {
235         mOp = (regType == kRegTyInt) ? x64::MOP_movl_r_r : x64::MOP_movfs_r_r;
236     } else if (srcBitSize == k16BitSize) {
237         mOp = (regType == kRegTyInt) ? x64::MOP_movw_r_r : x64::MOP_begin;
238     } else if (srcBitSize == k8BitSize) {
239         mOp = (regType == kRegTyInt) ? x64::MOP_movb_r_r : x64::MOP_begin;
240     } else {
241         CHECK_FATAL(false, "NIY");
242     }
243     CHECK_FATAL(mOp != x64::MOP_begin, "should not happen");
244     Insn &insn = x64CGFunc->GetInsnBuilder()->BuildInsn(mOp, X64CG::kMd[mOp]);
245     insn.AddOpndChain(srcRegOpnd).AddOpndChain(dstRegOpnd);
246     if (x64CGFunc->GetCG()->GenerateVerboseCG()) {
247         std::string key = "param: %%";
248         key += std::to_string(mirSym.GetPreg()->GetPregNo());
249         insn.SetComment(key);
250     }
251     x64CGFunc->GetCurBB()->InsertInsnBegin(insn);
252 }
253 
MoveVRegisterArgs()254 void X64MoveRegArgs::MoveVRegisterArgs()
255 {
256     X64CGFunc *x64CGFunc = static_cast<X64CGFunc *>(cgFunc);
257     BB *formerCurBB = x64CGFunc->GetCurBB();
258     x64CGFunc->GetDummyBB()->ClearInsns();
259     x64CGFunc->SetCurBB(*x64CGFunc->GetDummyBB());
260     X64CallConvImpl parmlocator(x64CGFunc->GetBecommon());
261     CCLocInfo ploc;
262 
263     uint32 formalCount = static_cast<uint32>(x64CGFunc->GetFunction().GetFormalCount());
264     uint32 start = 0;
265     if (formalCount) {
266         MIRFunction *func = const_cast<MIRFunction *>(x64CGFunc->GetBecommon().GetMIRModule().CurFunction());
267         if (x64CGFunc->GetBecommon().HasFuncReturnType(*func)) {
268             TyIdx idx = x64CGFunc->GetBecommon().GetFuncReturnType(*func);
269             if (x64CGFunc->GetBecommon().GetTypeSize(idx) <= k16BitSize) {
270                 start = 1;
271             }
272         }
273     }
274     for (uint32 i = start; i < formalCount; ++i) {
275         MIRType *ty = x64CGFunc->GetFunction().GetNthParamType(i);
276         parmlocator.LocateNextParm(*ty, ploc, i == 0, &x64CGFunc->GetFunction());
277         MIRSymbol *sym = x64CGFunc->GetFunction().GetFormal(i);
278 
279         /* load locarefvar formals to store in the reflocals. */
280         if (x64CGFunc->GetFunction().GetNthParamAttr(i).GetAttr(ATTR_localrefvar) && ploc.reg0 == kRinvalid) {
281             CHECK_FATAL(false, "NIY");
282         }
283 
284         if (!sym->IsPreg()) {
285             continue;
286         }
287 
288         if (ploc.reg0 == kRinvalid) {
289             /* load stack parameters to the vreg. */
290             LoadStackArgsToVReg(*sym);
291         } else {
292             MoveArgsToVReg(ploc, *sym);
293         }
294     }
295 
296     x64CGFunc->GetFirstBB()->InsertAtBeginning(*x64CGFunc->GetDummyBB());
297     x64CGFunc->SetCurBB(*formerCurBB);
298 }
299 } /* namespace maplebe */
300