• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (c) 2023 Huawei Device Co., Ltd.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at
6  *
7  *     http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 
16 #include "x64_cg.h"
17 #include "x64_isa.h"
18 #include "x64_MPISel.h"
19 
20 namespace maplebe {
21 using namespace maple;
22 
Run()23 void X64MoveRegArgs::Run()
24 {
25     MoveVRegisterArgs();
26     MoveRegisterArgs();
27 }
28 
CollectRegisterArgs(std::map<uint32,X64reg> & argsList,std::vector<uint32> & indexList,std::map<uint32,X64reg> & pairReg,std::vector<uint32> & numFpRegs,std::vector<uint32> & fpSize) const29 void X64MoveRegArgs::CollectRegisterArgs(std::map<uint32, X64reg> &argsList, std::vector<uint32> &indexList,
30                                          std::map<uint32, X64reg> &pairReg, std::vector<uint32> &numFpRegs,
31                                          std::vector<uint32> &fpSize) const
32 {
33     X64CGFunc *x64CGFunc = static_cast<X64CGFunc *>(cgFunc);
34     uint32 numFormal = static_cast<uint32>(x64CGFunc->GetFunction().GetFormalCount());
35     numFpRegs.resize(numFormal);
36     fpSize.resize(numFormal);
37     X64CallConvImpl parmlocator(x64CGFunc->GetBecommon());
38     CCLocInfo ploc;
39     uint32 start = 0;
40     if (numFormal) {
41         MIRFunction *func = const_cast<MIRFunction *>(x64CGFunc->GetBecommon().GetMIRModule().CurFunction());
42         if (x64CGFunc->GetBecommon().HasFuncReturnType(*func)) {
43             TyIdx tyIdx = x64CGFunc->GetBecommon().GetFuncReturnType(*func);
44             if (GlobalTables::GetTypeTable().GetTypeFromTyIdx(tyIdx)->GetSize() <= k16ByteSize) {
45                 start = 1;
46             }
47         }
48     }
49     for (uint32 i = start; i < numFormal; ++i) {
50         MIRType *ty = x64CGFunc->GetFunction().GetNthParamType(i);
51         parmlocator.LocateNextParm(*ty, ploc, i == 0, &x64CGFunc->GetFunction());
52         if (ploc.reg0 == kRinvalid) {
53             continue;
54         }
55         X64reg reg0 = static_cast<X64reg>(ploc.reg0);
56         MIRSymbol *sym = x64CGFunc->GetFunction().GetFormal(i);
57         if (sym->IsPreg()) {
58             continue;
59         }
60         argsList[i] = reg0;
61         indexList.emplace_back(i);
62         if (ploc.reg1 == kRinvalid) {
63             continue;
64         }
65         if (ploc.numFpPureRegs) {
66             uint32 index = i;
67             numFpRegs[index] = ploc.numFpPureRegs;
68             fpSize[index] = ploc.fpSize;
69             continue;
70         }
71         pairReg[i] = static_cast<X64reg>(ploc.reg1);
72     }
73 }
74 
GetArgInfo(std::map<uint32,X64reg> & argsList,uint32 argIndex,std::vector<uint32> & numFpRegs,std::vector<uint32> & fpSize) const75 X64ArgInfo X64MoveRegArgs::GetArgInfo(std::map<uint32, X64reg> &argsList, uint32 argIndex,
76                                       std::vector<uint32> &numFpRegs, std::vector<uint32> &fpSize) const
77 {
78     X64CGFunc *x64CGFunc = static_cast<X64CGFunc *>(cgFunc);
79     X64ArgInfo argInfo;
80     argInfo.reg = argsList[argIndex];
81     argInfo.mirTy = x64CGFunc->GetFunction().GetNthParamType(argIndex);
82     argInfo.symSize = argInfo.mirTy->GetSize();
83     argInfo.memPairSecondRegSize = 0;
84     argInfo.doMemPairOpt = false;
85     argInfo.createTwoStores = false;
86     argInfo.isTwoRegParm = false;
87     if ((argInfo.symSize > k8ByteSize) && (argInfo.symSize <= k16ByteSize)) {
88         argInfo.isTwoRegParm = true;
89         if (numFpRegs[argIndex] > kOneRegister) {
90             argInfo.symSize = argInfo.stkSize = fpSize[argIndex];
91         } else {
92             if (argInfo.symSize > k12ByteSize) {
93                 argInfo.memPairSecondRegSize = k8ByteSize;
94             } else {
95                 /* Round to 4 the stack space required for storing the struct */
96                 argInfo.memPairSecondRegSize = k4ByteSize;
97             }
98             argInfo.doMemPairOpt = true;
99             argInfo.symSize = argInfo.stkSize = GetPointerSize();
100         }
101     } else if (argInfo.symSize > k16ByteSize) {
102         /* For large struct passing, a pointer to the copy is used. */
103         argInfo.symSize = argInfo.stkSize = GetPointerSize();
104     } else if ((argInfo.mirTy->GetPrimType() == PTY_agg) && (argInfo.symSize < k8ByteSize)) {
105         argInfo.symSize = argInfo.stkSize = k8ByteSize;
106     } else {
107         argInfo.stkSize = (argInfo.symSize < k4ByteSize) ? k4ByteSize : argInfo.symSize;
108         if (argInfo.symSize > k4ByteSize) {
109             argInfo.symSize = k8ByteSize;
110         } else if ((argInfo.mirTy->GetPrimType() == PTY_agg) && (argInfo.symSize <= k4ByteSize)) {
111             argInfo.symSize = k4ByteSize;
112         }
113     }
114 
115     if (GetVecLanes(argInfo.mirTy->GetPrimType()) > 0) {
116         /* vector type */
117         CHECK_FATAL(false, "NIY");
118     }
119 
120     argInfo.regType = (argInfo.reg < V0) ? kRegTyInt : kRegTyFloat;
121     argInfo.sym = x64CGFunc->GetFunction().GetFormal(argIndex);
122     CHECK_NULL_FATAL(argInfo.sym);
123     argInfo.symLoc =
124         static_cast<const X64SymbolAlloc *>(x64CGFunc->GetMemlayout()->GetSymAllocInfo(argInfo.sym->GetStIndex()));
125     CHECK_NULL_FATAL(argInfo.symLoc);
126     return argInfo;
127 }
128 
GenerateMovInsn(X64ArgInfo & argInfo,X64reg reg2)129 void X64MoveRegArgs::GenerateMovInsn(X64ArgInfo &argInfo, X64reg reg2)
130 {
131     /* reg2 is required when the struct size is between 8-16 bytes */
132     X64CGFunc *x64CGFunc = static_cast<X64CGFunc *>(cgFunc);
133     int32 stOffset = x64CGFunc->GetBaseOffset(*argInfo.symLoc);
134     RegOperand *baseOpnd = x64CGFunc->GetBaseReg(*argInfo.symLoc);
135     uint32 opndSize = argInfo.symSize * kBitsPerByte;
136     RegOperand &regOpnd = x64CGFunc->GetOpndBuilder()->CreatePReg(argInfo.reg, opndSize, argInfo.regType);
137     MemOperand *memOpnd = &x64CGFunc->GetOpndBuilder()->CreateMem(*baseOpnd, stOffset, opndSize);
138 
139     MOperator mOp = x64::MOP_begin;
140     if (opndSize == k64BitSize) {
141         mOp = argInfo.regType == kRegTyInt ? x64::MOP_movq_r_m : x64::MOP_movfd_r_m;
142     } else if (opndSize == k32BitSize) {
143         mOp = argInfo.regType == kRegTyInt ? x64::MOP_movl_r_m : x64::MOP_movfs_r_m;
144     } else if (opndSize == k16BitSize) {
145         mOp = argInfo.regType == kRegTyInt ? x64::MOP_movw_r_m : x64::MOP_begin;
146     } else if (opndSize == k8BitSize) {
147         mOp = argInfo.regType == kRegTyInt ? x64::MOP_movb_r_m : x64::MOP_begin;
148     } else {
149         CHECK_FATAL(false, "NIY");
150     }
151     CHECK_FATAL(mOp != x64::MOP_begin, "NIY");
152     Insn &insn = x64CGFunc->GetInsnBuilder()->BuildInsn(mOp, X64CG::kMd[mOp]);
153     insn.AddOpndChain(regOpnd).AddOpndChain(*memOpnd);
154     x64CGFunc->GetCurBB()->AppendInsn(insn);
155     if (reg2 != kRinvalid) {
156         RegOperand &regOpnd2 = x64CGFunc->GetOpndBuilder()->CreatePReg(reg2, opndSize, argInfo.regType);
157         MemOperand *memOpnd2 = &x64CGFunc->GetOpndBuilder()->CreateMem(*baseOpnd, stOffset + 8, opndSize);
158         Insn &insn2 = x64CGFunc->GetInsnBuilder()->BuildInsn(mOp, X64CG::kMd[mOp]);
159         insn2.AddOpndChain(regOpnd2).AddOpndChain(*memOpnd2);
160         x64CGFunc->GetCurBB()->AppendInsn(insn2);
161     }
162 }
163 
MoveRegisterArgs()164 void X64MoveRegArgs::MoveRegisterArgs()
165 {
166     X64CGFunc *x64CGFunc = static_cast<X64CGFunc *>(cgFunc);
167     BB *formerCurBB = x64CGFunc->GetCurBB();
168     x64CGFunc->GetDummyBB()->ClearInsns();
169     x64CGFunc->SetCurBB(*x64CGFunc->GetDummyBB());
170 
171     /* <[0], maplebe::R0>; <[1], maplebe::V0> */
172     std::map<uint32, X64reg> movePara;
173     /* [0], [1] */
174     std::vector<uint32> moveParaIndex;
175     std::map<uint32, X64reg> pairReg;
176     std::vector<uint32> numFpRegs;
177     std::vector<uint32> fpSize;
178     CollectRegisterArgs(movePara, moveParaIndex, pairReg, numFpRegs, fpSize);
179 
180     for (auto indexItem = moveParaIndex.begin(); indexItem != moveParaIndex.end(); ++indexItem) {
181         uint32 index = *indexItem;
182         X64ArgInfo argInfo = GetArgInfo(movePara, index, numFpRegs, fpSize);
183         GenerateMovInsn(argInfo, pairReg[index]);
184     }
185 
186     x64CGFunc->GetFirstBB()->InsertAtBeginning(*x64CGFunc->GetDummyBB());
187     x64CGFunc->SetCurBB(*formerCurBB);
188 }
189 
LoadStackArgsToVReg(MIRSymbol & mirSym)190 void X64MoveRegArgs::LoadStackArgsToVReg(MIRSymbol &mirSym)
191 {
192     DEBUG_ASSERT(mirSym.GetStorageClass() == kScFormal, "NIY, vreg parameters should be kScFormal type.");
193     X64CGFunc *x64CGFunc = static_cast<X64CGFunc *>(cgFunc);
194     PrimType stype = mirSym.GetType()->GetPrimType();
195     uint32 opndSize = GetPrimTypeBitSize(stype);
196     RegType regType = cgFunc->GetRegTyFromPrimTy(stype);
197     auto symLoc = static_cast<const X64SymbolAlloc *>(x64CGFunc->GetMemlayout()->GetSymAllocInfo(mirSym.GetStIndex()));
198     int32 stOffset = x64CGFunc->GetBaseOffset(*symLoc);
199     RegOperand *baseOpnd = x64CGFunc->GetBaseReg(*symLoc);
200     MemOperand &memOpnd = x64CGFunc->GetOpndBuilder()->CreateMem(*baseOpnd, stOffset, opndSize);
201     PregIdx pregIdx = x64CGFunc->GetFunction().GetPregTab()->GetPregIdxFromPregno(mirSym.GetPreg()->GetPregNo());
202     RegOperand &dstRegOpnd = x64CGFunc->GetOpndBuilder()->CreateVReg(
203         x64CGFunc->GetVirtualRegNOFromPseudoRegIdx(pregIdx), opndSize, cgFunc->GetRegTyFromPrimTy(stype));
204 
205     MOperator mOp;
206     if (opndSize == k64BitSize) {
207         mOp = regType == kRegTyInt ? x64::MOP_movq_m_r : x64::MOP_movfd_m_r;
208     } else if (opndSize == k32BitSize) {
209         mOp = regType == kRegTyInt ? x64::MOP_movl_m_r : x64::MOP_movfs_m_r;
210     } else if (opndSize == k16BitSize) {
211         mOp = regType == kRegTyInt ? x64::MOP_movw_m_r : x64::MOP_begin;
212     } else if (opndSize == k8BitSize) {
213         mOp = regType == kRegTyInt ? x64::MOP_movb_m_r : x64::MOP_begin;
214     } else {
215         CHECK_FATAL(false, "NIY");
216     }
217     CHECK_FATAL(mOp != x64::MOP_begin, "should not happen");
218     Insn &insn = x64CGFunc->GetInsnBuilder()->BuildInsn(mOp, X64CG::kMd[mOp]);
219     insn.AddOpndChain(memOpnd).AddOpndChain(dstRegOpnd);
220     if (x64CGFunc->GetCG()->GenerateVerboseCG()) {
221         std::string key = "param: %%";
222         key += std::to_string(mirSym.GetPreg()->GetPregNo());
223         insn.SetComment(key);
224     }
225     x64CGFunc->GetCurBB()->InsertInsnBegin(insn);
226 }
227 
MoveArgsToVReg(const CCLocInfo & ploc,MIRSymbol & mirSym)228 void X64MoveRegArgs::MoveArgsToVReg(const CCLocInfo &ploc, MIRSymbol &mirSym)
229 {
230     DEBUG_ASSERT(mirSym.GetStorageClass() == kScFormal, "NIY, vreg parameters should be kScFormal type.");
231     X64CGFunc *x64CGFunc = static_cast<X64CGFunc *>(cgFunc);
232     RegType regType = (ploc.reg0 < V0) ? kRegTyInt : kRegTyFloat;
233     PrimType stype = mirSym.GetType()->GetPrimType();
234     uint32 byteSize = GetPrimTypeSize(stype);
235     uint32 srcBitSize = ((byteSize < k4ByteSize) ? k4ByteSize : byteSize) * kBitsPerByte;
236     PregIdx pregIdx = x64CGFunc->GetFunction().GetPregTab()->GetPregIdxFromPregno(mirSym.GetPreg()->GetPregNo());
237     RegOperand &dstRegOpnd = x64CGFunc->GetOpndBuilder()->CreateVReg(
238         x64CGFunc->GetVirtualRegNOFromPseudoRegIdx(pregIdx), srcBitSize, regType);
239     RegOperand &srcRegOpnd = x64CGFunc->GetOpndBuilder()->CreateVReg(ploc.reg0, srcBitSize, regType);
240 
241     MOperator mOp;
242     if (srcBitSize == k64BitSize) {
243         mOp = (regType == kRegTyInt) ? x64::MOP_movq_r_r : x64::MOP_movfd_r_r;
244     } else if (srcBitSize == k32BitSize) {
245         mOp = (regType == kRegTyInt) ? x64::MOP_movl_r_r : x64::MOP_movfs_r_r;
246     } else if (srcBitSize == k16BitSize) {
247         mOp = (regType == kRegTyInt) ? x64::MOP_movw_r_r : x64::MOP_begin;
248     } else if (srcBitSize == k8BitSize) {
249         mOp = (regType == kRegTyInt) ? x64::MOP_movb_r_r : x64::MOP_begin;
250     } else {
251         CHECK_FATAL(false, "NIY");
252     }
253     CHECK_FATAL(mOp != x64::MOP_begin, "should not happen");
254     Insn &insn = x64CGFunc->GetInsnBuilder()->BuildInsn(mOp, X64CG::kMd[mOp]);
255     insn.AddOpndChain(srcRegOpnd).AddOpndChain(dstRegOpnd);
256     if (x64CGFunc->GetCG()->GenerateVerboseCG()) {
257         std::string key = "param: %%";
258         key += std::to_string(mirSym.GetPreg()->GetPregNo());
259         insn.SetComment(key);
260     }
261     x64CGFunc->GetCurBB()->InsertInsnBegin(insn);
262 }
263 
MoveVRegisterArgs()264 void X64MoveRegArgs::MoveVRegisterArgs()
265 {
266     X64CGFunc *x64CGFunc = static_cast<X64CGFunc *>(cgFunc);
267     BB *formerCurBB = x64CGFunc->GetCurBB();
268     x64CGFunc->GetDummyBB()->ClearInsns();
269     x64CGFunc->SetCurBB(*x64CGFunc->GetDummyBB());
270     X64CallConvImpl parmlocator(x64CGFunc->GetBecommon());
271     CCLocInfo ploc;
272 
273     uint32 formalCount = static_cast<uint32>(x64CGFunc->GetFunction().GetFormalCount());
274     uint32 start = 0;
275     if (formalCount) {
276         MIRFunction *func = const_cast<MIRFunction *>(x64CGFunc->GetBecommon().GetMIRModule().CurFunction());
277         if (x64CGFunc->GetBecommon().HasFuncReturnType(*func)) {
278             TyIdx idx = x64CGFunc->GetBecommon().GetFuncReturnType(*func);
279             if (x64CGFunc->GetBecommon().GetTypeSize(idx) <= k16BitSize) {
280                 start = 1;
281             }
282         }
283     }
284     for (uint32 i = start; i < formalCount; ++i) {
285         MIRType *ty = x64CGFunc->GetFunction().GetNthParamType(i);
286         parmlocator.LocateNextParm(*ty, ploc, i == 0, &x64CGFunc->GetFunction());
287         MIRSymbol *sym = x64CGFunc->GetFunction().GetFormal(i);
288 
289         /* load locarefvar formals to store in the reflocals. */
290         if (x64CGFunc->GetFunction().GetNthParamAttr(i).GetAttr(ATTR_localrefvar) && ploc.reg0 == kRinvalid) {
291             CHECK_FATAL(false, "NIY");
292         }
293 
294         if (!sym->IsPreg()) {
295             continue;
296         }
297 
298         if (ploc.reg0 == kRinvalid) {
299             /* load stack parameters to the vreg. */
300             LoadStackArgsToVReg(*sym);
301         } else {
302             MoveArgsToVReg(ploc, *sym);
303         }
304     }
305 
306     x64CGFunc->GetFirstBB()->InsertAtBeginning(*x64CGFunc->GetDummyBB());
307     x64CGFunc->SetCurBB(*formerCurBB);
308 }
309 } /* namespace maplebe */
310