1 /*
2 * Copyright (c) 2023 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16 #include "insn.h"
17 #include "isa.h"
18 #include "cg.h"
19 namespace maplebe {
IsMachineInstruction() const20 bool Insn::IsMachineInstruction() const
21 {
22 return md && md->IsPhysicalInsn() && Globals::GetInstance()->GetTarget()->IsTargetInsn(mOp);
23 }
24 /* phi is not physical insn */
IsPhi() const25 bool Insn::IsPhi() const
26 {
27 return md ? md->IsPhi() : false;
28 }
IsLoad() const29 bool Insn::IsLoad() const
30 {
31 DEBUG_ASSERT(md, " set insnDescription for insn ");
32 return md->IsLoad();
33 }
IsStore() const34 bool Insn::IsStore() const
35 {
36 DEBUG_ASSERT(md, " set insnDescription for insn ");
37 return md->IsStore();
38 }
IsMove() const39 bool Insn::IsMove() const
40 {
41 DEBUG_ASSERT(md, " set insnDescription for insn ");
42 return md->IsMove();
43 }
IsBranch() const44 bool Insn::IsBranch() const
45 {
46 DEBUG_ASSERT(md, " set insnDescription for insn ");
47 return md->IsBranch();
48 }
IsCondBranch() const49 bool Insn::IsCondBranch() const
50 {
51 DEBUG_ASSERT(md, " set insnDescription for insn ");
52 return md->IsCondBranch();
53 }
IsUnCondBranch() const54 bool Insn::IsUnCondBranch() const
55 {
56 DEBUG_ASSERT(md, " set insnDescription for insn ");
57 return md->IsUnCondBranch();
58 }
IsBasicOp() const59 bool Insn::IsBasicOp() const
60 {
61 DEBUG_ASSERT(md, " set insnDescription for insn ");
62 return md->IsBasicOp();
63 }
IsConversion() const64 bool Insn::IsConversion() const
65 {
66 DEBUG_ASSERT(md, " set insnDescription for insn ");
67 return md->IsConversion();
68 }
IsUnaryOp() const69 bool Insn::IsUnaryOp() const
70 {
71 DEBUG_ASSERT(md, " set insnDescription for insn ");
72 return md->IsUnaryOp();
73 }
IsShift() const74 bool Insn::IsShift() const
75 {
76 DEBUG_ASSERT(md, " set insnDescription for insn ");
77 return md->IsShift();
78 }
IsCall() const79 bool Insn::IsCall() const
80 {
81 DEBUG_ASSERT(md, " set insnDescription for insn ");
82 return md->IsCall();
83 }
IsTailCall() const84 bool Insn::IsTailCall() const
85 {
86 DEBUG_ASSERT(md, " set insnDescription for insn ");
87 return md->IsTailCall();
88 }
IsAsmInsn() const89 bool Insn::IsAsmInsn() const
90 {
91 DEBUG_ASSERT(md, " set insnDescription for insn ");
92 return md->IsInlineAsm();
93 }
IsDMBInsn() const94 bool Insn::IsDMBInsn() const
95 {
96 DEBUG_ASSERT(md, " set insnDescription for insn ");
97 return md->IsDMB();
98 }
IsAtomic() const99 bool Insn::IsAtomic() const
100 {
101 DEBUG_ASSERT(md, " set insnDescription for insn ");
102 return md->IsAtomic();
103 }
IsVolatile() const104 bool Insn::IsVolatile() const
105 {
106 DEBUG_ASSERT(md, " set insnDescription for insn ");
107 return md->IsVolatile();
108 }
IsMemAccessBar() const109 bool Insn::IsMemAccessBar() const
110 {
111 DEBUG_ASSERT(md, " set insnDescription for insn ");
112 return md->IsMemAccessBar();
113 }
IsMemAccess() const114 bool Insn::IsMemAccess() const
115 {
116 DEBUG_ASSERT(md, " set insnDescription for insn ");
117 return md->IsMemAccess();
118 }
CanThrow() const119 bool Insn::CanThrow() const
120 {
121 DEBUG_ASSERT(md, " set insnDescription for insn ");
122 return md->CanThrow();
123 }
IsVectorOp() const124 bool Insn::IsVectorOp() const
125 {
126 DEBUG_ASSERT(md, " set insnDescription for insn ");
127 return md->IsVectorOp();
128 }
HasLoop() const129 bool Insn::HasLoop() const
130 {
131 DEBUG_ASSERT(md, " set insnDescription for insn ");
132 return md->HasLoop();
133 }
GetLatencyType() const134 uint32 Insn::GetLatencyType() const
135 {
136 DEBUG_ASSERT(md, " set insnDescription for insn ");
137 return md->GetLatencyType();
138 }
GetAtomicNum() const139 uint32 Insn::GetAtomicNum() const
140 {
141 DEBUG_ASSERT(md, " set insnDescription for insn ");
142 return md->GetAtomicNum();
143 }
IsSpecialIntrinsic() const144 bool Insn::IsSpecialIntrinsic() const
145 {
146 DEBUG_ASSERT(md, " set insnDescription for insn ");
147 return md->IsSpecialIntrinsic();
148 }
IsLoadPair() const149 bool Insn::IsLoadPair() const
150 {
151 DEBUG_ASSERT(md, " set insnDescription for insn ");
152 return md->IsLoadPair();
153 }
IsStorePair() const154 bool Insn::IsStorePair() const
155 {
156 DEBUG_ASSERT(md, " set insnDescription for insn ");
157 return md->IsStorePair();
158 }
IsLoadStorePair() const159 bool Insn::IsLoadStorePair() const
160 {
161 DEBUG_ASSERT(md, " set insnDescription for insn ");
162 return md->IsLoadStorePair();
163 }
IsLoadLabel() const164 bool Insn::IsLoadLabel() const
165 {
166 return md->IsLoad() && GetOperand(kInsnSecondOpnd).GetKind() == Operand::kOpdBBAddress;
167 }
OpndIsDef(uint32 id) const168 bool Insn::OpndIsDef(uint32 id) const
169 {
170 DEBUG_ASSERT(md, " set insnDescription for insn ");
171 return md->GetOpndDes(id)->IsDef();
172 }
OpndIsUse(uint32 id) const173 bool Insn::OpndIsUse(uint32 id) const
174 {
175 DEBUG_ASSERT(md, " set insnDescription for insn ");
176 return md->GetOpndDes(id)->IsUse();
177 }
IsClinit() const178 bool Insn::IsClinit() const
179 {
180 return Globals::GetInstance()->GetTarget()->IsClinitInsn(mOp);
181 }
IsComment() const182 bool Insn::IsComment() const
183 {
184 return mOp == abstract::MOP_comment && !md->IsPhysicalInsn();
185 }
186
IsImmaterialInsn() const187 bool Insn::IsImmaterialInsn() const
188 {
189 return IsComment();
190 }
191
IsPseudo() const192 bool Insn::IsPseudo() const
193 {
194 return md && md->IsPhysicalInsn() && Globals::GetInstance()->GetTarget()->IsPseudoInsn(mOp);
195 }
196
GetMemOpnd() const197 Operand *Insn::GetMemOpnd() const
198 {
199 for (uint32 i = 0; i < opnds.size(); ++i) {
200 Operand &opnd = GetOperand(i);
201 if (opnd.IsMemoryAccessOperand()) {
202 return &opnd;
203 }
204 }
205 return nullptr;
206 }
SetMemOpnd(MemOperand * memOpnd)207 void Insn::SetMemOpnd(MemOperand *memOpnd)
208 {
209 for (uint32 i = 0; i < static_cast<uint32>(opnds.size()); ++i) {
210 Operand &opnd = GetOperand(i);
211 if (opnd.IsMemoryAccessOperand()) {
212 SetOperand(i, *memOpnd);
213 return;
214 }
215 }
216 }
217
IsRegDefined(regno_t regNO) const218 bool Insn::IsRegDefined(regno_t regNO) const
219 {
220 return GetDefRegs().count(regNO);
221 }
222
GetDefRegs() const223 std::set<uint32> Insn::GetDefRegs() const
224 {
225 std::set<uint32> defRegNOs;
226 size_t opndNum = opnds.size();
227 for (uint32 i = 0; i < opndNum; ++i) {
228 Operand &opnd = GetOperand(i);
229 auto *regProp = md->opndMD[i];
230 bool isDef = regProp->IsDef();
231 if (!isDef && !opnd.IsMemoryAccessOperand()) {
232 continue;
233 }
234 if (opnd.IsList()) {
235 for (auto *op : static_cast<ListOperand &>(opnd).GetOperands()) {
236 DEBUG_ASSERT(op != nullptr, "invalid operand in list operand");
237 defRegNOs.emplace(op->GetRegisterNumber());
238 }
239 } else if (opnd.IsMemoryAccessOperand()) {
240 auto &memOpnd = static_cast<MemOperand &>(opnd);
241 RegOperand *base = memOpnd.GetBaseRegister();
242 if (base != nullptr) {
243 if (memOpnd.GetAddrMode() == MemOperand::kAddrModeBOi &&
244 (memOpnd.IsPostIndexed() || memOpnd.IsPreIndexed())) {
245 DEBUG_ASSERT(!defRegNOs.count(base->GetRegisterNumber()), "duplicate def in one insn");
246 defRegNOs.emplace(base->GetRegisterNumber());
247 }
248 }
249 } else if (opnd.IsConditionCode() || opnd.IsRegister()) {
250 defRegNOs.emplace(static_cast<RegOperand &>(opnd).GetRegisterNumber());
251 }
252 }
253 return defRegNOs;
254 }
255
256 #if DEBUG
Check() const257 void Insn::Check() const
258 {
259 if (!md) {
260 CHECK_FATAL(false, " need machine description for target insn ");
261 }
262 /* check if the number of operand(s) matches */
263 uint32 insnOperandSize = GetOperandSize();
264 if (insnOperandSize != md->GetOpndMDLength()) {
265 CHECK_FATAL(false, " the number of operands in instruction does not match machine description ");
266 }
267 /* check if the type of each operand matches */
268 for (uint32 i = 0; i < insnOperandSize; ++i) {
269 Operand &opnd = GetOperand(i);
270 if (opnd.GetKind() != md->GetOpndDes(i)->GetOperandType()) {
271 CHECK_FATAL(false, " operand type does not match machine description ");
272 }
273 }
274 }
275 #endif
276
Clone(MemPool & memPool) const277 Insn *Insn::Clone(MemPool &memPool) const
278 {
279 CHECK_FATAL(false, "NIY");
280 return nullptr;
281 }
GetCallTargetOperand() const282 Operand *Insn::GetCallTargetOperand() const
283 {
284 DEBUG_ASSERT(IsCall() || IsTailCall(), "should be call");
285 return &GetOperand(kInsnFirstOpnd);
286 }
287
GetCallArgumentOperand()288 ListOperand *Insn::GetCallArgumentOperand()
289 {
290 DEBUG_ASSERT(IsCall(), "should be call");
291 DEBUG_ASSERT(GetOperand(1).IsList(), "should be list");
292 return &static_cast<ListOperand &>(GetOperand(kInsnSecondOpnd));
293 }
294
CommuteOperands(uint32 dIndex,uint32 sIndex)295 void Insn::CommuteOperands(uint32 dIndex, uint32 sIndex)
296 {
297 Operand *tempCopy = opnds[sIndex];
298 opnds[sIndex] = opnds[dIndex];
299 opnds[dIndex] = tempCopy;
300 }
301
GetBothDefUseOpnd() const302 uint32 Insn::GetBothDefUseOpnd() const
303 {
304 size_t opndNum = opnds.size();
305 uint32 opndIdx = kInsnMaxOpnd;
306 if (md->GetAtomicNum() > 1) {
307 return opndIdx;
308 }
309 for (uint32 i = 0; i < opndNum; ++i) {
310 auto *opndProp = md->GetOpndDes(i);
311 if (opndProp->IsRegUse() && opndProp->IsDef()) {
312 DEBUG_ASSERT(opndIdx == kInsnMaxOpnd, "Do not support yet");
313 opndIdx = i;
314 }
315 if (opnds[i]->IsMemoryAccessOperand()) {
316 auto *MemOpnd = static_cast<MemOperand *>(opnds[i]);
317 if (!MemOpnd->IsIntactIndexed()) {
318 DEBUG_ASSERT(opndIdx == kInsnMaxOpnd, "Do not support yet");
319 opndIdx = i;
320 }
321 }
322 }
323 return opndIdx;
324 }
325
GetMemoryByteSize() const326 uint32 Insn::GetMemoryByteSize() const
327 {
328 DEBUG_ASSERT(IsMemAccess(), "must be memory access insn");
329 uint32 res = 0;
330 for (size_t i = 0; i < opnds.size(); ++i) {
331 if (md->GetOpndDes(i)->GetOperandType() == Operand::kOpdMem) {
332 res = md->GetOpndDes(i)->GetSize();
333 }
334 }
335 DEBUG_ASSERT(res, "cannot access empty memory");
336 if (IsLoadStorePair()) {
337 res = res << 1;
338 }
339 res = res >> k8BitShift;
340 return res;
341 }
342
ScanReg(regno_t regNO) const343 bool Insn::ScanReg(regno_t regNO) const
344 {
345 uint32 opndNum = GetOperandSize();
346 for (uint32 i = 0; i < opndNum; ++i) {
347 Operand &opnd = GetOperand(i);
348 if (opnd.IsList()) {
349 auto &listOpnd = static_cast<ListOperand &>(opnd);
350 for (auto listElem : listOpnd.GetOperands()) {
351 auto *regOpnd = static_cast<RegOperand *>(listElem);
352 DEBUG_ASSERT(regOpnd != nullptr, "parameter operand must be RegOperand");
353 if (regNO == regOpnd->GetRegisterNumber()) {
354 return true;
355 }
356 }
357 } else if (opnd.IsMemoryAccessOperand()) {
358 auto &memOpnd = static_cast<MemOperand &>(opnd);
359 RegOperand *base = memOpnd.GetBaseRegister();
360 RegOperand *index = memOpnd.GetIndexRegister();
361 if ((base != nullptr && base->GetRegisterNumber() == regNO) ||
362 (index != nullptr && index->GetRegisterNumber() == regNO)) {
363 return true;
364 }
365 } else if (opnd.IsRegister()) {
366 if (static_cast<RegOperand &>(opnd).GetRegisterNumber() == regNO) {
367 return true;
368 }
369 }
370 }
371 return false;
372 }
373
MayThrow() const374 bool Insn::MayThrow() const
375 {
376 if (md->IsMemAccess() && !IsLoadLabel()) {
377 auto *memOpnd = static_cast<MemOperand *>(GetMemOpnd());
378 DEBUG_ASSERT(memOpnd != nullptr, "CG invalid memory operand.");
379 if (memOpnd->IsStackMem()) {
380 return false;
381 }
382 }
383 return md->CanThrow();
384 }
385
SetMOP(const InsnDesc & idesc)386 void Insn::SetMOP(const InsnDesc &idesc)
387 {
388 mOp = idesc.GetOpc();
389 md = &idesc;
390 }
391
Dump() const392 void Insn::Dump() const
393 {
394 DEBUG_ASSERT(md != nullptr, "md should not be nullptr");
395 LogInfo::MapleLogger() << "< " << GetId() << " > ";
396 LogInfo::MapleLogger() << md->name << "(" << mOp << ")";
397
398 for (uint32 i = 0; i < GetOperandSize(); ++i) {
399 Operand &opnd = GetOperand(i);
400 LogInfo::MapleLogger() << " (opnd" << i << ": ";
401 Globals::GetInstance()->GetTarget()->DumpTargetOperand(opnd, *md->GetOpndDes(i));
402 LogInfo::MapleLogger() << ")";
403 }
404
405 if (IsVectorOp()) {
406 auto *vInsn = static_cast<const VectorInsn *>(this);
407 if (vInsn->GetNumOfRegSpec() != 0) {
408 LogInfo::MapleLogger() << " (vecSpec: " << vInsn->GetNumOfRegSpec() << ")";
409 }
410 }
411 if (stackMap != nullptr) {
412 const auto &deoptVreg2Opnd = stackMap->GetDeoptInfo().GetDeoptBundleInfo();
413 if (!deoptVreg2Opnd.empty()) {
414 LogInfo::MapleLogger() << " (deopt: ";
415 bool isFirstElem = true;
416 for (const auto &elem : deoptVreg2Opnd) {
417 if (!isFirstElem) {
418 LogInfo::MapleLogger() << ", ";
419 } else {
420 isFirstElem = false;
421 }
422 LogInfo::MapleLogger() << elem.first << ":";
423 elem.second->Dump();
424 }
425 LogInfo::MapleLogger() << ")";
426 }
427 }
428 LogInfo::MapleLogger() << "\n";
429 }
430
GetAndRemoveRegSpecFromList()431 VectorRegSpec *VectorInsn::GetAndRemoveRegSpecFromList()
432 {
433 if (regSpecList.size() == 0) {
434 VectorRegSpec *vecSpec = CG::GetCurCGFuncNoConst()->GetMemoryPool()->New<VectorRegSpec>();
435 return vecSpec;
436 }
437 VectorRegSpec *ret = regSpecList.back();
438 regSpecList.pop_back();
439 return ret;
440 }
441 } // namespace maplebe
442