• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /**
2  * Copyright (c) 2021-2022 Huawei Device Co., Ltd.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at
6  *
7  * http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 
16 /*
17 Register file implementation.
18 Reserve registers.
19 */
20 
21 #include "registers_description.h"
22 #include "target/aarch32/target.h"
23 #include "regfile.h"
24 
25 namespace panda::compiler::aarch32 {
26 /**
27  * Default aarch32 calling convention registers
28  * Callee
29  *    r4-r11,r14
30  *    d8-d15
31  * Caller
32  *    (r0-r3),r12
33  *     d0-d7
34  */
Aarch32RegisterDescription(ArenaAllocator * allocator)35 Aarch32RegisterDescription::Aarch32RegisterDescription(ArenaAllocator *allocator)
36     : RegistersDescription(allocator, Arch::AARCH32),
37       aarch32_reg_list_(allocator->Adapter()),
38       used_regs_(allocator->Adapter())
39 {
40     // Initialize Masm
41     for (uint32_t i = 0; i <= MAX_NUM_REGS; ++i) {
42         aarch32_reg_list_.emplace_back(Reg(i, INT32_TYPE));
43         aarch32_reg_list_.emplace_back(Reg(i, FLOAT32_TYPE));
44     }
45 
46     for (auto i = vixl::aarch32::r4.GetCode(); i < vixl::aarch32::r8.GetCode(); ++i) {
47         caller_savedv_.set(i);
48     }
49 }
50 
IsValid() const51 bool Aarch32RegisterDescription::IsValid() const
52 {
53     return !aarch32_reg_list_.empty();
54 }
55 
IsRegUsed(ArenaVector<Reg> vec_reg,Reg reg)56 bool Aarch32RegisterDescription::IsRegUsed(ArenaVector<Reg> vec_reg, Reg reg)
57 {
58     auto equality = [reg](Reg in) { return (reg.GetId() == in.GetId()) && (reg.GetType() == in.GetType()); };
59     return (std::find_if(vec_reg.begin(), vec_reg.end(), equality) != vec_reg.end());
60 }
61 
62 /* static */
IsTmp(Reg reg)63 bool Aarch32RegisterDescription::IsTmp(Reg reg)
64 {
65     if (reg.IsScalar()) {
66         for (auto it : AARCH32_TMP_REG) {
67             if (it == reg.GetId()) {
68                 return true;
69             }
70         }
71         return false;
72     }
73     ASSERT(reg.IsFloat());
74     for (auto it : AARCH32_TMP_VREG) {
75         if (it == reg.GetId()) {
76             return true;
77         }
78     }
79     return false;
80 }
81 
GetCalleeSaved()82 ArenaVector<Reg> Aarch32RegisterDescription::GetCalleeSaved()
83 {
84     ArenaVector<Reg> out(GetAllocator()->Adapter());
85     ASSERT(callee_saved_.size() == callee_savedv_.size());
86     for (size_t i = 0; i < callee_saved_.size(); ++i) {
87         if (callee_saved_.test(i)) {
88             out.emplace_back(Reg(i, INT32_TYPE));
89         }
90         if ((callee_savedv_.test(i))) {
91             out.emplace_back(Reg(i, FLOAT32_TYPE));
92         }
93     }
94     return out;
95 }
96 
SetCalleeSaved(const ArenaVector<Reg> & regs)97 void Aarch32RegisterDescription::SetCalleeSaved([[maybe_unused]] const ArenaVector<Reg> &regs)
98 {
99     callee_saved_ = CALLEE_SAVED;
100     callee_savedv_ = CALLEE_SAVEDV;
101 }
102 
SetUsedRegs(const ArenaVector<Reg> & regs)103 void Aarch32RegisterDescription::SetUsedRegs(const ArenaVector<Reg> &regs)
104 {
105     used_regs_ = regs;
106 
107     ASSERT(callee_saved_.size() == caller_saved_.size());
108     ASSERT(callee_savedv_.size() == caller_savedv_.size());
109 
110     allignment_reg_callee_ = vixl::aarch32::r10.GetCode();
111     // TODO (pishin) need to resolve conflict
112     allignment_reg_caller_ = vixl::aarch32::r10.GetCode();
113     for (size_t i = 0; i < callee_saved_.size(); ++i) {
114         // IsRegUsed use used_regs_ variable
115         bool scalar_used = IsRegUsed(used_regs_, Reg(i, INT64_TYPE));
116         bool is_tmp = IsTmp(Reg(i, INT32_TYPE));
117         if ((!scalar_used && ((callee_saved_.test(i)))) || is_tmp) {
118             callee_saved_.reset(i);
119             allignment_reg_callee_ = i;
120         }
121         if (!scalar_used && ((caller_saved_.test(i)))) {
122             allignment_reg_caller_ = i;
123         }
124         bool is_vtmp = IsTmp(Reg(i, FLOAT32_TYPE));
125 
126         bool vector_used = IsRegUsed(used_regs_, Reg(i, FLOAT64_TYPE));
127         if ((!vector_used && ((callee_savedv_.test(i)))) || is_vtmp) {
128             callee_savedv_.reset(i);
129         }
130         if (!vector_used && ((caller_savedv_.test(i)))) {
131             caller_savedv_.reset(i);
132         }
133         if (i > (AVAILABLE_DOUBLE_WORD_REGISTERS << 1U)) {
134             continue;
135         }
136         if (!scalar_used && ((callee_saved_.test(i + 1)))) {
137             callee_saved_.reset(i + 1);
138         }
139     }
140 
141     callee_saved_.reset(vixl::aarch32::pc.GetCode());
142     caller_saved_.reset(vixl::aarch32::pc.GetCode());
143 }
144 
145 }  // namespace panda::compiler::aarch32
146