1 //===- subzero/src/IceIntrinsics.cpp - Functions related to intrinsics ----===//
2 //
3 // The Subzero Code Generator
4 //
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
7 //
8 //===----------------------------------------------------------------------===//
9 ///
10 /// \file
11 /// \brief Implements the Intrinsics utilities for matching and then dispatching
12 /// by name.
13 ///
14 //===----------------------------------------------------------------------===//
15
16 #include "IceIntrinsics.h"
17
18 #include "IceCfg.h"
19 #include "IceCfgNode.h"
20 #include "IceInst.h"
21 #include "IceLiveness.h"
22 #include "IceOperand.h"
23 #include "IceStringPool.h"
24
25 #include <utility>
26
27 namespace Ice {
28
29 static_assert(sizeof(Intrinsics::IntrinsicInfo) == 4,
30 "Unexpected sizeof(IntrinsicInfo)");
31
32 namespace {
33
34 #define INTRIN(ID, SE, RT, MW) \
35 { Intrinsics::ID, Intrinsics::SE, Intrinsics::RT, Intrinsics::MW }
36
37 // Build list of intrinsics with their attributes and expected prototypes. List
38 // is sorted alphabetically.
39 const struct IceIntrinsicsEntry_ {
40 Intrinsics::FullIntrinsicInfo Info;
41 const char *IntrinsicName;
42 } IceIntrinsicsTable[] = {
43
44 #define AtomicCmpxchgInit(Overload, NameSuffix) \
45 { \
46 { \
47 INTRIN(AtomicCmpxchg, SideEffects_T, ReturnsTwice_F, MemoryWrite_T), \
48 {Overload, IceType_i32, Overload, Overload, IceType_i32, IceType_i32}, \
49 6 \
50 } \
51 , "llvm.nacl.atomic.cmpxchg." NameSuffix \
52 }
53 AtomicCmpxchgInit(IceType_i8, "i8"),
54 AtomicCmpxchgInit(IceType_i16, "i16"),
55 AtomicCmpxchgInit(IceType_i32, "i32"),
56 AtomicCmpxchgInit(IceType_i64, "i64"),
57 #undef AtomicCmpxchgInit
58
59 {{INTRIN(AtomicFence, SideEffects_T, ReturnsTwice_F, MemoryWrite_T),
60 {IceType_void, IceType_i32},
61 2},
62 "llvm.nacl.atomic.fence"},
63 {{INTRIN(AtomicFenceAll, SideEffects_T, ReturnsTwice_F, MemoryWrite_T),
64 {IceType_void},
65 1},
66 "llvm.nacl.atomic.fence.all"},
67 {{INTRIN(AtomicIsLockFree, SideEffects_F, ReturnsTwice_F, MemoryWrite_F),
68 {IceType_i1, IceType_i32, IceType_i32},
69 3},
70 "llvm.nacl.atomic.is.lock.free"},
71
72 #define AtomicLoadInit(Overload, NameSuffix) \
73 { \
74 { \
75 INTRIN(AtomicLoad, SideEffects_T, ReturnsTwice_F, MemoryWrite_T), \
76 {Overload, IceType_i32, IceType_i32}, 3 \
77 } \
78 , "llvm.nacl.atomic.load." NameSuffix \
79 }
80 AtomicLoadInit(IceType_i8, "i8"),
81 AtomicLoadInit(IceType_i16, "i16"),
82 AtomicLoadInit(IceType_i32, "i32"),
83 AtomicLoadInit(IceType_i64, "i64"),
84 #undef AtomicLoadInit
85
86 #define AtomicRMWInit(Overload, NameSuffix) \
87 { \
88 { \
89 INTRIN(AtomicRMW, SideEffects_T, ReturnsTwice_F, MemoryWrite_T) \
90 , {Overload, IceType_i32, IceType_i32, Overload, IceType_i32}, 5 \
91 } \
92 , "llvm.nacl.atomic.rmw." NameSuffix \
93 }
94 AtomicRMWInit(IceType_i8, "i8"),
95 AtomicRMWInit(IceType_i16, "i16"),
96 AtomicRMWInit(IceType_i32, "i32"),
97 AtomicRMWInit(IceType_i64, "i64"),
98 #undef AtomicRMWInit
99
100 #define AtomicStoreInit(Overload, NameSuffix) \
101 { \
102 { \
103 INTRIN(AtomicStore, SideEffects_T, ReturnsTwice_F, MemoryWrite_T) \
104 , {IceType_void, Overload, IceType_i32, IceType_i32}, 4 \
105 } \
106 , "llvm.nacl.atomic.store." NameSuffix \
107 }
108 AtomicStoreInit(IceType_i8, "i8"),
109 AtomicStoreInit(IceType_i16, "i16"),
110 AtomicStoreInit(IceType_i32, "i32"),
111 AtomicStoreInit(IceType_i64, "i64"),
112 #undef AtomicStoreInit
113
114 #define BswapInit(Overload, NameSuffix) \
115 { \
116 { \
117 INTRIN(Bswap, SideEffects_F, ReturnsTwice_F, MemoryWrite_F) \
118 , {Overload, Overload}, 2 \
119 } \
120 , "llvm.bswap." NameSuffix \
121 }
122 BswapInit(IceType_i16, "i16"),
123 BswapInit(IceType_i32, "i32"),
124 BswapInit(IceType_i64, "i64"),
125 #undef BswapInit
126
127 #define CtlzInit(Overload, NameSuffix) \
128 { \
129 { \
130 INTRIN(Ctlz, SideEffects_F, ReturnsTwice_F, MemoryWrite_F) \
131 , {Overload, Overload, IceType_i1}, 3 \
132 } \
133 , "llvm.ctlz." NameSuffix \
134 }
135 CtlzInit(IceType_i32, "i32"),
136 CtlzInit(IceType_i64, "i64"),
137 #undef CtlzInit
138
139 #define CtpopInit(Overload, NameSuffix) \
140 { \
141 { \
142 INTRIN(Ctpop, SideEffects_F, ReturnsTwice_F, MemoryWrite_F) \
143 , {Overload, Overload}, 2 \
144 } \
145 , "llvm.ctpop." NameSuffix \
146 }
147 CtpopInit(IceType_i32, "i32"),
148 CtpopInit(IceType_i64, "i64"),
149 #undef CtpopInit
150
151 #define CttzInit(Overload, NameSuffix) \
152 { \
153 { \
154 INTRIN(Cttz, SideEffects_F, ReturnsTwice_F, MemoryWrite_F) \
155 , {Overload, Overload, IceType_i1}, 3 \
156 } \
157 , "llvm.cttz." NameSuffix \
158 }
159 CttzInit(IceType_i32, "i32"),
160 CttzInit(IceType_i64, "i64"),
161 #undef CttzInit
162
163 #define FabsInit(Overload, NameSuffix) \
164 { \
165 { \
166 INTRIN(Fabs, SideEffects_F, ReturnsTwice_F, MemoryWrite_F), \
167 {Overload, Overload}, 2 \
168 } \
169 , "llvm.fabs." NameSuffix \
170 }
171 FabsInit(IceType_f32, "f32"),
172 FabsInit(IceType_f64, "f64"),
173 FabsInit(IceType_v4f32, "v4f32"),
174 #undef FabsInit
175
176 {{INTRIN(Longjmp, SideEffects_T, ReturnsTwice_F, MemoryWrite_F),
177 {IceType_void, IceType_i32, IceType_i32},
178 3},
179 "llvm.nacl.longjmp"},
180 {{INTRIN(Memcpy, SideEffects_T, ReturnsTwice_F, MemoryWrite_T),
181 {IceType_void, IceType_i32, IceType_i32, IceType_i32, IceType_i32,
182 IceType_i1},
183 6},
184 "llvm.memcpy.p0i8.p0i8.i32"},
185 {{INTRIN(Memmove, SideEffects_T, ReturnsTwice_F, MemoryWrite_T),
186 {IceType_void, IceType_i32, IceType_i32, IceType_i32, IceType_i32,
187 IceType_i1},
188 6},
189 "llvm.memmove.p0i8.p0i8.i32"},
190 {{INTRIN(Memset, SideEffects_T, ReturnsTwice_F, MemoryWrite_T),
191 {IceType_void, IceType_i32, IceType_i8, IceType_i32, IceType_i32,
192 IceType_i1},
193 6},
194 "llvm.memset.p0i8.i32"},
195 {{INTRIN(NaClReadTP, SideEffects_F, ReturnsTwice_F, MemoryWrite_F),
196 {IceType_i32},
197 1},
198 "llvm.nacl.read.tp"},
199 {{INTRIN(Setjmp, SideEffects_T, ReturnsTwice_T, MemoryWrite_T),
200 {IceType_i32, IceType_i32},
201 2},
202 "llvm.nacl.setjmp"},
203
204 #define SqrtInit(Overload, NameSuffix) \
205 { \
206 { \
207 INTRIN(Sqrt, SideEffects_F, ReturnsTwice_F, MemoryWrite_F), \
208 {Overload, Overload}, 2 \
209 } \
210 , "llvm.sqrt." NameSuffix \
211 }
212 SqrtInit(IceType_f32, "f32"),
213 SqrtInit(IceType_f64, "f64"),
214 #undef SqrtInit
215
216 {{INTRIN(Stacksave, SideEffects_T, ReturnsTwice_F, MemoryWrite_F),
217 {IceType_i32},
218 1},
219 "llvm.stacksave"},
220 {{INTRIN(Stackrestore, SideEffects_T, ReturnsTwice_F, MemoryWrite_F),
221 {IceType_void, IceType_i32},
222 2},
223 "llvm.stackrestore"},
224 {{INTRIN(Trap, SideEffects_T, ReturnsTwice_F, MemoryWrite_F),
225 {IceType_void},
226 1},
227 "llvm.trap"}};
228 const size_t IceIntrinsicsTableSize = llvm::array_lengthof(IceIntrinsicsTable);
229
230 #undef INTRIN
231
232 } // end of anonymous namespace
233
Intrinsics(GlobalContext * Ctx)234 Intrinsics::Intrinsics(GlobalContext *Ctx) {
235 for (size_t I = 0; I < IceIntrinsicsTableSize; ++I) {
236 const struct IceIntrinsicsEntry_ &Entry = IceIntrinsicsTable[I];
237 assert(Entry.Info.NumTypes <= kMaxIntrinsicParameters);
238 Map.insert(
239 std::make_pair(Ctx->getGlobalString(Entry.IntrinsicName), Entry.Info));
240 }
241 }
242
find(GlobalString Name,bool & Error) const243 const Intrinsics::FullIntrinsicInfo *Intrinsics::find(GlobalString Name,
244 bool &Error) const {
245 static constexpr char LLVMPrefix[] = "llvm.";
246 constexpr size_t LLVMPrefixLen = llvm::array_lengthof(LLVMPrefix) - 1;
247 Error = false;
248 if (Name.toString().substr(0, LLVMPrefixLen) != LLVMPrefix)
249 return nullptr;
250 auto Iter = Map.find(Name);
251 if (Iter == Map.end()) {
252 Error = true;
253 return nullptr;
254 }
255 return &Iter->second;
256 }
257
258 namespace {
259
260 // Returns whether PNaCl allows the given memory ordering in general.
isMemoryOrderValidPNaCl(uint64_t Order)261 bool isMemoryOrderValidPNaCl(uint64_t Order) {
262 switch (Order) {
263 case Intrinsics::MemoryOrderAcquire:
264 case Intrinsics::MemoryOrderRelease:
265 case Intrinsics::MemoryOrderAcquireRelease:
266 case Intrinsics::MemoryOrderSequentiallyConsistent:
267 return true;
268 default:
269 return false;
270 }
271 }
272
273 } // end of anonymous namespace
274
isMemoryOrderValid(IntrinsicID ID,uint64_t Order,uint64_t OrderOther)275 bool Intrinsics::isMemoryOrderValid(IntrinsicID ID, uint64_t Order,
276 uint64_t OrderOther) {
277 // Reject orderings not allowed in PNaCl.
278 if (!isMemoryOrderValidPNaCl(Order))
279 return false;
280 if (ID == AtomicCmpxchg && !isMemoryOrderValidPNaCl(OrderOther))
281 return false;
282 // Reject orderings not allowed by C++11.
283 switch (ID) {
284 default:
285 llvm_unreachable("isMemoryOrderValid: Unknown IntrinsicID");
286 return false;
287 case AtomicFence:
288 case AtomicFenceAll:
289 case AtomicRMW:
290 return true;
291 case AtomicCmpxchg:
292 // Reject orderings that are disallowed by C++11 as invalid combinations
293 // for cmpxchg.
294 switch (OrderOther) {
295 case MemoryOrderRelaxed:
296 case MemoryOrderConsume:
297 case MemoryOrderAcquire:
298 case MemoryOrderSequentiallyConsistent:
299 if (OrderOther > Order)
300 return false;
301 if (Order == MemoryOrderRelease && OrderOther != MemoryOrderRelaxed)
302 return false;
303 return true;
304 default:
305 return false;
306 }
307 case AtomicLoad:
308 switch (Order) {
309 case MemoryOrderRelease:
310 case MemoryOrderAcquireRelease:
311 return false;
312 default:
313 return true;
314 }
315 case AtomicStore:
316 switch (Order) {
317 case MemoryOrderConsume:
318 case MemoryOrderAcquire:
319 case MemoryOrderAcquireRelease:
320 return false;
321 default:
322 return true;
323 }
324 }
325 }
326
327 Intrinsics::ValidateCallValue
validateCall(const InstCall * Call,SizeT & ArgIndex) const328 Intrinsics::FullIntrinsicInfo::validateCall(const InstCall *Call,
329 SizeT &ArgIndex) const {
330 assert(NumTypes >= 1);
331 Variable *Result = Call->getDest();
332 if (Result == nullptr) {
333 if (getReturnType() != IceType_void)
334 return Intrinsics::BadReturnType;
335 } else if (getReturnType() != Result->getType()) {
336 return Intrinsics::BadReturnType;
337 }
338 if (Call->getNumArgs() != getNumArgs()) {
339 return Intrinsics::WrongNumOfArgs;
340 }
341 for (size_t i = 1; i < NumTypes; ++i) {
342 if (Call->getArg(i - 1)->getType() != Signature[i]) {
343 ArgIndex = i - 1;
344 return Intrinsics::WrongCallArgType;
345 }
346 }
347 return Intrinsics::IsValidCall;
348 }
349
getArgType(SizeT Index) const350 Type Intrinsics::FullIntrinsicInfo::getArgType(SizeT Index) const {
351 assert(NumTypes > 1);
352 assert(Index + 1 < NumTypes);
353 return Signature[Index + 1];
354 }
355
356 } // end of namespace Ice
357