• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2017 The Abseil Authors.
2 //
3 // Licensed under the Apache License, Version 2.0 (the "License");
4 // you may not use this file except in compliance with the License.
5 // You may obtain a copy of the License at
6 //
7 //      https://www.apache.org/licenses/LICENSE-2.0
8 //
9 // Unless required by applicable law or agreed to in writing, software
10 // distributed under the License is distributed on an "AS IS" BASIS,
11 // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 // See the License for the specific language governing permissions and
13 // limitations under the License.
14 //
15 
16 #ifndef ABSL_BASE_INTERNAL_ENDIAN_H_
17 #define ABSL_BASE_INTERNAL_ENDIAN_H_
18 
19 // The following guarantees declaration of the byte swap functions
20 #ifdef _MSC_VER
21 #include <stdlib.h>  // NOLINT(build/include)
22 #elif defined(__FreeBSD__)
23 #include <sys/endian.h>
24 #elif defined(__GLIBC__)
25 #include <byteswap.h>  // IWYU pragma: export
26 #endif
27 
28 #include <cstdint>
29 #include "absl/base/casts.h"
30 #include "absl/base/config.h"
31 #include "absl/base/internal/unaligned_access.h"
32 #include "absl/base/port.h"
33 
34 namespace absl {
35 ABSL_NAMESPACE_BEGIN
36 
37 // Use compiler byte-swapping intrinsics if they are available.  32-bit
38 // and 64-bit versions are available in Clang and GCC as of GCC 4.3.0.
39 // The 16-bit version is available in Clang and GCC only as of GCC 4.8.0.
40 // For simplicity, we enable them all only for GCC 4.8.0 or later.
41 #if defined(__clang__) || \
42     (defined(__GNUC__) && \
43      ((__GNUC__ == 4 && __GNUC_MINOR__ >= 8) || __GNUC__ >= 5))
gbswap_64(uint64_t host_int)44 inline uint64_t gbswap_64(uint64_t host_int) {
45   return __builtin_bswap64(host_int);
46 }
gbswap_32(uint32_t host_int)47 inline uint32_t gbswap_32(uint32_t host_int) {
48   return __builtin_bswap32(host_int);
49 }
gbswap_16(uint16_t host_int)50 inline uint16_t gbswap_16(uint16_t host_int) {
51   return __builtin_bswap16(host_int);
52 }
53 
54 #elif defined(_MSC_VER)
55 inline uint64_t gbswap_64(uint64_t host_int) {
56   return _byteswap_uint64(host_int);
57 }
58 inline uint32_t gbswap_32(uint32_t host_int) {
59   return _byteswap_ulong(host_int);
60 }
61 inline uint16_t gbswap_16(uint16_t host_int) {
62   return _byteswap_ushort(host_int);
63 }
64 
65 #else
66 inline uint64_t gbswap_64(uint64_t host_int) {
67 #if defined(__GNUC__) && defined(__x86_64__) && !defined(__APPLE__)
68   // Adapted from /usr/include/byteswap.h.  Not available on Mac.
69   if (__builtin_constant_p(host_int)) {
70     return __bswap_constant_64(host_int);
71   } else {
72     uint64_t result;
73     __asm__("bswap %0" : "=r"(result) : "0"(host_int));
74     return result;
75   }
76 #elif defined(__GLIBC__)
77   return bswap_64(host_int);
78 #else
79   return (((host_int & uint64_t{0xFF}) << 56) |
80           ((host_int & uint64_t{0xFF00}) << 40) |
81           ((host_int & uint64_t{0xFF0000}) << 24) |
82           ((host_int & uint64_t{0xFF000000}) << 8) |
83           ((host_int & uint64_t{0xFF00000000}) >> 8) |
84           ((host_int & uint64_t{0xFF0000000000}) >> 24) |
85           ((host_int & uint64_t{0xFF000000000000}) >> 40) |
86           ((host_int & uint64_t{0xFF00000000000000}) >> 56));
87 #endif  // bswap_64
88 }
89 
90 inline uint32_t gbswap_32(uint32_t host_int) {
91 #if defined(__GLIBC__)
92   return bswap_32(host_int);
93 #else
94   return (((host_int & uint32_t{0xFF}) << 24) |
95           ((host_int & uint32_t{0xFF00}) << 8) |
96           ((host_int & uint32_t{0xFF0000}) >> 8) |
97           ((host_int & uint32_t{0xFF000000}) >> 24));
98 #endif
99 }
100 
101 inline uint16_t gbswap_16(uint16_t host_int) {
102 #if defined(__GLIBC__)
103   return bswap_16(host_int);
104 #else
105   return (((host_int & uint16_t{0xFF}) << 8) |
106           ((host_int & uint16_t{0xFF00}) >> 8));
107 #endif
108 }
109 
110 #endif  // intrinsics available
111 
112 #ifdef ABSL_IS_LITTLE_ENDIAN
113 
114 // Definitions for ntohl etc. that don't require us to include
115 // netinet/in.h. We wrap gbswap_32 and gbswap_16 in functions rather
116 // than just #defining them because in debug mode, gcc doesn't
117 // correctly handle the (rather involved) definitions of bswap_32.
118 // gcc guarantees that inline functions are as fast as macros, so
119 // this isn't a performance hit.
ghtons(uint16_t x)120 inline uint16_t ghtons(uint16_t x) { return gbswap_16(x); }
ghtonl(uint32_t x)121 inline uint32_t ghtonl(uint32_t x) { return gbswap_32(x); }
ghtonll(uint64_t x)122 inline uint64_t ghtonll(uint64_t x) { return gbswap_64(x); }
123 
124 #elif defined ABSL_IS_BIG_ENDIAN
125 
126 // These definitions are simpler on big-endian machines
127 // These are functions instead of macros to avoid self-assignment warnings
128 // on calls such as "i = ghtnol(i);".  This also provides type checking.
ghtons(uint16_t x)129 inline uint16_t ghtons(uint16_t x) { return x; }
ghtonl(uint32_t x)130 inline uint32_t ghtonl(uint32_t x) { return x; }
ghtonll(uint64_t x)131 inline uint64_t ghtonll(uint64_t x) { return x; }
132 
133 #else
134 #error \
135     "Unsupported byte order: Either ABSL_IS_BIG_ENDIAN or " \
136        "ABSL_IS_LITTLE_ENDIAN must be defined"
137 #endif  // byte order
138 
gntohs(uint16_t x)139 inline uint16_t gntohs(uint16_t x) { return ghtons(x); }
gntohl(uint32_t x)140 inline uint32_t gntohl(uint32_t x) { return ghtonl(x); }
gntohll(uint64_t x)141 inline uint64_t gntohll(uint64_t x) { return ghtonll(x); }
142 
143 // Utilities to convert numbers between the current hosts's native byte
144 // order and little-endian byte order
145 //
146 // Load/Store methods are alignment safe
147 namespace little_endian {
148 // Conversion functions.
149 #ifdef ABSL_IS_LITTLE_ENDIAN
150 
FromHost16(uint16_t x)151 inline uint16_t FromHost16(uint16_t x) { return x; }
ToHost16(uint16_t x)152 inline uint16_t ToHost16(uint16_t x) { return x; }
153 
FromHost32(uint32_t x)154 inline uint32_t FromHost32(uint32_t x) { return x; }
ToHost32(uint32_t x)155 inline uint32_t ToHost32(uint32_t x) { return x; }
156 
FromHost64(uint64_t x)157 inline uint64_t FromHost64(uint64_t x) { return x; }
ToHost64(uint64_t x)158 inline uint64_t ToHost64(uint64_t x) { return x; }
159 
IsLittleEndian()160 inline constexpr bool IsLittleEndian() { return true; }
161 
162 #elif defined ABSL_IS_BIG_ENDIAN
163 
164 inline uint16_t FromHost16(uint16_t x) { return gbswap_16(x); }
165 inline uint16_t ToHost16(uint16_t x) { return gbswap_16(x); }
166 
167 inline uint32_t FromHost32(uint32_t x) { return gbswap_32(x); }
168 inline uint32_t ToHost32(uint32_t x) { return gbswap_32(x); }
169 
170 inline uint64_t FromHost64(uint64_t x) { return gbswap_64(x); }
171 inline uint64_t ToHost64(uint64_t x) { return gbswap_64(x); }
172 
173 inline constexpr bool IsLittleEndian() { return false; }
174 
175 #endif /* ENDIAN */
176 
FromHost(uint8_t x)177 inline uint8_t FromHost(uint8_t x) { return x; }
FromHost(uint16_t x)178 inline uint16_t FromHost(uint16_t x) { return FromHost16(x); }
FromHost(uint32_t x)179 inline uint32_t FromHost(uint32_t x) { return FromHost32(x); }
FromHost(uint64_t x)180 inline uint64_t FromHost(uint64_t x) { return FromHost64(x); }
ToHost(uint8_t x)181 inline uint8_t ToHost(uint8_t x) { return x; }
ToHost(uint16_t x)182 inline uint16_t ToHost(uint16_t x) { return ToHost16(x); }
ToHost(uint32_t x)183 inline uint32_t ToHost(uint32_t x) { return ToHost32(x); }
ToHost(uint64_t x)184 inline uint64_t ToHost(uint64_t x) { return ToHost64(x); }
185 
FromHost(int8_t x)186 inline int8_t FromHost(int8_t x) { return x; }
FromHost(int16_t x)187 inline int16_t FromHost(int16_t x) {
188   return bit_cast<int16_t>(FromHost16(bit_cast<uint16_t>(x)));
189 }
FromHost(int32_t x)190 inline int32_t FromHost(int32_t x) {
191   return bit_cast<int32_t>(FromHost32(bit_cast<uint32_t>(x)));
192 }
FromHost(int64_t x)193 inline int64_t FromHost(int64_t x) {
194   return bit_cast<int64_t>(FromHost64(bit_cast<uint64_t>(x)));
195 }
ToHost(int8_t x)196 inline int8_t ToHost(int8_t x) { return x; }
ToHost(int16_t x)197 inline int16_t ToHost(int16_t x) {
198   return bit_cast<int16_t>(ToHost16(bit_cast<uint16_t>(x)));
199 }
ToHost(int32_t x)200 inline int32_t ToHost(int32_t x) {
201   return bit_cast<int32_t>(ToHost32(bit_cast<uint32_t>(x)));
202 }
ToHost(int64_t x)203 inline int64_t ToHost(int64_t x) {
204   return bit_cast<int64_t>(ToHost64(bit_cast<uint64_t>(x)));
205 }
206 
207 // Functions to do unaligned loads and stores in little-endian order.
Load16(const void * p)208 inline uint16_t Load16(const void *p) {
209   return ToHost16(ABSL_INTERNAL_UNALIGNED_LOAD16(p));
210 }
211 
Store16(void * p,uint16_t v)212 inline void Store16(void *p, uint16_t v) {
213   ABSL_INTERNAL_UNALIGNED_STORE16(p, FromHost16(v));
214 }
215 
Load32(const void * p)216 inline uint32_t Load32(const void *p) {
217   return ToHost32(ABSL_INTERNAL_UNALIGNED_LOAD32(p));
218 }
219 
Store32(void * p,uint32_t v)220 inline void Store32(void *p, uint32_t v) {
221   ABSL_INTERNAL_UNALIGNED_STORE32(p, FromHost32(v));
222 }
223 
Load64(const void * p)224 inline uint64_t Load64(const void *p) {
225   return ToHost64(ABSL_INTERNAL_UNALIGNED_LOAD64(p));
226 }
227 
Store64(void * p,uint64_t v)228 inline void Store64(void *p, uint64_t v) {
229   ABSL_INTERNAL_UNALIGNED_STORE64(p, FromHost64(v));
230 }
231 
232 }  // namespace little_endian
233 
234 // Utilities to convert numbers between the current hosts's native byte
235 // order and big-endian byte order (same as network byte order)
236 //
237 // Load/Store methods are alignment safe
238 namespace big_endian {
239 #ifdef ABSL_IS_LITTLE_ENDIAN
240 
FromHost16(uint16_t x)241 inline uint16_t FromHost16(uint16_t x) { return gbswap_16(x); }
ToHost16(uint16_t x)242 inline uint16_t ToHost16(uint16_t x) { return gbswap_16(x); }
243 
FromHost32(uint32_t x)244 inline uint32_t FromHost32(uint32_t x) { return gbswap_32(x); }
ToHost32(uint32_t x)245 inline uint32_t ToHost32(uint32_t x) { return gbswap_32(x); }
246 
FromHost64(uint64_t x)247 inline uint64_t FromHost64(uint64_t x) { return gbswap_64(x); }
ToHost64(uint64_t x)248 inline uint64_t ToHost64(uint64_t x) { return gbswap_64(x); }
249 
IsLittleEndian()250 inline constexpr bool IsLittleEndian() { return true; }
251 
252 #elif defined ABSL_IS_BIG_ENDIAN
253 
254 inline uint16_t FromHost16(uint16_t x) { return x; }
255 inline uint16_t ToHost16(uint16_t x) { return x; }
256 
257 inline uint32_t FromHost32(uint32_t x) { return x; }
258 inline uint32_t ToHost32(uint32_t x) { return x; }
259 
260 inline uint64_t FromHost64(uint64_t x) { return x; }
261 inline uint64_t ToHost64(uint64_t x) { return x; }
262 
263 inline constexpr bool IsLittleEndian() { return false; }
264 
265 #endif /* ENDIAN */
266 
FromHost(uint8_t x)267 inline uint8_t FromHost(uint8_t x) { return x; }
FromHost(uint16_t x)268 inline uint16_t FromHost(uint16_t x) { return FromHost16(x); }
FromHost(uint32_t x)269 inline uint32_t FromHost(uint32_t x) { return FromHost32(x); }
FromHost(uint64_t x)270 inline uint64_t FromHost(uint64_t x) { return FromHost64(x); }
ToHost(uint8_t x)271 inline uint8_t ToHost(uint8_t x) { return x; }
ToHost(uint16_t x)272 inline uint16_t ToHost(uint16_t x) { return ToHost16(x); }
ToHost(uint32_t x)273 inline uint32_t ToHost(uint32_t x) { return ToHost32(x); }
ToHost(uint64_t x)274 inline uint64_t ToHost(uint64_t x) { return ToHost64(x); }
275 
FromHost(int8_t x)276 inline int8_t FromHost(int8_t x) { return x; }
FromHost(int16_t x)277 inline int16_t FromHost(int16_t x) {
278   return bit_cast<int16_t>(FromHost16(bit_cast<uint16_t>(x)));
279 }
FromHost(int32_t x)280 inline int32_t FromHost(int32_t x) {
281   return bit_cast<int32_t>(FromHost32(bit_cast<uint32_t>(x)));
282 }
FromHost(int64_t x)283 inline int64_t FromHost(int64_t x) {
284   return bit_cast<int64_t>(FromHost64(bit_cast<uint64_t>(x)));
285 }
ToHost(int8_t x)286 inline int8_t ToHost(int8_t x) { return x; }
ToHost(int16_t x)287 inline int16_t ToHost(int16_t x) {
288   return bit_cast<int16_t>(ToHost16(bit_cast<uint16_t>(x)));
289 }
ToHost(int32_t x)290 inline int32_t ToHost(int32_t x) {
291   return bit_cast<int32_t>(ToHost32(bit_cast<uint32_t>(x)));
292 }
ToHost(int64_t x)293 inline int64_t ToHost(int64_t x) {
294   return bit_cast<int64_t>(ToHost64(bit_cast<uint64_t>(x)));
295 }
296 
297 // Functions to do unaligned loads and stores in big-endian order.
Load16(const void * p)298 inline uint16_t Load16(const void *p) {
299   return ToHost16(ABSL_INTERNAL_UNALIGNED_LOAD16(p));
300 }
301 
Store16(void * p,uint16_t v)302 inline void Store16(void *p, uint16_t v) {
303   ABSL_INTERNAL_UNALIGNED_STORE16(p, FromHost16(v));
304 }
305 
Load32(const void * p)306 inline uint32_t Load32(const void *p) {
307   return ToHost32(ABSL_INTERNAL_UNALIGNED_LOAD32(p));
308 }
309 
Store32(void * p,uint32_t v)310 inline void Store32(void *p, uint32_t v) {
311   ABSL_INTERNAL_UNALIGNED_STORE32(p, FromHost32(v));
312 }
313 
Load64(const void * p)314 inline uint64_t Load64(const void *p) {
315   return ToHost64(ABSL_INTERNAL_UNALIGNED_LOAD64(p));
316 }
317 
Store64(void * p,uint64_t v)318 inline void Store64(void *p, uint64_t v) {
319   ABSL_INTERNAL_UNALIGNED_STORE64(p, FromHost64(v));
320 }
321 
322 }  // namespace big_endian
323 
324 ABSL_NAMESPACE_END
325 }  // namespace absl
326 
327 #endif  // ABSL_BASE_INTERNAL_ENDIAN_H_
328