1 // Protocol Buffers - Google's data interchange format
2 // Copyright 2008 Google Inc. All rights reserved.
3 //
4 // Use of this source code is governed by a BSD-style
5 // license that can be found in the LICENSE file or at
6 // https://developers.google.com/open-source/licenses/bsd
7
8 // A common header that is included across all protobuf headers. We do our best
9 // to avoid #defining any macros here; instead we generally put macros in
10 // port_def.inc and port_undef.inc so they are not visible from outside of
11 // protobuf.
12
13 #ifndef GOOGLE_PROTOBUF_PORT_H__
14 #define GOOGLE_PROTOBUF_PORT_H__
15
16 #include <atomic>
17 #include <cassert>
18 #include <cstddef>
19 #include <cstdint>
20 #include <new>
21 #include <string>
22 #include <type_traits>
23 #include <typeinfo>
24
25
26 #include "absl/base/config.h"
27 #include "absl/base/prefetch.h"
28 #include "absl/meta/type_traits.h"
29 #include "absl/strings/string_view.h"
30 #include "absl/types/optional.h"
31
32 // must be last
33 #include "google/protobuf/port_def.inc"
34
35
36 namespace google {
37 namespace protobuf {
38
39 class MessageLite;
40
41 namespace internal {
42
43 struct MessageTraitsImpl;
44
45 template <typename T>
StrongPointer(T * var)46 inline PROTOBUF_ALWAYS_INLINE void StrongPointer(T* var) {
47 #if defined(__GNUC__)
48 asm("" : : "r"(var));
49 #else
50 auto volatile unused = var;
51 (void)&unused; // Use address to avoid an extra load of "unused".
52 #endif
53 }
54
55 #if defined(__x86_64__) && defined(__linux__) && !defined(__APPLE__) && \
56 !defined(__ANDROID__) && defined(__clang__) && __clang_major__ >= 19
57 // Optimized implementation for clang where we can generate a relocation without
58 // adding runtime instructions.
59 template <typename T, T ptr>
StrongPointer()60 inline PROTOBUF_ALWAYS_INLINE void StrongPointer() {
61 // This injects a relocation in the code path without having to run code, but
62 // we can only do it with a newer clang.
63 asm(".reloc ., BFD_RELOC_NONE, %p0" ::"Ws"(ptr));
64 }
65
66 template <typename T, typename TraitsImpl = MessageTraitsImpl>
StrongReferenceToType()67 inline PROTOBUF_ALWAYS_INLINE void StrongReferenceToType() {
68 static constexpr auto ptr =
69 decltype(TraitsImpl::template value<T>)::StrongPointer();
70 // This is identical to the implementation of StrongPointer() above, but it
71 // has to be explicitly inlined here or else Clang 19 will raise an error in
72 // some configurations.
73 asm(".reloc ., BFD_RELOC_NONE, %p0" ::"Ws"(ptr));
74 }
75 #else // .reloc
76 // Portable fallback. It usually generates a single LEA instruction or
77 // equivalent.
78 template <typename T, T ptr>
StrongPointer()79 inline PROTOBUF_ALWAYS_INLINE void StrongPointer() {
80 StrongPointer(ptr);
81 }
82
83 template <typename T, typename TraitsImpl = MessageTraitsImpl>
StrongReferenceToType()84 inline PROTOBUF_ALWAYS_INLINE void StrongReferenceToType() {
85 return StrongPointer(
86 decltype(TraitsImpl::template value<T>)::StrongPointer());
87 }
88 #endif // .reloc
89
90
91 // See comments on `AllocateAtLeast` for information on size returning new.
92 struct SizedPtr {
93 void* p;
94 size_t n;
95 };
96
97 // Debug hook allowing setting up test scenarios for AllocateAtLeast usage.
98 using AllocateAtLeastHookFn = SizedPtr (*)(size_t, void*);
99
100 // `AllocAtLeastHook` API
101 constexpr bool HaveAllocateAtLeastHook();
102 void SetAllocateAtLeastHook(AllocateAtLeastHookFn fn, void* context = nullptr);
103
104 #if !defined(NDEBUG) && defined(ABSL_HAVE_THREAD_LOCAL) && \
105 defined(__cpp_inline_variables)
106
107 // Hook data for current thread. These vars must not be accessed directly, use
108 // the 'HaveAllocateAtLeastHook()` and `SetAllocateAtLeastHook()` API instead.
109 inline thread_local AllocateAtLeastHookFn allocate_at_least_hook = nullptr;
110 inline thread_local void* allocate_at_least_hook_context = nullptr;
111
HaveAllocateAtLeastHook()112 constexpr bool HaveAllocateAtLeastHook() { return true; }
SetAllocateAtLeastHook(AllocateAtLeastHookFn fn,void * context)113 inline void SetAllocateAtLeastHook(AllocateAtLeastHookFn fn, void* context) {
114 allocate_at_least_hook = fn;
115 allocate_at_least_hook_context = context;
116 }
117
118 #else // !NDEBUG && ABSL_HAVE_THREAD_LOCAL && __cpp_inline_variables
119
HaveAllocateAtLeastHook()120 constexpr bool HaveAllocateAtLeastHook() { return false; }
SetAllocateAtLeastHook(AllocateAtLeastHookFn fn,void * context)121 inline void SetAllocateAtLeastHook(AllocateAtLeastHookFn fn, void* context) {}
122
123 #endif // !NDEBUG && ABSL_HAVE_THREAD_LOCAL && __cpp_inline_variables
124
125 // Allocates at least `size` bytes. This function follows the c++ language
126 // proposal from D0901R10 (http://wg21.link/D0901R10) and will be implemented
127 // in terms of the new operator new semantics when available. The allocated
128 // memory should be released by a call to `SizedDelete` or `::operator delete`.
AllocateAtLeast(size_t size)129 inline SizedPtr AllocateAtLeast(size_t size) {
130 #if !defined(NDEBUG) && defined(ABSL_HAVE_THREAD_LOCAL) && \
131 defined(__cpp_inline_variables)
132 if (allocate_at_least_hook != nullptr) {
133 return allocate_at_least_hook(size, allocate_at_least_hook_context);
134 }
135 #endif // !NDEBUG && ABSL_HAVE_THREAD_LOCAL && __cpp_inline_variables
136 return {::operator new(size), size};
137 }
138
SizedDelete(void * p,size_t size)139 inline void SizedDelete(void* p, size_t size) {
140 #if defined(__cpp_sized_deallocation)
141 ::operator delete(p, size);
142 #else
143 // Avoid -Wunused-parameter
144 (void)size;
145 ::operator delete(p);
146 #endif
147 }
SizedArrayDelete(void * p,size_t size)148 inline void SizedArrayDelete(void* p, size_t size) {
149 #if defined(__cpp_sized_deallocation)
150 ::operator delete[](p, size);
151 #else
152 // Avoid -Wunused-parameter
153 (void)size;
154 ::operator delete[](p);
155 #endif
156 }
157
158 // Tag type used to invoke the constinit constructor overload of classes
159 // such as ArenaStringPtr and MapFieldBase. Such constructors are internal
160 // implementation details of the library.
161 struct ConstantInitialized {
162 explicit ConstantInitialized() = default;
163 };
164
165 // Tag type used to invoke the arena constructor overload of classes such
166 // as ExtensionSet and MapFieldLite in aggregate initialization. These
167 // classes typically don't have move/copy constructors, which rules out
168 // explicit initialization in pre-C++17.
169 struct ArenaInitialized {
170 explicit ArenaInitialized() = default;
171 };
172
173 template <typename To, typename From>
AssertDownCast(From * from)174 void AssertDownCast(From* from) {
175 static_assert(std::is_base_of<From, To>::value, "illegal DownCast");
176
177 #if defined(__cpp_concepts)
178 // Check that this function is not used to downcast message types.
179 // For those we should use {Down,Dynamic}CastTo{Message,Generated}.
180 static_assert(!requires {
181 std::derived_from<std::remove_pointer_t<To>,
182 typename std::remove_pointer_t<To>::MessageLite>;
183 });
184 #endif
185
186 #if PROTOBUF_RTTI
187 // RTTI: debug mode only!
188 assert(from == nullptr || dynamic_cast<To*>(from) != nullptr);
189 #endif
190 }
191
192 template <typename To, typename From>
DownCast(From * f)193 inline To DownCast(From* f) {
194 AssertDownCast<std::remove_pointer_t<To>>(f);
195 return static_cast<To>(f);
196 }
197
198 template <typename ToRef, typename From>
DownCast(From & f)199 inline ToRef DownCast(From& f) {
200 AssertDownCast<std::remove_reference_t<ToRef>>(&f);
201 return static_cast<ToRef>(f);
202 }
203
204 // Looks up the name of `T` via RTTI, if RTTI is available.
205 template <typename T>
RttiTypeName()206 inline absl::optional<absl::string_view> RttiTypeName() {
207 #if PROTOBUF_RTTI
208 return typeid(T).name();
209 #else
210 return absl::nullopt;
211 #endif
212 }
213
214 // Helpers for identifying our supported types.
215 template <typename T>
216 struct is_supported_integral_type
217 : absl::disjunction<std::is_same<T, int32_t>, std::is_same<T, uint32_t>,
218 std::is_same<T, int64_t>, std::is_same<T, uint64_t>,
219 std::is_same<T, bool>> {};
220
221 template <typename T>
222 struct is_supported_floating_point_type
223 : absl::disjunction<std::is_same<T, float>, std::is_same<T, double>> {};
224
225 template <typename T>
226 struct is_supported_string_type
227 : absl::disjunction<std::is_same<T, std::string>> {};
228
229 template <typename T>
230 struct is_supported_scalar_type
231 : absl::disjunction<is_supported_integral_type<T>,
232 is_supported_floating_point_type<T>,
233 is_supported_string_type<T>> {};
234
235 template <typename T>
236 struct is_supported_message_type
237 : absl::disjunction<std::is_base_of<MessageLite, T>> {
238 static constexpr auto force_complete_type = sizeof(T);
239 };
240
241 // To prevent sharing cache lines between threads
242 #ifdef __cpp_aligned_new
243 enum { kCacheAlignment = 64 };
244 #else
245 enum { kCacheAlignment = alignof(max_align_t) }; // do the best we can
246 #endif
247
248 // The maximum byte alignment we support.
249 enum { kMaxMessageAlignment = 8 };
250
251 // Returns true if debug hardening for clearing oneof message on arenas is
252 // enabled.
DebugHardenClearOneofMessageOnArena()253 inline constexpr bool DebugHardenClearOneofMessageOnArena() {
254 #ifdef NDEBUG
255 return false;
256 #else
257 return true;
258 #endif
259 }
260
PerformDebugChecks()261 constexpr bool PerformDebugChecks() {
262 #if defined(NDEBUG) && !defined(PROTOBUF_ASAN) && !defined(PROTOBUF_MSAN) && \
263 !defined(PROTOBUF_TSAN)
264 return false;
265 #else
266 return true;
267 #endif
268 }
269
270 // Force copy the default string to a string field so that non-optimized builds
271 // have harder-to-rely-on address stability.
DebugHardenForceCopyDefaultString()272 constexpr bool DebugHardenForceCopyDefaultString() {
273 return false;
274 }
275
DebugHardenForceCopyInRelease()276 constexpr bool DebugHardenForceCopyInRelease() {
277 return false;
278 }
279
DebugHardenForceCopyInSwap()280 constexpr bool DebugHardenForceCopyInSwap() {
281 return false;
282 }
283
DebugHardenForceCopyInMove()284 constexpr bool DebugHardenForceCopyInMove() {
285 return false;
286 }
287
DebugHardenForceAllocationOnConstruction()288 constexpr bool DebugHardenForceAllocationOnConstruction() {
289 return false;
290 }
291
DebugHardenFuzzMessageSpaceUsedLong()292 constexpr bool DebugHardenFuzzMessageSpaceUsedLong() {
293 return false;
294 }
295
296 // Returns true if pointers are 8B aligned, leaving least significant 3 bits
297 // available.
PtrIsAtLeast8BAligned()298 inline constexpr bool PtrIsAtLeast8BAligned() { return alignof(void*) >= 8; }
299
IsLazyParsingSupported()300 inline constexpr bool IsLazyParsingSupported() {
301 // We need 3 bits for pointer tagging in lazy parsing.
302 return PtrIsAtLeast8BAligned();
303 }
304
305 // Prefetch 5 64-byte cache line starting from 7 cache-lines ahead.
306 // Constants are somewhat arbitrary and pretty aggressive, but were
307 // chosen to give a better benchmark results. E.g. this is ~20%
308 // faster, single cache line prefetch is ~12% faster, increasing
309 // decreasing distance makes results 2-4% worse. Important note,
310 // prefetch doesn't require a valid address, so it is ok to prefetch
311 // past the end of message/valid memory, however we are doing this
312 // inside inline asm block, since computing the invalid pointer
313 // is a potential UB. Only insert prefetch once per function,
Prefetch5LinesFrom7Lines(const void * ptr)314 inline PROTOBUF_ALWAYS_INLINE void Prefetch5LinesFrom7Lines(const void* ptr) {
315 PROTOBUF_PREFETCH_WITH_OFFSET(ptr, 448);
316 PROTOBUF_PREFETCH_WITH_OFFSET(ptr, 512);
317 PROTOBUF_PREFETCH_WITH_OFFSET(ptr, 576);
318 PROTOBUF_PREFETCH_WITH_OFFSET(ptr, 640);
319 PROTOBUF_PREFETCH_WITH_OFFSET(ptr, 704);
320 }
321
322 // Prefetch 5 64-byte cache lines starting from 1 cache-line ahead.
Prefetch5LinesFrom1Line(const void * ptr)323 inline PROTOBUF_ALWAYS_INLINE void Prefetch5LinesFrom1Line(const void* ptr) {
324 PROTOBUF_PREFETCH_WITH_OFFSET(ptr, 64);
325 PROTOBUF_PREFETCH_WITH_OFFSET(ptr, 128);
326 PROTOBUF_PREFETCH_WITH_OFFSET(ptr, 192);
327 PROTOBUF_PREFETCH_WITH_OFFSET(ptr, 256);
328 PROTOBUF_PREFETCH_WITH_OFFSET(ptr, 320);
329 }
330
331 #if defined(NDEBUG) && ABSL_HAVE_BUILTIN(__builtin_unreachable)
332 [[noreturn]] ABSL_ATTRIBUTE_COLD PROTOBUF_ALWAYS_INLINE inline void
Unreachable()333 Unreachable() {
334 __builtin_unreachable();
335 }
336 #elif ABSL_HAVE_BUILTIN(__builtin_FILE) && ABSL_HAVE_BUILTIN(__builtin_LINE)
337 [[noreturn]] ABSL_ATTRIBUTE_COLD inline void Unreachable(
338 const char* file = __builtin_FILE(), int line = __builtin_LINE()) {
339 protobuf_assumption_failed("Unreachable", file, line);
340 }
341 #else
Unreachable()342 [[noreturn]] ABSL_ATTRIBUTE_COLD inline void Unreachable() {
343 protobuf_assumption_failed("Unreachable", "", 0);
344 }
345 #endif
346
347 #ifdef PROTOBUF_TSAN
348 // TODO: it would be preferable to use __tsan_external_read/
349 // __tsan_external_write, but they can cause dlopen issues.
350 template <typename T>
TSanRead(const T * impl)351 inline PROTOBUF_ALWAYS_INLINE void TSanRead(const T* impl) {
352 char protobuf_tsan_dummy =
353 *reinterpret_cast<const char*>(&impl->_tsan_detect_race);
354 asm volatile("" : "+r"(protobuf_tsan_dummy));
355 }
356
357 // We currently use a dedicated member for TSan checking so the value of this
358 // member is not important. We can unconditionally write to it without affecting
359 // correctness of the rest of the class.
360 template <typename T>
TSanWrite(T * impl)361 inline PROTOBUF_ALWAYS_INLINE void TSanWrite(T* impl) {
362 *reinterpret_cast<char*>(&impl->_tsan_detect_race) = 0;
363 }
364 #else
TSanRead(const void *)365 inline PROTOBUF_ALWAYS_INLINE void TSanRead(const void*) {}
TSanWrite(const void *)366 inline PROTOBUF_ALWAYS_INLINE void TSanWrite(const void*) {}
367 #endif
368
369 // This trampoline allows calling from codegen without needing a #include to
370 // absl. It simplifies IWYU and deps.
PrefetchToLocalCache(const void * ptr)371 inline void PrefetchToLocalCache(const void* ptr) {
372 absl::PrefetchToLocalCache(ptr);
373 }
374
375 template <typename T>
Launder(T * p)376 constexpr T* Launder(T* p) {
377 #if defined(__cpp_lib_launder) && __cpp_lib_launder >= 201606L
378 return std::launder(p);
379 #elif ABSL_HAVE_BUILTIN(__builtin_launder)
380 return __builtin_launder(p);
381 #else
382 return p;
383 #endif
384 }
385
386 #if defined(PROTOBUF_CUSTOM_VTABLE)
EnableCustomNew()387 constexpr bool EnableCustomNew() { return true; }
388 template <typename T>
EnableCustomNewFor()389 constexpr bool EnableCustomNewFor() {
390 return true;
391 }
392 #elif ABSL_HAVE_BUILTIN(__is_bitwise_cloneable)
EnableCustomNew()393 constexpr bool EnableCustomNew() { return true; }
394 template <typename T>
EnableCustomNewFor()395 constexpr bool EnableCustomNewFor() {
396 return __is_bitwise_cloneable(T);
397 }
398 #else
EnableCustomNew()399 constexpr bool EnableCustomNew() { return false; }
400 template <typename T>
EnableCustomNewFor()401 constexpr bool EnableCustomNewFor() {
402 return false;
403 }
404 #endif
405
IsOss()406 constexpr bool IsOss() { return true; }
407
408 // Counter library for debugging internal protobuf logic.
409 // It allows instrumenting code that has different options (eg fast vs slow
410 // path) to get visibility into how much we are hitting each path.
411 // When compiled with -DPROTOBUF_INTERNAL_ENABLE_DEBUG_COUNTERS, the counters
412 // register an atexit handler to dump the table. Otherwise, they are a noop and
413 // have not runtime cost.
414 //
415 // Usage:
416 //
417 // if (do_fast) {
418 // PROTOBUF_DEBUG_COUNTER("Foo.Fast").Inc();
419 // ...
420 // } else {
421 // PROTOBUF_DEBUG_COUNTER("Foo.Slow").Inc();
422 // ...
423 // }
424 class PROTOBUF_EXPORT RealDebugCounter {
425 public:
RealDebugCounter(absl::string_view name)426 explicit RealDebugCounter(absl::string_view name) { Register(name); }
427 // Lossy increment.
Inc()428 void Inc() { counter_.store(value() + 1, std::memory_order_relaxed); }
value()429 size_t value() const { return counter_.load(std::memory_order_relaxed); }
430
431 private:
432 void Register(absl::string_view name);
433 std::atomic<size_t> counter_{};
434 };
435
436 // When the feature is not enabled, the type is a noop.
437 class NoopDebugCounter {
438 public:
439 explicit constexpr NoopDebugCounter() = default;
Inc()440 constexpr void Inc() {}
441 };
442
443 } // namespace internal
444 } // namespace protobuf
445 } // namespace google
446
447 #include "google/protobuf/port_undef.inc"
448
449 #endif // GOOGLE_PROTOBUF_PORT_H__
450