• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2021 The Abseil Authors.
2 //
3 // Licensed under the Apache License, Version 2.0 (the "License");
4 // you may not use this file except in compliance with the License.
5 // You may obtain a copy of the License at
6 //
7 //      https://www.apache.org/licenses/LICENSE-2.0
8 //
9 // Unless required by applicable law or agreed to in writing, software
10 // distributed under the License is distributed on an "AS IS" BASIS,
11 // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 // See the License for the specific language governing permissions and
13 // limitations under the License.
14 
15 #ifndef ABSL_STRINGS_INTERNAL_CORD_INTERNAL_H_
16 #define ABSL_STRINGS_INTERNAL_CORD_INTERNAL_H_
17 
18 #include <atomic>
19 #include <cassert>
20 #include <cstddef>
21 #include <cstdint>
22 #include <type_traits>
23 
24 #include "absl/base/config.h"
25 #include "absl/base/internal/endian.h"
26 #include "absl/base/internal/invoke.h"
27 #include "absl/base/optimization.h"
28 #include "absl/container/internal/compressed_tuple.h"
29 #include "absl/meta/type_traits.h"
30 #include "absl/strings/string_view.h"
31 
32 namespace absl {
33 ABSL_NAMESPACE_BEGIN
34 namespace cord_internal {
35 
36 class CordzInfo;
37 
38 // Default feature enable states for cord ring buffers
39 enum CordFeatureDefaults {
40   kCordEnableBtreeDefault = true,
41   kCordEnableRingBufferDefault = false,
42   kCordShallowSubcordsDefault = false
43 };
44 
45 extern std::atomic<bool> cord_btree_enabled;
46 extern std::atomic<bool> cord_ring_buffer_enabled;
47 extern std::atomic<bool> shallow_subcords_enabled;
48 
49 // `cord_btree_exhaustive_validation` can be set to force exhaustive validation
50 // in debug assertions, and code that calls `IsValid()` explicitly. By default,
51 // assertions should be relatively cheap and AssertValid() can easily lead to
52 // O(n^2) complexity as recursive / full tree validation is O(n).
53 extern std::atomic<bool> cord_btree_exhaustive_validation;
54 
enable_cord_btree(bool enable)55 inline void enable_cord_btree(bool enable) {
56   cord_btree_enabled.store(enable, std::memory_order_relaxed);
57 }
58 
enable_cord_ring_buffer(bool enable)59 inline void enable_cord_ring_buffer(bool enable) {
60   cord_ring_buffer_enabled.store(enable, std::memory_order_relaxed);
61 }
62 
enable_shallow_subcords(bool enable)63 inline void enable_shallow_subcords(bool enable) {
64   shallow_subcords_enabled.store(enable, std::memory_order_relaxed);
65 }
66 
67 enum Constants {
68   // The inlined size to use with absl::InlinedVector.
69   //
70   // Note: The InlinedVectors in this file (and in cord.h) do not need to use
71   // the same value for their inlined size. The fact that they do is historical.
72   // It may be desirable for each to use a different inlined size optimized for
73   // that InlinedVector's usage.
74   //
75   // TODO(jgm): Benchmark to see if there's a more optimal value than 47 for
76   // the inlined vector size (47 exists for backward compatibility).
77   kInlinedVectorSize = 47,
78 
79   // Prefer copying blocks of at most this size, otherwise reference count.
80   kMaxBytesToCopy = 511
81 };
82 
83 // Compact class for tracking the reference count and state flags for CordRep
84 // instances.  Data is stored in an atomic int32_t for compactness and speed.
85 class RefcountAndFlags {
86  public:
RefcountAndFlags()87   constexpr RefcountAndFlags() : count_{kRefIncrement} {}
88   struct Immortal {};
RefcountAndFlags(Immortal)89   explicit constexpr RefcountAndFlags(Immortal) : count_(kImmortalFlag) {}
90   struct WithCrc {};
RefcountAndFlags(WithCrc)91   explicit constexpr RefcountAndFlags(WithCrc)
92       : count_(kCrcFlag | kRefIncrement) {}
93 
94   // Increments the reference count. Imposes no memory ordering.
Increment()95   inline void Increment() {
96     count_.fetch_add(kRefIncrement, std::memory_order_relaxed);
97   }
98 
99   // Asserts that the current refcount is greater than 0. If the refcount is
100   // greater than 1, decrements the reference count.
101   //
102   // Returns false if there are no references outstanding; true otherwise.
103   // Inserts barriers to ensure that state written before this method returns
104   // false will be visible to a thread that just observed this method returning
105   // false.  Always returns false when the immortal bit is set.
Decrement()106   inline bool Decrement() {
107     int32_t refcount = count_.load(std::memory_order_acquire) & kRefcountMask;
108     assert(refcount > 0 || refcount & kImmortalFlag);
109     return refcount != kRefIncrement &&
110            (count_.fetch_sub(kRefIncrement, std::memory_order_acq_rel) &
111             kRefcountMask) != kRefIncrement;
112   }
113 
114   // Same as Decrement but expect that refcount is greater than 1.
DecrementExpectHighRefcount()115   inline bool DecrementExpectHighRefcount() {
116     int32_t refcount =
117         count_.fetch_sub(kRefIncrement, std::memory_order_acq_rel) &
118         kRefcountMask;
119     assert(refcount > 0 || refcount & kImmortalFlag);
120     return refcount != kRefIncrement;
121   }
122 
123   // Returns the current reference count using acquire semantics.
Get()124   inline int32_t Get() const {
125     return count_.load(std::memory_order_acquire) >> kNumFlags;
126   }
127 
128   // Returns true if the referenced object carries a CRC value.
HasCrc()129   bool HasCrc() const {
130     return (count_.load(std::memory_order_relaxed) & kCrcFlag) != 0;
131   }
132 
133   // Returns true iff the atomic integer is 1 and this node does not store
134   // a CRC.  When both these conditions are met, the current thread owns
135   // the reference and no other thread shares it, so its contents may be
136   // safely mutated.
137   //
138   // If the referenced item is shared, carries a CRC, or is immortal,
139   // it should not be modified in-place, and this function returns false.
140   //
141   // This call performs the memory barrier needed for the owning thread
142   // to act on the object, so that if it returns true, it may safely
143   // assume exclusive access to the object.
IsMutable()144   inline bool IsMutable() {
145     return (count_.load(std::memory_order_acquire)) == kRefIncrement;
146   }
147 
148   // Returns whether the atomic integer is 1.  Similar to IsMutable(),
149   // but does not check for a stored CRC.  (An unshared node with a CRC is not
150   // mutable, because changing its data would invalidate the CRC.)
151   //
152   // When this returns true, there are no other references, and data sinks
153   // may safely adopt the children of the CordRep.
IsOne()154   inline bool IsOne() {
155     return (count_.load(std::memory_order_acquire) & kRefcountMask) ==
156            kRefIncrement;
157   }
158 
IsImmortal()159   bool IsImmortal() const {
160     return (count_.load(std::memory_order_relaxed) & kImmortalFlag) != 0;
161   }
162 
163  private:
164   // We reserve the bottom bits for flags.
165   // kImmortalBit indicates that this entity should never be collected; it is
166   // used for the StringConstant constructor to avoid collecting immutable
167   // constant cords.
168   // kReservedFlag is reserved for future use.
169   enum {
170     kNumFlags = 2,
171 
172     kImmortalFlag = 0x1,
173     kCrcFlag = 0x2,
174     kRefIncrement = (1 << kNumFlags),
175 
176     // Bitmask to use when checking refcount by equality.  This masks out
177     // all flags except kImmortalFlag, which is part of the refcount for
178     // purposes of equality.  (A refcount of 0 or 1 does not count as 0 or 1
179     // if the immortal bit is set.)
180     kRefcountMask = ~kCrcFlag,
181   };
182 
183   std::atomic<int32_t> count_;
184 };
185 
186 // The overhead of a vtable is too much for Cord, so we roll our own subclasses
187 // using only a single byte to differentiate classes from each other - the "tag"
188 // byte.  Define the subclasses first so we can provide downcasting helper
189 // functions in the base class.
190 
191 struct CordRepConcat;
192 struct CordRepExternal;
193 struct CordRepFlat;
194 struct CordRepSubstring;
195 class CordRepRing;
196 class CordRepBtree;
197 
198 // Various representations that we allow
199 enum CordRepKind {
200   CONCAT = 0,
201   SUBSTRING = 1,
202   BTREE = 2,
203   RING = 3,
204   EXTERNAL = 4,
205 
206   // We have different tags for different sized flat arrays,
207   // starting with FLAT, and limited to MAX_FLAT_TAG. The 225 value is based on
208   // the current 'size to tag' encoding of 8 / 32 bytes. If a new tag is needed
209   // in the future, then 'FLAT' and 'MAX_FLAT_TAG' should be adjusted as well
210   // as the Tag <---> Size logic so that FLAT stil represents the minimum flat
211   // allocation size. (32 bytes as of now).
212   FLAT = 5,
213   MAX_FLAT_TAG = 225
214 };
215 
216 // There are various locations where we want to check if some rep is a 'plain'
217 // data edge, i.e. an external or flat rep. By having FLAT == EXTERNAL + 1, we
218 // can perform this check in a single branch as 'tag >= EXTERNAL'
219 // Likewise, we have some locations where we check for 'ring or external/flat',
220 // so likewise align RING to EXTERNAL.
221 // Note that we can leave this optimization to the compiler. The compiler will
222 // DTRT when it sees a condition like `tag == EXTERNAL || tag >= FLAT`.
223 static_assert(RING == BTREE + 1, "BTREE and RING not consecutive");
224 static_assert(EXTERNAL == RING + 1, "BTREE and EXTERNAL not consecutive");
225 static_assert(FLAT == EXTERNAL + 1, "EXTERNAL and FLAT not consecutive");
226 
227 struct CordRep {
228   CordRep() = default;
CordRepCordRep229   constexpr CordRep(RefcountAndFlags::Immortal immortal, size_t l)
230       : length(l), refcount(immortal), tag(EXTERNAL), storage{} {}
231 
232   // The following three fields have to be less than 32 bytes since
233   // that is the smallest supported flat node size.
234   size_t length;
235   RefcountAndFlags refcount;
236   // If tag < FLAT, it represents CordRepKind and indicates the type of node.
237   // Otherwise, the node type is CordRepFlat and the tag is the encoded size.
238   uint8_t tag;
239 
240   // `storage` provides two main purposes:
241   // - the starting point for FlatCordRep.Data() [flexible-array-member]
242   // - 3 bytes of additional storage for use by derived classes.
243   // The latter is used by CordrepConcat and CordRepBtree. CordRepConcat stores
244   // a 'depth' value in storage[0], and the (future) CordRepBtree class stores
245   // `height`, `begin` and `end` in the 3 entries. Otherwise we would need to
246   // allocate room for these in the derived class, as not all compilers reuse
247   // padding space from the base class (clang and gcc do, MSVC does not, etc)
248   uint8_t storage[3];
249 
250   // Returns true if this instance's tag matches the requested type.
IsRingCordRep251   constexpr bool IsRing() const { return tag == RING; }
IsConcatCordRep252   constexpr bool IsConcat() const { return tag == CONCAT; }
IsSubstringCordRep253   constexpr bool IsSubstring() const { return tag == SUBSTRING; }
IsExternalCordRep254   constexpr bool IsExternal() const { return tag == EXTERNAL; }
IsFlatCordRep255   constexpr bool IsFlat() const { return tag >= FLAT; }
IsBtreeCordRep256   constexpr bool IsBtree() const { return tag == BTREE; }
257 
258   inline CordRepRing* ring();
259   inline const CordRepRing* ring() const;
260   inline CordRepConcat* concat();
261   inline const CordRepConcat* concat() const;
262   inline CordRepSubstring* substring();
263   inline const CordRepSubstring* substring() const;
264   inline CordRepExternal* external();
265   inline const CordRepExternal* external() const;
266   inline CordRepFlat* flat();
267   inline const CordRepFlat* flat() const;
268   inline CordRepBtree* btree();
269   inline const CordRepBtree* btree() const;
270 
271   // --------------------------------------------------------------------
272   // Memory management
273 
274   // Destroys the provided `rep`.
275   static void Destroy(CordRep* rep);
276 
277   // Increments the reference count of `rep`.
278   // Requires `rep` to be a non-null pointer value.
279   static inline CordRep* Ref(CordRep* rep);
280 
281   // Decrements the reference count of `rep`. Destroys rep if count reaches
282   // zero. Requires `rep` to be a non-null pointer value.
283   static inline void Unref(CordRep* rep);
284 };
285 
286 struct CordRepConcat : public CordRep {
287   CordRep* left;
288   CordRep* right;
289 
depthCordRepConcat290   uint8_t depth() const { return storage[0]; }
set_depthCordRepConcat291   void set_depth(uint8_t depth) { storage[0] = depth; }
292 };
293 
294 struct CordRepSubstring : public CordRep {
295   size_t start;  // Starting offset of substring in child
296   CordRep* child;
297 };
298 
299 // Type for function pointer that will invoke the releaser function and also
300 // delete the `CordRepExternalImpl` corresponding to the passed in
301 // `CordRepExternal`.
302 using ExternalReleaserInvoker = void (*)(CordRepExternal*);
303 
304 // External CordReps are allocated together with a type erased releaser. The
305 // releaser is stored in the memory directly following the CordRepExternal.
306 struct CordRepExternal : public CordRep {
307   CordRepExternal() = default;
CordRepExternalCordRepExternal308   explicit constexpr CordRepExternal(absl::string_view str)
309       : CordRep(RefcountAndFlags::Immortal{}, str.size()),
310         base(str.data()),
311         releaser_invoker(nullptr) {}
312 
313   const char* base;
314   // Pointer to function that knows how to call and destroy the releaser.
315   ExternalReleaserInvoker releaser_invoker;
316 
317   // Deletes (releases) the external rep.
318   // Requires rep != nullptr and rep->IsExternal()
319   static void Delete(CordRep* rep);
320 };
321 
322 struct Rank1 {};
323 struct Rank0 : Rank1 {};
324 
325 template <typename Releaser, typename = ::absl::base_internal::invoke_result_t<
326                                  Releaser, absl::string_view>>
InvokeReleaser(Rank0,Releaser && releaser,absl::string_view data)327 void InvokeReleaser(Rank0, Releaser&& releaser, absl::string_view data) {
328   ::absl::base_internal::invoke(std::forward<Releaser>(releaser), data);
329 }
330 
331 template <typename Releaser,
332           typename = ::absl::base_internal::invoke_result_t<Releaser>>
InvokeReleaser(Rank1,Releaser && releaser,absl::string_view)333 void InvokeReleaser(Rank1, Releaser&& releaser, absl::string_view) {
334   ::absl::base_internal::invoke(std::forward<Releaser>(releaser));
335 }
336 
337 // We use CompressedTuple so that we can benefit from EBCO.
338 template <typename Releaser>
339 struct CordRepExternalImpl
340     : public CordRepExternal,
341       public ::absl::container_internal::CompressedTuple<Releaser> {
342   // The extra int arg is so that we can avoid interfering with copy/move
343   // constructors while still benefitting from perfect forwarding.
344   template <typename T>
CordRepExternalImplCordRepExternalImpl345   CordRepExternalImpl(T&& releaser, int)
346       : CordRepExternalImpl::CompressedTuple(std::forward<T>(releaser)) {
347     this->releaser_invoker = &Release;
348   }
349 
~CordRepExternalImplCordRepExternalImpl350   ~CordRepExternalImpl() {
351     InvokeReleaser(Rank0{}, std::move(this->template get<0>()),
352                    absl::string_view(base, length));
353   }
354 
ReleaseCordRepExternalImpl355   static void Release(CordRepExternal* rep) {
356     delete static_cast<CordRepExternalImpl*>(rep);
357   }
358 };
359 
Delete(CordRep * rep)360 inline void CordRepExternal::Delete(CordRep* rep) {
361   assert(rep != nullptr && rep->IsExternal());
362   auto* rep_external = static_cast<CordRepExternal*>(rep);
363   assert(rep_external->releaser_invoker != nullptr);
364   rep_external->releaser_invoker(rep_external);
365 }
366 
367 template <typename Str>
368 struct ConstInitExternalStorage {
369   ABSL_CONST_INIT static CordRepExternal value;
370 };
371 
372 template <typename Str>
373 CordRepExternal ConstInitExternalStorage<Str>::value(Str::value);
374 
375 enum {
376   kMaxInline = 15,
377 };
378 
GetOrNull(absl::string_view data,size_t pos)379 constexpr char GetOrNull(absl::string_view data, size_t pos) {
380   return pos < data.size() ? data[pos] : '\0';
381 }
382 
383 // We store cordz_info as 64 bit pointer value in big endian format. This
384 // guarantees that the least significant byte of cordz_info matches the last
385 // byte of the inline data representation in as_chars_, which holds the inlined
386 // size or the 'is_tree' bit.
387 using cordz_info_t = int64_t;
388 
389 // Assert that the `cordz_info` pointer value perfectly overlaps the last half
390 // of `as_chars_` and can hold a pointer value.
391 static_assert(sizeof(cordz_info_t) * 2 == kMaxInline + 1, "");
392 static_assert(sizeof(cordz_info_t) >= sizeof(intptr_t), "");
393 
394 // BigEndianByte() creates a big endian representation of 'value', i.e.: a big
395 // endian value where the last byte in the host's representation holds 'value`,
396 // with all other bytes being 0.
BigEndianByte(unsigned char value)397 static constexpr cordz_info_t BigEndianByte(unsigned char value) {
398 #if defined(ABSL_IS_BIG_ENDIAN)
399   return value;
400 #else
401   return static_cast<cordz_info_t>(value) << ((sizeof(cordz_info_t) - 1) * 8);
402 #endif
403 }
404 
405 class InlineData {
406  public:
407   // DefaultInitType forces the use of the default initialization constructor.
408   enum DefaultInitType { kDefaultInit };
409 
410   // kNullCordzInfo holds the big endian representation of intptr_t(1)
411   // This is the 'null' / initial value of 'cordz_info'. The null value
412   // is specifically big endian 1 as with 64-bit pointers, the last
413   // byte of cordz_info overlaps with the last byte holding the tag.
414   static constexpr cordz_info_t kNullCordzInfo = BigEndianByte(1);
415 
InlineData()416   constexpr InlineData() : as_chars_{0} {}
InlineData(DefaultInitType)417   explicit InlineData(DefaultInitType) {}
InlineData(CordRep * rep)418   explicit constexpr InlineData(CordRep* rep) : as_tree_(rep) {}
InlineData(absl::string_view chars)419   explicit constexpr InlineData(absl::string_view chars)
420       : as_chars_{
421             GetOrNull(chars, 0),  GetOrNull(chars, 1),
422             GetOrNull(chars, 2),  GetOrNull(chars, 3),
423             GetOrNull(chars, 4),  GetOrNull(chars, 5),
424             GetOrNull(chars, 6),  GetOrNull(chars, 7),
425             GetOrNull(chars, 8),  GetOrNull(chars, 9),
426             GetOrNull(chars, 10), GetOrNull(chars, 11),
427             GetOrNull(chars, 12), GetOrNull(chars, 13),
428             GetOrNull(chars, 14), static_cast<char>((chars.size() << 1))} {}
429 
430   // Returns true if the current instance is empty.
431   // The 'empty value' is an inlined data value of zero length.
is_empty()432   bool is_empty() const { return tag() == 0; }
433 
434   // Returns true if the current instance holds a tree value.
is_tree()435   bool is_tree() const { return (tag() & 1) != 0; }
436 
437   // Returns true if the current instance holds a cordz_info value.
438   // Requires the current instance to hold a tree value.
is_profiled()439   bool is_profiled() const {
440     assert(is_tree());
441     return as_tree_.cordz_info != kNullCordzInfo;
442   }
443 
444   // Returns true if either of the provided instances hold a cordz_info value.
445   // This method is more efficient than the equivalent `data1.is_profiled() ||
446   // data2.is_profiled()`. Requires both arguments to hold a tree.
is_either_profiled(const InlineData & data1,const InlineData & data2)447   static bool is_either_profiled(const InlineData& data1,
448                                  const InlineData& data2) {
449     assert(data1.is_tree() && data2.is_tree());
450     return (data1.as_tree_.cordz_info | data2.as_tree_.cordz_info) !=
451            kNullCordzInfo;
452   }
453 
454   // Returns the cordz_info sampling instance for this instance, or nullptr
455   // if the current instance is not sampled and does not have CordzInfo data.
456   // Requires the current instance to hold a tree value.
cordz_info()457   CordzInfo* cordz_info() const {
458     assert(is_tree());
459     intptr_t info =
460         static_cast<intptr_t>(absl::big_endian::ToHost64(as_tree_.cordz_info));
461     assert(info & 1);
462     return reinterpret_cast<CordzInfo*>(info - 1);
463   }
464 
465   // Sets the current cordz_info sampling instance for this instance, or nullptr
466   // if the current instance is not sampled and does not have CordzInfo data.
467   // Requires the current instance to hold a tree value.
set_cordz_info(CordzInfo * cordz_info)468   void set_cordz_info(CordzInfo* cordz_info) {
469     assert(is_tree());
470     intptr_t info = reinterpret_cast<intptr_t>(cordz_info) | 1;
471     as_tree_.cordz_info = absl::big_endian::FromHost64(info);
472   }
473 
474   // Resets the current cordz_info to null / empty.
clear_cordz_info()475   void clear_cordz_info() {
476     assert(is_tree());
477     as_tree_.cordz_info = kNullCordzInfo;
478   }
479 
480   // Returns a read only pointer to the character data inside this instance.
481   // Requires the current instance to hold inline data.
as_chars()482   const char* as_chars() const {
483     assert(!is_tree());
484     return as_chars_;
485   }
486 
487   // Returns a mutable pointer to the character data inside this instance.
488   // Should be used for 'write only' operations setting an inlined value.
489   // Applications can set the value of inlined data either before or after
490   // setting the inlined size, i.e., both of the below are valid:
491   //
492   //   // Set inlined data and inline size
493   //   memcpy(data_.as_chars(), data, size);
494   //   data_.set_inline_size(size);
495   //
496   //   // Set inlined size and inline data
497   //   data_.set_inline_size(size);
498   //   memcpy(data_.as_chars(), data, size);
499   //
500   // It's an error to read from the returned pointer without a preceding write
501   // if the current instance does not hold inline data, i.e.: is_tree() == true.
as_chars()502   char* as_chars() { return as_chars_; }
503 
504   // Returns the tree value of this value.
505   // Requires the current instance to hold a tree value.
as_tree()506   CordRep* as_tree() const {
507     assert(is_tree());
508     return as_tree_.rep;
509   }
510 
511   // Initialize this instance to holding the tree value `rep`,
512   // initializing the cordz_info to null, i.e.: 'not profiled'.
make_tree(CordRep * rep)513   void make_tree(CordRep* rep) {
514     as_tree_.rep = rep;
515     as_tree_.cordz_info = kNullCordzInfo;
516   }
517 
518   // Set the tree value of this instance to 'rep`.
519   // Requires the current instance to already hold a tree value.
520   // Does not affect the value of cordz_info.
set_tree(CordRep * rep)521   void set_tree(CordRep* rep) {
522     assert(is_tree());
523     as_tree_.rep = rep;
524   }
525 
526   // Returns the size of the inlined character data inside this instance.
527   // Requires the current instance to hold inline data.
inline_size()528   size_t inline_size() const {
529     assert(!is_tree());
530     return tag() >> 1;
531   }
532 
533   // Sets the size of the inlined character data inside this instance.
534   // Requires `size` to be <= kMaxInline.
535   // See the documentation on 'as_chars()' for more information and examples.
set_inline_size(size_t size)536   void set_inline_size(size_t size) {
537     ABSL_ASSERT(size <= kMaxInline);
538     tag() = static_cast<char>(size << 1);
539   }
540 
541  private:
542   // See cordz_info_t for forced alignment and size of `cordz_info` details.
543   struct AsTree {
AsTreeAsTree544     explicit constexpr AsTree(absl::cord_internal::CordRep* tree)
545         : rep(tree), cordz_info(kNullCordzInfo) {}
546     // This union uses up extra space so that whether rep is 32 or 64 bits,
547     // cordz_info will still start at the eighth byte, and the last
548     // byte of cordz_info will still be the last byte of InlineData.
549     union {
550       absl::cord_internal::CordRep* rep;
551       cordz_info_t unused_aligner;
552     };
553     cordz_info_t cordz_info;
554   };
555 
tag()556   char& tag() { return reinterpret_cast<char*>(this)[kMaxInline]; }
tag()557   char tag() const { return reinterpret_cast<const char*>(this)[kMaxInline]; }
558 
559   // If the data has length <= kMaxInline, we store it in `as_chars_`, and
560   // store the size in the last char of `as_chars_` shifted left + 1.
561   // Else we store it in a tree and store a pointer to that tree in
562   // `as_tree_.rep` and store a tag in `tagged_size`.
563   union {
564     char as_chars_[kMaxInline + 1];
565     AsTree as_tree_;
566   };
567 };
568 
569 static_assert(sizeof(InlineData) == kMaxInline + 1, "");
570 
concat()571 inline CordRepConcat* CordRep::concat() {
572   assert(IsConcat());
573   return static_cast<CordRepConcat*>(this);
574 }
575 
concat()576 inline const CordRepConcat* CordRep::concat() const {
577   assert(IsConcat());
578   return static_cast<const CordRepConcat*>(this);
579 }
580 
substring()581 inline CordRepSubstring* CordRep::substring() {
582   assert(IsSubstring());
583   return static_cast<CordRepSubstring*>(this);
584 }
585 
substring()586 inline const CordRepSubstring* CordRep::substring() const {
587   assert(IsSubstring());
588   return static_cast<const CordRepSubstring*>(this);
589 }
590 
external()591 inline CordRepExternal* CordRep::external() {
592   assert(IsExternal());
593   return static_cast<CordRepExternal*>(this);
594 }
595 
external()596 inline const CordRepExternal* CordRep::external() const {
597   assert(IsExternal());
598   return static_cast<const CordRepExternal*>(this);
599 }
600 
Ref(CordRep * rep)601 inline CordRep* CordRep::Ref(CordRep* rep) {
602   assert(rep != nullptr);
603   rep->refcount.Increment();
604   return rep;
605 }
606 
Unref(CordRep * rep)607 inline void CordRep::Unref(CordRep* rep) {
608   assert(rep != nullptr);
609   // Expect refcount to be 0. Avoiding the cost of an atomic decrement should
610   // typically outweigh the cost of an extra branch checking for ref == 1.
611   if (ABSL_PREDICT_FALSE(!rep->refcount.DecrementExpectHighRefcount())) {
612     Destroy(rep);
613   }
614 }
615 
616 }  // namespace cord_internal
617 
618 ABSL_NAMESPACE_END
619 }  // namespace absl
620 #endif  // ABSL_STRINGS_INTERNAL_CORD_INTERNAL_H_
621