• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2020 The Abseil Authors.
2 //
3 // Licensed under the Apache License, Version 2.0 (the "License");
4 // you may not use this file except in compliance with the License.
5 // You may obtain a copy of the License at
6 //
7 //      https://www.apache.org/licenses/LICENSE-2.0
8 //
9 // Unless required by applicable law or agreed to in writing, software
10 // distributed under the License is distributed on an "AS IS" BASIS,
11 // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 // See the License for the specific language governing permissions and
13 // limitations under the License.
14 
15 #include "absl/strings/cord.h"
16 
17 #include <algorithm>
18 #include <atomic>
19 #include <cstddef>
20 #include <cstdio>
21 #include <cstdlib>
22 #include <iomanip>
23 #include <ios>
24 #include <iostream>
25 #include <limits>
26 #include <ostream>
27 #include <sstream>
28 #include <type_traits>
29 #include <unordered_set>
30 #include <vector>
31 
32 #include "absl/base/casts.h"
33 #include "absl/base/internal/raw_logging.h"
34 #include "absl/base/macros.h"
35 #include "absl/base/port.h"
36 #include "absl/container/fixed_array.h"
37 #include "absl/container/inlined_vector.h"
38 #include "absl/crc/internal/crc_cord_state.h"
39 #include "absl/strings/cord_buffer.h"
40 #include "absl/strings/escaping.h"
41 #include "absl/strings/internal/cord_data_edge.h"
42 #include "absl/strings/internal/cord_internal.h"
43 #include "absl/strings/internal/cord_rep_btree.h"
44 #include "absl/strings/internal/cord_rep_crc.h"
45 #include "absl/strings/internal/cord_rep_flat.h"
46 #include "absl/strings/internal/cordz_statistics.h"
47 #include "absl/strings/internal/cordz_update_scope.h"
48 #include "absl/strings/internal/cordz_update_tracker.h"
49 #include "absl/strings/internal/resize_uninitialized.h"
50 #include "absl/strings/str_cat.h"
51 #include "absl/strings/str_join.h"
52 #include "absl/strings/string_view.h"
53 
54 namespace absl {
55 ABSL_NAMESPACE_BEGIN
56 
57 using ::absl::cord_internal::CordRep;
58 using ::absl::cord_internal::CordRepBtree;
59 using ::absl::cord_internal::CordRepCrc;
60 using ::absl::cord_internal::CordRepExternal;
61 using ::absl::cord_internal::CordRepFlat;
62 using ::absl::cord_internal::CordRepSubstring;
63 using ::absl::cord_internal::CordzUpdateTracker;
64 using ::absl::cord_internal::InlineData;
65 using ::absl::cord_internal::kMaxFlatLength;
66 using ::absl::cord_internal::kMinFlatLength;
67 
68 using ::absl::cord_internal::kInlinedVectorSize;
69 using ::absl::cord_internal::kMaxBytesToCopy;
70 
71 static void DumpNode(CordRep* rep, bool include_data, std::ostream* os,
72                      int indent = 0);
73 static bool VerifyNode(CordRep* root, CordRep* start_node,
74                        bool full_validation);
75 
VerifyTree(CordRep * node)76 static inline CordRep* VerifyTree(CordRep* node) {
77   // Verification is expensive, so only do it in debug mode.
78   // Even in debug mode we normally do only light validation.
79   // If you are debugging Cord itself, you should define the
80   // macro EXTRA_CORD_VALIDATION, e.g. by adding
81   // --copt=-DEXTRA_CORD_VALIDATION to the blaze line.
82 #ifdef EXTRA_CORD_VALIDATION
83   assert(node == nullptr || VerifyNode(node, node, /*full_validation=*/true));
84 #else   // EXTRA_CORD_VALIDATION
85   assert(node == nullptr || VerifyNode(node, node, /*full_validation=*/false));
86 #endif  // EXTRA_CORD_VALIDATION
87   static_cast<void>(&VerifyNode);
88 
89   return node;
90 }
91 
CreateFlat(const char * data,size_t length,size_t alloc_hint)92 static CordRepFlat* CreateFlat(const char* data, size_t length,
93                                size_t alloc_hint) {
94   CordRepFlat* flat = CordRepFlat::New(length + alloc_hint);
95   flat->length = length;
96   memcpy(flat->Data(), data, length);
97   return flat;
98 }
99 
100 // Creates a new flat or Btree out of the specified array.
101 // The returned node has a refcount of 1.
NewBtree(const char * data,size_t length,size_t alloc_hint)102 static CordRep* NewBtree(const char* data, size_t length, size_t alloc_hint) {
103   if (length <= kMaxFlatLength) {
104     return CreateFlat(data, length, alloc_hint);
105   }
106   CordRepFlat* flat = CreateFlat(data, kMaxFlatLength, 0);
107   data += kMaxFlatLength;
108   length -= kMaxFlatLength;
109   auto* root = CordRepBtree::Create(flat);
110   return CordRepBtree::Append(root, {data, length}, alloc_hint);
111 }
112 
113 // Create a new tree out of the specified array.
114 // The returned node has a refcount of 1.
NewTree(const char * data,size_t length,size_t alloc_hint)115 static CordRep* NewTree(const char* data, size_t length, size_t alloc_hint) {
116   if (length == 0) return nullptr;
117   return NewBtree(data, length, alloc_hint);
118 }
119 
120 namespace cord_internal {
121 
InitializeCordRepExternal(absl::string_view data,CordRepExternal * rep)122 void InitializeCordRepExternal(absl::string_view data, CordRepExternal* rep) {
123   assert(!data.empty());
124   rep->length = data.size();
125   rep->tag = EXTERNAL;
126   rep->base = data.data();
127   VerifyTree(rep);
128 }
129 
130 }  // namespace cord_internal
131 
132 // Creates a CordRep from the provided string. If the string is large enough,
133 // and not wasteful, we move the string into an external cord rep, preserving
134 // the already allocated string contents.
135 // Requires the provided string length to be larger than `kMaxInline`.
CordRepFromString(std::string && src)136 static CordRep* CordRepFromString(std::string&& src) {
137   assert(src.length() > cord_internal::kMaxInline);
138   if (
139       // String is short: copy data to avoid external block overhead.
140       src.size() <= kMaxBytesToCopy ||
141       // String is wasteful: copy data to avoid pinning too much unused memory.
142       src.size() < src.capacity() / 2
143   ) {
144     return NewTree(src.data(), src.size(), 0);
145   }
146 
147   struct StringReleaser {
148     void operator()(absl::string_view /* data */) {}
149     std::string data;
150   };
151   const absl::string_view original_data = src;
152   auto* rep =
153       static_cast<::absl::cord_internal::CordRepExternalImpl<StringReleaser>*>(
154           absl::cord_internal::NewExternalRep(original_data,
155                                               StringReleaser{std::move(src)}));
156   // Moving src may have invalidated its data pointer, so adjust it.
157   rep->base = rep->template get<0>().data.data();
158   return rep;
159 }
160 
161 // --------------------------------------------------------------------
162 // Cord::InlineRep functions
163 
164 #ifdef ABSL_INTERNAL_NEED_REDUNDANT_CONSTEXPR_DECL
165 constexpr unsigned char Cord::InlineRep::kMaxInline;
166 #endif
167 
set_data(const char * data,size_t n)168 inline void Cord::InlineRep::set_data(const char* data, size_t n) {
169   static_assert(kMaxInline == 15, "set_data is hard-coded for a length of 15");
170   data_.set_inline_data(data, n);
171 }
172 
set_data(size_t n)173 inline char* Cord::InlineRep::set_data(size_t n) {
174   assert(n <= kMaxInline);
175   ResetToEmpty();
176   set_inline_size(n);
177   return data_.as_chars();
178 }
179 
reduce_size(size_t n)180 inline void Cord::InlineRep::reduce_size(size_t n) {
181   size_t tag = inline_size();
182   assert(tag <= kMaxInline);
183   assert(tag >= n);
184   tag -= n;
185   memset(data_.as_chars() + tag, 0, n);
186   set_inline_size(tag);
187 }
188 
remove_prefix(size_t n)189 inline void Cord::InlineRep::remove_prefix(size_t n) {
190   cord_internal::SmallMemmove(data_.as_chars(), data_.as_chars() + n,
191                               inline_size() - n);
192   reduce_size(n);
193 }
194 
195 // Returns `rep` converted into a CordRepBtree.
196 // Directly returns `rep` if `rep` is already a CordRepBtree.
ForceBtree(CordRep * rep)197 static CordRepBtree* ForceBtree(CordRep* rep) {
198   return rep->IsBtree()
199              ? rep->btree()
200              : CordRepBtree::Create(cord_internal::RemoveCrcNode(rep));
201 }
202 
AppendTreeToInlined(CordRep * tree,MethodIdentifier method)203 void Cord::InlineRep::AppendTreeToInlined(CordRep* tree,
204                                           MethodIdentifier method) {
205   assert(!is_tree());
206   if (!data_.is_empty()) {
207     CordRepFlat* flat = MakeFlatWithExtraCapacity(0);
208     tree = CordRepBtree::Append(CordRepBtree::Create(flat), tree);
209   }
210   EmplaceTree(tree, method);
211 }
212 
AppendTreeToTree(CordRep * tree,MethodIdentifier method)213 void Cord::InlineRep::AppendTreeToTree(CordRep* tree, MethodIdentifier method) {
214   assert(is_tree());
215   const CordzUpdateScope scope(data_.cordz_info(), method);
216   tree = CordRepBtree::Append(ForceBtree(data_.as_tree()), tree);
217   SetTree(tree, scope);
218 }
219 
AppendTree(CordRep * tree,MethodIdentifier method)220 void Cord::InlineRep::AppendTree(CordRep* tree, MethodIdentifier method) {
221   assert(tree != nullptr);
222   assert(tree->length != 0);
223   assert(!tree->IsCrc());
224   if (data_.is_tree()) {
225     AppendTreeToTree(tree, method);
226   } else {
227     AppendTreeToInlined(tree, method);
228   }
229 }
230 
PrependTreeToInlined(CordRep * tree,MethodIdentifier method)231 void Cord::InlineRep::PrependTreeToInlined(CordRep* tree,
232                                            MethodIdentifier method) {
233   assert(!is_tree());
234   if (!data_.is_empty()) {
235     CordRepFlat* flat = MakeFlatWithExtraCapacity(0);
236     tree = CordRepBtree::Prepend(CordRepBtree::Create(flat), tree);
237   }
238   EmplaceTree(tree, method);
239 }
240 
PrependTreeToTree(CordRep * tree,MethodIdentifier method)241 void Cord::InlineRep::PrependTreeToTree(CordRep* tree,
242                                         MethodIdentifier method) {
243   assert(is_tree());
244   const CordzUpdateScope scope(data_.cordz_info(), method);
245   tree = CordRepBtree::Prepend(ForceBtree(data_.as_tree()), tree);
246   SetTree(tree, scope);
247 }
248 
PrependTree(CordRep * tree,MethodIdentifier method)249 void Cord::InlineRep::PrependTree(CordRep* tree, MethodIdentifier method) {
250   assert(tree != nullptr);
251   assert(tree->length != 0);
252   assert(!tree->IsCrc());
253   if (data_.is_tree()) {
254     PrependTreeToTree(tree, method);
255   } else {
256     PrependTreeToInlined(tree, method);
257   }
258 }
259 
260 // Searches for a non-full flat node at the rightmost leaf of the tree. If a
261 // suitable leaf is found, the function will update the length field for all
262 // nodes to account for the size increase. The append region address will be
263 // written to region and the actual size increase will be written to size.
PrepareAppendRegion(CordRep * root,char ** region,size_t * size,size_t max_length)264 static inline bool PrepareAppendRegion(CordRep* root, char** region,
265                                        size_t* size, size_t max_length) {
266   if (root->IsBtree() && root->refcount.IsOne()) {
267     Span<char> span = root->btree()->GetAppendBuffer(max_length);
268     if (!span.empty()) {
269       *region = span.data();
270       *size = span.size();
271       return true;
272     }
273   }
274 
275   CordRep* dst = root;
276   if (!dst->IsFlat() || !dst->refcount.IsOne()) {
277     *region = nullptr;
278     *size = 0;
279     return false;
280   }
281 
282   const size_t in_use = dst->length;
283   const size_t capacity = dst->flat()->Capacity();
284   if (in_use == capacity) {
285     *region = nullptr;
286     *size = 0;
287     return false;
288   }
289 
290   const size_t size_increase = std::min(capacity - in_use, max_length);
291   dst->length += size_increase;
292 
293   *region = dst->flat()->Data() + in_use;
294   *size = size_increase;
295   return true;
296 }
297 
AssignSlow(const Cord::InlineRep & src)298 void Cord::InlineRep::AssignSlow(const Cord::InlineRep& src) {
299   assert(&src != this);
300   assert(is_tree() || src.is_tree());
301   auto constexpr method = CordzUpdateTracker::kAssignCord;
302   if (ABSL_PREDICT_TRUE(!is_tree())) {
303     EmplaceTree(CordRep::Ref(src.as_tree()), src.data_, method);
304     return;
305   }
306 
307   CordRep* tree = as_tree();
308   if (CordRep* src_tree = src.tree()) {
309     // Leave any existing `cordz_info` in place, and let MaybeTrackCord()
310     // decide if this cord should be (or remains to be) sampled or not.
311     data_.set_tree(CordRep::Ref(src_tree));
312     CordzInfo::MaybeTrackCord(data_, src.data_, method);
313   } else {
314     CordzInfo::MaybeUntrackCord(data_.cordz_info());
315     data_ = src.data_;
316   }
317   CordRep::Unref(tree);
318 }
319 
UnrefTree()320 void Cord::InlineRep::UnrefTree() {
321   if (is_tree()) {
322     CordzInfo::MaybeUntrackCord(data_.cordz_info());
323     CordRep::Unref(tree());
324   }
325 }
326 
327 // --------------------------------------------------------------------
328 // Constructors and destructors
329 
Cord(absl::string_view src,MethodIdentifier method)330 Cord::Cord(absl::string_view src, MethodIdentifier method)
331     : contents_(InlineData::kDefaultInit) {
332   const size_t n = src.size();
333   if (n <= InlineRep::kMaxInline) {
334     contents_.set_data(src.data(), n);
335   } else {
336     CordRep* rep = NewTree(src.data(), n, 0);
337     contents_.EmplaceTree(rep, method);
338   }
339 }
340 
341 template <typename T, Cord::EnableIfString<T>>
Cord(T && src)342 Cord::Cord(T&& src) : contents_(InlineData::kDefaultInit) {
343   if (src.size() <= InlineRep::kMaxInline) {
344     contents_.set_data(src.data(), src.size());
345   } else {
346     CordRep* rep = CordRepFromString(std::forward<T>(src));
347     contents_.EmplaceTree(rep, CordzUpdateTracker::kConstructorString);
348   }
349 }
350 
351 template Cord::Cord(std::string&& src);
352 
353 // The destruction code is separate so that the compiler can determine
354 // that it does not need to call the destructor on a moved-from Cord.
DestroyCordSlow()355 void Cord::DestroyCordSlow() {
356   assert(contents_.is_tree());
357   CordzInfo::MaybeUntrackCord(contents_.cordz_info());
358   CordRep::Unref(VerifyTree(contents_.as_tree()));
359 }
360 
361 // --------------------------------------------------------------------
362 // Mutators
363 
Clear()364 void Cord::Clear() {
365   if (CordRep* tree = contents_.clear()) {
366     CordRep::Unref(tree);
367   }
368 }
369 
AssignLargeString(std::string && src)370 Cord& Cord::AssignLargeString(std::string&& src) {
371   auto constexpr method = CordzUpdateTracker::kAssignString;
372   assert(src.size() > kMaxBytesToCopy);
373   CordRep* rep = CordRepFromString(std::move(src));
374   if (CordRep* tree = contents_.tree()) {
375     CordzUpdateScope scope(contents_.cordz_info(), method);
376     contents_.SetTree(rep, scope);
377     CordRep::Unref(tree);
378   } else {
379     contents_.EmplaceTree(rep, method);
380   }
381   return *this;
382 }
383 
operator =(absl::string_view src)384 Cord& Cord::operator=(absl::string_view src) {
385   auto constexpr method = CordzUpdateTracker::kAssignString;
386   const char* data = src.data();
387   size_t length = src.size();
388   CordRep* tree = contents_.tree();
389   if (length <= InlineRep::kMaxInline) {
390     // Embed into this->contents_, which is somewhat subtle:
391     // - MaybeUntrackCord must be called before Unref(tree).
392     // - MaybeUntrackCord must be called before set_data() clobbers cordz_info.
393     // - set_data() must be called before Unref(tree) as it may reference tree.
394     if (tree != nullptr) CordzInfo::MaybeUntrackCord(contents_.cordz_info());
395     contents_.set_data(data, length);
396     if (tree != nullptr) CordRep::Unref(tree);
397     return *this;
398   }
399   if (tree != nullptr) {
400     CordzUpdateScope scope(contents_.cordz_info(), method);
401     if (tree->IsFlat() && tree->flat()->Capacity() >= length &&
402         tree->refcount.IsOne()) {
403       // Copy in place if the existing FLAT node is reusable.
404       memmove(tree->flat()->Data(), data, length);
405       tree->length = length;
406       VerifyTree(tree);
407       return *this;
408     }
409     contents_.SetTree(NewTree(data, length, 0), scope);
410     CordRep::Unref(tree);
411   } else {
412     contents_.EmplaceTree(NewTree(data, length, 0), method);
413   }
414   return *this;
415 }
416 
417 // TODO(sanjay): Move to Cord::InlineRep section of file.  For now,
418 // we keep it here to make diffs easier.
AppendArray(absl::string_view src,MethodIdentifier method)419 void Cord::InlineRep::AppendArray(absl::string_view src,
420                                   MethodIdentifier method) {
421   MaybeRemoveEmptyCrcNode();
422   if (src.empty()) return;  // memcpy(_, nullptr, 0) is undefined.
423 
424   size_t appended = 0;
425   CordRep* rep = tree();
426   const CordRep* const root = rep;
427   CordzUpdateScope scope(root ? cordz_info() : nullptr, method);
428   if (root != nullptr) {
429     rep = cord_internal::RemoveCrcNode(rep);
430     char* region;
431     if (PrepareAppendRegion(rep, &region, &appended, src.size())) {
432       memcpy(region, src.data(), appended);
433     }
434   } else {
435     // Try to fit in the inline buffer if possible.
436     size_t inline_length = inline_size();
437     if (src.size() <= kMaxInline - inline_length) {
438       // Append new data to embedded array
439       set_inline_size(inline_length + src.size());
440       memcpy(data_.as_chars() + inline_length, src.data(), src.size());
441       return;
442     }
443 
444     // Allocate flat to be a perfect fit on first append exceeding inlined size.
445     // Subsequent growth will use amortized growth until we reach maximum flat
446     // size.
447     rep = CordRepFlat::New(inline_length + src.size());
448     appended = std::min(src.size(), rep->flat()->Capacity() - inline_length);
449     memcpy(rep->flat()->Data(), data_.as_chars(), inline_length);
450     memcpy(rep->flat()->Data() + inline_length, src.data(), appended);
451     rep->length = inline_length + appended;
452   }
453 
454   src.remove_prefix(appended);
455   if (src.empty()) {
456     CommitTree(root, rep, scope, method);
457     return;
458   }
459 
460   // TODO(b/192061034): keep legacy 10% growth rate: consider other rates.
461   rep = ForceBtree(rep);
462   const size_t min_growth = std::max<size_t>(rep->length / 10, src.size());
463   rep = CordRepBtree::Append(rep->btree(), src, min_growth - src.size());
464 
465   CommitTree(root, rep, scope, method);
466 }
467 
TakeRep() const468 inline CordRep* Cord::TakeRep() const& {
469   return CordRep::Ref(contents_.tree());
470 }
471 
TakeRep()472 inline CordRep* Cord::TakeRep() && {
473   CordRep* rep = contents_.tree();
474   contents_.clear();
475   return rep;
476 }
477 
478 template <typename C>
AppendImpl(C && src)479 inline void Cord::AppendImpl(C&& src) {
480   auto constexpr method = CordzUpdateTracker::kAppendCord;
481 
482   contents_.MaybeRemoveEmptyCrcNode();
483   if (src.empty()) return;
484 
485   if (empty()) {
486     // Since destination is empty, we can avoid allocating a node,
487     if (src.contents_.is_tree()) {
488       // by taking the tree directly
489       CordRep* rep =
490           cord_internal::RemoveCrcNode(std::forward<C>(src).TakeRep());
491       contents_.EmplaceTree(rep, method);
492     } else {
493       // or copying over inline data
494       contents_.data_ = src.contents_.data_;
495     }
496     return;
497   }
498 
499   // For short cords, it is faster to copy data if there is room in dst.
500   const size_t src_size = src.contents_.size();
501   if (src_size <= kMaxBytesToCopy) {
502     CordRep* src_tree = src.contents_.tree();
503     if (src_tree == nullptr) {
504       // src has embedded data.
505       contents_.AppendArray({src.contents_.data(), src_size}, method);
506       return;
507     }
508     if (src_tree->IsFlat()) {
509       // src tree just has one flat node.
510       contents_.AppendArray({src_tree->flat()->Data(), src_size}, method);
511       return;
512     }
513     if (&src == this) {
514       // ChunkIterator below assumes that src is not modified during traversal.
515       Append(Cord(src));
516       return;
517     }
518     // TODO(mec): Should we only do this if "dst" has space?
519     for (absl::string_view chunk : src.Chunks()) {
520       Append(chunk);
521     }
522     return;
523   }
524 
525   // Guaranteed to be a tree (kMaxBytesToCopy > kInlinedSize)
526   CordRep* rep = cord_internal::RemoveCrcNode(std::forward<C>(src).TakeRep());
527   contents_.AppendTree(rep, CordzUpdateTracker::kAppendCord);
528 }
529 
ExtractAppendBuffer(CordRep * rep,size_t min_capacity)530 static CordRep::ExtractResult ExtractAppendBuffer(CordRep* rep,
531                                                   size_t min_capacity) {
532   switch (rep->tag) {
533     case cord_internal::BTREE:
534       return CordRepBtree::ExtractAppendBuffer(rep->btree(), min_capacity);
535     default:
536       if (rep->IsFlat() && rep->refcount.IsOne() &&
537           rep->flat()->Capacity() - rep->length >= min_capacity) {
538         return {nullptr, rep};
539       }
540       return {rep, nullptr};
541   }
542 }
543 
CreateAppendBuffer(InlineData & data,size_t block_size,size_t capacity)544 static CordBuffer CreateAppendBuffer(InlineData& data, size_t block_size,
545                                      size_t capacity) {
546   // Watch out for overflow, people can ask for size_t::max().
547   const size_t size = data.inline_size();
548   const size_t max_capacity = std::numeric_limits<size_t>::max() - size;
549   capacity = (std::min)(max_capacity, capacity) + size;
550   CordBuffer buffer =
551       block_size ? CordBuffer::CreateWithCustomLimit(block_size, capacity)
552                  : CordBuffer::CreateWithDefaultLimit(capacity);
553   cord_internal::SmallMemmove(buffer.data(), data.as_chars(), size);
554   buffer.SetLength(size);
555   data = {};
556   return buffer;
557 }
558 
GetAppendBufferSlowPath(size_t block_size,size_t capacity,size_t min_capacity)559 CordBuffer Cord::GetAppendBufferSlowPath(size_t block_size, size_t capacity,
560                                          size_t min_capacity) {
561   auto constexpr method = CordzUpdateTracker::kGetAppendBuffer;
562   CordRep* tree = contents_.tree();
563   if (tree != nullptr) {
564     CordzUpdateScope scope(contents_.cordz_info(), method);
565     CordRep::ExtractResult result = ExtractAppendBuffer(tree, min_capacity);
566     if (result.extracted != nullptr) {
567       contents_.SetTreeOrEmpty(result.tree, scope);
568       return CordBuffer(result.extracted->flat());
569     }
570     return block_size ? CordBuffer::CreateWithCustomLimit(block_size, capacity)
571                       : CordBuffer::CreateWithDefaultLimit(capacity);
572   }
573   return CreateAppendBuffer(contents_.data_, block_size, capacity);
574 }
575 
Append(const Cord & src)576 void Cord::Append(const Cord& src) {
577   AppendImpl(src);
578 }
579 
Append(Cord && src)580 void Cord::Append(Cord&& src) {
581   AppendImpl(std::move(src));
582 }
583 
584 template <typename T, Cord::EnableIfString<T>>
Append(T && src)585 void Cord::Append(T&& src) {
586   if (src.size() <= kMaxBytesToCopy) {
587     Append(absl::string_view(src));
588   } else {
589     CordRep* rep = CordRepFromString(std::forward<T>(src));
590     contents_.AppendTree(rep, CordzUpdateTracker::kAppendString);
591   }
592 }
593 
594 template void Cord::Append(std::string&& src);
595 
Prepend(const Cord & src)596 void Cord::Prepend(const Cord& src) {
597   contents_.MaybeRemoveEmptyCrcNode();
598   if (src.empty()) return;
599 
600   CordRep* src_tree = src.contents_.tree();
601   if (src_tree != nullptr) {
602     CordRep::Ref(src_tree);
603     contents_.PrependTree(cord_internal::RemoveCrcNode(src_tree),
604                           CordzUpdateTracker::kPrependCord);
605     return;
606   }
607 
608   // `src` cord is inlined.
609   absl::string_view src_contents(src.contents_.data(), src.contents_.size());
610   return Prepend(src_contents);
611 }
612 
PrependArray(absl::string_view src,MethodIdentifier method)613 void Cord::PrependArray(absl::string_view src, MethodIdentifier method) {
614   contents_.MaybeRemoveEmptyCrcNode();
615   if (src.empty()) return;  // memcpy(_, nullptr, 0) is undefined.
616 
617   if (!contents_.is_tree()) {
618     size_t cur_size = contents_.inline_size();
619     if (cur_size + src.size() <= InlineRep::kMaxInline) {
620       // Use embedded storage.
621       InlineData data;
622       data.set_inline_size(cur_size + src.size());
623       memcpy(data.as_chars(), src.data(), src.size());
624       memcpy(data.as_chars() + src.size(), contents_.data(), cur_size);
625       contents_.data_ = data;
626       return;
627     }
628   }
629   CordRep* rep = NewTree(src.data(), src.size(), 0);
630   contents_.PrependTree(rep, method);
631 }
632 
AppendPrecise(absl::string_view src,MethodIdentifier method)633 void Cord::AppendPrecise(absl::string_view src, MethodIdentifier method) {
634   assert(!src.empty());
635   assert(src.size() <= cord_internal::kMaxFlatLength);
636   if (contents_.remaining_inline_capacity() >= src.size()) {
637     const size_t inline_length = contents_.inline_size();
638     contents_.set_inline_size(inline_length + src.size());
639     memcpy(contents_.data_.as_chars() + inline_length, src.data(), src.size());
640   } else {
641     contents_.AppendTree(CordRepFlat::Create(src), method);
642   }
643 }
644 
PrependPrecise(absl::string_view src,MethodIdentifier method)645 void Cord::PrependPrecise(absl::string_view src, MethodIdentifier method) {
646   assert(!src.empty());
647   assert(src.size() <= cord_internal::kMaxFlatLength);
648   if (contents_.remaining_inline_capacity() >= src.size()) {
649     const size_t cur_size = contents_.inline_size();
650     InlineData data;
651     data.set_inline_size(cur_size + src.size());
652     memcpy(data.as_chars(), src.data(), src.size());
653     memcpy(data.as_chars() + src.size(), contents_.data(), cur_size);
654     contents_.data_ = data;
655   } else {
656     contents_.PrependTree(CordRepFlat::Create(src), method);
657   }
658 }
659 
660 template <typename T, Cord::EnableIfString<T>>
Prepend(T && src)661 inline void Cord::Prepend(T&& src) {
662   if (src.size() <= kMaxBytesToCopy) {
663     Prepend(absl::string_view(src));
664   } else {
665     CordRep* rep = CordRepFromString(std::forward<T>(src));
666     contents_.PrependTree(rep, CordzUpdateTracker::kPrependString);
667   }
668 }
669 
670 template void Cord::Prepend(std::string&& src);
671 
RemovePrefix(size_t n)672 void Cord::RemovePrefix(size_t n) {
673   ABSL_INTERNAL_CHECK(n <= size(),
674                       absl::StrCat("Requested prefix size ", n,
675                                    " exceeds Cord's size ", size()));
676   contents_.MaybeRemoveEmptyCrcNode();
677   CordRep* tree = contents_.tree();
678   if (tree == nullptr) {
679     contents_.remove_prefix(n);
680   } else {
681     auto constexpr method = CordzUpdateTracker::kRemovePrefix;
682     CordzUpdateScope scope(contents_.cordz_info(), method);
683     tree = cord_internal::RemoveCrcNode(tree);
684     if (n >= tree->length) {
685       CordRep::Unref(tree);
686       tree = nullptr;
687     } else if (tree->IsBtree()) {
688       CordRep* old = tree;
689       tree = tree->btree()->SubTree(n, tree->length - n);
690       CordRep::Unref(old);
691     } else if (tree->IsSubstring() && tree->refcount.IsOne()) {
692       tree->substring()->start += n;
693       tree->length -= n;
694     } else {
695       CordRep* rep = CordRepSubstring::Substring(tree, n, tree->length - n);
696       CordRep::Unref(tree);
697       tree = rep;
698     }
699     contents_.SetTreeOrEmpty(tree, scope);
700   }
701 }
702 
RemoveSuffix(size_t n)703 void Cord::RemoveSuffix(size_t n) {
704   ABSL_INTERNAL_CHECK(n <= size(),
705                       absl::StrCat("Requested suffix size ", n,
706                                    " exceeds Cord's size ", size()));
707   contents_.MaybeRemoveEmptyCrcNode();
708   CordRep* tree = contents_.tree();
709   if (tree == nullptr) {
710     contents_.reduce_size(n);
711   } else {
712     auto constexpr method = CordzUpdateTracker::kRemoveSuffix;
713     CordzUpdateScope scope(contents_.cordz_info(), method);
714     tree = cord_internal::RemoveCrcNode(tree);
715     if (n >= tree->length) {
716       CordRep::Unref(tree);
717       tree = nullptr;
718     } else if (tree->IsBtree()) {
719       tree = CordRepBtree::RemoveSuffix(tree->btree(), n);
720     } else if (!tree->IsExternal() && tree->refcount.IsOne()) {
721       assert(tree->IsFlat() || tree->IsSubstring());
722       tree->length -= n;
723     } else {
724       CordRep* rep = CordRepSubstring::Substring(tree, 0, tree->length - n);
725       CordRep::Unref(tree);
726       tree = rep;
727     }
728     contents_.SetTreeOrEmpty(tree, scope);
729   }
730 }
731 
Subcord(size_t pos,size_t new_size) const732 Cord Cord::Subcord(size_t pos, size_t new_size) const {
733   Cord sub_cord;
734   size_t length = size();
735   if (pos > length) pos = length;
736   if (new_size > length - pos) new_size = length - pos;
737   if (new_size == 0) return sub_cord;
738 
739   CordRep* tree = contents_.tree();
740   if (tree == nullptr) {
741     sub_cord.contents_.set_data(contents_.data() + pos, new_size);
742     return sub_cord;
743   }
744 
745   if (new_size <= InlineRep::kMaxInline) {
746     sub_cord.contents_.set_inline_size(new_size);
747     char* dest = sub_cord.contents_.data_.as_chars();
748     Cord::ChunkIterator it = chunk_begin();
749     it.AdvanceBytes(pos);
750     size_t remaining_size = new_size;
751     while (remaining_size > it->size()) {
752       cord_internal::SmallMemmove(dest, it->data(), it->size());
753       remaining_size -= it->size();
754       dest += it->size();
755       ++it;
756     }
757     cord_internal::SmallMemmove(dest, it->data(), remaining_size);
758     return sub_cord;
759   }
760 
761   tree = cord_internal::SkipCrcNode(tree);
762   if (tree->IsBtree()) {
763     tree = tree->btree()->SubTree(pos, new_size);
764   } else {
765     tree = CordRepSubstring::Substring(tree, pos, new_size);
766   }
767   sub_cord.contents_.EmplaceTree(tree, contents_.data_,
768                                  CordzUpdateTracker::kSubCord);
769   return sub_cord;
770 }
771 
772 // --------------------------------------------------------------------
773 // Comparators
774 
775 namespace {
776 
ClampResult(int memcmp_res)777 int ClampResult(int memcmp_res) {
778   return static_cast<int>(memcmp_res > 0) - static_cast<int>(memcmp_res < 0);
779 }
780 
CompareChunks(absl::string_view * lhs,absl::string_view * rhs,size_t * size_to_compare)781 int CompareChunks(absl::string_view* lhs, absl::string_view* rhs,
782                   size_t* size_to_compare) {
783   size_t compared_size = std::min(lhs->size(), rhs->size());
784   assert(*size_to_compare >= compared_size);
785   *size_to_compare -= compared_size;
786 
787   int memcmp_res = ::memcmp(lhs->data(), rhs->data(), compared_size);
788   if (memcmp_res != 0) return memcmp_res;
789 
790   lhs->remove_prefix(compared_size);
791   rhs->remove_prefix(compared_size);
792 
793   return 0;
794 }
795 
796 // This overload set computes comparison results from memcmp result. This
797 // interface is used inside GenericCompare below. Differet implementations
798 // are specialized for int and bool. For int we clamp result to {-1, 0, 1}
799 // set. For bool we just interested in "value == 0".
800 template <typename ResultType>
ComputeCompareResult(int memcmp_res)801 ResultType ComputeCompareResult(int memcmp_res) {
802   return ClampResult(memcmp_res);
803 }
804 template <>
ComputeCompareResult(int memcmp_res)805 bool ComputeCompareResult<bool>(int memcmp_res) {
806   return memcmp_res == 0;
807 }
808 
809 }  // namespace
810 
811 // Helper routine. Locates the first flat or external chunk of the Cord without
812 // initializing the iterator, and returns a string_view referencing the data.
FindFlatStartPiece() const813 inline absl::string_view Cord::InlineRep::FindFlatStartPiece() const {
814   if (!is_tree()) {
815     return absl::string_view(data_.as_chars(), data_.inline_size());
816   }
817 
818   CordRep* node = cord_internal::SkipCrcNode(tree());
819   if (node->IsFlat()) {
820     return absl::string_view(node->flat()->Data(), node->length);
821   }
822 
823   if (node->IsExternal()) {
824     return absl::string_view(node->external()->base, node->length);
825   }
826 
827   if (node->IsBtree()) {
828     CordRepBtree* tree = node->btree();
829     int height = tree->height();
830     while (--height >= 0) {
831       tree = tree->Edge(CordRepBtree::kFront)->btree();
832     }
833     return tree->Data(tree->begin());
834   }
835 
836   // Get the child node if we encounter a SUBSTRING.
837   size_t offset = 0;
838   size_t length = node->length;
839   assert(length != 0);
840 
841   if (node->IsSubstring()) {
842     offset = node->substring()->start;
843     node = node->substring()->child;
844   }
845 
846   if (node->IsFlat()) {
847     return absl::string_view(node->flat()->Data() + offset, length);
848   }
849 
850   assert(node->IsExternal() && "Expect FLAT or EXTERNAL node here");
851 
852   return absl::string_view(node->external()->base + offset, length);
853 }
854 
SetCrcCordState(crc_internal::CrcCordState state)855 void Cord::SetCrcCordState(crc_internal::CrcCordState state) {
856   auto constexpr method = CordzUpdateTracker::kSetExpectedChecksum;
857   if (empty()) {
858     contents_.MaybeRemoveEmptyCrcNode();
859     CordRep* rep = CordRepCrc::New(nullptr, std::move(state));
860     contents_.EmplaceTree(rep, method);
861   } else if (!contents_.is_tree()) {
862     CordRep* rep = contents_.MakeFlatWithExtraCapacity(0);
863     rep = CordRepCrc::New(rep, std::move(state));
864     contents_.EmplaceTree(rep, method);
865   } else {
866     const CordzUpdateScope scope(contents_.data_.cordz_info(), method);
867     CordRep* rep = CordRepCrc::New(contents_.data_.as_tree(), std::move(state));
868     contents_.SetTree(rep, scope);
869   }
870 }
871 
SetExpectedChecksum(uint32_t crc)872 void Cord::SetExpectedChecksum(uint32_t crc) {
873   // Construct a CrcCordState with a single chunk.
874   crc_internal::CrcCordState state;
875   state.mutable_rep()->prefix_crc.push_back(
876       crc_internal::CrcCordState::PrefixCrc(size(), absl::crc32c_t{crc}));
877   SetCrcCordState(std::move(state));
878 }
879 
MaybeGetCrcCordState() const880 const crc_internal::CrcCordState* Cord::MaybeGetCrcCordState() const {
881   if (!contents_.is_tree() || !contents_.tree()->IsCrc()) {
882     return nullptr;
883   }
884   return &contents_.tree()->crc()->crc_cord_state;
885 }
886 
ExpectedChecksum() const887 absl::optional<uint32_t> Cord::ExpectedChecksum() const {
888   if (!contents_.is_tree() || !contents_.tree()->IsCrc()) {
889     return absl::nullopt;
890   }
891   return static_cast<uint32_t>(
892       contents_.tree()->crc()->crc_cord_state.Checksum());
893 }
894 
CompareSlowPath(absl::string_view rhs,size_t compared_size,size_t size_to_compare) const895 inline int Cord::CompareSlowPath(absl::string_view rhs, size_t compared_size,
896                                  size_t size_to_compare) const {
897   auto advance = [](Cord::ChunkIterator* it, absl::string_view* chunk) {
898     if (!chunk->empty()) return true;
899     ++*it;
900     if (it->bytes_remaining_ == 0) return false;
901     *chunk = **it;
902     return true;
903   };
904 
905   Cord::ChunkIterator lhs_it = chunk_begin();
906 
907   // compared_size is inside first chunk.
908   absl::string_view lhs_chunk =
909       (lhs_it.bytes_remaining_ != 0) ? *lhs_it : absl::string_view();
910   assert(compared_size <= lhs_chunk.size());
911   assert(compared_size <= rhs.size());
912   lhs_chunk.remove_prefix(compared_size);
913   rhs.remove_prefix(compared_size);
914   size_to_compare -= compared_size;  // skip already compared size.
915 
916   while (advance(&lhs_it, &lhs_chunk) && !rhs.empty()) {
917     int comparison_result = CompareChunks(&lhs_chunk, &rhs, &size_to_compare);
918     if (comparison_result != 0) return comparison_result;
919     if (size_to_compare == 0) return 0;
920   }
921 
922   return static_cast<int>(rhs.empty()) - static_cast<int>(lhs_chunk.empty());
923 }
924 
CompareSlowPath(const Cord & rhs,size_t compared_size,size_t size_to_compare) const925 inline int Cord::CompareSlowPath(const Cord& rhs, size_t compared_size,
926                                  size_t size_to_compare) const {
927   auto advance = [](Cord::ChunkIterator* it, absl::string_view* chunk) {
928     if (!chunk->empty()) return true;
929     ++*it;
930     if (it->bytes_remaining_ == 0) return false;
931     *chunk = **it;
932     return true;
933   };
934 
935   Cord::ChunkIterator lhs_it = chunk_begin();
936   Cord::ChunkIterator rhs_it = rhs.chunk_begin();
937 
938   // compared_size is inside both first chunks.
939   absl::string_view lhs_chunk =
940       (lhs_it.bytes_remaining_ != 0) ? *lhs_it : absl::string_view();
941   absl::string_view rhs_chunk =
942       (rhs_it.bytes_remaining_ != 0) ? *rhs_it : absl::string_view();
943   assert(compared_size <= lhs_chunk.size());
944   assert(compared_size <= rhs_chunk.size());
945   lhs_chunk.remove_prefix(compared_size);
946   rhs_chunk.remove_prefix(compared_size);
947   size_to_compare -= compared_size;  // skip already compared size.
948 
949   while (advance(&lhs_it, &lhs_chunk) && advance(&rhs_it, &rhs_chunk)) {
950     int memcmp_res = CompareChunks(&lhs_chunk, &rhs_chunk, &size_to_compare);
951     if (memcmp_res != 0) return memcmp_res;
952     if (size_to_compare == 0) return 0;
953   }
954 
955   return static_cast<int>(rhs_chunk.empty()) -
956          static_cast<int>(lhs_chunk.empty());
957 }
958 
GetFirstChunk(const Cord & c)959 inline absl::string_view Cord::GetFirstChunk(const Cord& c) {
960   if (c.empty()) return {};
961   return c.contents_.FindFlatStartPiece();
962 }
GetFirstChunk(absl::string_view sv)963 inline absl::string_view Cord::GetFirstChunk(absl::string_view sv) {
964   return sv;
965 }
966 
967 // Compares up to 'size_to_compare' bytes of 'lhs' with 'rhs'. It is assumed
968 // that 'size_to_compare' is greater that size of smallest of first chunks.
969 template <typename ResultType, typename RHS>
GenericCompare(const Cord & lhs,const RHS & rhs,size_t size_to_compare)970 ResultType GenericCompare(const Cord& lhs, const RHS& rhs,
971                           size_t size_to_compare) {
972   absl::string_view lhs_chunk = Cord::GetFirstChunk(lhs);
973   absl::string_view rhs_chunk = Cord::GetFirstChunk(rhs);
974 
975   size_t compared_size = std::min(lhs_chunk.size(), rhs_chunk.size());
976   assert(size_to_compare >= compared_size);
977   int memcmp_res = ::memcmp(lhs_chunk.data(), rhs_chunk.data(), compared_size);
978   if (compared_size == size_to_compare || memcmp_res != 0) {
979     return ComputeCompareResult<ResultType>(memcmp_res);
980   }
981 
982   return ComputeCompareResult<ResultType>(
983       lhs.CompareSlowPath(rhs, compared_size, size_to_compare));
984 }
985 
EqualsImpl(absl::string_view rhs,size_t size_to_compare) const986 bool Cord::EqualsImpl(absl::string_view rhs, size_t size_to_compare) const {
987   return GenericCompare<bool>(*this, rhs, size_to_compare);
988 }
989 
EqualsImpl(const Cord & rhs,size_t size_to_compare) const990 bool Cord::EqualsImpl(const Cord& rhs, size_t size_to_compare) const {
991   return GenericCompare<bool>(*this, rhs, size_to_compare);
992 }
993 
994 template <typename RHS>
SharedCompareImpl(const Cord & lhs,const RHS & rhs)995 inline int SharedCompareImpl(const Cord& lhs, const RHS& rhs) {
996   size_t lhs_size = lhs.size();
997   size_t rhs_size = rhs.size();
998   if (lhs_size == rhs_size) {
999     return GenericCompare<int>(lhs, rhs, lhs_size);
1000   }
1001   if (lhs_size < rhs_size) {
1002     auto data_comp_res = GenericCompare<int>(lhs, rhs, lhs_size);
1003     return data_comp_res == 0 ? -1 : data_comp_res;
1004   }
1005 
1006   auto data_comp_res = GenericCompare<int>(lhs, rhs, rhs_size);
1007   return data_comp_res == 0 ? +1 : data_comp_res;
1008 }
1009 
Compare(absl::string_view rhs) const1010 int Cord::Compare(absl::string_view rhs) const {
1011   return SharedCompareImpl(*this, rhs);
1012 }
1013 
CompareImpl(const Cord & rhs) const1014 int Cord::CompareImpl(const Cord& rhs) const {
1015   return SharedCompareImpl(*this, rhs);
1016 }
1017 
EndsWith(absl::string_view rhs) const1018 bool Cord::EndsWith(absl::string_view rhs) const {
1019   size_t my_size = size();
1020   size_t rhs_size = rhs.size();
1021 
1022   if (my_size < rhs_size) return false;
1023 
1024   Cord tmp(*this);
1025   tmp.RemovePrefix(my_size - rhs_size);
1026   return tmp.EqualsImpl(rhs, rhs_size);
1027 }
1028 
EndsWith(const Cord & rhs) const1029 bool Cord::EndsWith(const Cord& rhs) const {
1030   size_t my_size = size();
1031   size_t rhs_size = rhs.size();
1032 
1033   if (my_size < rhs_size) return false;
1034 
1035   Cord tmp(*this);
1036   tmp.RemovePrefix(my_size - rhs_size);
1037   return tmp.EqualsImpl(rhs, rhs_size);
1038 }
1039 
1040 // --------------------------------------------------------------------
1041 // Misc.
1042 
operator std::string() const1043 Cord::operator std::string() const {
1044   std::string s;
1045   absl::CopyCordToString(*this, &s);
1046   return s;
1047 }
1048 
CopyCordToString(const Cord & src,std::string * dst)1049 void CopyCordToString(const Cord& src, std::string* dst) {
1050   if (!src.contents_.is_tree()) {
1051     src.contents_.CopyTo(dst);
1052   } else {
1053     absl::strings_internal::STLStringResizeUninitialized(dst, src.size());
1054     src.CopyToArraySlowPath(&(*dst)[0]);
1055   }
1056 }
1057 
CopyToArraySlowPath(char * dst) const1058 void Cord::CopyToArraySlowPath(char* dst) const {
1059   assert(contents_.is_tree());
1060   absl::string_view fragment;
1061   if (GetFlatAux(contents_.tree(), &fragment)) {
1062     memcpy(dst, fragment.data(), fragment.size());
1063     return;
1064   }
1065   for (absl::string_view chunk : Chunks()) {
1066     memcpy(dst, chunk.data(), chunk.size());
1067     dst += chunk.size();
1068   }
1069 }
1070 
AdvanceAndReadBytes(size_t n)1071 Cord Cord::ChunkIterator::AdvanceAndReadBytes(size_t n) {
1072   ABSL_HARDENING_ASSERT(bytes_remaining_ >= n &&
1073                         "Attempted to iterate past `end()`");
1074   Cord subcord;
1075   auto constexpr method = CordzUpdateTracker::kCordReader;
1076 
1077   if (n <= InlineRep::kMaxInline) {
1078     // Range to read fits in inline data. Flatten it.
1079     char* data = subcord.contents_.set_data(n);
1080     while (n > current_chunk_.size()) {
1081       memcpy(data, current_chunk_.data(), current_chunk_.size());
1082       data += current_chunk_.size();
1083       n -= current_chunk_.size();
1084       ++*this;
1085     }
1086     memcpy(data, current_chunk_.data(), n);
1087     if (n < current_chunk_.size()) {
1088       RemoveChunkPrefix(n);
1089     } else if (n > 0) {
1090       ++*this;
1091     }
1092     return subcord;
1093   }
1094 
1095   if (btree_reader_) {
1096     size_t chunk_size = current_chunk_.size();
1097     if (n <= chunk_size && n <= kMaxBytesToCopy) {
1098       subcord = Cord(current_chunk_.substr(0, n), method);
1099       if (n < chunk_size) {
1100         current_chunk_.remove_prefix(n);
1101       } else {
1102         current_chunk_ = btree_reader_.Next();
1103       }
1104     } else {
1105       CordRep* rep;
1106       current_chunk_ = btree_reader_.Read(n, chunk_size, rep);
1107       subcord.contents_.EmplaceTree(rep, method);
1108     }
1109     bytes_remaining_ -= n;
1110     return subcord;
1111   }
1112 
1113   // Short circuit if reading the entire data edge.
1114   assert(current_leaf_ != nullptr);
1115   if (n == current_leaf_->length) {
1116     bytes_remaining_ = 0;
1117     current_chunk_ = {};
1118     CordRep* tree = CordRep::Ref(current_leaf_);
1119     subcord.contents_.EmplaceTree(VerifyTree(tree), method);
1120     return subcord;
1121   }
1122 
1123   // From this point on, we need a partial substring node.
1124   // Get pointer to the underlying flat or external data payload and
1125   // compute data pointer and offset into current flat or external.
1126   CordRep* payload = current_leaf_->IsSubstring()
1127                          ? current_leaf_->substring()->child
1128                          : current_leaf_;
1129   const char* data = payload->IsExternal() ? payload->external()->base
1130                                            : payload->flat()->Data();
1131   const size_t offset = static_cast<size_t>(current_chunk_.data() - data);
1132 
1133   auto* tree = CordRepSubstring::Substring(payload, offset, n);
1134   subcord.contents_.EmplaceTree(VerifyTree(tree), method);
1135   bytes_remaining_ -= n;
1136   current_chunk_.remove_prefix(n);
1137   return subcord;
1138 }
1139 
operator [](size_t i) const1140 char Cord::operator[](size_t i) const {
1141   ABSL_HARDENING_ASSERT(i < size());
1142   size_t offset = i;
1143   const CordRep* rep = contents_.tree();
1144   if (rep == nullptr) {
1145     return contents_.data()[i];
1146   }
1147   rep = cord_internal::SkipCrcNode(rep);
1148   while (true) {
1149     assert(rep != nullptr);
1150     assert(offset < rep->length);
1151     if (rep->IsFlat()) {
1152       // Get the "i"th character directly from the flat array.
1153       return rep->flat()->Data()[offset];
1154     } else if (rep->IsBtree()) {
1155       return rep->btree()->GetCharacter(offset);
1156     } else if (rep->IsExternal()) {
1157       // Get the "i"th character from the external array.
1158       return rep->external()->base[offset];
1159     } else {
1160       // This must be a substring a node, so bypass it to get to the child.
1161       assert(rep->IsSubstring());
1162       offset += rep->substring()->start;
1163       rep = rep->substring()->child;
1164     }
1165   }
1166 }
1167 
FlattenSlowPath()1168 absl::string_view Cord::FlattenSlowPath() {
1169   assert(contents_.is_tree());
1170   size_t total_size = size();
1171   CordRep* new_rep;
1172   char* new_buffer;
1173 
1174   // Try to put the contents into a new flat rep. If they won't fit in the
1175   // biggest possible flat node, use an external rep instead.
1176   if (total_size <= kMaxFlatLength) {
1177     new_rep = CordRepFlat::New(total_size);
1178     new_rep->length = total_size;
1179     new_buffer = new_rep->flat()->Data();
1180     CopyToArraySlowPath(new_buffer);
1181   } else {
1182     new_buffer = std::allocator<char>().allocate(total_size);
1183     CopyToArraySlowPath(new_buffer);
1184     new_rep = absl::cord_internal::NewExternalRep(
1185         absl::string_view(new_buffer, total_size), [](absl::string_view s) {
1186           std::allocator<char>().deallocate(const_cast<char*>(s.data()),
1187                                             s.size());
1188         });
1189   }
1190   CordzUpdateScope scope(contents_.cordz_info(), CordzUpdateTracker::kFlatten);
1191   CordRep::Unref(contents_.as_tree());
1192   contents_.SetTree(new_rep, scope);
1193   return absl::string_view(new_buffer, total_size);
1194 }
1195 
GetFlatAux(CordRep * rep,absl::string_view * fragment)1196 /* static */ bool Cord::GetFlatAux(CordRep* rep, absl::string_view* fragment) {
1197   assert(rep != nullptr);
1198   if (rep->length == 0) {
1199     *fragment = absl::string_view();
1200     return true;
1201   }
1202   rep = cord_internal::SkipCrcNode(rep);
1203   if (rep->IsFlat()) {
1204     *fragment = absl::string_view(rep->flat()->Data(), rep->length);
1205     return true;
1206   } else if (rep->IsExternal()) {
1207     *fragment = absl::string_view(rep->external()->base, rep->length);
1208     return true;
1209   } else if (rep->IsBtree()) {
1210     return rep->btree()->IsFlat(fragment);
1211   } else if (rep->IsSubstring()) {
1212     CordRep* child = rep->substring()->child;
1213     if (child->IsFlat()) {
1214       *fragment = absl::string_view(
1215           child->flat()->Data() + rep->substring()->start, rep->length);
1216       return true;
1217     } else if (child->IsExternal()) {
1218       *fragment = absl::string_view(
1219           child->external()->base + rep->substring()->start, rep->length);
1220       return true;
1221     } else if (child->IsBtree()) {
1222       return child->btree()->IsFlat(rep->substring()->start, rep->length,
1223                                     fragment);
1224     }
1225   }
1226   return false;
1227 }
1228 
ForEachChunkAux(absl::cord_internal::CordRep * rep,absl::FunctionRef<void (absl::string_view)> callback)1229 /* static */ void Cord::ForEachChunkAux(
1230     absl::cord_internal::CordRep* rep,
1231     absl::FunctionRef<void(absl::string_view)> callback) {
1232   assert(rep != nullptr);
1233   if (rep->length == 0) return;
1234   rep = cord_internal::SkipCrcNode(rep);
1235 
1236   if (rep->IsBtree()) {
1237     ChunkIterator it(rep), end;
1238     while (it != end) {
1239       callback(*it);
1240       ++it;
1241     }
1242     return;
1243   }
1244 
1245   // This is a leaf node, so invoke our callback.
1246   absl::cord_internal::CordRep* current_node = cord_internal::SkipCrcNode(rep);
1247   absl::string_view chunk;
1248   bool success = GetFlatAux(current_node, &chunk);
1249   assert(success);
1250   if (success) {
1251     callback(chunk);
1252   }
1253 }
1254 
DumpNode(CordRep * rep,bool include_data,std::ostream * os,int indent)1255 static void DumpNode(CordRep* rep, bool include_data, std::ostream* os,
1256                      int indent) {
1257   const int kIndentStep = 1;
1258   absl::InlinedVector<CordRep*, kInlinedVectorSize> stack;
1259   absl::InlinedVector<int, kInlinedVectorSize> indents;
1260   for (;;) {
1261     *os << std::setw(3) << rep->refcount.Get();
1262     *os << " " << std::setw(7) << rep->length;
1263     *os << " [";
1264     if (include_data) *os << static_cast<void*>(rep);
1265     *os << "]";
1266     *os << " " << std::setw(indent) << "";
1267     bool leaf = false;
1268     if (rep == nullptr) {
1269       *os << "NULL\n";
1270       leaf = true;
1271     } else if (rep->IsCrc()) {
1272       *os << "CRC crc=" << rep->crc()->crc_cord_state.Checksum() << "\n";
1273       indent += kIndentStep;
1274       rep = rep->crc()->child;
1275     } else if (rep->IsSubstring()) {
1276       *os << "SUBSTRING @ " << rep->substring()->start << "\n";
1277       indent += kIndentStep;
1278       rep = rep->substring()->child;
1279     } else {  // Leaf or ring
1280       leaf = true;
1281       if (rep->IsExternal()) {
1282         *os << "EXTERNAL [";
1283         if (include_data)
1284           *os << absl::CEscape(std::string(rep->external()->base, rep->length));
1285         *os << "]\n";
1286       } else if (rep->IsFlat()) {
1287         *os << "FLAT cap=" << rep->flat()->Capacity() << " [";
1288         if (include_data)
1289           *os << absl::CEscape(std::string(rep->flat()->Data(), rep->length));
1290         *os << "]\n";
1291       } else {
1292         CordRepBtree::Dump(rep, /*label=*/ "", include_data, *os);
1293       }
1294     }
1295     if (leaf) {
1296       if (stack.empty()) break;
1297       rep = stack.back();
1298       stack.pop_back();
1299       indent = indents.back();
1300       indents.pop_back();
1301     }
1302   }
1303   ABSL_INTERNAL_CHECK(indents.empty(), "");
1304 }
1305 
ReportError(CordRep * root,CordRep * node)1306 static std::string ReportError(CordRep* root, CordRep* node) {
1307   std::ostringstream buf;
1308   buf << "Error at node " << node << " in:";
1309   DumpNode(root, true, &buf);
1310   return buf.str();
1311 }
1312 
VerifyNode(CordRep * root,CordRep * start_node,bool)1313 static bool VerifyNode(CordRep* root, CordRep* start_node,
1314                        bool /* full_validation */) {
1315   absl::InlinedVector<CordRep*, 2> worklist;
1316   worklist.push_back(start_node);
1317   do {
1318     CordRep* node = worklist.back();
1319     worklist.pop_back();
1320 
1321     ABSL_INTERNAL_CHECK(node != nullptr, ReportError(root, node));
1322     if (node != root) {
1323       ABSL_INTERNAL_CHECK(node->length != 0, ReportError(root, node));
1324       ABSL_INTERNAL_CHECK(!node->IsCrc(), ReportError(root, node));
1325     }
1326 
1327     if (node->IsFlat()) {
1328       ABSL_INTERNAL_CHECK(node->length <= node->flat()->Capacity(),
1329                           ReportError(root, node));
1330     } else if (node->IsExternal()) {
1331       ABSL_INTERNAL_CHECK(node->external()->base != nullptr,
1332                           ReportError(root, node));
1333     } else if (node->IsSubstring()) {
1334       ABSL_INTERNAL_CHECK(
1335           node->substring()->start < node->substring()->child->length,
1336           ReportError(root, node));
1337       ABSL_INTERNAL_CHECK(node->substring()->start + node->length <=
1338                               node->substring()->child->length,
1339                           ReportError(root, node));
1340     } else if (node->IsCrc()) {
1341       ABSL_INTERNAL_CHECK(
1342           node->crc()->child != nullptr || node->crc()->length == 0,
1343           ReportError(root, node));
1344       if (node->crc()->child != nullptr) {
1345         ABSL_INTERNAL_CHECK(node->crc()->length == node->crc()->child->length,
1346                             ReportError(root, node));
1347         worklist.push_back(node->crc()->child);
1348       }
1349     }
1350   } while (!worklist.empty());
1351   return true;
1352 }
1353 
operator <<(std::ostream & out,const Cord & cord)1354 std::ostream& operator<<(std::ostream& out, const Cord& cord) {
1355   for (absl::string_view chunk : cord.Chunks()) {
1356     out.write(chunk.data(), static_cast<std::streamsize>(chunk.size()));
1357   }
1358   return out;
1359 }
1360 
1361 namespace strings_internal {
FlatOverhead()1362 size_t CordTestAccess::FlatOverhead() { return cord_internal::kFlatOverhead; }
MaxFlatLength()1363 size_t CordTestAccess::MaxFlatLength() { return cord_internal::kMaxFlatLength; }
FlatTagToLength(uint8_t tag)1364 size_t CordTestAccess::FlatTagToLength(uint8_t tag) {
1365   return cord_internal::TagToLength(tag);
1366 }
LengthToTag(size_t s)1367 uint8_t CordTestAccess::LengthToTag(size_t s) {
1368   ABSL_INTERNAL_CHECK(s <= kMaxFlatLength, absl::StrCat("Invalid length ", s));
1369   return cord_internal::AllocatedSizeToTag(s + cord_internal::kFlatOverhead);
1370 }
SizeofCordRepExternal()1371 size_t CordTestAccess::SizeofCordRepExternal() {
1372   return sizeof(CordRepExternal);
1373 }
SizeofCordRepSubstring()1374 size_t CordTestAccess::SizeofCordRepSubstring() {
1375   return sizeof(CordRepSubstring);
1376 }
1377 }  // namespace strings_internal
1378 ABSL_NAMESPACE_END
1379 }  // namespace absl
1380