1// Copyright 2020 The Abseil Authors.
2//
3// Licensed under the Apache License, Version 2.0 (the "License");
4// you may not use this file except in compliance with the License.
5// You may obtain a copy of the License at
6//
7// https://www.apache.org/licenses/LICENSE-2.0
8//
9// Unless required by applicable law or agreed to in writing, software
10// distributed under the License is distributed on an "AS IS" BASIS,
11// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12// See the License for the specific language governing permissions and
13// limitations under the License.
14
15#include "absl/strings/cord.h"
16
17#include <algorithm>
18#include <atomic>
19#include <cstddef>
20#include <cstdio>
21#include <cstdlib>
22#include <iomanip>
23#include <iostream>
24#include <limits>
25#include <ostream>
26#include <sstream>
27#include <type_traits>
28#include <unordered_set>
29#include <vector>
30
31#include "absl/base/casts.h"
32#include "absl/base/internal/raw_logging.h"
33#include "absl/base/macros.h"
34#include "absl/base/port.h"
35#include "absl/container/fixed_array.h"
36#include "absl/container/inlined_vector.h"
37#include "absl/strings/cord_buffer.h"
38#include "absl/strings/escaping.h"
39#include "absl/strings/internal/cord_data_edge.h"
40#include "absl/strings/internal/cord_internal.h"
41#include "absl/strings/internal/cord_rep_btree.h"
42#include "absl/strings/internal/cord_rep_crc.h"
43#include "absl/strings/internal/cord_rep_flat.h"
44#include "absl/strings/internal/cordz_statistics.h"
45#include "absl/strings/internal/cordz_update_scope.h"
46#include "absl/strings/internal/cordz_update_tracker.h"
47#include "absl/strings/internal/resize_uninitialized.h"
48#include "absl/strings/str_cat.h"
49#include "absl/strings/str_format.h"
50#include "absl/strings/str_join.h"
51#include "absl/strings/string_view.h"
52
53namespace absl {
54ABSL_NAMESPACE_BEGIN
55
56using ::absl::cord_internal::CordRep;
57using ::absl::cord_internal::CordRepBtree;
58using ::absl::cord_internal::CordRepCrc;
59using ::absl::cord_internal::CordRepExternal;
60using ::absl::cord_internal::CordRepFlat;
61using ::absl::cord_internal::CordRepSubstring;
62using ::absl::cord_internal::CordzUpdateTracker;
63using ::absl::cord_internal::InlineData;
64using ::absl::cord_internal::kMaxFlatLength;
65using ::absl::cord_internal::kMinFlatLength;
66
67using ::absl::cord_internal::kInlinedVectorSize;
68using ::absl::cord_internal::kMaxBytesToCopy;
69
70static void DumpNode(CordRep* rep, bool include_data, std::ostream* os,
71 int indent = 0);
72static bool VerifyNode(CordRep* root, CordRep* start_node,
73 bool full_validation);
74
75static inline CordRep* VerifyTree(CordRep* node) {
76 // Verification is expensive, so only do it in debug mode.
77 // Even in debug mode we normally do only light validation.
78 // If you are debugging Cord itself, you should define the
79 // macro EXTRA_CORD_VALIDATION, e.g. by adding
80 // --copt=-DEXTRA_CORD_VALIDATION to the blaze line.
81#ifdef EXTRA_CORD_VALIDATION
82 assert(node == nullptr || VerifyNode(node, node, /*full_validation=*/true));
83#else // EXTRA_CORD_VALIDATION
84 assert(node == nullptr || VerifyNode(node, node, /*full_validation=*/false));
85#endif // EXTRA_CORD_VALIDATION
86 static_cast<void>(&VerifyNode);
87
88 return node;
89}
90
91static CordRepFlat* CreateFlat(const char* data, size_t length,
92 size_t alloc_hint) {
93 CordRepFlat* flat = CordRepFlat::New(len: length + alloc_hint);
94 flat->length = length;
95 memcpy(dest: flat->Data(), src: data, n: length);
96 return flat;
97}
98
99// Creates a new flat or Btree out of the specified array.
100// The returned node has a refcount of 1.
101static CordRep* NewBtree(const char* data, size_t length, size_t alloc_hint) {
102 if (length <= kMaxFlatLength) {
103 return CreateFlat(data, length, alloc_hint);
104 }
105 CordRepFlat* flat = CreateFlat(data, length: kMaxFlatLength, alloc_hint: 0);
106 data += kMaxFlatLength;
107 length -= kMaxFlatLength;
108 auto* root = CordRepBtree::Create(rep: flat);
109 return CordRepBtree::Append(tree: root, data: {data, length}, extra: alloc_hint);
110}
111
112// Create a new tree out of the specified array.
113// The returned node has a refcount of 1.
114static CordRep* NewTree(const char* data, size_t length, size_t alloc_hint) {
115 if (length == 0) return nullptr;
116 return NewBtree(data, length, alloc_hint);
117}
118
119namespace cord_internal {
120
121void InitializeCordRepExternal(absl::string_view data, CordRepExternal* rep) {
122 assert(!data.empty());
123 rep->length = data.size();
124 rep->tag = EXTERNAL;
125 rep->base = data.data();
126 VerifyTree(node: rep);
127}
128
129} // namespace cord_internal
130
131// Creates a CordRep from the provided string. If the string is large enough,
132// and not wasteful, we move the string into an external cord rep, preserving
133// the already allocated string contents.
134// Requires the provided string length to be larger than `kMaxInline`.
135static CordRep* CordRepFromString(std::string&& src) {
136 assert(src.length() > cord_internal::kMaxInline);
137 if (
138 // String is short: copy data to avoid external block overhead.
139 src.size() <= kMaxBytesToCopy ||
140 // String is wasteful: copy data to avoid pinning too much unused memory.
141 src.size() < src.capacity() / 2
142 ) {
143 return NewTree(data: src.data(), length: src.size(), alloc_hint: 0);
144 }
145
146 struct StringReleaser {
147 void operator()(absl::string_view /* data */) {}
148 std::string data;
149 };
150 const absl::string_view original_data = src;
151 auto* rep =
152 static_cast<::absl::cord_internal::CordRepExternalImpl<StringReleaser>*>(
153 absl::cord_internal::NewExternalRep(data: original_data,
154 releaser: StringReleaser{.data: std::move(src)}));
155 // Moving src may have invalidated its data pointer, so adjust it.
156 rep->base = rep->template get<0>().data.data();
157 return rep;
158}
159
160// --------------------------------------------------------------------
161// Cord::InlineRep functions
162
163#ifdef ABSL_INTERNAL_NEED_REDUNDANT_CONSTEXPR_DECL
164constexpr unsigned char Cord::InlineRep::kMaxInline;
165#endif
166
167inline void Cord::InlineRep::set_data(const char* data, size_t n) {
168 static_assert(kMaxInline == 15, "set_data is hard-coded for a length of 15");
169
170 cord_internal::SmallMemmove<true>(dst: data_.as_chars(), src: data, n);
171 set_inline_size(n);
172}
173
174inline char* Cord::InlineRep::set_data(size_t n) {
175 assert(n <= kMaxInline);
176 ResetToEmpty();
177 set_inline_size(n);
178 return data_.as_chars();
179}
180
181inline void Cord::InlineRep::reduce_size(size_t n) {
182 size_t tag = inline_size();
183 assert(tag <= kMaxInline);
184 assert(tag >= n);
185 tag -= n;
186 memset(s: data_.as_chars() + tag, c: 0, n: n);
187 set_inline_size(static_cast<char>(tag));
188}
189
190inline void Cord::InlineRep::remove_prefix(size_t n) {
191 cord_internal::SmallMemmove(dst: data_.as_chars(), src: data_.as_chars() + n,
192 n: inline_size() - n);
193 reduce_size(n);
194}
195
196// Returns `rep` converted into a CordRepBtree.
197// Directly returns `rep` if `rep` is already a CordRepBtree.
198static CordRepBtree* ForceBtree(CordRep* rep) {
199 return rep->IsBtree()
200 ? rep->btree()
201 : CordRepBtree::Create(rep: cord_internal::RemoveCrcNode(rep));
202}
203
204void Cord::InlineRep::AppendTreeToInlined(CordRep* tree,
205 MethodIdentifier method) {
206 assert(!is_tree());
207 if (!data_.is_empty()) {
208 CordRepFlat* flat = MakeFlatWithExtraCapacity(extra: 0);
209 tree = CordRepBtree::Append(tree: CordRepBtree::Create(rep: flat), rep: tree);
210 }
211 EmplaceTree(rep: tree, method);
212}
213
214void Cord::InlineRep::AppendTreeToTree(CordRep* tree, MethodIdentifier method) {
215 assert(is_tree());
216 const CordzUpdateScope scope(data_.cordz_info(), method);
217 tree = CordRepBtree::Append(tree: ForceBtree(rep: data_.as_tree()), rep: tree);
218 SetTree(rep: tree, scope);
219}
220
221void Cord::InlineRep::AppendTree(CordRep* tree, MethodIdentifier method) {
222 assert(tree != nullptr);
223 assert(tree->length != 0);
224 assert(!tree->IsCrc());
225 if (data_.is_tree()) {
226 AppendTreeToTree(tree, method);
227 } else {
228 AppendTreeToInlined(tree, method);
229 }
230}
231
232void Cord::InlineRep::PrependTreeToInlined(CordRep* tree,
233 MethodIdentifier method) {
234 assert(!is_tree());
235 if (!data_.is_empty()) {
236 CordRepFlat* flat = MakeFlatWithExtraCapacity(extra: 0);
237 tree = CordRepBtree::Prepend(tree: CordRepBtree::Create(rep: flat), rep: tree);
238 }
239 EmplaceTree(rep: tree, method);
240}
241
242void Cord::InlineRep::PrependTreeToTree(CordRep* tree,
243 MethodIdentifier method) {
244 assert(is_tree());
245 const CordzUpdateScope scope(data_.cordz_info(), method);
246 tree = CordRepBtree::Prepend(tree: ForceBtree(rep: data_.as_tree()), rep: tree);
247 SetTree(rep: tree, scope);
248}
249
250void Cord::InlineRep::PrependTree(CordRep* tree, MethodIdentifier method) {
251 assert(tree != nullptr);
252 assert(tree->length != 0);
253 assert(!tree->IsCrc());
254 if (data_.is_tree()) {
255 PrependTreeToTree(tree, method);
256 } else {
257 PrependTreeToInlined(tree, method);
258 }
259}
260
261// Searches for a non-full flat node at the rightmost leaf of the tree. If a
262// suitable leaf is found, the function will update the length field for all
263// nodes to account for the size increase. The append region address will be
264// written to region and the actual size increase will be written to size.
265static inline bool PrepareAppendRegion(CordRep* root, char** region,
266 size_t* size, size_t max_length) {
267 if (root->IsBtree() && root->refcount.IsOne()) {
268 Span<char> span = root->btree()->GetAppendBuffer(size: max_length);
269 if (!span.empty()) {
270 *region = span.data();
271 *size = span.size();
272 return true;
273 }
274 }
275
276 CordRep* dst = root;
277 if (!dst->IsFlat() || !dst->refcount.IsOne()) {
278 *region = nullptr;
279 *size = 0;
280 return false;
281 }
282
283 const size_t in_use = dst->length;
284 const size_t capacity = dst->flat()->Capacity();
285 if (in_use == capacity) {
286 *region = nullptr;
287 *size = 0;
288 return false;
289 }
290
291 const size_t size_increase = std::min(a: capacity - in_use, b: max_length);
292 dst->length += size_increase;
293
294 *region = dst->flat()->Data() + in_use;
295 *size = size_increase;
296 return true;
297}
298
299void Cord::InlineRep::AssignSlow(const Cord::InlineRep& src) {
300 assert(&src != this);
301 assert(is_tree() || src.is_tree());
302 auto constexpr method = CordzUpdateTracker::kAssignCord;
303 if (ABSL_PREDICT_TRUE(!is_tree())) {
304 EmplaceTree(rep: CordRep::Ref(rep: src.as_tree()), parent: src.data_, method);
305 return;
306 }
307
308 CordRep* tree = as_tree();
309 if (CordRep* src_tree = src.tree()) {
310 // Leave any existing `cordz_info` in place, and let MaybeTrackCord()
311 // decide if this cord should be (or remains to be) sampled or not.
312 data_.set_tree(CordRep::Ref(rep: src_tree));
313 CordzInfo::MaybeTrackCord(cord&: data_, src: src.data_, method);
314 } else {
315 CordzInfo::MaybeUntrackCord(info: data_.cordz_info());
316 data_ = src.data_;
317 }
318 CordRep::Unref(rep: tree);
319}
320
321void Cord::InlineRep::UnrefTree() {
322 if (is_tree()) {
323 CordzInfo::MaybeUntrackCord(info: data_.cordz_info());
324 CordRep::Unref(rep: tree());
325 }
326}
327
328// --------------------------------------------------------------------
329// Constructors and destructors
330
331Cord::Cord(absl::string_view src, MethodIdentifier method)
332 : contents_(InlineData::kDefaultInit) {
333 const size_t n = src.size();
334 if (n <= InlineRep::kMaxInline) {
335 contents_.set_data(data: src.data(), n);
336 } else {
337 CordRep* rep = NewTree(data: src.data(), length: n, alloc_hint: 0);
338 contents_.EmplaceTree(rep, method);
339 }
340}
341
342template <typename T, Cord::EnableIfString<T>>
343Cord::Cord(T&& src) : contents_(InlineData::kDefaultInit) {
344 if (src.size() <= InlineRep::kMaxInline) {
345 contents_.set_data(src.data(), src.size());
346 } else {
347 CordRep* rep = CordRepFromString(std::forward<T>(src));
348 contents_.EmplaceTree(rep, method: CordzUpdateTracker::kConstructorString);
349 }
350}
351
352template Cord::Cord(std::string&& src);
353
354// The destruction code is separate so that the compiler can determine
355// that it does not need to call the destructor on a moved-from Cord.
356void Cord::DestroyCordSlow() {
357 assert(contents_.is_tree());
358 CordzInfo::MaybeUntrackCord(info: contents_.cordz_info());
359 CordRep::Unref(rep: VerifyTree(node: contents_.as_tree()));
360}
361
362// --------------------------------------------------------------------
363// Mutators
364
365void Cord::Clear() {
366 if (CordRep* tree = contents_.clear()) {
367 CordRep::Unref(rep: tree);
368 }
369}
370
371Cord& Cord::AssignLargeString(std::string&& src) {
372 auto constexpr method = CordzUpdateTracker::kAssignString;
373 assert(src.size() > kMaxBytesToCopy);
374 CordRep* rep = CordRepFromString(src: std::move(src));
375 if (CordRep* tree = contents_.tree()) {
376 CordzUpdateScope scope(contents_.cordz_info(), method);
377 contents_.SetTree(rep, scope);
378 CordRep::Unref(rep: tree);
379 } else {
380 contents_.EmplaceTree(rep, method);
381 }
382 return *this;
383}
384
385Cord& Cord::operator=(absl::string_view src) {
386 auto constexpr method = CordzUpdateTracker::kAssignString;
387 const char* data = src.data();
388 size_t length = src.size();
389 CordRep* tree = contents_.tree();
390 if (length <= InlineRep::kMaxInline) {
391 // Embed into this->contents_, which is somewhat subtle:
392 // - MaybeUntrackCord must be called before Unref(tree).
393 // - MaybeUntrackCord must be called before set_data() clobbers cordz_info.
394 // - set_data() must be called before Unref(tree) as it may reference tree.
395 if (tree != nullptr) CordzInfo::MaybeUntrackCord(info: contents_.cordz_info());
396 contents_.set_data(data, n: length);
397 if (tree != nullptr) CordRep::Unref(rep: tree);
398 return *this;
399 }
400 if (tree != nullptr) {
401 CordzUpdateScope scope(contents_.cordz_info(), method);
402 if (tree->IsFlat() && tree->flat()->Capacity() >= length &&
403 tree->refcount.IsOne()) {
404 // Copy in place if the existing FLAT node is reusable.
405 memmove(dest: tree->flat()->Data(), src: data, n: length);
406 tree->length = length;
407 VerifyTree(node: tree);
408 return *this;
409 }
410 contents_.SetTree(rep: NewTree(data, length, alloc_hint: 0), scope);
411 CordRep::Unref(rep: tree);
412 } else {
413 contents_.EmplaceTree(rep: NewTree(data, length, alloc_hint: 0), method);
414 }
415 return *this;
416}
417
418// TODO(sanjay): Move to Cord::InlineRep section of file. For now,
419// we keep it here to make diffs easier.
420void Cord::InlineRep::AppendArray(absl::string_view src,
421 MethodIdentifier method) {
422 if (src.empty()) return; // memcpy(_, nullptr, 0) is undefined.
423
424 size_t appended = 0;
425 CordRep* rep = tree();
426 const CordRep* const root = rep;
427 CordzUpdateScope scope(root ? cordz_info() : nullptr, method);
428 if (root != nullptr) {
429 rep = cord_internal::RemoveCrcNode(rep);
430 char* region;
431 if (PrepareAppendRegion(root: rep, region: &region, size: &appended, max_length: src.size())) {
432 memcpy(dest: region, src: src.data(), n: appended);
433 }
434 } else {
435 // Try to fit in the inline buffer if possible.
436 size_t inline_length = inline_size();
437 if (src.size() <= kMaxInline - inline_length) {
438 // Append new data to embedded array
439 memcpy(dest: data_.as_chars() + inline_length, src: src.data(), n: src.size());
440 set_inline_size(inline_length + src.size());
441 return;
442 }
443
444 // Allocate flat to be a perfect fit on first append exceeding inlined size.
445 // Subsequent growth will use amortized growth until we reach maximum flat
446 // size.
447 rep = CordRepFlat::New(len: inline_length + src.size());
448 appended = std::min(a: src.size(), b: rep->flat()->Capacity() - inline_length);
449 memcpy(dest: rep->flat()->Data(), src: data_.as_chars(), n: inline_length);
450 memcpy(dest: rep->flat()->Data() + inline_length, src: src.data(), n: appended);
451 rep->length = inline_length + appended;
452 }
453
454 src.remove_prefix(n: appended);
455 if (src.empty()) {
456 CommitTree(old_rep: root, rep, scope, method);
457 return;
458 }
459
460 // TODO(b/192061034): keep legacy 10% growth rate: consider other rates.
461 rep = ForceBtree(rep);
462 const size_t min_growth = std::max<size_t>(a: rep->length / 10, b: src.size());
463 rep = CordRepBtree::Append(tree: rep->btree(), data: src, extra: min_growth - src.size());
464
465 CommitTree(old_rep: root, rep, scope, method);
466}
467
468inline CordRep* Cord::TakeRep() const& {
469 return CordRep::Ref(rep: contents_.tree());
470}
471
472inline CordRep* Cord::TakeRep() && {
473 CordRep* rep = contents_.tree();
474 contents_.clear();
475 return rep;
476}
477
478template <typename C>
479inline void Cord::AppendImpl(C&& src) {
480 auto constexpr method = CordzUpdateTracker::kAppendCord;
481 if (empty()) {
482 // Since destination is empty, we can avoid allocating a node,
483 if (src.contents_.is_tree()) {
484 // by taking the tree directly
485 CordRep* rep =
486 cord_internal::RemoveCrcNode(rep: std::forward<C>(src).TakeRep());
487 contents_.EmplaceTree(rep, method);
488 } else {
489 // or copying over inline data
490 contents_.data_ = src.contents_.data_;
491 }
492 return;
493 }
494
495 // For short cords, it is faster to copy data if there is room in dst.
496 const size_t src_size = src.contents_.size();
497 if (src_size <= kMaxBytesToCopy) {
498 CordRep* src_tree = src.contents_.tree();
499 if (src_tree == nullptr) {
500 // src has embedded data.
501 contents_.AppendArray(src: {src.contents_.data(), src_size}, method);
502 return;
503 }
504 if (src_tree->IsFlat()) {
505 // src tree just has one flat node.
506 contents_.AppendArray(src: {src_tree->flat()->Data(), src_size}, method);
507 return;
508 }
509 if (&src == this) {
510 // ChunkIterator below assumes that src is not modified during traversal.
511 Append(src: Cord(src));
512 return;
513 }
514 // TODO(mec): Should we only do this if "dst" has space?
515 for (absl::string_view chunk : src.Chunks()) {
516 Append(src: chunk);
517 }
518 return;
519 }
520
521 // Guaranteed to be a tree (kMaxBytesToCopy > kInlinedSize)
522 CordRep* rep = cord_internal::RemoveCrcNode(rep: std::forward<C>(src).TakeRep());
523 contents_.AppendTree(tree: rep, method: CordzUpdateTracker::kAppendCord);
524}
525
526static CordRep::ExtractResult ExtractAppendBuffer(CordRep* rep,
527 size_t min_capacity) {
528 switch (rep->tag) {
529 case cord_internal::BTREE:
530 return CordRepBtree::ExtractAppendBuffer(tree: rep->btree(), extra_capacity: min_capacity);
531 default:
532 if (rep->IsFlat() && rep->refcount.IsOne() &&
533 rep->flat()->Capacity() - rep->length >= min_capacity) {
534 return {.tree: nullptr, .extracted: rep};
535 }
536 return {.tree: rep, .extracted: nullptr};
537 }
538}
539
540static CordBuffer CreateAppendBuffer(InlineData& data, size_t block_size,
541 size_t capacity) {
542 // Watch out for overflow, people can ask for size_t::max().
543 const size_t size = data.inline_size();
544 const size_t max_capacity = std::numeric_limits<size_t>::max() - size;
545 capacity = (std::min)(a: max_capacity, b: capacity) + size;
546 CordBuffer buffer =
547 block_size ? CordBuffer::CreateWithCustomLimit(block_size, capacity)
548 : CordBuffer::CreateWithDefaultLimit(capacity);
549 cord_internal::SmallMemmove(dst: buffer.data(), src: data.as_chars(), n: size);
550 buffer.SetLength(size);
551 data = {};
552 return buffer;
553}
554
555CordBuffer Cord::GetAppendBufferSlowPath(size_t block_size, size_t capacity,
556 size_t min_capacity) {
557 auto constexpr method = CordzUpdateTracker::kGetAppendBuffer;
558 CordRep* tree = contents_.tree();
559 if (tree != nullptr) {
560 CordzUpdateScope scope(contents_.cordz_info(), method);
561 CordRep::ExtractResult result = ExtractAppendBuffer(rep: tree, min_capacity);
562 if (result.extracted != nullptr) {
563 contents_.SetTreeOrEmpty(rep: result.tree, scope);
564 return CordBuffer(result.extracted->flat());
565 }
566 return block_size ? CordBuffer::CreateWithCustomLimit(block_size, capacity)
567 : CordBuffer::CreateWithDefaultLimit(capacity);
568 }
569 return CreateAppendBuffer(data&: contents_.data_, block_size, capacity);
570}
571
572void Cord::Append(const Cord& src) {
573 AppendImpl(src);
574}
575
576void Cord::Append(Cord&& src) {
577 AppendImpl(src: std::move(src));
578}
579
580template <typename T, Cord::EnableIfString<T>>
581void Cord::Append(T&& src) {
582 if (src.size() <= kMaxBytesToCopy) {
583 Append(src: absl::string_view(src));
584 } else {
585 CordRep* rep = CordRepFromString(std::forward<T>(src));
586 contents_.AppendTree(tree: rep, method: CordzUpdateTracker::kAppendString);
587 }
588}
589
590template void Cord::Append(std::string&& src);
591
592void Cord::Prepend(const Cord& src) {
593 CordRep* src_tree = src.contents_.tree();
594 if (src_tree != nullptr) {
595 CordRep::Ref(rep: src_tree);
596 contents_.PrependTree(tree: cord_internal::RemoveCrcNode(rep: src_tree),
597 method: CordzUpdateTracker::kPrependCord);
598 return;
599 }
600
601 // `src` cord is inlined.
602 absl::string_view src_contents(src.contents_.data(), src.contents_.size());
603 return Prepend(src: src_contents);
604}
605
606void Cord::PrependArray(absl::string_view src, MethodIdentifier method) {
607 if (src.empty()) return; // memcpy(_, nullptr, 0) is undefined.
608 if (!contents_.is_tree()) {
609 size_t cur_size = contents_.inline_size();
610 if (cur_size + src.size() <= InlineRep::kMaxInline) {
611 // Use embedded storage.
612 char data[InlineRep::kMaxInline + 1] = {0};
613 memcpy(dest: data, src: src.data(), n: src.size());
614 memcpy(dest: data + src.size(), src: contents_.data(), n: cur_size);
615 memcpy(dest: contents_.data_.as_chars(), src: data, n: InlineRep::kMaxInline + 1);
616 contents_.set_inline_size(cur_size + src.size());
617 return;
618 }
619 }
620 CordRep* rep = NewTree(data: src.data(), length: src.size(), alloc_hint: 0);
621 contents_.PrependTree(tree: rep, method);
622}
623
624void Cord::AppendPrecise(absl::string_view src, MethodIdentifier method) {
625 assert(!src.empty());
626 assert(src.size() <= cord_internal::kMaxFlatLength);
627 if (contents_.remaining_inline_capacity() >= src.size()) {
628 const size_t inline_length = contents_.inline_size();
629 memcpy(dest: contents_.data_.as_chars() + inline_length, src: src.data(), n: src.size());
630 contents_.set_inline_size(inline_length + src.size());
631 } else {
632 contents_.AppendTree(tree: CordRepFlat::Create(data: src), method);
633 }
634}
635
636void Cord::PrependPrecise(absl::string_view src, MethodIdentifier method) {
637 assert(!src.empty());
638 assert(src.size() <= cord_internal::kMaxFlatLength);
639 if (contents_.remaining_inline_capacity() >= src.size()) {
640 const size_t inline_length = contents_.inline_size();
641 char data[InlineRep::kMaxInline + 1] = {0};
642 memcpy(dest: data, src: src.data(), n: src.size());
643 memcpy(dest: data + src.size(), src: contents_.data(), n: inline_length);
644 memcpy(dest: contents_.data_.as_chars(), src: data, n: InlineRep::kMaxInline + 1);
645 contents_.set_inline_size(inline_length + src.size());
646 } else {
647 contents_.PrependTree(tree: CordRepFlat::Create(data: src), method);
648 }
649}
650
651template <typename T, Cord::EnableIfString<T>>
652inline void Cord::Prepend(T&& src) {
653 if (src.size() <= kMaxBytesToCopy) {
654 Prepend(src: absl::string_view(src));
655 } else {
656 CordRep* rep = CordRepFromString(std::forward<T>(src));
657 contents_.PrependTree(tree: rep, method: CordzUpdateTracker::kPrependString);
658 }
659}
660
661template void Cord::Prepend(std::string&& src);
662
663void Cord::RemovePrefix(size_t n) {
664 ABSL_INTERNAL_CHECK(n <= size(),
665 absl::StrCat("Requested prefix size ", n,
666 " exceeds Cord's size ", size()));
667 CordRep* tree = contents_.tree();
668 if (tree == nullptr) {
669 contents_.remove_prefix(n);
670 } else {
671 auto constexpr method = CordzUpdateTracker::kRemovePrefix;
672 CordzUpdateScope scope(contents_.cordz_info(), method);
673 tree = cord_internal::RemoveCrcNode(rep: tree);
674 if (n >= tree->length) {
675 CordRep::Unref(rep: tree);
676 tree = nullptr;
677 } else if (tree->IsBtree()) {
678 CordRep* old = tree;
679 tree = tree->btree()->SubTree(offset: n, n: tree->length - n);
680 CordRep::Unref(rep: old);
681 } else if (tree->IsSubstring() && tree->refcount.IsOne()) {
682 tree->substring()->start += n;
683 tree->length -= n;
684 } else {
685 CordRep* rep = CordRepSubstring::Substring(rep: tree, pos: n, n: tree->length - n);
686 CordRep::Unref(rep: tree);
687 tree = rep;
688 }
689 contents_.SetTreeOrEmpty(rep: tree, scope);
690 }
691}
692
693void Cord::RemoveSuffix(size_t n) {
694 ABSL_INTERNAL_CHECK(n <= size(),
695 absl::StrCat("Requested suffix size ", n,
696 " exceeds Cord's size ", size()));
697 CordRep* tree = contents_.tree();
698 if (tree == nullptr) {
699 contents_.reduce_size(n);
700 } else {
701 auto constexpr method = CordzUpdateTracker::kRemoveSuffix;
702 CordzUpdateScope scope(contents_.cordz_info(), method);
703 tree = cord_internal::RemoveCrcNode(rep: tree);
704 if (n >= tree->length) {
705 CordRep::Unref(rep: tree);
706 tree = nullptr;
707 } else if (tree->IsBtree()) {
708 tree = CordRepBtree::RemoveSuffix(tree: tree->btree(), n);
709 } else if (!tree->IsExternal() && tree->refcount.IsOne()) {
710 assert(tree->IsFlat() || tree->IsSubstring());
711 tree->length -= n;
712 } else {
713 CordRep* rep = CordRepSubstring::Substring(rep: tree, pos: 0, n: tree->length - n);
714 CordRep::Unref(rep: tree);
715 tree = rep;
716 }
717 contents_.SetTreeOrEmpty(rep: tree, scope);
718 }
719}
720
721Cord Cord::Subcord(size_t pos, size_t new_size) const {
722 Cord sub_cord;
723 size_t length = size();
724 if (pos > length) pos = length;
725 if (new_size > length - pos) new_size = length - pos;
726 if (new_size == 0) return sub_cord;
727
728 CordRep* tree = contents_.tree();
729 if (tree == nullptr) {
730 sub_cord.contents_.set_data(data: contents_.data() + pos, n: new_size);
731 return sub_cord;
732 }
733
734 if (new_size <= InlineRep::kMaxInline) {
735 char* dest = sub_cord.contents_.data_.as_chars();
736 Cord::ChunkIterator it = chunk_begin();
737 it.AdvanceBytes(n: pos);
738 size_t remaining_size = new_size;
739 while (remaining_size > it->size()) {
740 cord_internal::SmallMemmove(dst: dest, src: it->data(), n: it->size());
741 remaining_size -= it->size();
742 dest += it->size();
743 ++it;
744 }
745 cord_internal::SmallMemmove(dst: dest, src: it->data(), n: remaining_size);
746 sub_cord.contents_.set_inline_size(new_size);
747 return sub_cord;
748 }
749
750 tree = cord_internal::SkipCrcNode(rep: tree);
751 if (tree->IsBtree()) {
752 tree = tree->btree()->SubTree(offset: pos, n: new_size);
753 } else {
754 tree = CordRepSubstring::Substring(rep: tree, pos, n: new_size);
755 }
756 sub_cord.contents_.EmplaceTree(rep: tree, parent: contents_.data_,
757 method: CordzUpdateTracker::kSubCord);
758 return sub_cord;
759}
760
761// --------------------------------------------------------------------
762// Comparators
763
764namespace {
765
766int ClampResult(int memcmp_res) {
767 return static_cast<int>(memcmp_res > 0) - static_cast<int>(memcmp_res < 0);
768}
769
770int CompareChunks(absl::string_view* lhs, absl::string_view* rhs,
771 size_t* size_to_compare) {
772 size_t compared_size = std::min(a: lhs->size(), b: rhs->size());
773 assert(*size_to_compare >= compared_size);
774 *size_to_compare -= compared_size;
775
776 int memcmp_res = ::memcmp(s1: lhs->data(), s2: rhs->data(), n: compared_size);
777 if (memcmp_res != 0) return memcmp_res;
778
779 lhs->remove_prefix(n: compared_size);
780 rhs->remove_prefix(n: compared_size);
781
782 return 0;
783}
784
785// This overload set computes comparison results from memcmp result. This
786// interface is used inside GenericCompare below. Differet implementations
787// are specialized for int and bool. For int we clamp result to {-1, 0, 1}
788// set. For bool we just interested in "value == 0".
789template <typename ResultType>
790ResultType ComputeCompareResult(int memcmp_res) {
791 return ClampResult(memcmp_res);
792}
793template <>
794bool ComputeCompareResult<bool>(int memcmp_res) {
795 return memcmp_res == 0;
796}
797
798} // namespace
799
800// Helper routine. Locates the first flat or external chunk of the Cord without
801// initializing the iterator, and returns a string_view referencing the data.
802inline absl::string_view Cord::InlineRep::FindFlatStartPiece() const {
803 if (!is_tree()) {
804 return absl::string_view(data_.as_chars(), data_.inline_size());
805 }
806
807 CordRep* node = cord_internal::SkipCrcNode(rep: tree());
808 if (node->IsFlat()) {
809 return absl::string_view(node->flat()->Data(), node->length);
810 }
811
812 if (node->IsExternal()) {
813 return absl::string_view(node->external()->base, node->length);
814 }
815
816 if (node->IsBtree()) {
817 CordRepBtree* tree = node->btree();
818 int height = tree->height();
819 while (--height >= 0) {
820 tree = tree->Edge(edge_type: CordRepBtree::kFront)->btree();
821 }
822 return tree->Data(index: tree->begin());
823 }
824
825 // Get the child node if we encounter a SUBSTRING.
826 size_t offset = 0;
827 size_t length = node->length;
828 assert(length != 0);
829
830 if (node->IsSubstring()) {
831 offset = node->substring()->start;
832 node = node->substring()->child;
833 }
834
835 if (node->IsFlat()) {
836 return absl::string_view(node->flat()->Data() + offset, length);
837 }
838
839 assert(node->IsExternal() && "Expect FLAT or EXTERNAL node here");
840
841 return absl::string_view(node->external()->base + offset, length);
842}
843
844void Cord::SetExpectedChecksum(uint32_t crc) {
845 auto constexpr method = CordzUpdateTracker::kSetExpectedChecksum;
846 if (empty()) return;
847
848 if (!contents_.is_tree()) {
849 CordRep* rep = contents_.MakeFlatWithExtraCapacity(extra: 0);
850 rep = CordRepCrc::New(child: rep, crc);
851 contents_.EmplaceTree(rep, method);
852 } else {
853 const CordzUpdateScope scope(contents_.data_.cordz_info(), method);
854 CordRep* rep = CordRepCrc::New(child: contents_.data_.as_tree(), crc);
855 contents_.SetTree(rep, scope);
856 }
857}
858
859absl::optional<uint32_t> Cord::ExpectedChecksum() const {
860 if (!contents_.is_tree() || !contents_.tree()->IsCrc()) {
861 return absl::nullopt;
862 }
863 return contents_.tree()->crc()->crc;
864}
865
866inline int Cord::CompareSlowPath(absl::string_view rhs, size_t compared_size,
867 size_t size_to_compare) const {
868 auto advance = [](Cord::ChunkIterator* it, absl::string_view* chunk) {
869 if (!chunk->empty()) return true;
870 ++*it;
871 if (it->bytes_remaining_ == 0) return false;
872 *chunk = **it;
873 return true;
874 };
875
876 Cord::ChunkIterator lhs_it = chunk_begin();
877
878 // compared_size is inside first chunk.
879 absl::string_view lhs_chunk =
880 (lhs_it.bytes_remaining_ != 0) ? *lhs_it : absl::string_view();
881 assert(compared_size <= lhs_chunk.size());
882 assert(compared_size <= rhs.size());
883 lhs_chunk.remove_prefix(n: compared_size);
884 rhs.remove_prefix(n: compared_size);
885 size_to_compare -= compared_size; // skip already compared size.
886
887 while (advance(&lhs_it, &lhs_chunk) && !rhs.empty()) {
888 int comparison_result = CompareChunks(lhs: &lhs_chunk, rhs: &rhs, size_to_compare: &size_to_compare);
889 if (comparison_result != 0) return comparison_result;
890 if (size_to_compare == 0) return 0;
891 }
892
893 return static_cast<int>(rhs.empty()) - static_cast<int>(lhs_chunk.empty());
894}
895
896inline int Cord::CompareSlowPath(const Cord& rhs, size_t compared_size,
897 size_t size_to_compare) const {
898 auto advance = [](Cord::ChunkIterator* it, absl::string_view* chunk) {
899 if (!chunk->empty()) return true;
900 ++*it;
901 if (it->bytes_remaining_ == 0) return false;
902 *chunk = **it;
903 return true;
904 };
905
906 Cord::ChunkIterator lhs_it = chunk_begin();
907 Cord::ChunkIterator rhs_it = rhs.chunk_begin();
908
909 // compared_size is inside both first chunks.
910 absl::string_view lhs_chunk =
911 (lhs_it.bytes_remaining_ != 0) ? *lhs_it : absl::string_view();
912 absl::string_view rhs_chunk =
913 (rhs_it.bytes_remaining_ != 0) ? *rhs_it : absl::string_view();
914 assert(compared_size <= lhs_chunk.size());
915 assert(compared_size <= rhs_chunk.size());
916 lhs_chunk.remove_prefix(n: compared_size);
917 rhs_chunk.remove_prefix(n: compared_size);
918 size_to_compare -= compared_size; // skip already compared size.
919
920 while (advance(&lhs_it, &lhs_chunk) && advance(&rhs_it, &rhs_chunk)) {
921 int memcmp_res = CompareChunks(lhs: &lhs_chunk, rhs: &rhs_chunk, size_to_compare: &size_to_compare);
922 if (memcmp_res != 0) return memcmp_res;
923 if (size_to_compare == 0) return 0;
924 }
925
926 return static_cast<int>(rhs_chunk.empty()) -
927 static_cast<int>(lhs_chunk.empty());
928}
929
930inline absl::string_view Cord::GetFirstChunk(const Cord& c) {
931 return c.contents_.FindFlatStartPiece();
932}
933inline absl::string_view Cord::GetFirstChunk(absl::string_view sv) {
934 return sv;
935}
936
937// Compares up to 'size_to_compare' bytes of 'lhs' with 'rhs'. It is assumed
938// that 'size_to_compare' is greater that size of smallest of first chunks.
939template <typename ResultType, typename RHS>
940ResultType GenericCompare(const Cord& lhs, const RHS& rhs,
941 size_t size_to_compare) {
942 absl::string_view lhs_chunk = Cord::GetFirstChunk(c: lhs);
943 absl::string_view rhs_chunk = Cord::GetFirstChunk(rhs);
944
945 size_t compared_size = std::min(a: lhs_chunk.size(), b: rhs_chunk.size());
946 assert(size_to_compare >= compared_size);
947 int memcmp_res = ::memcmp(s1: lhs_chunk.data(), s2: rhs_chunk.data(), n: compared_size);
948 if (compared_size == size_to_compare || memcmp_res != 0) {
949 return ComputeCompareResult<ResultType>(memcmp_res);
950 }
951
952 return ComputeCompareResult<ResultType>(
953 lhs.CompareSlowPath(rhs, compared_size, size_to_compare));
954}
955
956bool Cord::EqualsImpl(absl::string_view rhs, size_t size_to_compare) const {
957 return GenericCompare<bool>(lhs: *this, rhs, size_to_compare);
958}
959
960bool Cord::EqualsImpl(const Cord& rhs, size_t size_to_compare) const {
961 return GenericCompare<bool>(lhs: *this, rhs, size_to_compare);
962}
963
964template <typename RHS>
965inline int SharedCompareImpl(const Cord& lhs, const RHS& rhs) {
966 size_t lhs_size = lhs.size();
967 size_t rhs_size = rhs.size();
968 if (lhs_size == rhs_size) {
969 return GenericCompare<int>(lhs, rhs, lhs_size);
970 }
971 if (lhs_size < rhs_size) {
972 auto data_comp_res = GenericCompare<int>(lhs, rhs, lhs_size);
973 return data_comp_res == 0 ? -1 : data_comp_res;
974 }
975
976 auto data_comp_res = GenericCompare<int>(lhs, rhs, rhs_size);
977 return data_comp_res == 0 ? +1 : data_comp_res;
978}
979
980int Cord::Compare(absl::string_view rhs) const {
981 return SharedCompareImpl(lhs: *this, rhs);
982}
983
984int Cord::CompareImpl(const Cord& rhs) const {
985 return SharedCompareImpl(lhs: *this, rhs);
986}
987
988bool Cord::EndsWith(absl::string_view rhs) const {
989 size_t my_size = size();
990 size_t rhs_size = rhs.size();
991
992 if (my_size < rhs_size) return false;
993
994 Cord tmp(*this);
995 tmp.RemovePrefix(n: my_size - rhs_size);
996 return tmp.EqualsImpl(rhs, size_to_compare: rhs_size);
997}
998
999bool Cord::EndsWith(const Cord& rhs) const {
1000 size_t my_size = size();
1001 size_t rhs_size = rhs.size();
1002
1003 if (my_size < rhs_size) return false;
1004
1005 Cord tmp(*this);
1006 tmp.RemovePrefix(n: my_size - rhs_size);
1007 return tmp.EqualsImpl(rhs, size_to_compare: rhs_size);
1008}
1009
1010// --------------------------------------------------------------------
1011// Misc.
1012
1013Cord::operator std::string() const {
1014 std::string s;
1015 absl::CopyCordToString(src: *this, dst: &s);
1016 return s;
1017}
1018
1019void CopyCordToString(const Cord& src, std::string* dst) {
1020 if (!src.contents_.is_tree()) {
1021 src.contents_.CopyTo(dst);
1022 } else {
1023 absl::strings_internal::STLStringResizeUninitialized(s: dst, new_size: src.size());
1024 src.CopyToArraySlowPath(dst: &(*dst)[0]);
1025 }
1026}
1027
1028void Cord::CopyToArraySlowPath(char* dst) const {
1029 assert(contents_.is_tree());
1030 absl::string_view fragment;
1031 if (GetFlatAux(rep: contents_.tree(), fragment: &fragment)) {
1032 memcpy(dest: dst, src: fragment.data(), n: fragment.size());
1033 return;
1034 }
1035 for (absl::string_view chunk : Chunks()) {
1036 memcpy(dest: dst, src: chunk.data(), n: chunk.size());
1037 dst += chunk.size();
1038 }
1039}
1040
1041Cord Cord::ChunkIterator::AdvanceAndReadBytes(size_t n) {
1042 ABSL_HARDENING_ASSERT(bytes_remaining_ >= n &&
1043 "Attempted to iterate past `end()`");
1044 Cord subcord;
1045 auto constexpr method = CordzUpdateTracker::kCordReader;
1046
1047 if (n <= InlineRep::kMaxInline) {
1048 // Range to read fits in inline data. Flatten it.
1049 char* data = subcord.contents_.set_data(n);
1050 while (n > current_chunk_.size()) {
1051 memcpy(dest: data, src: current_chunk_.data(), n: current_chunk_.size());
1052 data += current_chunk_.size();
1053 n -= current_chunk_.size();
1054 ++*this;
1055 }
1056 memcpy(dest: data, src: current_chunk_.data(), n: n);
1057 if (n < current_chunk_.size()) {
1058 RemoveChunkPrefix(n);
1059 } else if (n > 0) {
1060 ++*this;
1061 }
1062 return subcord;
1063 }
1064
1065 if (btree_reader_) {
1066 size_t chunk_size = current_chunk_.size();
1067 if (n <= chunk_size && n <= kMaxBytesToCopy) {
1068 subcord = Cord(current_chunk_.substr(pos: 0, n), method);
1069 if (n < chunk_size) {
1070 current_chunk_.remove_prefix(n);
1071 } else {
1072 current_chunk_ = btree_reader_.Next();
1073 }
1074 } else {
1075 CordRep* rep;
1076 current_chunk_ = btree_reader_.Read(n, chunk_size, tree&: rep);
1077 subcord.contents_.EmplaceTree(rep, method);
1078 }
1079 bytes_remaining_ -= n;
1080 return subcord;
1081 }
1082
1083 // Short circuit if reading the entire data edge.
1084 assert(current_leaf_ != nullptr);
1085 if (n == current_leaf_->length) {
1086 bytes_remaining_ = 0;
1087 current_chunk_ = {};
1088 CordRep* tree = CordRep::Ref(rep: current_leaf_);
1089 subcord.contents_.EmplaceTree(rep: VerifyTree(node: tree), method);
1090 return subcord;
1091 }
1092
1093 // From this point on, we need a partial substring node.
1094 // Get pointer to the underlying flat or external data payload and
1095 // compute data pointer and offset into current flat or external.
1096 CordRep* payload = current_leaf_->IsSubstring()
1097 ? current_leaf_->substring()->child
1098 : current_leaf_;
1099 const char* data = payload->IsExternal() ? payload->external()->base
1100 : payload->flat()->Data();
1101 const size_t offset = current_chunk_.data() - data;
1102
1103 auto* tree = CordRepSubstring::Substring(rep: payload, pos: offset, n);
1104 subcord.contents_.EmplaceTree(rep: VerifyTree(node: tree), method);
1105 bytes_remaining_ -= n;
1106 current_chunk_.remove_prefix(n);
1107 return subcord;
1108}
1109
1110char Cord::operator[](size_t i) const {
1111 ABSL_HARDENING_ASSERT(i < size());
1112 size_t offset = i;
1113 const CordRep* rep = contents_.tree();
1114 if (rep == nullptr) {
1115 return contents_.data()[i];
1116 }
1117 rep = cord_internal::SkipCrcNode(rep);
1118 while (true) {
1119 assert(rep != nullptr);
1120 assert(offset < rep->length);
1121 if (rep->IsFlat()) {
1122 // Get the "i"th character directly from the flat array.
1123 return rep->flat()->Data()[offset];
1124 } else if (rep->IsBtree()) {
1125 return rep->btree()->GetCharacter(offset);
1126 } else if (rep->IsExternal()) {
1127 // Get the "i"th character from the external array.
1128 return rep->external()->base[offset];
1129 } else {
1130 // This must be a substring a node, so bypass it to get to the child.
1131 assert(rep->IsSubstring());
1132 offset += rep->substring()->start;
1133 rep = rep->substring()->child;
1134 }
1135 }
1136}
1137
1138absl::string_view Cord::FlattenSlowPath() {
1139 assert(contents_.is_tree());
1140 size_t total_size = size();
1141 CordRep* new_rep;
1142 char* new_buffer;
1143
1144 // Try to put the contents into a new flat rep. If they won't fit in the
1145 // biggest possible flat node, use an external rep instead.
1146 if (total_size <= kMaxFlatLength) {
1147 new_rep = CordRepFlat::New(len: total_size);
1148 new_rep->length = total_size;
1149 new_buffer = new_rep->flat()->Data();
1150 CopyToArraySlowPath(dst: new_buffer);
1151 } else {
1152 new_buffer = std::allocator<char>().allocate(n: total_size);
1153 CopyToArraySlowPath(dst: new_buffer);
1154 new_rep = absl::cord_internal::NewExternalRep(
1155 data: absl::string_view(new_buffer, total_size), releaser: [](absl::string_view s) {
1156 std::allocator<char>().deallocate(p: const_cast<char*>(s.data()),
1157 n: s.size());
1158 });
1159 }
1160 CordzUpdateScope scope(contents_.cordz_info(), CordzUpdateTracker::kFlatten);
1161 CordRep::Unref(rep: contents_.as_tree());
1162 contents_.SetTree(rep: new_rep, scope);
1163 return absl::string_view(new_buffer, total_size);
1164}
1165
1166/* static */ bool Cord::GetFlatAux(CordRep* rep, absl::string_view* fragment) {
1167 assert(rep != nullptr);
1168 rep = cord_internal::SkipCrcNode(rep);
1169 if (rep->IsFlat()) {
1170 *fragment = absl::string_view(rep->flat()->Data(), rep->length);
1171 return true;
1172 } else if (rep->IsExternal()) {
1173 *fragment = absl::string_view(rep->external()->base, rep->length);
1174 return true;
1175 } else if (rep->IsBtree()) {
1176 return rep->btree()->IsFlat(fragment);
1177 } else if (rep->IsSubstring()) {
1178 CordRep* child = rep->substring()->child;
1179 if (child->IsFlat()) {
1180 *fragment = absl::string_view(
1181 child->flat()->Data() + rep->substring()->start, rep->length);
1182 return true;
1183 } else if (child->IsExternal()) {
1184 *fragment = absl::string_view(
1185 child->external()->base + rep->substring()->start, rep->length);
1186 return true;
1187 } else if (child->IsBtree()) {
1188 return child->btree()->IsFlat(offset: rep->substring()->start, n: rep->length,
1189 fragment);
1190 }
1191 }
1192 return false;
1193}
1194
1195/* static */ void Cord::ForEachChunkAux(
1196 absl::cord_internal::CordRep* rep,
1197 absl::FunctionRef<void(absl::string_view)> callback) {
1198 assert(rep != nullptr);
1199 rep = cord_internal::SkipCrcNode(rep);
1200
1201 if (rep->IsBtree()) {
1202 ChunkIterator it(rep), end;
1203 while (it != end) {
1204 callback(*it);
1205 ++it;
1206 }
1207 return;
1208 }
1209
1210 // This is a leaf node, so invoke our callback.
1211 absl::cord_internal::CordRep* current_node = cord_internal::SkipCrcNode(rep);
1212 absl::string_view chunk;
1213 bool success = GetFlatAux(rep: current_node, fragment: &chunk);
1214 assert(success);
1215 if (success) {
1216 callback(chunk);
1217 }
1218}
1219
1220static void DumpNode(CordRep* rep, bool include_data, std::ostream* os,
1221 int indent) {
1222 const int kIndentStep = 1;
1223 absl::InlinedVector<CordRep*, kInlinedVectorSize> stack;
1224 absl::InlinedVector<int, kInlinedVectorSize> indents;
1225 for (;;) {
1226 *os << std::setw(3) << rep->refcount.Get();
1227 *os << " " << std::setw(7) << rep->length;
1228 *os << " [";
1229 if (include_data) *os << static_cast<void*>(rep);
1230 *os << "]";
1231 *os << " " << std::setw(indent) << "";
1232 if (rep->IsCrc()) {
1233 *os << "CRC crc=" << rep->crc()->crc << "\n";
1234 indent += kIndentStep;
1235 rep = rep->crc()->child;
1236 } else if (rep->IsSubstring()) {
1237 *os << "SUBSTRING @ " << rep->substring()->start << "\n";
1238 indent += kIndentStep;
1239 rep = rep->substring()->child;
1240 } else { // Leaf or ring
1241 if (rep->IsExternal()) {
1242 *os << "EXTERNAL [";
1243 if (include_data)
1244 *os << absl::CEscape(src: std::string(rep->external()->base, rep->length));
1245 *os << "]\n";
1246 } else if (rep->IsFlat()) {
1247 *os << "FLAT cap=" << rep->flat()->Capacity() << " [";
1248 if (include_data)
1249 *os << absl::CEscape(src: std::string(rep->flat()->Data(), rep->length));
1250 *os << "]\n";
1251 } else {
1252 CordRepBtree::Dump(rep, /*label=*/ "", include_contents: include_data, stream&: *os);
1253 }
1254 if (stack.empty()) break;
1255 rep = stack.back();
1256 stack.pop_back();
1257 indent = indents.back();
1258 indents.pop_back();
1259 }
1260 }
1261 ABSL_INTERNAL_CHECK(indents.empty(), "");
1262}
1263
1264static std::string ReportError(CordRep* root, CordRep* node) {
1265 std::ostringstream buf;
1266 buf << "Error at node " << node << " in:";
1267 DumpNode(rep: root, include_data: true, os: &buf);
1268 return buf.str();
1269}
1270
1271static bool VerifyNode(CordRep* root, CordRep* start_node,
1272 bool /* full_validation */) {
1273 absl::InlinedVector<CordRep*, 2> worklist;
1274 worklist.push_back(v: start_node);
1275 do {
1276 CordRep* node = worklist.back();
1277 worklist.pop_back();
1278
1279 ABSL_INTERNAL_CHECK(node != nullptr, ReportError(root, node));
1280 if (node != root) {
1281 ABSL_INTERNAL_CHECK(node->length != 0, ReportError(root, node));
1282 ABSL_INTERNAL_CHECK(!node->IsCrc(), ReportError(root, node));
1283 }
1284
1285 if (node->IsFlat()) {
1286 ABSL_INTERNAL_CHECK(node->length <= node->flat()->Capacity(),
1287 ReportError(root, node));
1288 } else if (node->IsExternal()) {
1289 ABSL_INTERNAL_CHECK(node->external()->base != nullptr,
1290 ReportError(root, node));
1291 } else if (node->IsSubstring()) {
1292 ABSL_INTERNAL_CHECK(
1293 node->substring()->start < node->substring()->child->length,
1294 ReportError(root, node));
1295 ABSL_INTERNAL_CHECK(node->substring()->start + node->length <=
1296 node->substring()->child->length,
1297 ReportError(root, node));
1298 } else if (node->IsCrc()) {
1299 ABSL_INTERNAL_CHECK(node->crc()->child != nullptr,
1300 ReportError(root, node));
1301 ABSL_INTERNAL_CHECK(node->crc()->length == node->crc()->child->length,
1302 ReportError(root, node));
1303 worklist.push_back(v: node->crc()->child);
1304 }
1305 } while (!worklist.empty());
1306 return true;
1307}
1308
1309std::ostream& operator<<(std::ostream& out, const Cord& cord) {
1310 for (absl::string_view chunk : cord.Chunks()) {
1311 out.write(s: chunk.data(), n: chunk.size());
1312 }
1313 return out;
1314}
1315
1316namespace strings_internal {
1317size_t CordTestAccess::FlatOverhead() { return cord_internal::kFlatOverhead; }
1318size_t CordTestAccess::MaxFlatLength() { return cord_internal::kMaxFlatLength; }
1319size_t CordTestAccess::FlatTagToLength(uint8_t tag) {
1320 return cord_internal::TagToLength(tag);
1321}
1322uint8_t CordTestAccess::LengthToTag(size_t s) {
1323 ABSL_INTERNAL_CHECK(s <= kMaxFlatLength, absl::StrCat("Invalid length ", s));
1324 return cord_internal::AllocatedSizeToTag(size: s + cord_internal::kFlatOverhead);
1325}
1326size_t CordTestAccess::SizeofCordRepExternal() {
1327 return sizeof(CordRepExternal);
1328}
1329size_t CordTestAccess::SizeofCordRepSubstring() {
1330 return sizeof(CordRepSubstring);
1331}
1332} // namespace strings_internal
1333ABSL_NAMESPACE_END
1334} // namespace absl
1335

Provided by KDAB

Privacy Policy
Learn more about Flutter for embedded and desktop on industrialflutter.com

source code of flutter_engine/third_party/abseil-cpp/absl/strings/cord.cc