• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2012 The Chromium Authors
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #ifdef UNSAFE_BUFFERS_BUILD
6 // TODO(crbug.com/40284755): Remove this and spanify to fix the errors.
7 #pragma allow_unsafe_buffers
8 #endif
9 
10 #include "base/pickle.h"
11 
12 #include <algorithm>
13 #include <bit>
14 #include <cstdlib>
15 #include <limits>
16 #include <ostream>
17 #include <string_view>
18 #include <type_traits>
19 
20 #include "base/bits.h"
21 #include "base/containers/span.h"
22 #include "base/numerics/safe_conversions.h"
23 #include "base/numerics/safe_math.h"
24 #include "build/build_config.h"
25 
26 namespace base {
27 
28 // static
29 const size_t Pickle::kPayloadUnit = 64;
30 
31 static const size_t kCapacityReadOnly = static_cast<size_t>(-1);
32 
PickleIterator(const Pickle & pickle)33 PickleIterator::PickleIterator(const Pickle& pickle)
34     : payload_(pickle.payload()),
35       read_index_(0),
36       end_index_(pickle.payload_size()) {}
37 
38 template <typename Type>
ReadBuiltinType(Type * result)39 inline bool PickleIterator::ReadBuiltinType(Type* result) {
40   static_assert(
41       std::is_integral_v<Type> && !std::is_same_v<Type, bool>,
42       "This method is only safe with to use with types without padding bits.");
43   const char* read_from = GetReadPointerAndAdvance<Type>();
44   if (!read_from)
45     return false;
46   memcpy(result, read_from, sizeof(*result));
47   return true;
48 }
49 
Advance(size_t size)50 inline void PickleIterator::Advance(size_t size) {
51   size_t aligned_size = bits::AlignUp(size, sizeof(uint32_t));
52   if (end_index_ - read_index_ < aligned_size) {
53     read_index_ = end_index_;
54   } else {
55     read_index_ += aligned_size;
56   }
57 }
58 
59 template <typename Type>
GetReadPointerAndAdvance()60 inline const char* PickleIterator::GetReadPointerAndAdvance() {
61   if (sizeof(Type) > end_index_ - read_index_) {
62     read_index_ = end_index_;
63     return nullptr;
64   }
65   const char* current_read_ptr = payload_ + read_index_;
66   Advance(sizeof(Type));
67   return current_read_ptr;
68 }
69 
GetReadPointerAndAdvance(size_t num_bytes)70 const char* PickleIterator::GetReadPointerAndAdvance(size_t num_bytes) {
71   if (num_bytes > end_index_ - read_index_) {
72     read_index_ = end_index_;
73     return nullptr;
74   }
75   const char* current_read_ptr = payload_ + read_index_;
76   Advance(num_bytes);
77   return current_read_ptr;
78 }
79 
GetReadPointerAndAdvance(size_t num_elements,size_t size_element)80 inline const char* PickleIterator::GetReadPointerAndAdvance(
81     size_t num_elements,
82     size_t size_element) {
83   // Check for size_t overflow.
84   size_t num_bytes;
85   if (!CheckMul(num_elements, size_element).AssignIfValid(&num_bytes))
86     return nullptr;
87   return GetReadPointerAndAdvance(num_bytes);
88 }
89 
ReadBool(bool * result)90 bool PickleIterator::ReadBool(bool* result) {
91   // Not all bit patterns are valid bools. Avoid undefined behavior by reading a
92   // type with no padding bits, then converting to bool.
93   uint8_t v;
94   if (!ReadBuiltinType(&v)) {
95     return false;
96   }
97   *result = v != 0;
98   return true;
99 }
100 
ReadInt(int * result)101 bool PickleIterator::ReadInt(int* result) {
102   return ReadBuiltinType(result);
103 }
104 
ReadLong(long * result)105 bool PickleIterator::ReadLong(long* result) {
106   // Always read long as a 64-bit value to ensure compatibility between 32-bit
107   // and 64-bit processes.
108   int64_t result_int64 = 0;
109   if (!ReadBuiltinType(&result_int64))
110     return false;
111   if (!IsValueInRangeForNumericType<long>(result_int64))
112     return false;
113   *result = static_cast<long>(result_int64);
114   return true;
115 }
116 
ReadUInt16(uint16_t * result)117 bool PickleIterator::ReadUInt16(uint16_t* result) {
118   return ReadBuiltinType(result);
119 }
120 
ReadUInt32(uint32_t * result)121 bool PickleIterator::ReadUInt32(uint32_t* result) {
122   return ReadBuiltinType(result);
123 }
124 
ReadInt64(int64_t * result)125 bool PickleIterator::ReadInt64(int64_t* result) {
126   return ReadBuiltinType(result);
127 }
128 
ReadUInt64(uint64_t * result)129 bool PickleIterator::ReadUInt64(uint64_t* result) {
130   return ReadBuiltinType(result);
131 }
132 
ReadFloat(float * result)133 bool PickleIterator::ReadFloat(float* result) {
134   // crbug.com/315213
135   // The source data may not be properly aligned, and unaligned float reads
136   // cause SIGBUS on some ARM platforms, so force using memcpy to copy the data
137   // into the result.
138   const char* read_from = GetReadPointerAndAdvance<float>();
139   if (!read_from)
140     return false;
141   memcpy(result, read_from, sizeof(*result));
142   return true;
143 }
144 
ReadDouble(double * result)145 bool PickleIterator::ReadDouble(double* result) {
146   // crbug.com/315213
147   // The source data may not be properly aligned, and unaligned double reads
148   // cause SIGBUS on some ARM platforms, so force using memcpy to copy the data
149   // into the result.
150   const char* read_from = GetReadPointerAndAdvance<double>();
151   if (!read_from)
152     return false;
153   memcpy(result, read_from, sizeof(*result));
154   return true;
155 }
156 
ReadString(std::string * result)157 bool PickleIterator::ReadString(std::string* result) {
158   size_t len;
159   if (!ReadLength(&len))
160     return false;
161   const char* read_from = GetReadPointerAndAdvance(len);
162   if (!read_from)
163     return false;
164 
165   result->assign(read_from, len);
166   return true;
167 }
168 
ReadStringPiece(std::string_view * result)169 bool PickleIterator::ReadStringPiece(std::string_view* result) {
170   size_t len;
171   if (!ReadLength(&len))
172     return false;
173   const char* read_from = GetReadPointerAndAdvance(len);
174   if (!read_from)
175     return false;
176 
177   *result = std::string_view(read_from, len);
178   return true;
179 }
180 
ReadString16(std::u16string * result)181 bool PickleIterator::ReadString16(std::u16string* result) {
182   size_t len;
183   if (!ReadLength(&len))
184     return false;
185   const char* read_from = GetReadPointerAndAdvance(len, sizeof(char16_t));
186   if (!read_from)
187     return false;
188 
189   result->assign(reinterpret_cast<const char16_t*>(read_from), len);
190   return true;
191 }
192 
ReadStringPiece16(std::u16string_view * result)193 bool PickleIterator::ReadStringPiece16(std::u16string_view* result) {
194   size_t len;
195   if (!ReadLength(&len))
196     return false;
197   const char* read_from = GetReadPointerAndAdvance(len, sizeof(char16_t));
198   if (!read_from)
199     return false;
200 
201   *result =
202       std::u16string_view(reinterpret_cast<const char16_t*>(read_from), len);
203   return true;
204 }
205 
ReadData(const char ** data,size_t * length)206 bool PickleIterator::ReadData(const char** data, size_t* length) {
207   *length = 0;
208   *data = nullptr;
209 
210   if (!ReadLength(length))
211     return false;
212 
213   return ReadBytes(data, *length);
214 }
215 
ReadData()216 std::optional<span<const uint8_t>> PickleIterator::ReadData() {
217   const char* ptr;
218   size_t length;
219 
220   if (!ReadData(&ptr, &length))
221     return std::nullopt;
222 
223   return as_bytes(span(ptr, length));
224 }
225 
ReadBytes(const char ** data,size_t length)226 bool PickleIterator::ReadBytes(const char** data, size_t length) {
227   const char* read_from = GetReadPointerAndAdvance(length);
228   if (!read_from)
229     return false;
230   *data = read_from;
231   return true;
232 }
233 
234 Pickle::Attachment::Attachment() = default;
235 
236 Pickle::Attachment::~Attachment() = default;
237 
238 // Payload is uint32_t aligned.
239 
Pickle()240 Pickle::Pickle()
241     : header_(nullptr),
242       header_size_(sizeof(Header)),
243       capacity_after_header_(0),
244       write_offset_(0) {
245   static_assert(std::has_single_bit(Pickle::kPayloadUnit),
246                 "Pickle::kPayloadUnit must be a power of two");
247   Resize(kPayloadUnit);
248   header_->payload_size = 0;
249 }
250 
Pickle(size_t header_size)251 Pickle::Pickle(size_t header_size)
252     : header_(nullptr),
253       header_size_(bits::AlignUp(header_size, sizeof(uint32_t))),
254       capacity_after_header_(0),
255       write_offset_(0) {
256   DCHECK_GE(header_size, sizeof(Header));
257   DCHECK_LE(header_size, kPayloadUnit);
258   Resize(kPayloadUnit);
259   header_->payload_size = 0;
260 }
261 
WithData(span<const uint8_t> data)262 Pickle Pickle::WithData(span<const uint8_t> data) {
263   // Create a pickle with unowned data, then do a copy to internalize the data.
264   Pickle pickle(kUnownedData, data);
265   Pickle internalized_data_pickle = pickle;
266   CHECK_NE(internalized_data_pickle.capacity_after_header_, kCapacityReadOnly);
267   return internalized_data_pickle;
268 }
269 
WithUnownedBuffer(span<const uint8_t> data)270 Pickle Pickle::WithUnownedBuffer(span<const uint8_t> data) {
271   // This uses return value optimization to return a Pickle without copying
272   // which will preserve the unowned-ness of the data.
273   return Pickle(kUnownedData, data);
274 }
275 
Pickle(UnownedData,span<const uint8_t> data)276 Pickle::Pickle(UnownedData, span<const uint8_t> data)
277     : header_(reinterpret_cast<Header*>(const_cast<uint8_t*>(data.data()))),
278       header_size_(0),
279       capacity_after_header_(kCapacityReadOnly),
280       write_offset_(0) {
281   if (data.size() >= sizeof(Header)) {
282     header_size_ = data.size() - header_->payload_size;
283   }
284 
285   if (header_size_ > data.size()) {
286     header_size_ = 0;
287   }
288 
289   if (header_size_ != bits::AlignUp(header_size_, sizeof(uint32_t))) {
290     header_size_ = 0;
291   }
292 
293   // If there is anything wrong with the data, we're not going to use it.
294   if (!header_size_) {
295     header_ = nullptr;
296   }
297 }
298 
Pickle(const Pickle & other)299 Pickle::Pickle(const Pickle& other)
300     : header_(nullptr),
301       header_size_(other.header_size_),
302       capacity_after_header_(0),
303       write_offset_(other.write_offset_) {
304   if (other.header_) {
305     Resize(other.header_->payload_size);
306     memcpy(header_, other.header_, header_size_ + other.header_->payload_size);
307   }
308 }
309 
~Pickle()310 Pickle::~Pickle() {
311   if (capacity_after_header_ != kCapacityReadOnly)
312     free(header_);
313 }
314 
operator =(const Pickle & other)315 Pickle& Pickle::operator=(const Pickle& other) {
316   if (this == &other) {
317     return *this;
318   }
319   if (capacity_after_header_ == kCapacityReadOnly) {
320     header_ = nullptr;
321     capacity_after_header_ = 0;
322   }
323   if (header_size_ != other.header_size_) {
324     free(header_);
325     header_ = nullptr;
326     header_size_ = other.header_size_;
327   }
328   if (other.header_) {
329     Resize(other.header_->payload_size);
330     memcpy(header_, other.header_,
331            other.header_size_ + other.header_->payload_size);
332     write_offset_ = other.write_offset_;
333   }
334   return *this;
335 }
336 
WriteString(std::string_view value)337 void Pickle::WriteString(std::string_view value) {
338   WriteData(value.data(), value.size());
339 }
340 
WriteString16(std::u16string_view value)341 void Pickle::WriteString16(std::u16string_view value) {
342   WriteInt(checked_cast<int>(value.size()));
343   WriteBytes(value.data(), value.size() * sizeof(char16_t));
344 }
345 
WriteData(const char * data,size_t length)346 void Pickle::WriteData(const char* data, size_t length) {
347   WriteData(as_bytes(span(data, length)));
348 }
349 
WriteData(std::string_view data)350 void Pickle::WriteData(std::string_view data) {
351   WriteData(as_byte_span(data));
352 }
353 
WriteData(base::span<const uint8_t> data)354 void Pickle::WriteData(base::span<const uint8_t> data) {
355   WriteInt(checked_cast<int>(data.size()));
356   WriteBytes(data);
357 }
358 
WriteBytes(const void * data,size_t length)359 void Pickle::WriteBytes(const void* data, size_t length) {
360   WriteBytesCommon(span(static_cast<const uint8_t*>(data), length));
361 }
362 
WriteBytes(span<const uint8_t> data)363 void Pickle::WriteBytes(span<const uint8_t> data) {
364   WriteBytesCommon(data);
365 }
366 
Reserve(size_t length)367 void Pickle::Reserve(size_t length) {
368   size_t data_len = bits::AlignUp(length, sizeof(uint32_t));
369   DCHECK_GE(data_len, length);
370 #ifdef ARCH_CPU_64_BITS
371   DCHECK_LE(data_len, std::numeric_limits<uint32_t>::max());
372 #endif
373   DCHECK_LE(write_offset_, std::numeric_limits<uint32_t>::max() - data_len);
374   size_t new_size = write_offset_ + data_len;
375   if (new_size > capacity_after_header_)
376     Resize(capacity_after_header_ * 2 + new_size);
377 }
378 
WriteAttachment(scoped_refptr<Attachment> attachment)379 bool Pickle::WriteAttachment(scoped_refptr<Attachment> attachment) {
380   return false;
381 }
382 
ReadAttachment(base::PickleIterator * iter,scoped_refptr<Attachment> * attachment) const383 bool Pickle::ReadAttachment(base::PickleIterator* iter,
384                             scoped_refptr<Attachment>* attachment) const {
385   return false;
386 }
387 
HasAttachments() const388 bool Pickle::HasAttachments() const {
389   return false;
390 }
391 
Resize(size_t new_capacity)392 void Pickle::Resize(size_t new_capacity) {
393   CHECK_NE(capacity_after_header_, kCapacityReadOnly);
394   capacity_after_header_ = bits::AlignUp(new_capacity, kPayloadUnit);
395   void* p = realloc(header_, GetTotalAllocatedSize());
396   CHECK(p);
397   header_ = reinterpret_cast<Header*>(p);
398 }
399 
ClaimBytes(size_t num_bytes)400 void* Pickle::ClaimBytes(size_t num_bytes) {
401   void* p = ClaimUninitializedBytesInternal(num_bytes);
402   CHECK(p);
403   memset(p, 0, num_bytes);
404   return p;
405 }
406 
GetTotalAllocatedSize() const407 size_t Pickle::GetTotalAllocatedSize() const {
408   if (capacity_after_header_ == kCapacityReadOnly)
409     return 0;
410   return header_size_ + capacity_after_header_;
411 }
412 
413 // static
FindNext(size_t header_size,const char * start,const char * end)414 const char* Pickle::FindNext(size_t header_size,
415                              const char* start,
416                              const char* end) {
417   size_t pickle_size = 0;
418   if (!PeekNext(header_size, start, end, &pickle_size))
419     return nullptr;
420 
421   if (pickle_size > static_cast<size_t>(end - start))
422     return nullptr;
423 
424   return start + pickle_size;
425 }
426 
427 // static
PeekNext(size_t header_size,const char * start,const char * end,size_t * pickle_size)428 bool Pickle::PeekNext(size_t header_size,
429                       const char* start,
430                       const char* end,
431                       size_t* pickle_size) {
432   DCHECK_EQ(header_size, bits::AlignUp(header_size, sizeof(uint32_t)));
433   DCHECK_GE(header_size, sizeof(Header));
434   DCHECK_LE(header_size, static_cast<size_t>(kPayloadUnit));
435 
436   size_t length = static_cast<size_t>(end - start);
437   if (length < sizeof(Header))
438     return false;
439 
440   const Header* hdr = reinterpret_cast<const Header*>(start);
441   if (length < header_size)
442     return false;
443 
444   // If payload_size causes an overflow, we return maximum possible
445   // pickle size to indicate that.
446   *pickle_size = ClampAdd(header_size, hdr->payload_size);
447   return true;
448 }
449 
450 template <size_t length>
WriteBytesStatic(const void * data)451 void Pickle::WriteBytesStatic(const void* data) {
452   WriteBytesCommon(span(static_cast<const uint8_t*>(data), length));
453 }
454 
455 template void Pickle::WriteBytesStatic<2>(const void* data);
456 template void Pickle::WriteBytesStatic<4>(const void* data);
457 template void Pickle::WriteBytesStatic<8>(const void* data);
458 
ClaimUninitializedBytesInternal(size_t length)459 inline void* Pickle::ClaimUninitializedBytesInternal(size_t length) {
460   DCHECK_NE(kCapacityReadOnly, capacity_after_header_)
461       << "oops: pickle is readonly";
462   size_t data_len = bits::AlignUp(length, sizeof(uint32_t));
463   DCHECK_GE(data_len, length);
464 #ifdef ARCH_CPU_64_BITS
465   DCHECK_LE(data_len, std::numeric_limits<uint32_t>::max());
466 #endif
467   DCHECK_LE(write_offset_, std::numeric_limits<uint32_t>::max() - data_len);
468   size_t new_size = write_offset_ + data_len;
469   if (new_size > capacity_after_header_) {
470     size_t new_capacity = capacity_after_header_ * 2;
471     const size_t kPickleHeapAlign = 4096;
472     if (new_capacity > kPickleHeapAlign) {
473       new_capacity =
474           bits::AlignUp(new_capacity, kPickleHeapAlign) - kPayloadUnit;
475     }
476     Resize(std::max(new_capacity, new_size));
477   }
478 
479   char* write = mutable_payload() + write_offset_;
480   std::fill(write + length, write + data_len, 0);  // Always initialize padding
481   header_->payload_size = static_cast<uint32_t>(new_size);
482   write_offset_ = new_size;
483   return write;
484 }
485 
WriteBytesCommon(span<const uint8_t> data)486 inline void Pickle::WriteBytesCommon(span<const uint8_t> data) {
487   DCHECK_NE(kCapacityReadOnly, capacity_after_header_)
488       << "oops: pickle is readonly";
489   MSAN_CHECK_MEM_IS_INITIALIZED(data.data(), data.size());
490   void* write = ClaimUninitializedBytesInternal(data.size());
491   std::copy(data.data(), data.data() + data.size(), static_cast<char*>(write));
492 }
493 
494 }  // namespace base
495