• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2018 The Chromium Authors
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #ifndef BASE_ALLOCATOR_PARTITION_ALLOCATOR_SRC_PARTITION_ALLOC_ENCODED_NEXT_FREELIST_H_
6 #define BASE_ALLOCATOR_PARTITION_ALLOCATOR_SRC_PARTITION_ALLOC_ENCODED_NEXT_FREELIST_H_
7 
8 #include <cstddef>
9 #include <cstdint>
10 
11 #include "build/build_config.h"
12 #include "partition_alloc/freeslot_bitmap.h"
13 #include "partition_alloc/partition_alloc-inl.h"
14 #include "partition_alloc/partition_alloc_base/compiler_specific.h"
15 #include "partition_alloc/partition_alloc_base/debug/debugging_buildflags.h"
16 #include "partition_alloc/partition_alloc_base/immediate_crash.h"
17 #include "partition_alloc/partition_alloc_buildflags.h"
18 #include "partition_alloc/partition_alloc_check.h"
19 #include "partition_alloc/partition_alloc_config.h"
20 #include "partition_alloc/partition_alloc_constants.h"
21 #include "partition_alloc/partition_ref_count.h"
22 
23 #if !defined(ARCH_CPU_BIG_ENDIAN)
24 #include "partition_alloc/reverse_bytes.h"
25 #endif  // !defined(ARCH_CPU_BIG_ENDIAN)
26 
27 namespace partition_alloc::internal {
28 
29 class EncodedNextFreelistEntry;
30 
31 class EncodedFreelistPtr {
32  private:
EncodedFreelistPtr(std::nullptr_t)33   PA_ALWAYS_INLINE constexpr explicit EncodedFreelistPtr(std::nullptr_t)
34       : encoded_(Transform(0)) {}
EncodedFreelistPtr(void * ptr)35   PA_ALWAYS_INLINE explicit EncodedFreelistPtr(void* ptr)
36       // The encoded pointer stays MTE-tagged.
37       : encoded_(Transform(reinterpret_cast<uintptr_t>(ptr))) {}
38 
Decode()39   PA_ALWAYS_INLINE EncodedNextFreelistEntry* Decode() const {
40     return reinterpret_cast<EncodedNextFreelistEntry*>(Transform(encoded_));
41   }
42 
Inverted()43   PA_ALWAYS_INLINE constexpr uintptr_t Inverted() const { return ~encoded_; }
44 
Override(uintptr_t encoded)45   PA_ALWAYS_INLINE constexpr void Override(uintptr_t encoded) {
46     encoded_ = encoded;
47   }
48 
49   PA_ALWAYS_INLINE constexpr explicit operator bool() const { return encoded_; }
50 
51   // Transform() works the same in both directions, so can be used for
52   // encoding and decoding.
Transform(uintptr_t address)53   PA_ALWAYS_INLINE static constexpr uintptr_t Transform(uintptr_t address) {
54     // We use bswap on little endian as a fast transformation for two reasons:
55     // 1) On 64 bit architectures, the pointer is very unlikely to be a
56     //    canonical address. Therefore, if an object is freed and its vtable is
57     //    used where the attacker doesn't get the chance to run allocations
58     //    between the free and use, the vtable dereference is likely to fault.
59     // 2) If the attacker has a linear buffer overflow and elects to try and
60     //    corrupt a freelist pointer, partial pointer overwrite attacks are
61     //    thwarted.
62     // For big endian, similar guarantees are arrived at with a negation.
63 #if defined(ARCH_CPU_BIG_ENDIAN)
64     uintptr_t transformed = ~address;
65 #else
66     uintptr_t transformed = ReverseBytes(address);
67 #endif
68     return transformed;
69   }
70 
71   uintptr_t encoded_;
72 
73   friend EncodedNextFreelistEntry;
74 };
75 
76 // Freelist entries are encoded for security reasons. See
77 // //base/allocator/partition_allocator/PartitionAlloc.md
78 // and |Transform()| for the rationale and mechanism, respectively.
79 class EncodedNextFreelistEntry {
80  private:
EncodedNextFreelistEntry(std::nullptr_t)81   constexpr explicit EncodedNextFreelistEntry(std::nullptr_t)
82       : encoded_next_(EncodedFreelistPtr(nullptr))
83 #if PA_CONFIG(HAS_FREELIST_SHADOW_ENTRY)
84         ,
85         shadow_(encoded_next_.Inverted())
86 #endif
87   {
88   }
EncodedNextFreelistEntry(EncodedNextFreelistEntry * next)89   explicit EncodedNextFreelistEntry(EncodedNextFreelistEntry* next)
90       : encoded_next_(EncodedFreelistPtr(next))
91 #if PA_CONFIG(HAS_FREELIST_SHADOW_ENTRY)
92         ,
93         shadow_(encoded_next_.Inverted())
94 #endif
95   {
96   }
97   // For testing only.
EncodedNextFreelistEntry(void * next,bool make_shadow_match)98   EncodedNextFreelistEntry(void* next, bool make_shadow_match)
99       : encoded_next_(EncodedFreelistPtr(next))
100 #if PA_CONFIG(HAS_FREELIST_SHADOW_ENTRY)
101         ,
102         shadow_(make_shadow_match ? encoded_next_.Inverted() : 12345)
103 #endif
104   {
105   }
106 
107  public:
108   ~EncodedNextFreelistEntry() = delete;
109 
110   // Emplaces the freelist entry at the beginning of the given slot span, and
111   // initializes it as null-terminated.
EmplaceAndInitNull(void * slot_start_tagged)112   PA_ALWAYS_INLINE static EncodedNextFreelistEntry* EmplaceAndInitNull(
113       void* slot_start_tagged) {
114     // |slot_start_tagged| is MTE-tagged.
115     auto* entry = new (slot_start_tagged) EncodedNextFreelistEntry(nullptr);
116     return entry;
117   }
EmplaceAndInitNull(uintptr_t slot_start)118   PA_ALWAYS_INLINE static EncodedNextFreelistEntry* EmplaceAndInitNull(
119       uintptr_t slot_start) {
120     return EmplaceAndInitNull(SlotStartAddr2Ptr(slot_start));
121   }
122 
123   // Emplaces the freelist entry at the beginning of the given slot span, and
124   // initializes it with the given |next| pointer, but encoded.
125   //
126   // This freelist is built for the purpose of thread-cache. This means that we
127   // can't perform a check that this and the next pointer belong to the same
128   // super page, as thread-cache spans may chain slots across super pages.
129   PA_ALWAYS_INLINE static EncodedNextFreelistEntry*
EmplaceAndInitForThreadCache(uintptr_t slot_start,EncodedNextFreelistEntry * next)130   EmplaceAndInitForThreadCache(uintptr_t slot_start,
131                                EncodedNextFreelistEntry* next) {
132     auto* entry =
133         new (SlotStartAddr2Ptr(slot_start)) EncodedNextFreelistEntry(next);
134     return entry;
135   }
136 
137   // Emplaces the freelist entry at the beginning of the given slot span, and
138   // initializes it with the given |next| pointer.
139   //
140   // This is for testing purposes only! |make_shadow_match| allows you to choose
141   // if the shadow matches the next pointer properly or is trash.
EmplaceAndInitForTest(uintptr_t slot_start,void * next,bool make_shadow_match)142   PA_ALWAYS_INLINE static void EmplaceAndInitForTest(uintptr_t slot_start,
143                                                      void* next,
144                                                      bool make_shadow_match) {
145     new (SlotStartAddr2Ptr(slot_start))
146         EncodedNextFreelistEntry(next, make_shadow_match);
147   }
148 
CorruptNextForTesting(uintptr_t v)149   void CorruptNextForTesting(uintptr_t v) {
150     // We just need a value that can never be a valid pointer here.
151     encoded_next_.Override(EncodedFreelistPtr::Transform(v));
152   }
153 
154   // Puts `slot_size` on the stack before crashing in case of memory
155   // corruption. Meant to be used to report the failed allocation size.
156   template <bool crash_on_corruption>
157   PA_ALWAYS_INLINE EncodedNextFreelistEntry* GetNextForThreadCache(
158       size_t slot_size) const;
159   PA_ALWAYS_INLINE EncodedNextFreelistEntry* GetNext(size_t slot_size) const;
160 
CheckFreeList(size_t slot_size)161   PA_NOINLINE void CheckFreeList(size_t slot_size) const {
162     for (auto* entry = this; entry; entry = entry->GetNext(slot_size)) {
163       // |GetNext()| checks freelist integrity.
164     }
165   }
166 
CheckFreeListForThreadCache(size_t slot_size)167   PA_NOINLINE void CheckFreeListForThreadCache(size_t slot_size) const {
168     for (auto* entry = this; entry;
169          entry = entry->GetNextForThreadCache<true>(slot_size)) {
170       // |GetNextForThreadCache()| checks freelist integrity.
171     }
172   }
173 
SetNext(EncodedNextFreelistEntry * entry)174   PA_ALWAYS_INLINE void SetNext(EncodedNextFreelistEntry* entry) {
175     // SetNext() is either called on the freelist head, when provisioning new
176     // slots, or when GetNext() has been called before, no need to pass the
177     // size.
178 #if BUILDFLAG(PA_DCHECK_IS_ON)
179     // Regular freelists always point to an entry within the same super page.
180     //
181     // This is most likely a PartitionAlloc bug if this triggers.
182     if (PA_UNLIKELY(entry &&
183                     (SlotStartPtr2Addr(this) & kSuperPageBaseMask) !=
184                         (SlotStartPtr2Addr(entry) & kSuperPageBaseMask))) {
185       FreelistCorruptionDetected(0);
186     }
187 #endif  // BUILDFLAG(PA_DCHECK_IS_ON)
188 
189     encoded_next_ = EncodedFreelistPtr(entry);
190 #if PA_CONFIG(HAS_FREELIST_SHADOW_ENTRY)
191     shadow_ = encoded_next_.Inverted();
192 #endif
193   }
194 
195   // Zeroes out |this| before returning the slot. The pointer to this memory
196   // will be returned to the user (caller of Alloc()), thus can't have internal
197   // data.
ClearForAllocation()198   PA_ALWAYS_INLINE uintptr_t ClearForAllocation() {
199     encoded_next_.Override(0);
200 #if PA_CONFIG(HAS_FREELIST_SHADOW_ENTRY)
201     shadow_ = 0;
202 #endif
203     return SlotStartPtr2Addr(this);
204   }
205 
IsEncodedNextPtrZero()206   PA_ALWAYS_INLINE constexpr bool IsEncodedNextPtrZero() const {
207     return !encoded_next_;
208   }
209 
210  private:
211   template <bool crash_on_corruption>
212   PA_ALWAYS_INLINE EncodedNextFreelistEntry* GetNextInternal(
213       size_t slot_size,
214       bool for_thread_cache) const;
215 
IsSane(const EncodedNextFreelistEntry * here,const EncodedNextFreelistEntry * next,bool for_thread_cache)216   PA_ALWAYS_INLINE static bool IsSane(const EncodedNextFreelistEntry* here,
217                                       const EncodedNextFreelistEntry* next,
218                                       bool for_thread_cache) {
219     // Don't allow the freelist to be blindly followed to any location.
220     // Checks two constraints:
221     // - here and next must belong to the same superpage, unless this is in the
222     //   thread cache (they even always belong to the same slot span).
223     // - next cannot point inside the metadata area.
224     //
225     // Also, the lightweight UaF detection (pointer shadow) is checked.
226 
227     uintptr_t here_address = SlotStartPtr2Addr(here);
228     uintptr_t next_address = SlotStartPtr2Addr(next);
229 
230 #if PA_CONFIG(HAS_FREELIST_SHADOW_ENTRY)
231     bool shadow_ptr_ok = here->encoded_next_.Inverted() == here->shadow_;
232 #else
233     bool shadow_ptr_ok = true;
234 #endif
235 
236     bool same_superpage = (here_address & kSuperPageBaseMask) ==
237                           (next_address & kSuperPageBaseMask);
238 #if BUILDFLAG(USE_FREESLOT_BITMAP)
239     bool marked_as_free_in_bitmap =
240         for_thread_cache ? true : !FreeSlotBitmapSlotIsUsed(next_address);
241 #else
242     bool marked_as_free_in_bitmap = true;
243 #endif
244 
245     // This is necessary but not sufficient when quarantine is enabled, see
246     // SuperPagePayloadBegin() in partition_page.h. However we don't want to
247     // fetch anything from the root in this function.
248     bool not_in_metadata =
249         (next_address & kSuperPageOffsetMask) >= PartitionPageSize();
250 
251     if (for_thread_cache) {
252       return shadow_ptr_ok & not_in_metadata;
253     } else {
254       return shadow_ptr_ok & same_superpage & marked_as_free_in_bitmap &
255              not_in_metadata;
256     }
257   }
258 
259   EncodedFreelistPtr encoded_next_;
260   // This is intended to detect unintentional corruptions of the freelist.
261   // These can happen due to a Use-after-Free, or overflow of the previous
262   // allocation in the slot span.
263 #if PA_CONFIG(HAS_FREELIST_SHADOW_ENTRY)
264   uintptr_t shadow_;
265 #endif
266 };
267 
268 template <bool crash_on_corruption>
269 PA_ALWAYS_INLINE EncodedNextFreelistEntry*
GetNextInternal(size_t slot_size,bool for_thread_cache)270 EncodedNextFreelistEntry::GetNextInternal(size_t slot_size,
271                                           bool for_thread_cache) const {
272   // GetNext() can be called on discarded memory, in which case |encoded_next_|
273   // is 0, and none of the checks apply. Don't prefetch nullptr either.
274   if (IsEncodedNextPtrZero()) {
275     return nullptr;
276   }
277 
278   auto* ret = encoded_next_.Decode();
279   // We rely on constant propagation to remove the branches coming from
280   // |for_thread_cache|, since the argument is always a compile-time constant.
281   if (PA_UNLIKELY(!IsSane(this, ret, for_thread_cache))) {
282     if constexpr (crash_on_corruption) {
283       // Put the corrupted data on the stack, it may give us more information
284       // about what kind of corruption that was.
285       PA_DEBUG_DATA_ON_STACK("first",
286                              static_cast<size_t>(encoded_next_.encoded_));
287 #if PA_CONFIG(HAS_FREELIST_SHADOW_ENTRY)
288       PA_DEBUG_DATA_ON_STACK("second", static_cast<size_t>(shadow_));
289 #endif
290       FreelistCorruptionDetected(slot_size);
291     } else {
292       return nullptr;
293     }
294   }
295 
296   // In real-world profiles, the load of |encoded_next_| above is responsible
297   // for a large fraction of the allocation cost. However, we cannot anticipate
298   // it enough since it is accessed right after we know its address.
299   //
300   // In the case of repeated allocations, we can prefetch the access that will
301   // be done at the *next* allocation, which will touch *ret, prefetch it.
302   PA_PREFETCH(ret);
303 
304   return ret;
305 }
306 
307 template <bool crash_on_corruption>
308 PA_ALWAYS_INLINE EncodedNextFreelistEntry*
GetNextForThreadCache(size_t slot_size)309 EncodedNextFreelistEntry::GetNextForThreadCache(size_t slot_size) const {
310   return GetNextInternal<crash_on_corruption>(slot_size, true);
311 }
312 
GetNext(size_t slot_size)313 PA_ALWAYS_INLINE EncodedNextFreelistEntry* EncodedNextFreelistEntry::GetNext(
314     size_t slot_size) const {
315   return GetNextInternal<true>(slot_size, false);
316 }
317 
318 }  // namespace partition_alloc::internal
319 
320 #endif  // BASE_ALLOCATOR_PARTITION_ALLOCATOR_SRC_PARTITION_ALLOC_ENCODED_NEXT_FREELIST_H_
321