1 // Copyright 2020 The Chromium Authors
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #ifndef BASE_ALLOCATOR_PARTITION_ALLOCATOR_SRC_PARTITION_ALLOC_PARTITION_ADDRESS_SPACE_H_
6 #define BASE_ALLOCATOR_PARTITION_ALLOCATOR_SRC_PARTITION_ALLOC_PARTITION_ADDRESS_SPACE_H_
7
8 #include <bit>
9 #include <cstddef>
10 #include <utility>
11
12 #include "build/build_config.h"
13 #include "partition_alloc/address_pool_manager_types.h"
14 #include "partition_alloc/page_allocator_constants.h"
15 #include "partition_alloc/partition_alloc_base/compiler_specific.h"
16 #include "partition_alloc/partition_alloc_base/component_export.h"
17 #include "partition_alloc/partition_alloc_base/notreached.h"
18 #include "partition_alloc/partition_alloc_buildflags.h"
19 #include "partition_alloc/partition_alloc_check.h"
20 #include "partition_alloc/partition_alloc_config.h"
21 #include "partition_alloc/partition_alloc_constants.h"
22 #include "partition_alloc/partition_alloc_forward.h"
23 #include "partition_alloc/thread_isolation/alignment.h"
24
25 #if BUILDFLAG(ENABLE_THREAD_ISOLATION)
26 #include "partition_alloc/thread_isolation/thread_isolation.h"
27 #endif
28
29 // The feature is not applicable to 32-bit address space.
30 #if BUILDFLAG(HAS_64_BIT_POINTERS)
31
32 namespace partition_alloc {
33
34 namespace internal {
35
36 // Manages PartitionAlloc address space, which is split into pools.
37 // See `glossary.md`.
PA_COMPONENT_EXPORT(PARTITION_ALLOC)38 class PA_COMPONENT_EXPORT(PARTITION_ALLOC) PartitionAddressSpace {
39 public:
40 // Represents pool-specific information about a given address.
41 struct PoolInfo {
42 pool_handle handle;
43 uintptr_t base;
44 uintptr_t offset;
45 };
46
47 #if PA_CONFIG(DYNAMICALLY_SELECT_POOL_SIZE)
48 PA_ALWAYS_INLINE static uintptr_t RegularPoolBaseMask() {
49 return setup_.regular_pool_base_mask_;
50 }
51 #else
52 PA_ALWAYS_INLINE static constexpr uintptr_t RegularPoolBaseMask() {
53 return kRegularPoolBaseMask;
54 }
55 #endif
56
57 PA_ALWAYS_INLINE static PoolInfo GetPoolInfo(uintptr_t address) {
58 // When USE_BACKUP_REF_PTR is off, BRP pool isn't used.
59 #if !BUILDFLAG(ENABLE_BACKUP_REF_PTR_SUPPORT)
60 PA_DCHECK(!IsInBRPPool(address));
61 #endif
62 pool_handle pool = kNullPoolHandle;
63 uintptr_t base = 0;
64 #if BUILDFLAG(ENABLE_BACKUP_REF_PTR_SUPPORT)
65 if (IsInBRPPool(address)) {
66 pool = kBRPPoolHandle;
67 base = setup_.brp_pool_base_address_;
68 } else
69 #endif // BUILDFLAG(ENABLE_BACKUP_REF_PTR_SUPPORT)
70 if (IsInRegularPool(address)) {
71 pool = kRegularPoolHandle;
72 base = setup_.regular_pool_base_address_;
73 } else if (IsInConfigurablePool(address)) {
74 PA_DCHECK(IsConfigurablePoolInitialized());
75 pool = kConfigurablePoolHandle;
76 base = setup_.configurable_pool_base_address_;
77 #if BUILDFLAG(ENABLE_THREAD_ISOLATION)
78 } else if (IsInThreadIsolatedPool(address)) {
79 pool = kThreadIsolatedPoolHandle;
80 base = setup_.thread_isolated_pool_base_address_;
81 #endif
82 } else {
83 PA_NOTREACHED();
84 }
85 return PoolInfo{.handle = pool, .base = base, .offset = address - base};
86 }
87 PA_ALWAYS_INLINE static constexpr size_t ConfigurablePoolMaxSize() {
88 return kConfigurablePoolMaxSize;
89 }
90 PA_ALWAYS_INLINE static constexpr size_t ConfigurablePoolMinSize() {
91 return kConfigurablePoolMinSize;
92 }
93
94 // Initialize pools (except for the configurable one).
95 //
96 // This function must only be called from the main thread.
97 static void Init();
98 // Initialize the ConfigurablePool at the given address |pool_base|. It must
99 // be aligned to the size of the pool. The size must be a power of two and
100 // must be within [ConfigurablePoolMinSize(), ConfigurablePoolMaxSize()].
101 //
102 // This function must only be called from the main thread.
103 static void InitConfigurablePool(uintptr_t pool_base, size_t size);
104 #if BUILDFLAG(ENABLE_THREAD_ISOLATION)
105 static void InitThreadIsolatedPool(ThreadIsolationOption thread_isolation);
106 static void UninitThreadIsolatedPoolForTesting();
107 #endif
108 static void UninitForTesting();
109 static void UninitConfigurablePoolForTesting();
110
111 PA_ALWAYS_INLINE static bool IsInitialized() {
112 // Either neither or both regular and BRP pool are initialized. The
113 // configurable and thread isolated pool are initialized separately.
114 if (setup_.regular_pool_base_address_ != kUninitializedPoolBaseAddress) {
115 PA_DCHECK(setup_.brp_pool_base_address_ != kUninitializedPoolBaseAddress);
116 return true;
117 }
118
119 PA_DCHECK(setup_.brp_pool_base_address_ == kUninitializedPoolBaseAddress);
120 return false;
121 }
122
123 PA_ALWAYS_INLINE static bool IsConfigurablePoolInitialized() {
124 return setup_.configurable_pool_base_address_ !=
125 kUninitializedPoolBaseAddress;
126 }
127
128 #if BUILDFLAG(ENABLE_THREAD_ISOLATION)
129 PA_ALWAYS_INLINE static bool IsThreadIsolatedPoolInitialized() {
130 return setup_.thread_isolated_pool_base_address_ !=
131 kUninitializedPoolBaseAddress;
132 }
133 #endif
134
135 // Returns false for nullptr.
136 PA_ALWAYS_INLINE static bool IsInRegularPool(uintptr_t address) {
137 #if PA_CONFIG(DYNAMICALLY_SELECT_POOL_SIZE)
138 const uintptr_t regular_pool_base_mask = setup_.regular_pool_base_mask_;
139 #else
140 constexpr uintptr_t regular_pool_base_mask = kRegularPoolBaseMask;
141 #endif
142 return (address & regular_pool_base_mask) ==
143 setup_.regular_pool_base_address_;
144 }
145
146 PA_ALWAYS_INLINE static uintptr_t RegularPoolBase() {
147 return setup_.regular_pool_base_address_;
148 }
149
150 // Returns false for nullptr.
151 PA_ALWAYS_INLINE static bool IsInBRPPool(uintptr_t address) {
152 #if PA_CONFIG(DYNAMICALLY_SELECT_POOL_SIZE)
153 const uintptr_t brp_pool_base_mask = setup_.brp_pool_base_mask_;
154 #else
155 constexpr uintptr_t brp_pool_base_mask = kBRPPoolBaseMask;
156 #endif
157 return (address & brp_pool_base_mask) == setup_.brp_pool_base_address_;
158 }
159
160 #if BUILDFLAG(GLUE_CORE_POOLS)
161 // Checks whether the address belongs to either regular or BRP pool.
162 // Returns false for nullptr.
163 PA_ALWAYS_INLINE static bool IsInCorePools(uintptr_t address) {
164 #if PA_CONFIG(DYNAMICALLY_SELECT_POOL_SIZE)
165 const uintptr_t core_pools_base_mask = setup_.core_pools_base_mask_;
166 #else
167 // When PA_GLUE_CORE_POOLS is on, the BRP pool is placed at the end of the
168 // regular pool, effectively forming one virtual pool of a twice bigger
169 // size. Adjust the mask appropriately.
170 constexpr uintptr_t core_pools_base_mask = kRegularPoolBaseMask << 1;
171 #endif // PA_CONFIG(DYNAMICALLY_SELECT_POOL_SIZE)
172 bool ret =
173 (address & core_pools_base_mask) == setup_.regular_pool_base_address_;
174 PA_DCHECK(ret == (IsInRegularPool(address) || IsInBRPPool(address)));
175 return ret;
176 }
177 #if PA_CONFIG(DYNAMICALLY_SELECT_POOL_SIZE)
178 PA_ALWAYS_INLINE static size_t CorePoolsSize() {
179 return RegularPoolSize() * 2;
180 }
181 #else
182 PA_ALWAYS_INLINE static constexpr size_t CorePoolsSize() {
183 return RegularPoolSize() * 2;
184 }
185 #endif // PA_CONFIG(DYNAMICALLY_SELECT_POOL_SIZE)
186 #endif // BUILDFLAG(GLUE_CORE_POOLS)
187
188 PA_ALWAYS_INLINE static uintptr_t OffsetInBRPPool(uintptr_t address) {
189 PA_DCHECK(IsInBRPPool(address));
190 return address - setup_.brp_pool_base_address_;
191 }
192
193 // Returns false for nullptr.
194 PA_ALWAYS_INLINE static bool IsInConfigurablePool(uintptr_t address) {
195 return (address & setup_.configurable_pool_base_mask_) ==
196 setup_.configurable_pool_base_address_;
197 }
198
199 PA_ALWAYS_INLINE static uintptr_t ConfigurablePoolBase() {
200 return setup_.configurable_pool_base_address_;
201 }
202
203 #if BUILDFLAG(ENABLE_THREAD_ISOLATION)
204 // Returns false for nullptr.
205 PA_ALWAYS_INLINE static bool IsInThreadIsolatedPool(uintptr_t address) {
206 return (address & kThreadIsolatedPoolBaseMask) ==
207 setup_.thread_isolated_pool_base_address_;
208 }
209 #endif
210
211 #if PA_CONFIG(ENABLE_SHADOW_METADATA)
212 PA_ALWAYS_INLINE static std::ptrdiff_t ShadowPoolOffset(pool_handle pool) {
213 if (pool == kRegularPoolHandle) {
214 return regular_pool_shadow_offset_;
215 } else if (pool == kBRPPoolHandle) {
216 return brp_pool_shadow_offset_;
217 } else {
218 // TODO(crbug.com/1362969): Add shadow for configurable pool as well.
219 // Shadow is not created for ConfigurablePool for now, so this part should
220 // be unreachable.
221 PA_NOTREACHED();
222 }
223 }
224 #endif
225
226 // PartitionAddressSpace is static_only class.
227 PartitionAddressSpace() = delete;
228 PartitionAddressSpace(const PartitionAddressSpace&) = delete;
229 void* operator new(size_t) = delete;
230 void* operator new(size_t, void*) = delete;
231
232 private:
233 #if PA_CONFIG(DYNAMICALLY_SELECT_POOL_SIZE)
234 PA_ALWAYS_INLINE static size_t RegularPoolSize();
235 PA_ALWAYS_INLINE static size_t BRPPoolSize();
236 #else
237 // The pool sizes should be as large as maximum whenever possible.
238 PA_ALWAYS_INLINE static constexpr size_t RegularPoolSize() {
239 return kRegularPoolSize;
240 }
241 PA_ALWAYS_INLINE static constexpr size_t BRPPoolSize() {
242 return kBRPPoolSize;
243 }
244 #endif // PA_CONFIG(DYNAMICALLY_SELECT_POOL_SIZE)
245
246 #if BUILDFLAG(ENABLE_THREAD_ISOLATION)
247 PA_ALWAYS_INLINE static constexpr size_t ThreadIsolatedPoolSize() {
248 return kThreadIsolatedPoolSize;
249 }
250 #endif
251
252 // On 64-bit systems, PA allocates from several contiguous, mutually disjoint
253 // pools. The BRP pool is where all allocations have a BRP ref-count, thus
254 // pointers pointing there can use a BRP protection against UaF. Allocations
255 // in the other pools don't have that.
256 //
257 // Pool sizes have to be the power of two. Each pool will be aligned at its
258 // own size boundary.
259 //
260 // NOTE! The BRP pool must be preceded by an inaccessible region. This is to
261 // prevent a pointer to the end of a non-BRP-pool allocation from falling into
262 // the BRP pool, thus triggering BRP mechanism and likely crashing. This
263 // "forbidden zone" can be as small as 1B, but it's simpler to just reserve an
264 // allocation granularity unit.
265 //
266 // The ConfigurablePool is an optional Pool that can be created inside an
267 // existing mapping provided by the embedder. This Pool can be used when
268 // certain PA allocations must be located inside a given virtual address
269 // region. One use case for this Pool is V8 Sandbox, which requires that
270 // ArrayBuffers be located inside of it.
271 static constexpr size_t kRegularPoolSize = kPoolMaxSize;
272 static constexpr size_t kBRPPoolSize = kPoolMaxSize;
273 static_assert(std::has_single_bit(kRegularPoolSize));
274 static_assert(std::has_single_bit(kBRPPoolSize));
275 #if BUILDFLAG(ENABLE_THREAD_ISOLATION)
276 static constexpr size_t kThreadIsolatedPoolSize = kGiB / 4;
277 static_assert(std::has_single_bit(kThreadIsolatedPoolSize));
278 #endif
279 static constexpr size_t kConfigurablePoolMaxSize = kPoolMaxSize;
280 static constexpr size_t kConfigurablePoolMinSize = 1 * kGiB;
281 static_assert(kConfigurablePoolMinSize <= kConfigurablePoolMaxSize);
282 static_assert(std::has_single_bit(kConfigurablePoolMaxSize));
283 static_assert(std::has_single_bit(kConfigurablePoolMinSize));
284
285 #if BUILDFLAG(IS_IOS)
286
287 #if !PA_CONFIG(DYNAMICALLY_SELECT_POOL_SIZE)
288 #error iOS is only supported with a dynamically sized GigaCase.
289 #endif
290
291 // We can't afford pool sizes as large as kPoolMaxSize in iOS EarlGrey tests,
292 // since the test process cannot use an extended virtual address space (see
293 // crbug.com/1250788).
294 static constexpr size_t kRegularPoolSizeForIOSTestProcess = kGiB / 4;
295 static constexpr size_t kBRPPoolSizeForIOSTestProcess = kGiB / 4;
296 static_assert(kRegularPoolSizeForIOSTestProcess < kRegularPoolSize);
297 static_assert(kBRPPoolSizeForIOSTestProcess < kBRPPoolSize);
298 static_assert(std::has_single_bit(kRegularPoolSizeForIOSTestProcess));
299 static_assert(std::has_single_bit(kBRPPoolSizeForIOSTestProcess));
300 #endif // BUILDFLAG(IOS_IOS)
301
302 #if !PA_CONFIG(DYNAMICALLY_SELECT_POOL_SIZE)
303 // Masks used to easy determine belonging to a pool.
304 static constexpr uintptr_t kRegularPoolOffsetMask =
305 static_cast<uintptr_t>(kRegularPoolSize) - 1;
306 static constexpr uintptr_t kRegularPoolBaseMask = ~kRegularPoolOffsetMask;
307 static constexpr uintptr_t kBRPPoolOffsetMask =
308 static_cast<uintptr_t>(kBRPPoolSize) - 1;
309 static constexpr uintptr_t kBRPPoolBaseMask = ~kBRPPoolOffsetMask;
310 #endif // !PA_CONFIG(DYNAMICALLY_SELECT_POOL_SIZE)
311
312 #if BUILDFLAG(ENABLE_THREAD_ISOLATION)
313 static constexpr uintptr_t kThreadIsolatedPoolOffsetMask =
314 static_cast<uintptr_t>(kThreadIsolatedPoolSize) - 1;
315 static constexpr uintptr_t kThreadIsolatedPoolBaseMask =
316 ~kThreadIsolatedPoolOffsetMask;
317 #endif
318
319 // This must be set to such a value that IsIn*Pool() always returns false when
320 // the pool isn't initialized.
321 static constexpr uintptr_t kUninitializedPoolBaseAddress =
322 static_cast<uintptr_t>(-1);
323
324 struct alignas(kPartitionCachelineSize) PA_THREAD_ISOLATED_ALIGN PoolSetup {
325 // Before PartitionAddressSpace::Init(), no allocation are allocated from a
326 // reserved address space. Therefore, set *_pool_base_address_ initially to
327 // -1, so that PartitionAddressSpace::IsIn*Pool() always returns false.
328 constexpr PoolSetup() = default;
329
330 // Using a struct to enforce alignment and padding
331 uintptr_t regular_pool_base_address_ = kUninitializedPoolBaseAddress;
332 uintptr_t brp_pool_base_address_ = kUninitializedPoolBaseAddress;
333 uintptr_t configurable_pool_base_address_ = kUninitializedPoolBaseAddress;
334 #if BUILDFLAG(ENABLE_THREAD_ISOLATION)
335 uintptr_t thread_isolated_pool_base_address_ =
336 kUninitializedPoolBaseAddress;
337 #endif
338 #if PA_CONFIG(DYNAMICALLY_SELECT_POOL_SIZE)
339 uintptr_t regular_pool_base_mask_ = 0;
340 uintptr_t brp_pool_base_mask_ = 0;
341 #if BUILDFLAG(GLUE_CORE_POOLS)
342 uintptr_t core_pools_base_mask_ = 0;
343 #endif
344 #endif // PA_CONFIG(DYNAMICALLY_SELECT_POOL_SIZE)
345 uintptr_t configurable_pool_base_mask_ = 0;
346 #if BUILDFLAG(ENABLE_THREAD_ISOLATION)
347 ThreadIsolationOption thread_isolation_;
348 #endif
349 };
350 #if BUILDFLAG(ENABLE_THREAD_ISOLATION)
351 static_assert(sizeof(PoolSetup) % SystemPageSize() == 0,
352 "PoolSetup has to fill a page(s)");
353 #else
354 static_assert(sizeof(PoolSetup) % kPartitionCachelineSize == 0,
355 "PoolSetup has to fill a cacheline(s)");
356 #endif
357
358 // See the comment describing the address layout above.
359 //
360 // These are write-once fields, frequently accessed thereafter. Make sure they
361 // don't share a cacheline with other, potentially writeable data, through
362 // alignment and padding.
363 static PoolSetup setup_ PA_CONSTINIT;
364
365 #if PA_CONFIG(ENABLE_SHADOW_METADATA)
366 static std::ptrdiff_t regular_pool_shadow_offset_;
367 static std::ptrdiff_t brp_pool_shadow_offset_;
368 #endif
369
370 #if BUILDFLAG(ENABLE_THREAD_ISOLATION)
371 // If we use thread isolation, we need to write-protect its metadata.
372 // Allow the function to get access to the PoolSetup.
373 friend void WriteProtectThreadIsolatedGlobals(ThreadIsolationOption);
374 #endif
375 };
376
GetPoolInfo(uintptr_t address)377 PA_ALWAYS_INLINE PartitionAddressSpace::PoolInfo GetPoolInfo(
378 uintptr_t address) {
379 return PartitionAddressSpace::GetPoolInfo(address);
380 }
381
GetPool(uintptr_t address)382 PA_ALWAYS_INLINE pool_handle GetPool(uintptr_t address) {
383 return GetPoolInfo(address).handle;
384 }
385
OffsetInBRPPool(uintptr_t address)386 PA_ALWAYS_INLINE uintptr_t OffsetInBRPPool(uintptr_t address) {
387 return PartitionAddressSpace::OffsetInBRPPool(address);
388 }
389
390 #if PA_CONFIG(ENABLE_SHADOW_METADATA)
ShadowPoolOffset(pool_handle pool)391 PA_ALWAYS_INLINE std::ptrdiff_t ShadowPoolOffset(pool_handle pool) {
392 return PartitionAddressSpace::ShadowPoolOffset(pool);
393 }
394 #endif
395
396 } // namespace internal
397
398 // Returns false for nullptr.
IsManagedByPartitionAlloc(uintptr_t address)399 PA_ALWAYS_INLINE bool IsManagedByPartitionAlloc(uintptr_t address) {
400 // When ENABLE_BACKUP_REF_PTR_SUPPORT is off, BRP pool isn't used.
401 #if !BUILDFLAG(ENABLE_BACKUP_REF_PTR_SUPPORT)
402 PA_DCHECK(!internal::PartitionAddressSpace::IsInBRPPool(address));
403 #endif
404 return internal::PartitionAddressSpace::IsInRegularPool(address)
405 #if BUILDFLAG(ENABLE_BACKUP_REF_PTR_SUPPORT)
406 || internal::PartitionAddressSpace::IsInBRPPool(address)
407 #endif
408 #if BUILDFLAG(ENABLE_THREAD_ISOLATION)
409 || internal::PartitionAddressSpace::IsInThreadIsolatedPool(address)
410 #endif
411 || internal::PartitionAddressSpace::IsInConfigurablePool(address);
412 }
413
414 // Returns false for nullptr.
IsManagedByPartitionAllocRegularPool(uintptr_t address)415 PA_ALWAYS_INLINE bool IsManagedByPartitionAllocRegularPool(uintptr_t address) {
416 return internal::PartitionAddressSpace::IsInRegularPool(address);
417 }
418
419 // Returns false for nullptr.
IsManagedByPartitionAllocBRPPool(uintptr_t address)420 PA_ALWAYS_INLINE bool IsManagedByPartitionAllocBRPPool(uintptr_t address) {
421 return internal::PartitionAddressSpace::IsInBRPPool(address);
422 }
423
424 #if BUILDFLAG(GLUE_CORE_POOLS)
425 // Checks whether the address belongs to either regular or BRP pool.
426 // Returns false for nullptr.
IsManagedByPartitionAllocCorePools(uintptr_t address)427 PA_ALWAYS_INLINE bool IsManagedByPartitionAllocCorePools(uintptr_t address) {
428 return internal::PartitionAddressSpace::IsInCorePools(address);
429 }
430 #endif // BUILDFLAG(GLUE_CORE_POOLS)
431
432 // Returns false for nullptr.
IsManagedByPartitionAllocConfigurablePool(uintptr_t address)433 PA_ALWAYS_INLINE bool IsManagedByPartitionAllocConfigurablePool(
434 uintptr_t address) {
435 return internal::PartitionAddressSpace::IsInConfigurablePool(address);
436 }
437
438 #if BUILDFLAG(ENABLE_THREAD_ISOLATION)
439 // Returns false for nullptr.
IsManagedByPartitionAllocThreadIsolatedPool(uintptr_t address)440 PA_ALWAYS_INLINE bool IsManagedByPartitionAllocThreadIsolatedPool(
441 uintptr_t address) {
442 return internal::PartitionAddressSpace::IsInThreadIsolatedPool(address);
443 }
444 #endif
445
IsConfigurablePoolAvailable()446 PA_ALWAYS_INLINE bool IsConfigurablePoolAvailable() {
447 return internal::PartitionAddressSpace::IsConfigurablePoolInitialized();
448 }
449
450 } // namespace partition_alloc
451
452 #endif // BUILDFLAG(HAS_64_BIT_POINTERS)
453
454 #endif // BASE_ALLOCATOR_PARTITION_ALLOCATOR_SRC_PARTITION_ALLOC_PARTITION_ADDRESS_SPACE_H_
455