• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2016 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "src/zone/accounting-allocator.h"
6 
7 #include <memory>
8 
9 #include "src/base/bounded-page-allocator.h"
10 #include "src/base/logging.h"
11 #include "src/base/macros.h"
12 #include "src/utils/allocation.h"
13 #include "src/zone/zone-compression.h"
14 #include "src/zone/zone-segment.h"
15 
16 namespace v8 {
17 namespace internal {
18 
19 // These definitions are here in order to please the linker, which in debug mode
20 // sometimes requires static constants to be defined in .cc files.
21 const size_t ZoneCompression::kReservationSize;
22 const size_t ZoneCompression::kReservationAlignment;
23 
24 namespace {
25 
26 static constexpr size_t kZonePageSize = 256 * KB;
27 
ReserveAddressSpace(v8::PageAllocator * platform_allocator)28 VirtualMemory ReserveAddressSpace(v8::PageAllocator* platform_allocator) {
29   DCHECK(IsAligned(ZoneCompression::kReservationSize,
30                    platform_allocator->AllocatePageSize()));
31 
32   void* hint = reinterpret_cast<void*>(RoundDown(
33       reinterpret_cast<uintptr_t>(platform_allocator->GetRandomMmapAddr()),
34       ZoneCompression::kReservationAlignment));
35 
36   VirtualMemory memory(platform_allocator, ZoneCompression::kReservationSize,
37                        hint, ZoneCompression::kReservationAlignment);
38   if (memory.IsReserved()) {
39     CHECK(IsAligned(memory.address(), ZoneCompression::kReservationAlignment));
40     return memory;
41   }
42 
43   FATAL(
44       "Fatal process out of memory: Failed to reserve memory for compressed "
45       "zones");
46   UNREACHABLE();
47 }
48 
CreateBoundedAllocator(v8::PageAllocator * platform_allocator,Address reservation_start)49 std::unique_ptr<v8::base::BoundedPageAllocator> CreateBoundedAllocator(
50     v8::PageAllocator* platform_allocator, Address reservation_start) {
51   CHECK(reservation_start);
52   CHECK(IsAligned(reservation_start, ZoneCompression::kReservationAlignment));
53 
54   auto allocator = std::make_unique<v8::base::BoundedPageAllocator>(
55       platform_allocator, reservation_start, ZoneCompression::kReservationSize,
56       kZonePageSize);
57 
58   // Exclude first page from allocation to ensure that accesses through
59   // decompressed null pointer will seg-fault.
60   allocator->AllocatePagesAt(reservation_start, kZonePageSize,
61                              v8::PageAllocator::kNoAccess);
62   return allocator;
63 }
64 
65 }  // namespace
66 
AccountingAllocator()67 AccountingAllocator::AccountingAllocator() {
68   if (COMPRESS_ZONES_BOOL) {
69     v8::PageAllocator* platform_page_allocator = GetPlatformPageAllocator();
70     VirtualMemory memory = ReserveAddressSpace(platform_page_allocator);
71     reserved_area_ = std::make_unique<VirtualMemory>(std::move(memory));
72     bounded_page_allocator_ = CreateBoundedAllocator(platform_page_allocator,
73                                                      reserved_area_->address());
74   }
75 }
76 
77 AccountingAllocator::~AccountingAllocator() = default;
78 
AllocateSegment(size_t bytes,bool supports_compression)79 Segment* AccountingAllocator::AllocateSegment(size_t bytes,
80                                               bool supports_compression) {
81   void* memory;
82   if (COMPRESS_ZONES_BOOL && supports_compression) {
83     bytes = RoundUp(bytes, kZonePageSize);
84     memory = AllocatePages(bounded_page_allocator_.get(), nullptr, bytes,
85                            kZonePageSize, PageAllocator::kReadWrite);
86 
87   } else {
88     memory = AllocWithRetry(bytes);
89   }
90   if (memory == nullptr) return nullptr;
91 
92   size_t current =
93       current_memory_usage_.fetch_add(bytes, std::memory_order_relaxed) + bytes;
94   size_t max = max_memory_usage_.load(std::memory_order_relaxed);
95   while (current > max && !max_memory_usage_.compare_exchange_weak(
96                               max, current, std::memory_order_relaxed)) {
97     // {max} was updated by {compare_exchange_weak}; retry.
98   }
99   DCHECK_LE(sizeof(Segment), bytes);
100   return new (memory) Segment(bytes);
101 }
102 
ReturnSegment(Segment * segment,bool supports_compression)103 void AccountingAllocator::ReturnSegment(Segment* segment,
104                                         bool supports_compression) {
105   segment->ZapContents();
106   size_t segment_size = segment->total_size();
107   current_memory_usage_.fetch_sub(segment_size, std::memory_order_relaxed);
108   segment->ZapHeader();
109   if (COMPRESS_ZONES_BOOL && supports_compression) {
110     CHECK(FreePages(bounded_page_allocator_.get(), segment, segment_size));
111   } else {
112     free(segment);
113   }
114 }
115 
116 }  // namespace internal
117 }  // namespace v8
118