1 // Copyright 2016 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "src/zone/accounting-allocator.h"
6
7 #include <memory>
8
9 #include "src/base/bounded-page-allocator.h"
10 #include "src/base/logging.h"
11 #include "src/base/macros.h"
12 #include "src/base/platform/wrappers.h"
13 #include "src/utils/allocation.h"
14 #include "src/zone/zone-compression.h"
15 #include "src/zone/zone-segment.h"
16
17 namespace v8 {
18 namespace internal {
19
20 // These definitions are here in order to please the linker, which in debug mode
21 // sometimes requires static constants to be defined in .cc files.
22 const size_t ZoneCompression::kReservationSize;
23 const size_t ZoneCompression::kReservationAlignment;
24
25 namespace {
26
27 static constexpr size_t kZonePageSize = 256 * KB;
28
ReserveAddressSpace(v8::PageAllocator * platform_allocator)29 VirtualMemory ReserveAddressSpace(v8::PageAllocator* platform_allocator) {
30 DCHECK(IsAligned(ZoneCompression::kReservationSize,
31 platform_allocator->AllocatePageSize()));
32
33 void* hint = reinterpret_cast<void*>(RoundDown(
34 reinterpret_cast<uintptr_t>(platform_allocator->GetRandomMmapAddr()),
35 ZoneCompression::kReservationAlignment));
36
37 VirtualMemory memory(platform_allocator, ZoneCompression::kReservationSize,
38 hint, ZoneCompression::kReservationAlignment);
39 if (memory.IsReserved()) {
40 CHECK(IsAligned(memory.address(), ZoneCompression::kReservationAlignment));
41 return memory;
42 }
43
44 FATAL(
45 "Fatal process out of memory: Failed to reserve memory for compressed "
46 "zones");
47 UNREACHABLE();
48 }
49
CreateBoundedAllocator(v8::PageAllocator * platform_allocator,Address reservation_start)50 std::unique_ptr<v8::base::BoundedPageAllocator> CreateBoundedAllocator(
51 v8::PageAllocator* platform_allocator, Address reservation_start) {
52 CHECK(reservation_start);
53 CHECK(IsAligned(reservation_start, ZoneCompression::kReservationAlignment));
54
55 auto allocator = std::make_unique<v8::base::BoundedPageAllocator>(
56 platform_allocator, reservation_start, ZoneCompression::kReservationSize,
57 kZonePageSize,
58 base::PageInitializationMode::kAllocatedPagesCanBeUninitialized);
59
60 // Exclude first page from allocation to ensure that accesses through
61 // decompressed null pointer will seg-fault.
62 allocator->AllocatePagesAt(reservation_start, kZonePageSize,
63 v8::PageAllocator::kNoAccess);
64 return allocator;
65 }
66
67 } // namespace
68
AccountingAllocator()69 AccountingAllocator::AccountingAllocator()
70 : zone_backing_malloc_(
71 V8::GetCurrentPlatform()->GetZoneBackingAllocator()->GetMallocFn()),
72 zone_backing_free_(
73 V8::GetCurrentPlatform()->GetZoneBackingAllocator()->GetFreeFn()) {
74 if (COMPRESS_ZONES_BOOL) {
75 v8::PageAllocator* platform_page_allocator = GetPlatformPageAllocator();
76 VirtualMemory memory = ReserveAddressSpace(platform_page_allocator);
77 reserved_area_ = std::make_unique<VirtualMemory>(std::move(memory));
78 bounded_page_allocator_ = CreateBoundedAllocator(platform_page_allocator,
79 reserved_area_->address());
80 }
81 }
82
83 AccountingAllocator::~AccountingAllocator() = default;
84
AllocateSegment(size_t bytes,bool supports_compression)85 Segment* AccountingAllocator::AllocateSegment(size_t bytes,
86 bool supports_compression) {
87 void* memory;
88 if (COMPRESS_ZONES_BOOL && supports_compression) {
89 bytes = RoundUp(bytes, kZonePageSize);
90 memory = AllocatePages(bounded_page_allocator_.get(), nullptr, bytes,
91 kZonePageSize, PageAllocator::kReadWrite);
92
93 } else {
94 memory = AllocWithRetry(bytes, zone_backing_malloc_);
95 }
96 if (memory == nullptr) return nullptr;
97
98 size_t current =
99 current_memory_usage_.fetch_add(bytes, std::memory_order_relaxed) + bytes;
100 size_t max = max_memory_usage_.load(std::memory_order_relaxed);
101 while (current > max && !max_memory_usage_.compare_exchange_weak(
102 max, current, std::memory_order_relaxed)) {
103 // {max} was updated by {compare_exchange_weak}; retry.
104 }
105 DCHECK_LE(sizeof(Segment), bytes);
106 return new (memory) Segment(bytes);
107 }
108
ReturnSegment(Segment * segment,bool supports_compression)109 void AccountingAllocator::ReturnSegment(Segment* segment,
110 bool supports_compression) {
111 segment->ZapContents();
112 size_t segment_size = segment->total_size();
113 current_memory_usage_.fetch_sub(segment_size, std::memory_order_relaxed);
114 segment->ZapHeader();
115 if (COMPRESS_ZONES_BOOL && supports_compression) {
116 FreePages(bounded_page_allocator_.get(), segment, segment_size);
117 } else {
118 zone_backing_free_(segment);
119 }
120 }
121
122 } // namespace internal
123 } // namespace v8
124