• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2015 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #ifndef BERBERIS_BASE_ARENA_ALLOC_H_
18 #define BERBERIS_BASE_ARENA_ALLOC_H_
19 
20 #include <cstddef>
21 #include <cstdint>
22 #include <new>
23 
24 #include "berberis/base/bit_util.h"
25 #include "berberis/base/logging.h"
26 #include "berberis/base/mmap.h"
27 #include "berberis/base/mmap_pool.h"
28 
29 namespace berberis {
30 
31 namespace arena_internal {
32 
33 // TODO(eaeltsin): tune for each guest arch?
34 inline constexpr size_t kDefaultArenaBlockSize = 32 * kPageSize;
35 inline constexpr size_t kMmapPoolSizeLimit = kDefaultArenaBlockSize * 16;
36 inline constexpr size_t kMaxAllocSizeInDefaultArenaBlock = 16 * kPageSize;
37 
38 using MmapPoolForArena = MmapPool<kDefaultArenaBlockSize, kMmapPoolSizeLimit>;
39 
40 struct ArenaBlock {
41   const size_t size;
42   ArenaBlock* next;
43 
dataArenaBlock44   uint8_t* data() { return reinterpret_cast<uint8_t*>(this) + sizeof(ArenaBlock); }
data_endArenaBlock45   uint8_t* data_end() { return reinterpret_cast<uint8_t*>(this) + size; }
46 };
47 
AllocArenaBlock(size_t size,size_t align,ArenaBlock * blocks)48 inline ArenaBlock* AllocArenaBlock(size_t size, size_t align, ArenaBlock* blocks) {
49   // Add header size.
50   size += AlignUp(sizeof(ArenaBlock), align);
51 
52   // Pick between default and dedicated block sizes.
53   if (size < kMaxAllocSizeInDefaultArenaBlock) {
54     return new (MmapPoolForArena::Alloc()) ArenaBlock{kDefaultArenaBlockSize, blocks};
55   } else {
56     return new (MmapOrDie(size)) ArenaBlock{AlignUpPageSize(size), blocks};
57   }
58 }
59 
FreeArenaBlocks(ArenaBlock * blocks)60 inline void FreeArenaBlocks(ArenaBlock* blocks) {
61   while (blocks) {
62     auto next = blocks->next;
63     // It may happen that a big block was allocated with kDefaultArenaBlockSize.
64     // It's still okay to push it to MmapPool.
65     if (blocks->size == kDefaultArenaBlockSize) {
66       MmapPoolForArena::Free(blocks);
67     } else {
68       MunmapOrDie(blocks, blocks->size);
69     }
70     blocks = next;
71   }
72 }
73 
74 }  // namespace arena_internal
75 
76 // Arena is for placement of small objects with same lifetime (such as IR nodes in translation).
77 // Arena is NOT thread-safe!
78 class Arena {
79  public:
Arena()80   Arena() {}
81 
~Arena()82   ~Arena() { arena_internal::FreeArenaBlocks(blocks_); }
83 
Alloc(size_t size,size_t align)84   void* Alloc(size_t size, size_t align) {
85     if (size == 0) {
86       // STL allocators shall return distinct non-NULL values for
87       // 0-sized allocations.
88       size = 1;
89     }
90 
91     // Allocate in current block.
92     auto res = AlignUp(current_, align);
93     if (res + size <= end_) {
94       // Fits in the current block.
95       current_ = res + size;
96     } else {
97       // Doesn't fit in the current block, allocate new block of sufficient size.
98       blocks_ = arena_internal::AllocArenaBlock(size, align, blocks_);
99 
100       // Allocate in the new block.
101       res = AlignUp(blocks_->data(), align);
102       auto new_current = res + size;
103 
104       if (end_ - current_ < blocks_->data_end() - new_current) {
105         // Current block has less space left than the new one.
106         current_ = new_current;
107         end_ = blocks_->data_end();
108       }
109     }
110 
111     return res;
112   }
113 
114  private:
115   arena_internal::ArenaBlock* blocks_ = nullptr;
116   uint8_t* current_ = nullptr;
117   uint8_t* end_ = nullptr;
118 
119   friend class ArenaTest;
120 };
121 
122 template <typename T, typename... Args>
NewInArena(Arena * arena,Args...args)123 T* NewInArena(Arena* arena, Args... args) {
124   void* ptr = arena->Alloc(sizeof(T), alignof(T));
125   return new (ptr) T(args...);
126 }
127 
128 template <typename T>
NewArrayInArena(Arena * arena,size_t size)129 T* NewArrayInArena(Arena* arena, size_t size) {
130   void* ptr = arena->Alloc(sizeof(T) * size, alignof(T));
131   return new (ptr) T[size];
132 }
133 
134 // ArenaAllocator is used for faster placement of STL containers.
135 template <class T>
136 class ArenaAllocator {
137  public:
138   typedef T value_type;
139 
140   // Allow passing arena as allocator arg of STL container ctor.
ArenaAllocator(Arena * arena)141   ArenaAllocator(Arena* arena) : arena_(arena) {}  // NOLINT(runtime/explicit)
142 
143   template <typename U>
ArenaAllocator(const ArenaAllocator<U> & other)144   ArenaAllocator(const ArenaAllocator<U>& other) : arena_(other.arena()) {}
145 
allocate(size_t n)146   T* allocate(size_t n) {
147     size_t size = n * sizeof(T);
148     return reinterpret_cast<T*>(arena()->Alloc(size, alignof(T)));
149   }
150 
deallocate(T *,size_t)151   void deallocate(T*, size_t) {}
152 
153   bool operator==(const ArenaAllocator<T>& other) const { return arena() == other.arena(); }
154 
155   bool operator!=(const ArenaAllocator<T>& other) const { return arena() != other.arena(); }
156 
arena()157   Arena* arena() const { return arena_; }
158 
159  private:
160   Arena* arena_;
161 };
162 
163 }  // namespace berberis
164 
165 #endif  // BERBERIS_BASE_ARENA_ALLOC_H_
166