• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2018 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 // Sometimes it is necessary to allocate a large number of small
18 // objects.  Doing this the usual way (malloc, new) is slow,
19 // especially for multithreaded programs.  A BaseArena provides a
20 // mark/release method of memory management: it asks for a large chunk
21 // from the operating system and doles it out bit by bit as required.
22 // Then you free all the memory at once by calling BaseArena::Reset().
23 //
24 //
25 // --Example Uses Of UnsafeArena
26 //    This is the simplest way.  Just create an arena, and whenever you
27 //    need a block of memory to put something in, call BaseArena::Alloc().  eg
28 //        s = arena.Alloc(100);
29 //        snprintf(s, 100, "%s:%d", host, port);
30 //        arena.Shrink(strlen(s)+1);     // optional; see below for use
31 //
32 //    You'll probably use the convenience routines more often:
33 //        s = arena.Strdup(host);        // a copy of host lives in the arena
34 //        s = arena.Strndup(host, 100);  // we guarantee to NUL-terminate!
35 //        s = arena.Memdup(protobuf, sizeof(protobuf);
36 //
37 //    If you go the Alloc() route, you'll probably allocate too-much-space.
38 //    You can reclaim the extra space by calling Shrink() before the next
39 //    Alloc() (or Strdup(), or whatever), with the #bytes you actually used.
40 //       If you use this method, memory management is easy: just call Alloc()
41 //    and friends a lot, and call Reset() when you're done with the data.
42 //
43 // FOR STRINGS: --Uses UnsafeArena
44 //    This is a special case of STL (below), but is simpler.  Use an
45 //    astring, which acts like a string but allocates from the passed-in
46 //    arena:
47 //       astring s(arena);             // or "sastring" to use a SafeArena
48 //       s.assign(host);
49 //       astring s2(host, hostlen, arena);
50 
51 #ifndef LIBTEXTCLASSIFIER_UTILS_BASE_ARENA_H_
52 #define LIBTEXTCLASSIFIER_UTILS_BASE_ARENA_H_
53 
54 #include <assert.h>
55 #include <string.h>
56 
57 #include <vector>
58 #ifdef ADDRESS_SANITIZER
59 #include <sanitizer/asan_interface.h>
60 #endif
61 
62 #include "utils/base/integral_types.h"
63 #include "utils/base/logging.h"
64 
65 namespace libtextclassifier3 {
66 
67 // This class is "thread-compatible": different threads can access the
68 // arena at the same time without locking, as long as they use only
69 // const methods.
70 class BaseArena {
71  protected:  // You can't make an arena directly; only a subclass of one
72   BaseArena(char* first_block, const size_t block_size, bool align_to_page);
73 
74  public:
75   virtual ~BaseArena();
76 
77   virtual void Reset();
78 
79   // they're "slow" only 'cause they're virtual (subclasses define "fast" ones)
80   virtual char* SlowAlloc(size_t size) = 0;
81   virtual void SlowFree(void* memory, size_t size) = 0;
82   virtual char* SlowRealloc(char* memory, size_t old_size, size_t new_size) = 0;
83 
84   class Status {
85    private:
86     friend class BaseArena;
87     size_t bytes_allocated_;
88 
89    public:
Status()90     Status() : bytes_allocated_(0) {}
bytes_allocated()91     size_t bytes_allocated() const { return bytes_allocated_; }
92   };
93 
94   // Accessors and stats counters
95   // This accessor isn't so useful here, but is included so we can be
96   // type-compatible with ArenaAllocator (in arena_allocator.h).  That is,
97   // we define arena() because ArenaAllocator does, and that way you
98   // can template on either of these and know it's safe to call arena().
arena()99   virtual BaseArena* arena() { return this; }
block_size()100   size_t block_size() const { return block_size_; }
101   int block_count() const;
is_empty()102   bool is_empty() const {
103     // must check block count in case we allocated a block larger than blksize
104     return freestart_ == freestart_when_empty_ && 1 == block_count();
105   }
106 
107   // The alignment that ArenaAllocator uses except for 1-byte objects.
108   static constexpr int kDefaultAlignment = 8;
109 
110  protected:
111   bool SatisfyAlignment(const size_t alignment);
112   void MakeNewBlock(const uint32 alignment);
113   void* GetMemoryFallback(const size_t size, const int align);
GetMemory(const size_t size,const int align)114   void* GetMemory(const size_t size, const int align) {
115     assert(remaining_ <= block_size_);                   // an invariant
116     if (size > 0 && size <= remaining_ && align == 1) {  // common case
117       last_alloc_ = freestart_;
118       freestart_ += size;
119       remaining_ -= size;
120 #ifdef ADDRESS_SANITIZER
121       ASAN_UNPOISON_MEMORY_REGION(last_alloc_, size);
122 #endif
123       return reinterpret_cast<void*>(last_alloc_);
124     }
125     return GetMemoryFallback(size, align);
126   }
127 
128   // This doesn't actually free any memory except for the last piece allocated
ReturnMemory(void * memory,const size_t size)129   void ReturnMemory(void* memory, const size_t size) {
130     if (memory == last_alloc_ &&
131         size == static_cast<size_t>(freestart_ - last_alloc_)) {
132       remaining_ += size;
133       freestart_ = last_alloc_;
134     }
135 #ifdef ADDRESS_SANITIZER
136     ASAN_POISON_MEMORY_REGION(memory, size);
137 #endif
138   }
139 
140   // This is used by Realloc() -- usually we Realloc just by copying to a
141   // bigger space, but for the last alloc we can realloc by growing the region.
142   bool AdjustLastAlloc(void* last_alloc, const size_t newsize);
143 
144   Status status_;
145   size_t remaining_;
146 
147  private:
148   struct AllocatedBlock {
149     char* mem;
150     size_t size;
151     size_t alignment;
152   };
153 
154   // Allocate new new block of at least block_size, with the specified
155   // alignment.
156   // The returned AllocatedBlock* is valid until the next call to AllocNewBlock
157   // or Reset (i.e. anything that might affect overflow_blocks_).
158   AllocatedBlock* AllocNewBlock(const size_t block_size,
159                                 const uint32 alignment);
160 
161   const AllocatedBlock* IndexToBlock(int index) const;
162 
163   const size_t block_size_;
164   char* freestart_;  // beginning of the free space in most recent block
165   char* freestart_when_empty_;  // beginning of the free space when we're empty
166   char* last_alloc_;            // used to make sure ReturnBytes() is safe
167   // if the first_blocks_ aren't enough, expand into overflow_blocks_.
168   std::vector<AllocatedBlock>* overflow_blocks_;
169   // STL vector isn't as efficient as it could be, so we use an array at first
170   const bool first_block_externally_owned_;  // true if they pass in 1st block
171   const bool page_aligned_;  // when true, all blocks need to be page aligned
172   int8_t blocks_alloced_;  // how many of the first_blocks_ have been allocated
173   AllocatedBlock first_blocks_[16];  // the length of this array is arbitrary
174 
175   void FreeBlocks();  // Frees all except first block
176 
177   BaseArena(const BaseArena&) = delete;
178   BaseArena& operator=(const BaseArena&) = delete;
179 };
180 
181 class UnsafeArena : public BaseArena {
182  public:
183   // Allocates a thread-compatible arena with the specified block size.
UnsafeArena(const size_t block_size)184   explicit UnsafeArena(const size_t block_size)
185       : BaseArena(nullptr, block_size, false) {}
UnsafeArena(const size_t block_size,bool align)186   UnsafeArena(const size_t block_size, bool align)
187       : BaseArena(nullptr, block_size, align) {}
188 
189   // Allocates a thread-compatible arena with the specified block
190   // size. "first_block" must have size "block_size". Memory is
191   // allocated from "first_block" until it is exhausted; after that
192   // memory is allocated by allocating new blocks from the heap.
UnsafeArena(char * first_block,const size_t block_size)193   UnsafeArena(char* first_block, const size_t block_size)
194       : BaseArena(first_block, block_size, false) {}
UnsafeArena(char * first_block,const size_t block_size,bool align)195   UnsafeArena(char* first_block, const size_t block_size, bool align)
196       : BaseArena(first_block, block_size, align) {}
197 
Alloc(const size_t size)198   char* Alloc(const size_t size) {
199     return reinterpret_cast<char*>(GetMemory(size, 1));
200   }
AllocAligned(const size_t size,const int align)201   void* AllocAligned(const size_t size, const int align) {
202     return GetMemory(size, align);
203   }
204 
205   // Allocates and initializes an object on the arena.
206   template <typename T, typename... Args>
AllocAndInit(Args &&...args)207   T* AllocAndInit(Args&&... args) {
208     return new (reinterpret_cast<T*>(AllocAligned(sizeof(T), alignof(T))))
209         T(std::forward<Args>(args)...);
210   }
211 
Calloc(const size_t size)212   char* Calloc(const size_t size) {
213     void* return_value = Alloc(size);
214     memset(return_value, 0, size);
215     return reinterpret_cast<char*>(return_value);
216   }
217 
CallocAligned(const size_t size,const int align)218   void* CallocAligned(const size_t size, const int align) {
219     void* return_value = AllocAligned(size, align);
220     memset(return_value, 0, size);
221     return return_value;
222   }
223 
224   // Free does nothing except for the last piece allocated.
Free(void * memory,size_t size)225   void Free(void* memory, size_t size) { ReturnMemory(memory, size); }
SlowAlloc(size_t size)226   char* SlowAlloc(size_t size) override {  // "slow" 'cause it's virtual
227     return Alloc(size);
228   }
SlowFree(void * memory,size_t size)229   void SlowFree(void* memory,
230                 size_t size) override {  // "slow" 'cause it's virt
231     Free(memory, size);
232   }
SlowRealloc(char * memory,size_t old_size,size_t new_size)233   char* SlowRealloc(char* memory, size_t old_size, size_t new_size) override {
234     return Realloc(memory, old_size, new_size);
235   }
236 
Memdup(const char * s,size_t bytes)237   char* Memdup(const char* s, size_t bytes) {
238     char* newstr = Alloc(bytes);
239     memcpy(newstr, s, bytes);
240     return newstr;
241   }
MemdupPlusNUL(const char * s,size_t bytes)242   char* MemdupPlusNUL(const char* s, size_t bytes) {  // like "string(s, len)"
243     char* newstr = Alloc(bytes + 1);
244     memcpy(newstr, s, bytes);
245     newstr[bytes] = '\0';
246     return newstr;
247   }
Strdup(const char * s)248   char* Strdup(const char* s) { return Memdup(s, strlen(s) + 1); }
249   // Unlike libc's strncpy, I always NUL-terminate.  libc's semantics are dumb.
250   // This will allocate at most n+1 bytes (+1 is for the nul terminator).
Strndup(const char * s,size_t n)251   char* Strndup(const char* s, size_t n) {
252     // Use memchr so we don't walk past n.
253     // We can't use the one in //strings since this is the base library,
254     // so we have to reinterpret_cast from the libc void*.
255     const char* eos = reinterpret_cast<const char*>(memchr(s, '\0', n));
256     // if no null terminator found, use full n
257     const size_t bytes = (eos == nullptr) ? n : eos - s;
258     return MemdupPlusNUL(s, bytes);
259   }
260 
261   // You can realloc a previously-allocated string either bigger or smaller.
262   // We can be more efficient if you realloc a string right after you allocate
263   // it (eg allocate way-too-much space, fill it, realloc to just-big-enough)
264   char* Realloc(char* original, size_t oldsize, size_t newsize);
265   // If you know the new size is smaller (or equal), you don't need to know
266   // oldsize.  We don't check that newsize is smaller, so you'd better be sure!
Shrink(char * s,size_t newsize)267   char* Shrink(char* s, size_t newsize) {
268     AdjustLastAlloc(s, newsize);  // reclaim space if we can
269     return s;                     // never need to move if we go smaller
270   }
271 
272   // We make a copy so you can keep track of status at a given point in time
status()273   Status status() const { return status_; }
274 
275   // Number of bytes remaining before the arena has to allocate another block.
bytes_until_next_allocation()276   size_t bytes_until_next_allocation() const { return remaining_; }
277 
278  private:
279   UnsafeArena(const UnsafeArena&) = delete;
280   UnsafeArena& operator=(const UnsafeArena&) = delete;
281 
282   virtual void UnusedKeyMethod();  // Dummy key method to avoid weak vtable.
283 };
284 
285 }  // namespace libtextclassifier3
286 
287 #endif  // LIBTEXTCLASSIFIER_UTILS_BASE_ARENA_H_
288