• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /**
2  * Copyright (c) 2021-2022 Huawei Device Co., Ltd.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at
6  *
7  * http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 #ifndef RUNTIME_MEM_PANDA_BUMP_ALLOCATOR_H
16 #define RUNTIME_MEM_PANDA_BUMP_ALLOCATOR_H
17 
18 #include <functional>
19 #include <memory>
20 
21 #include "mem/mem_pool.h"
22 #include "libpandabase/macros.h"
23 #include "libpandabase/mem/arena-inl.h"
24 #include "libpandabase/mem/mem.h"
25 #include "libpandabase/mem/mem_range.h"
26 #include "runtime/mem/tlab.h"
27 #include "runtime/mem/lock_config_helper.h"
28 
29 namespace panda {
30 class ObjectHeader;
31 }  // namespace panda
32 
33 namespace panda::mem {
34 
35 class BumpPointerAllocatorLockConfig {
36 public:
37     using CommonLock = os::memory::Mutex;
38     using DummyLock = os::memory::DummyLock;
39 
40     template <MTModeT MTMode>
41     using ParameterizedLock = typename LockConfigHelper<BumpPointerAllocatorLockConfig, MTMode>::Value;
42 };
43 
44 // This allocator can allocate memory as a BumpPointerAllocator
45 // and also can allocate big pieces of memory for the TLABs.
46 //
47 // Structure:
48 //
49 //  |------------------------------------------------------------------------------------------------------------|
50 //  |                                                 Memory Pool                                                |
51 //  |------------------------------------------------------------------------------------------------------------|
52 //  |     allocated objects     |         unused memory        |                 memory for TLABs                |
53 //  |---------------------------|------------------------------|-------------------------------------------------|
54 //  |xxxxxxxxxx|xxxxxx|xxxxxxxxx|                              |               ||               ||               |
55 //  |xxxxxxxxxx|xxxxxx|xxxxxxxxx|                              |               ||               ||               |
56 //  |xxxxxxxxxx|xxxxxx|xxxxxxxxx|           free memory        |     TLAB 3    ||     TLAB 2    ||     TLAB 1    |
57 //  |xxxxxxxxxx|xxxxxx|xxxxxxxxx|                              |               ||               ||               |
58 //  |xxxxxxxxxx|xxxxxx|xxxxxxxxx|                              |               ||               ||               |
59 //  |------------------------------------------------------------------------------------------------------------|
60 //
61 
62 template <typename AllocConfigT, typename LockConfigT = BumpPointerAllocatorLockConfig::CommonLock,
63           bool UseTlabs = false>
64 class BumpPointerAllocator {
65 public:
66     DEFAULT_NOEXCEPT_MOVE_SEMANTIC(BumpPointerAllocator);
67     NO_COPY_SEMANTIC(BumpPointerAllocator);
68     ~BumpPointerAllocator();
69 
70     BumpPointerAllocator() = delete;
71 
72     TLAB *CreateNewTLAB(size_t size);
73 
74     /**
75      * Construct BumpPointer allocator with provided pool
76      * @param pool - pool
77      */
78     explicit BumpPointerAllocator(Pool pool, SpaceType type_allocation, MemStatsType *mem_stats,
79                                   size_t tlabs_max_count = 0);
80 
81     [[nodiscard]] void *Alloc(size_t size, Alignment alignment = panda::DEFAULT_ALIGNMENT);
82 
83     void VisitAndRemoveAllPools(const MemVisitor &mem_visitor);
84 
85     void VisitAndRemoveFreePools(const MemVisitor &mem_visitor);
86 
87     /**
88      * \brief Iterates over all objects allocated by this allocator
89      * @param object_visitor
90      */
91     void IterateOverObjects(const std::function<void(ObjectHeader *object_header)> &object_visitor);
92 
93     /**
94      * \brief Iterates over objects in the range inclusively.
95      * @tparam MemVisitor
96      * @param mem_visitor - function pointer or functor
97      * @param left_border - a pointer to the first byte of the range
98      * @param right_border - a pointer to the last byte of the range
99      */
100     template <typename MemVisitor>
101     void IterateOverObjectsInRange(const MemVisitor &mem_visitor, void *left_border, void *right_border);
102 
103     /**
104      * Resets to the "all clear" state
105      */
106     void Reset();
107 
108     /**
109      * \brief Add an extra memory pool to the allocator.
110      * The memory pool must be located just after the current memory given to this allocator.
111      * @param mem - pointer to the extra memory pool.
112      * @param size - a size of the extra memory pool.
113      */
114     void ExpandMemory(void *mem, size_t size);
115 
116     /**
117      * Get MemRange used by allocator
118      * @return MemRange for allocator
119      */
120     MemRange GetMemRange();
121 
122     // BumpPointer allocator can't be used for simple collection.
123     // Only for CollectAndMove.
124     void Collect(GCObjectVisitor death_checker_fn) = delete;
125 
126     /**
127      * Collects dead objects and move alive with provided visitor
128      * @param death_checker - functor for check if object alive
129      * @param object_move_visitor - object visitor
130      */
131     template <typename ObjectMoveVisitorT>
132     void CollectAndMove(const GCObjectVisitor &death_checker, const ObjectMoveVisitorT &object_move_visitor);
133 
GetAllocatorType()134     static constexpr AllocatorType GetAllocatorType()
135     {
136         return AllocatorType::BUMP_ALLOCATOR;
137     }
138 
139     bool ContainObject(const ObjectHeader *obj);
140 
141     bool IsLive(const ObjectHeader *obj);
142 
143 private:
144     class TLABsManager {
145     public:
TLABsManager(size_t tlabs_max_count)146         explicit TLABsManager(size_t tlabs_max_count) : tlabs_max_count_(tlabs_max_count), tlabs_(tlabs_max_count) {}
147 
Reset()148         void Reset()
149         {
150             for (size_t i = 0; i < cur_tlab_num_; i++) {
151                 tlabs_[i].Fill(nullptr, 0);
152             }
153             cur_tlab_num_ = 0;
154             tlabs_occupied_size_ = 0;
155         }
156 
GetUnusedTLABInstance()157         TLAB *GetUnusedTLABInstance()
158         {
159             if (cur_tlab_num_ < tlabs_max_count_) {
160                 return &tlabs_[cur_tlab_num_++];
161             }
162             return nullptr;
163         }
164 
165         template <class Visitor>
IterateOverTLABs(const Visitor & visitor)166         void IterateOverTLABs(const Visitor &visitor)
167         {
168             for (size_t i = 0; i < cur_tlab_num_; i++) {
169                 if (!visitor(&tlabs_[i])) {
170                     return;
171                 }
172             }
173         }
174 
GetTLABsOccupiedSize()175         size_t GetTLABsOccupiedSize()
176         {
177             return tlabs_occupied_size_;
178         }
179 
IncreaseTLABsOccupiedSize(size_t size)180         void IncreaseTLABsOccupiedSize(size_t size)
181         {
182             tlabs_occupied_size_ += size;
183         }
184 
185     private:
186         size_t cur_tlab_num_ {0};
187         size_t tlabs_max_count_;
188         std::vector<TLAB> tlabs_;
189         size_t tlabs_occupied_size_ {0};
190     };
191 
192     // Mutex, which allows only one thread to Alloc/Free/Collect/Iterate inside this allocator
193     LockConfigT allocator_lock_;
194     Arena arena_;
195     TLABsManager tlab_manager_;
196     SpaceType type_allocation_;
197     MemStatsType *mem_stats_;
198 };
199 
200 }  // namespace panda::mem
201 
202 #endif  // RUNTIME_MEM_PANDA_BUMP_ALLOCATOR_H
203