• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /**
2  * Copyright (c) 2021-2022 Huawei Device Co., Ltd.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at
6  *
7  * http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 #ifndef PANDA_RUNTIME_MEM_INTERNAL_ALLOCATOR_INL_H
16 #define PANDA_RUNTIME_MEM_INTERNAL_ALLOCATOR_INL_H
17 
18 #include "runtime/mem/malloc-proxy-allocator-inl.h"
19 #include "runtime/mem/freelist_allocator-inl.h"
20 #include "runtime/mem/humongous_obj_allocator-inl.h"
21 #include "runtime/mem/internal_allocator.h"
22 #include "runtime/mem/runslots_allocator-inl.h"
23 
24 namespace panda::mem {
25 
26 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage)
27 #define LOG_INTERNAL_ALLOCATOR(level) LOG(level, ALLOC) << "InternalAllocator: "
28 
29 template <InternalAllocatorConfig Config>
30 template <class T>
AllocArray(size_t size)31 T *InternalAllocator<Config>::AllocArray(size_t size)
32 {
33     return static_cast<T *>(this->Alloc(sizeof(T) * size, GetAlignment<T>()));
34 }
35 
36 template <InternalAllocatorConfig Config>
37 template <class T>
AllocArrayLocal(size_t size)38 T *InternalAllocator<Config>::AllocArrayLocal(size_t size)
39 {
40     return static_cast<T *>(this->AllocLocal(sizeof(T) * size, GetAlignment<T>()));
41 }
42 
43 template <InternalAllocatorConfig Config>
44 template <typename T, typename... Args>
New(Args &&...args)45 std::enable_if_t<!std::is_array_v<T>, T *> InternalAllocator<Config>::New(Args &&... args)
46 {
47     void *p = Alloc(sizeof(T), GetAlignment<T>());
48     if (UNLIKELY(p == nullptr)) {
49         return nullptr;
50     }
51     new (p) T(std::forward<Args>(args)...);
52     return reinterpret_cast<T *>(p);
53 }
54 
55 template <InternalAllocatorConfig Config>
56 template <typename T>
New(size_t size)57 std::enable_if_t<is_unbounded_array_v<T>, std::remove_extent_t<T> *> InternalAllocator<Config>::New(size_t size)
58 {
59     static constexpr size_t SIZE_BEFORE_DATA_OFFSET = AlignUp(sizeof(size_t), GetAlignmentInBytes(GetAlignment<T>()));
60     using element_type = std::remove_extent_t<T>;
61     void *p = Alloc(SIZE_BEFORE_DATA_OFFSET + sizeof(element_type) * size, GetAlignment<T>());
62     *static_cast<size_t *>(p) = size;
63     element_type *data = ToNativePtr<element_type>(ToUintPtr(p) + SIZE_BEFORE_DATA_OFFSET);
64     element_type *current_element = data;
65     for (size_t i = 0; i < size; ++i, ++current_element) {
66         new (current_element) element_type();
67     }
68     return data;
69 }
70 
71 template <InternalAllocatorConfig Config>
72 template <class T>
Delete(T * ptr)73 void InternalAllocator<Config>::Delete(T *ptr)
74 {
75     if constexpr (std::is_class_v<T>) {
76         ptr->~T();
77     }
78     Free(ptr);
79 }
80 
81 template <InternalAllocatorConfig Config>
82 template <typename T>
DeleteArray(T * data)83 void InternalAllocator<Config>::DeleteArray(T *data)
84 {
85     static constexpr size_t SIZE_BEFORE_DATA_OFFSET = AlignUp(sizeof(size_t), GetAlignmentInBytes(GetAlignment<T>()));
86     void *p = ToVoidPtr(ToUintPtr(data) - SIZE_BEFORE_DATA_OFFSET);
87     size_t size = *static_cast<size_t *>(p);
88     if constexpr (std::is_class_v<T>) {
89         for (size_t i = 0; i < size; ++i, ++data) {
90             data->~T();
91         }
92     }
93     Free(p);
94 }
95 
96 template <InternalAllocatorConfig Config>
97 template <typename MemVisitor>
VisitAndRemoveAllPools(MemVisitor mem_visitor)98 void InternalAllocator<Config>::VisitAndRemoveAllPools(MemVisitor mem_visitor)
99 {
100     // NOLINTNEXTLINE(readability-braces-around-statements, bugprone-suspicious-semicolon)
101     if constexpr (Config == InternalAllocatorConfig::PANDA_ALLOCATORS) {
102         runslots_allocator_->VisitAndRemoveAllPools(mem_visitor);
103         freelist_allocator_->VisitAndRemoveAllPools(mem_visitor);
104         humongous_allocator_->VisitAndRemoveAllPools(mem_visitor);
105     }
106 }
107 
108 template <InternalAllocatorConfig Config>
109 template <typename MemVisitor>
VisitAndRemoveFreePools(MemVisitor mem_visitor)110 void InternalAllocator<Config>::VisitAndRemoveFreePools(MemVisitor mem_visitor)
111 {
112     // NOLINTNEXTLINE(readability-braces-around-statements, bugprone-suspicious-semicolon)
113     if constexpr (Config == InternalAllocatorConfig::PANDA_ALLOCATORS) {
114         runslots_allocator_->VisitAndRemoveFreePools(mem_visitor);
115         freelist_allocator_->VisitAndRemoveFreePools(mem_visitor);
116         humongous_allocator_->VisitAndRemoveFreePools(mem_visitor);
117     }
118 }
119 
120 #undef LOG_INTERNAL_ALLOCATOR
121 
122 }  // namespace panda::mem
123 
124 #endif  // PANDA_RUNTIME_MEM_INTERNAL_ALLOCATOR_H
125