• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (c) 2022 Huawei Device Co., Ltd.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at
6  *
7  *     http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 
16 #ifndef ECMASCRIPT_MEM_NATIVE_AREA_ALLOCATOR_H
17 #define ECMASCRIPT_MEM_NATIVE_AREA_ALLOCATOR_H
18 
19 #include <atomic>
20 #include <cstddef>
21 
22 #include "ecmascript/common.h"
23 #include "ecmascript/log_wrapper.h"
24 #include "ecmascript/mem/mem.h"
25 #include "ecmascript/mem/area.h"
26 
27 namespace panda::ecmascript {
28 enum class NativeFlag {
29     NO_DIV,
30     ARRAY_BUFFER,
31     REGEXP_BTYECODE,
32     CHUNK_MEM,
33 };
34 
35 class PUBLIC_API NativeAreaAllocator {
36 public:
37     NativeAreaAllocator() = default;
~NativeAreaAllocator()38     virtual ~NativeAreaAllocator()
39     {
40         if (cachedArea_ != nullptr) {
41             FreeArea(cachedArea_);
42             cachedArea_ = nullptr;
43         }
44     }
45 
46     Area *AllocateArea(size_t capacity);
47     void FreeArea(Area *area);
48     void Free(void *mem, size_t size);
49     void *AllocateBuffer(size_t size);
50     void FreeBuffer(void *mem);
51 
52     static void FreeBufferFunc(void* buffer, void* data);
53 
54     template<class T>
FreeObjectFunc(void * buffer,void * data)55     static void FreeObjectFunc(void* buffer, void* data)
56     {
57         if (buffer == nullptr || data == nullptr) {
58             return;
59         }
60         NativeAreaAllocator* allocator = reinterpret_cast<NativeAreaAllocator*>(data);
61         allocator->Delete<T>(static_cast<T *>(buffer));
62     }
63 
64     // implemented by AllocateBuffer
65     template<typename T, typename... Args>
New(Args &&...args)66     std::enable_if_t<!std::is_array_v<T>, T *> New(Args &&... args)
67     {
68         void *p = AllocateBuffer(sizeof(T));
69         if (UNLIKELY(p == nullptr)) {
70             return nullptr;
71         }
72         new (p) T(std::forward<Args>(args)...);  // NOLINT(bugprone-throw-keyword-missing)
73         return reinterpret_cast<T *>(p);
74     }
75 
76     template<class T>
Delete(T * ptr)77     void Delete(T *ptr)
78     {
79         if (ptr == nullptr) {
80             return;
81         }
82         // NOLINTNEXTLINE(readability-braces-around-statements,bugprone-suspicious-semicolon)
83         if constexpr (std::is_class_v<T>) {
84             ptr->~T();
85         }
86         FreeBuffer(ptr);
87     }
88 
IncreaseNativeMemoryUsage(size_t bytes)89     void IncreaseNativeMemoryUsage(size_t bytes)
90     {
91         size_t current = nativeMemoryUsage_.fetch_add(bytes, std::memory_order_relaxed) + bytes;
92         size_t max = maxNativeMemoryUsage_.load(std::memory_order_relaxed);
93         while (current > max && !maxNativeMemoryUsage_.compare_exchange_weak(max, current, std::memory_order_relaxed)) {
94         }
95     }
96 
DecreaseNativeMemoryUsage(size_t bytes)97     void DecreaseNativeMemoryUsage(size_t bytes)
98     {
99         nativeMemoryUsage_.fetch_sub(bytes, std::memory_order_relaxed);
100     }
101 
GetNativeMemoryUsage()102     size_t GetNativeMemoryUsage() const
103     {
104         return nativeMemoryUsage_.load(std::memory_order_relaxed);
105     }
106 
GetMaxNativeMemoryUsage()107     size_t GetMaxNativeMemoryUsage() const
108     {
109         return maxNativeMemoryUsage_.load(std::memory_order_relaxed);
110     }
111 
GetArrayBufferNativeSize()112     size_t GetArrayBufferNativeSize() const
113     {
114         return arrayBufferNativeSize_;
115     }
116 
GetRegExpNativeSize()117     size_t GetRegExpNativeSize() const
118     {
119         return regExpNativeSize_;
120     }
121 
GetChunkNativeSize()122     size_t GetChunkNativeSize() const
123     {
124         return chunkNativeSize_;
125     }
126 
IncreaseNativeSizeStats(size_t size,NativeFlag flag)127     inline void IncreaseNativeSizeStats(size_t size, NativeFlag flag)
128     {
129         if (size == 0) {
130             return;
131         }
132         switch (flag) {
133             case NativeFlag::ARRAY_BUFFER:
134                 arrayBufferNativeSize_ += size;
135                 break;
136             case NativeFlag::REGEXP_BTYECODE:
137                 regExpNativeSize_ += size;
138                 break;
139             case NativeFlag::CHUNK_MEM:
140                 chunkNativeSize_ += size;
141                 break;
142             default:
143                 break;
144         }
145     }
146 
DecreaseNativeSizeStats(size_t size,NativeFlag flag)147     inline void DecreaseNativeSizeStats(size_t size, NativeFlag flag)
148     {
149         if (size == 0) {
150             return;
151         }
152         switch (flag) {
153             case NativeFlag::ARRAY_BUFFER:
154                 arrayBufferNativeSize_ -= size;
155                 break;
156             case NativeFlag::REGEXP_BTYECODE:
157                 regExpNativeSize_ -= size;
158                 break;
159             case NativeFlag::CHUNK_MEM:
160                 chunkNativeSize_ -= size;
161                 break;
162             default:
163                 break;
164         }
165     }
166 
ModifyNativeSizeStats(size_t preSize,size_t nextSize,NativeFlag flag)167     void ModifyNativeSizeStats(size_t preSize, size_t nextSize, NativeFlag flag) {
168         if (flag == NativeFlag::NO_DIV) {
169             return;
170         }
171         DecreaseNativeSizeStats(preSize, flag);
172         IncreaseNativeSizeStats(nextSize, flag);
173     }
174 
Allocate(size_t size)175     void *Allocate(size_t size)
176     {
177         if (size == 0) {
178             LOG_ECMA_MEM(FATAL) << "size must have a size bigger than 0";
179             UNREACHABLE();
180         }
181         // NOLINTNEXTLINE(cppcoreguidelines-no-malloc)
182         void *ptr = malloc(size);
183         if (ptr == nullptr) {
184             LOG_ECMA_MEM(FATAL) << "malloc failed";
185             UNREACHABLE();
186         }
187         IncreaseNativeMemoryUsage(size);
188         return ptr;
189     }
190 
AllocateSpace(size_t capacity)191     static inline Area *AllocateSpace(size_t capacity)
192     {
193         size_t headerSize = sizeof(Area);
194         if (capacity < headerSize) {
195             LOG_ECMA_MEM(FATAL) << "capacity must have a size not less than sizeof Area.";
196             UNREACHABLE();
197         }
198         // NOLINTNEXTLINE(cppcoreguidelines-no-malloc)
199         void *mem = malloc(capacity);
200         if (mem == nullptr) {
201             LOG_ECMA_MEM(FATAL) << "malloc failed";
202             UNREACHABLE();
203         }
204         // NOLINTNEXTLINE(cppcoreguidelines-pro-bounds-pointer-arithmetic)
205         uintptr_t begin = reinterpret_cast<uintptr_t>(mem) + headerSize;
206         capacity -= headerSize;
207         return new (mem) Area(begin, capacity);
208     }
209 
FreeSpace(Area * area)210     static inline void FreeSpace(Area *area)
211     {
212         if (area == nullptr) {
213             return;
214         }
215         // NOLINTNEXTLINE(cppcoreguidelines-no-malloc)
216         free(reinterpret_cast<std::byte *>(area));
217     }
218 
219 private:
220     NO_COPY_SEMANTIC(NativeAreaAllocator);
221     NO_MOVE_SEMANTIC(NativeAreaAllocator);
222 
223     Area *cachedArea_ {nullptr};
224     std::atomic<size_t> nativeMemoryUsage_ {0};
225     std::atomic<size_t> maxNativeMemoryUsage_ {0};
226     // native area size stats
227     size_t arrayBufferNativeSize_ {0};
228     size_t regExpNativeSize_ {0};
229     size_t chunkNativeSize_ {0};
230 };
231 }  // namespace panda::ecmascript
232 
233 #endif  // ECMASCRIPT_MEM_NATIVE_AREA_ALLOCATOR_H
234