• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (c) 2021 Huawei Device Co., Ltd.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at
6  *
7  *     http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 
16 #ifndef FOUNDATION_ACE_NAPI_SCOPE_MANAGER_NATIVE_SCOPE_MANAGER_H
17 #define FOUNDATION_ACE_NAPI_SCOPE_MANAGER_NATIVE_SCOPE_MANAGER_H
18 
19 #include <cstddef>
20 #include <cstdlib>
21 #include <cstdint>
22 #include <vector>
23 
24 #ifdef ENABLE_MEMLEAK_DEBUG
25 #include <atomic>
26 #endif
27 
28 #ifdef ENABLE_MEMLEAK_DEBUG
29 #include <string>
30 #include <vector>
31 #endif
32 
33 class NativeValue;
34 struct NativeHandle;
35 #ifdef ENABLE_MEMLEAK_DEBUG
36 struct StructVma {
37     uint64_t begin = 0;
38     uint64_t end = 0;
39     std::string path;
40 };
41 #endif
42 
43 struct NativeScope {
CreateNewInstanceNativeScope44     static NativeScope* CreateNewInstance()
45     {
46         return new NativeScope();
47     }
48 
49     NativeHandle* handlePtr = nullptr;
50     size_t handleCount = 0;
51     bool escaped = false;
52     bool escapeCalled = false;
53 
54     NativeScope* child = nullptr;
55     NativeScope* parent = nullptr;
56 };
57 
58 class NativeChunk {
59 public:
60     static constexpr size_t CHUNK_PAGE_SIZE = 8 * 1024;
61 
NativeChunk()62     NativeChunk() {};
~NativeChunk()63     ~NativeChunk()
64     {
65         for (auto iter = usedPage_.begin(); iter != usedPage_.end(); iter++) {
66             free(*iter);
67         }
68         usedPage_.clear();
69     }
70 
71     template<class T>
NewArray(size_t size)72     [[nodiscard]] T *NewArray(size_t size)
73     {
74         return static_cast<T *>(Allocate(size * sizeof(T)));
75     }
76 
77     template<typename T, typename... Args>
New(Args &&...args)78     [[nodiscard]] T *New(Args &&... args)
79     {
80         auto p = reinterpret_cast<void *>(Allocate(sizeof(T)));
81         new (p) T(std::forward<Args>(args)...);
82         return reinterpret_cast<T *>(p);
83     }
84 
85     template<class T>
Delete(T * ptr)86     void Delete(T *ptr)
87     {
88         if (!useChunk_) {
89             delete ptr;
90             return;
91         }
92 
93         if (std::is_class<T>::value) {
94             ptr->~T();
95         }
96     }
97 
PushChunkStats(NativeScope * scope)98     void PushChunkStats(NativeScope* scope)
99     {
100         ChunkStats stats(scope, currentHandleStorageIndex_, ptr_, end_);
101         chunkStats_.emplace_back(stats);
102     }
103 
PopChunkStats()104     void PopChunkStats()
105     {
106         chunkStats_.pop_back();
107     }
108 
RemoveStats(NativeScope * scope)109     void RemoveStats(NativeScope* scope)
110     {
111         for (auto iter = chunkStats_.begin(); iter != chunkStats_.end(); iter++) {
112             if (iter->scope_ == scope) {
113                 chunkStats_.erase(iter);
114                 return;
115             }
116         }
117     }
118 
PopChunkStatsAndReset()119     void PopChunkStatsAndReset()
120     {
121         ChunkStats& stats = chunkStats_.back();
122         ChunkReset(stats.prevScopeIndex_, stats.prevNext_, stats.prevEnd_);
123         int index = static_cast<int>(usedPage_.size()) - 1;
124         if (index < 0) {
125             return;
126         }
127         for (; index > stats.prevScopeIndex_; index--) {
128             free(usedPage_[index]);
129             usedPage_.pop_back();
130         }
131         chunkStats_.pop_back();
132     }
133 
134 private:
135     class ChunkStats {
136     public:
ChunkStats(NativeScope * scope,uint32_t index,uintptr_t begin,uintptr_t end)137         ChunkStats(NativeScope* scope, uint32_t index, uintptr_t begin, uintptr_t end)
138             : scope_(scope), prevScopeIndex_(index), prevNext_(begin), prevEnd_(end) {}
139 
140         NativeScope* scope_ {nullptr};
141         int32_t prevScopeIndex_ {-1};
142         uintptr_t prevNext_ {0};
143         uintptr_t prevEnd_ {0};
144     };
145 
ChunkReset(int32_t prevIndex,uintptr_t prevNext,uintptr_t prevEnd)146     void ChunkReset(int32_t prevIndex, uintptr_t prevNext, uintptr_t prevEnd)
147     {
148         currentHandleStorageIndex_ = prevIndex;
149         ptr_ = prevNext;
150         end_ = prevEnd;
151     }
152 
Allocate(size_t size)153     void *Allocate(size_t size)
154     {
155         uintptr_t result = ptr_;
156         if (size > end_ - ptr_) {
157             result = Expand();
158         }
159         ptr_ += size;
160         return reinterpret_cast<void *>(result);
161     }
162 
Expand()163     uintptr_t Expand()
164     {
165         void *ptr = malloc(CHUNK_PAGE_SIZE);
166         if (ptr == nullptr) {
167             std::abort();
168         }
169         usedPage_.emplace_back(ptr);
170         currentHandleStorageIndex_++;
171         useChunk_ = true;
172         ptr_ = reinterpret_cast<uintptr_t>(ptr);
173         end_ = ptr_ + CHUNK_PAGE_SIZE;
174         return ptr_;
175     }
176 
177     uintptr_t ptr_ {0};
178     uintptr_t end_ {0};
179     int32_t currentHandleStorageIndex_ {-1};
180     bool useChunk_ {false};
181     std::vector<void *> usedPage_ {};
182     std::vector<ChunkStats> chunkStats_ {};
183 };
184 
185 class NativeScopeManager {
186 public:
187     NativeScopeManager();
188     virtual ~NativeScopeManager();
189 
190     virtual NativeScope* Open();
191     virtual void Close(NativeScope* scope, bool needReset = true);
192 
193     virtual NativeScope* OpenEscape();
194     virtual void CloseEscape(NativeScope* scope);
195 
196     virtual void CreateHandle(NativeValue* value);
197     virtual NativeValue* Escape(NativeScope* scope, NativeValue* value);
198 
GetNativeChunk()199     NativeChunk& GetNativeChunk()
200     {
201         return nativeChunk_;
202     }
203 
204     NativeScopeManager(NativeScopeManager&) = delete;
205     virtual NativeScopeManager& operator=(NativeScopeManager&) = delete;
206 
207 #ifdef ENABLE_MEMLEAK_DEBUG
208     static const int MAPINFO_SIZE = 256;
209     static const int NAME_LEN = 128;
210     static const int DEBUG_MEMLEAK;
211     static const int BACKTRACE_DEPTH;
212 #endif
213 
214 private:
215 #ifdef ENABLE_MEMLEAK_DEBUG
216     static std::atomic<std::vector<struct StructVma>*> vmas;
217 #endif
218     NativeScope* root_;
219     NativeScope* current_;
220     NativeChunk nativeChunk_;
221 };
222 
223 #endif /* FOUNDATION_ACE_NAPI_SCOPE_MANAGER_NATIVE_SCOPE_MANAGER_H */
224