1 /*
2 * Copyright (c) 2021 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16 #include "common/rs_common_def.h"
17
18 namespace OHOS {
19 namespace Rosen {
20 namespace {
21 class MemAllocater final {
22 struct BlockHead {
23 int size;
24 char ptr[0];
25 };
26 using Cache = std::vector<char*>;
27
28 public:
29 static MemAllocater& GetInstance();
30 MemAllocater() = default;
31 ~MemAllocater();
32
33 void* Alloc(size_t size);
34 void Free(void* ptr);
35
36 private:
37 MemAllocater(const MemAllocater&) = delete;
38 MemAllocater& operator=(const MemAllocater&) = delete;
39
40 std::mutex mutex_;
41 std::unordered_map<size_t, Cache> memCaches_;
42 std::vector<char*> blocks_;
43 static constexpr unsigned sizeStep_ = 64;
44 };
45 static MemAllocater allocater;
46 }
47
GetInstance()48 MemAllocater& MemAllocater::GetInstance()
49 {
50 return allocater;
51 }
52
~MemAllocater()53 MemAllocater::~MemAllocater()
54 {
55 for (void* ptr : blocks_) {
56 if (ptr != nullptr) {
57 free(ptr);
58 }
59 }
60 blocks_.clear();
61 memCaches_.clear();
62 }
63
Alloc(size_t size)64 void* MemAllocater::Alloc(size_t size)
65 {
66 std::lock_guard<std::mutex> lock(mutex_);
67 Cache* cachePtr = nullptr;
68 auto itr = memCaches_.find(size);
69 if (itr == memCaches_.end()) {
70 Cache tempCache;
71 memCaches_.insert(std::pair<size_t, Cache>(size, tempCache));
72 itr = memCaches_.find(size);
73 cachePtr = &(itr->second);
74 cachePtr->reserve(sizeStep_);
75 } else {
76 cachePtr = &(itr->second);
77 }
78
79 if (cachePtr == nullptr) {
80 return nullptr;
81 }
82 size_t memSize = (size + sizeof(BlockHead));
83 if (cachePtr->empty()) {
84 char* block = static_cast<char*>(malloc(memSize * sizeStep_));
85 if (block == nullptr) {
86 return nullptr;
87 }
88 blocks_.push_back(block);
89 for (unsigned i = 0; i < sizeStep_; ++i) {
90 cachePtr->push_back(block + (i * memSize));
91 }
92 }
93
94 char* mem = cachePtr->back();
95 cachePtr->pop_back();
96 BlockHead* head = reinterpret_cast<BlockHead*>(mem);
97 head->size = static_cast<int>(size);
98 return head->ptr;
99 }
100
Free(void * ptr)101 void MemAllocater::Free(void* ptr)
102 {
103 if (ptr == nullptr) {
104 return;
105 }
106 std::lock_guard<std::mutex> lock(mutex_);
107 char* p = static_cast<char*>(ptr) - sizeof(BlockHead);
108 BlockHead* head = reinterpret_cast<BlockHead*>(p);
109 auto itr = memCaches_.find(head->size);
110 if (itr == memCaches_.end()) {
111 free(p);
112 } else {
113 itr->second.push_back(p);
114 }
115 }
116
operator new(size_t size)117 void* MemObject::operator new(size_t size)
118 {
119 return MemAllocater::GetInstance().Alloc(size);
120 }
121
operator delete(void * ptr)122 void MemObject::operator delete(void* ptr)
123 {
124 return MemAllocater::GetInstance().Free(ptr);
125 }
126
operator new(size_t size,const std::nothrow_t &)127 void* MemObject::operator new(size_t size, const std::nothrow_t&) noexcept
128 {
129 return MemAllocater::GetInstance().Alloc(size);
130 }
131
operator delete(void * ptr,const std::nothrow_t &)132 void MemObject::operator delete(void* ptr, const std::nothrow_t&) noexcept
133 {
134 return MemAllocater::GetInstance().Free(ptr);
135 }
136 } // namespace Rosen
137 } // namespace OHOS
138