1 /*
2 * Copyright (c) 2021 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16 #include "common/rs_common_def.h"
17
18 #include <mutex>
19 #include <vector>
20
21 namespace OHOS {
22 namespace Rosen {
23 namespace {
24 class MemAllocater final {
25 struct BlockHead {
26 int size;
27 char ptr[0];
28 };
29 using Cache = std::vector<char*>;
30
31 public:
32 static MemAllocater& GetInstance();
33 MemAllocater() = default;
34 ~MemAllocater();
35
36 void* Alloc(size_t size);
37 void Free(void* ptr);
38
39 private:
40 MemAllocater(const MemAllocater&) = delete;
41 MemAllocater& operator=(const MemAllocater&) = delete;
42
43 std::mutex mutex_;
44 std::unordered_map<size_t, Cache> memCaches_;
45 std::vector<char*> blocks_;
46 static constexpr unsigned sizeStep_ = 64;
47 };
48 static MemAllocater allocater;
49 }
50
GetInstance()51 MemAllocater& MemAllocater::GetInstance()
52 {
53 return allocater;
54 }
55
~MemAllocater()56 MemAllocater::~MemAllocater()
57 {
58 std::lock_guard<std::mutex> lock(mutex_);
59 for (void* ptr : blocks_) {
60 if (ptr != nullptr) {
61 free(ptr);
62 }
63 }
64 blocks_.clear();
65 memCaches_.clear();
66 }
67
Alloc(size_t size)68 void* MemAllocater::Alloc(size_t size)
69 {
70 std::lock_guard<std::mutex> lock(mutex_);
71 Cache* cachePtr = nullptr;
72 auto itr = memCaches_.find(size);
73 if (itr == memCaches_.end()) {
74 Cache tempCache;
75 memCaches_.insert(std::pair<size_t, Cache>(size, tempCache));
76 itr = memCaches_.find(size);
77 cachePtr = &(itr->second);
78 cachePtr->reserve(sizeStep_);
79 } else {
80 cachePtr = &(itr->second);
81 }
82
83 if (cachePtr == nullptr) {
84 return nullptr;
85 }
86 size_t memSize = (size + sizeof(BlockHead));
87 if (cachePtr->empty()) {
88 char* block = static_cast<char*>(malloc(memSize * sizeStep_));
89 if (block == nullptr) {
90 return nullptr;
91 }
92 blocks_.push_back(block);
93 for (unsigned i = 0; i < sizeStep_; ++i) {
94 cachePtr->push_back(block + (i * memSize));
95 }
96 }
97
98 char* mem = cachePtr->back();
99 cachePtr->pop_back();
100 BlockHead* head = reinterpret_cast<BlockHead*>(mem);
101 head->size = static_cast<int>(size);
102 return head->ptr;
103 }
104
Free(void * ptr)105 void MemAllocater::Free(void* ptr)
106 {
107 if (ptr == nullptr) {
108 return;
109 }
110 std::lock_guard<std::mutex> lock(mutex_);
111 char* p = static_cast<char*>(ptr) - sizeof(BlockHead);
112 BlockHead* head = reinterpret_cast<BlockHead*>(p);
113 auto itr = memCaches_.find(head->size);
114 if (itr == memCaches_.end()) {
115 free(p);
116 } else {
117 itr->second.push_back(p);
118 }
119 }
120
operator new(size_t size)121 void* MemObject::operator new(size_t size)
122 {
123 return MemAllocater::GetInstance().Alloc(size);
124 }
125
operator delete(void * ptr)126 void MemObject::operator delete(void* ptr)
127 {
128 return MemAllocater::GetInstance().Free(ptr);
129 }
130
operator new(size_t size,const std::nothrow_t &)131 void* MemObject::operator new(size_t size, const std::nothrow_t&) noexcept
132 {
133 return MemAllocater::GetInstance().Alloc(size);
134 }
135
operator delete(void * ptr,const std::nothrow_t &)136 void MemObject::operator delete(void* ptr, const std::nothrow_t&) noexcept
137 {
138 return MemAllocater::GetInstance().Free(ptr);
139 }
140 } // namespace Rosen
141 } // namespace OHOS
142