• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /**
2  * Copyright (c) 2021-2022 Huawei Device Co., Ltd.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at
6  *
7  * http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 
16 #include "base_mem_stats.h"
17 
18 #include "utils/logger.h"
19 
20 #include <numeric>
21 namespace panda {
22 
RecordAllocateRaw(size_t size,SpaceType type_mem)23 void BaseMemStats::RecordAllocateRaw(size_t size, SpaceType type_mem)
24 {
25     ASSERT(!IsHeapSpace(type_mem));
26     RecordAllocate(size, type_mem);
27 }
28 
RecordAllocate(size_t size,SpaceType type_mem)29 void BaseMemStats::RecordAllocate(size_t size, SpaceType type_mem)
30 {
31     auto index = helpers::ToUnderlying(type_mem);
32     // Atomic with acq_rel order reason: data race with allocated_ with dependecies on reads after the load and on
33     // writes before the store
34     allocated_[index].fetch_add(size, std::memory_order_acq_rel);
35 }
36 
RecordMoved(size_t size,SpaceType type_mem)37 void BaseMemStats::RecordMoved(size_t size, SpaceType type_mem)
38 {
39     auto index = helpers::ToUnderlying(type_mem);
40     // Atomic with acq_rel order reason: data race with allocated_ with dependecies on reads after the load and on
41     // writes before the store
42     uint64_t old_value = allocated_[index].fetch_sub(size, std::memory_order_acq_rel);
43     (void)old_value;
44     ASSERT(old_value >= size);
45 }
46 
RecordFreeRaw(size_t size,SpaceType type_mem)47 void BaseMemStats::RecordFreeRaw(size_t size, SpaceType type_mem)
48 {
49     ASSERT(!IsHeapSpace(type_mem));
50     RecordFree(size, type_mem);
51 }
52 
RecordFree(size_t size,SpaceType type_mem)53 void BaseMemStats::RecordFree(size_t size, SpaceType type_mem)
54 {
55     auto index = helpers::ToUnderlying(type_mem);
56     // Atomic with acq_rel order reason: data race with allocated_ with dependecies on reads after the load and on
57     // writes before the store
58     freed_[index].fetch_add(size, std::memory_order_acq_rel);
59 }
60 
GetAllocated(SpaceType type_mem) const61 uint64_t BaseMemStats::GetAllocated(SpaceType type_mem) const
62 {
63     // Atomic with acquire order reason: data race with allocated_ with dependecies on reads after the load which should
64     // become visible
65     return allocated_[helpers::ToUnderlying(type_mem)].load(std::memory_order_acquire);
66 }
67 
GetFreed(SpaceType type_mem) const68 uint64_t BaseMemStats::GetFreed(SpaceType type_mem) const
69 {
70     // Atomic with acquire order reason: data race with allocated_ with dependecies on reads after the load which should
71     // become visible
72     return freed_[helpers::ToUnderlying(type_mem)].load(std::memory_order_acquire);
73 }
74 
GetAllocatedHeap() const75 uint64_t BaseMemStats::GetAllocatedHeap() const
76 {
77     uint64_t result = 0;
78     for (size_t index = 0; index < SPACE_TYPE_SIZE; index++) {
79         SpaceType type = ToSpaceType(index);
80         if (IsHeapSpace(type)) {
81             // Atomic with acquire order reason: data race with allocated_ with dependecies on reads after the load
82             // which should become visible
83             result += allocated_[index].load(std::memory_order_acquire);
84         }
85     }
86     return result;
87 }
88 
GetFreedHeap() const89 uint64_t BaseMemStats::GetFreedHeap() const
90 {
91     uint64_t result = 0;
92     for (size_t index = 0; index < SPACE_TYPE_SIZE; index++) {
93         SpaceType type = ToSpaceType(index);
94         if (IsHeapSpace(type)) {
95             // Atomic with acquire order reason: data race with allocated_ with dependecies on reads after the load
96             // which should become visible
97             result += freed_[index].load(std::memory_order_acquire);
98         }
99     }
100     return result;
101 }
102 
GetFootprintHeap() const103 uint64_t BaseMemStats::GetFootprintHeap() const
104 {
105     return helpers::UnsignedDifferenceUint64(GetAllocatedHeap(), GetFreedHeap());
106 }
107 
GetFootprint(SpaceType type_mem) const108 uint64_t BaseMemStats::GetFootprint(SpaceType type_mem) const
109 {
110     auto index = helpers::ToUnderlying(type_mem);
111     // Atomic with acquire order reason: data race with allocated_ with dependecies on reads after the load which should
112     // become visible
113     LOG_IF(allocated_[index].load(std::memory_order_acquire) < freed_[index].load(std::memory_order_acquire), FATAL, GC)
114         << "Allocated < Freed (mem type = " << std::dec
115         << static_cast<size_t>(index)
116         // Atomic with acquire order reason: data race with allocated_ with dependecies on reads after the load which
117         // should become visible
118         << "): " << allocated_[index].load(std::memory_order_acquire)
119         << " < "
120         // Atomic with acquire order reason: data race with allocated_ with dependecies on reads after the load which
121         // should become visible
122         << freed_[index].load(std::memory_order_acquire);
123     // Atomic with acquire order reason: data race with allocated_ with dependecies on reads after the load which should
124     // become visible
125     return allocated_[index].load(std::memory_order_acquire) - freed_[index].load(std::memory_order_acquire);
126 }
127 
GetTotalFootprint() const128 uint64_t BaseMemStats::GetTotalFootprint() const
129 {
130     return std::accumulate(begin(allocated_), end(allocated_), 0UL) - std::accumulate(begin(freed_), end(freed_), 0UL);
131 }
132 
133 }  // namespace panda
134