• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2015 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "base/trace_event/heap_profiler_allocation_register.h"
6 
7 #include <algorithm>
8 
9 #include "base/trace_event/trace_event_memory_overhead.h"
10 
11 namespace base {
12 namespace trace_event {
13 
ConstIterator(const AllocationRegister & alloc_register,AllocationIndex index)14 AllocationRegister::ConstIterator::ConstIterator(
15     const AllocationRegister& alloc_register, AllocationIndex index)
16     : register_(alloc_register),
17       index_(index) {}
18 
operator ++()19 void AllocationRegister::ConstIterator::operator++() {
20   index_ = register_.allocations_.Next(index_ + 1);
21 }
22 
operator !=(const ConstIterator & other) const23 bool AllocationRegister::ConstIterator::operator!=(
24     const ConstIterator& other) const {
25   return index_ != other.index_;
26 }
27 
28 AllocationRegister::Allocation
operator *() const29 AllocationRegister::ConstIterator::operator*() const {
30   return register_.GetAllocation(index_);
31 }
32 
operator ()(const Backtrace & backtrace) const33 size_t AllocationRegister::BacktraceHasher::operator () (
34     const Backtrace& backtrace) const {
35   const size_t kSampleLength = 10;
36 
37   uintptr_t total_value = 0;
38 
39   size_t head_end = std::min(backtrace.frame_count, kSampleLength);
40   for (size_t i = 0; i != head_end; ++i) {
41     total_value += reinterpret_cast<uintptr_t>(backtrace.frames[i].value);
42   }
43 
44   size_t tail_start = backtrace.frame_count -
45       std::min(backtrace.frame_count - head_end, kSampleLength);
46   for (size_t i = tail_start; i != backtrace.frame_count; ++i) {
47     total_value += reinterpret_cast<uintptr_t>(backtrace.frames[i].value);
48   }
49 
50   total_value += backtrace.frame_count;
51 
52   // These magic constants give best results in terms of average collisions
53   // per backtrace. They were found by replaying real backtraces from Linux
54   // and Android against different hash functions.
55   return (total_value * 131101) >> 14;
56 }
57 
operator ()(const void * address) const58 size_t AllocationRegister::AddressHasher::operator () (
59     const void* address) const {
60   // The multiplicative hashing scheme from [Knuth 1998]. The value of |a| has
61   // been chosen carefully based on measurements with real-word data (addresses
62   // recorded from a Chrome trace run). It is the first prime after 2^17. For
63   // |shift|, 13, 14 and 15 yield good results. These values are tuned to 2^18
64   // buckets. Microbenchmarks show that this simple scheme outperforms fancy
65   // hashes like Murmur3 by 20 to 40 percent.
66   const uintptr_t key = reinterpret_cast<uintptr_t>(address);
67   const uintptr_t a = 131101;
68   const uintptr_t shift = 14;
69   const uintptr_t h = (key * a) >> shift;
70   return h;
71 }
72 
AllocationRegister()73 AllocationRegister::AllocationRegister()
74     : AllocationRegister(kAllocationCapacity, kBacktraceCapacity) {}
75 
AllocationRegister(size_t allocation_capacity,size_t backtrace_capacity)76 AllocationRegister::AllocationRegister(size_t allocation_capacity,
77                                        size_t backtrace_capacity)
78     : allocations_(allocation_capacity),
79       backtraces_(backtrace_capacity) {}
80 
~AllocationRegister()81 AllocationRegister::~AllocationRegister() {
82 }
83 
Insert(const void * address,size_t size,const AllocationContext & context)84 void AllocationRegister::Insert(const void* address,
85                                 size_t size,
86                                 const AllocationContext& context) {
87   DCHECK(address != nullptr);
88   if (size == 0) {
89     return;
90   }
91 
92   AllocationInfo info = {
93       size,
94       context.type_name,
95       InsertBacktrace(context.backtrace)
96   };
97 
98   // Try to insert the allocation.
99   auto index_and_flag = allocations_.Insert(address, info);
100   if (!index_and_flag.second) {
101     // |address| is already there - overwrite the allocation info.
102     auto& old_info = allocations_.Get(index_and_flag.first).second;
103     RemoveBacktrace(old_info.backtrace_index);
104     old_info = info;
105   }
106 }
107 
Remove(const void * address)108 void AllocationRegister::Remove(const void* address) {
109   auto index = allocations_.Find(address);
110   if (index == AllocationMap::kInvalidKVIndex) {
111     return;
112   }
113 
114   const AllocationInfo& info = allocations_.Get(index).second;
115   RemoveBacktrace(info.backtrace_index);
116   allocations_.Remove(index);
117 }
118 
Get(const void * address,Allocation * out_allocation) const119 bool AllocationRegister::Get(const void* address,
120                              Allocation* out_allocation) const {
121   auto index = allocations_.Find(address);
122   if (index == AllocationMap::kInvalidKVIndex) {
123     return false;
124   }
125 
126   if (out_allocation) {
127     *out_allocation = GetAllocation(index);
128   }
129   return true;
130 }
131 
begin() const132 AllocationRegister::ConstIterator AllocationRegister::begin() const {
133   return ConstIterator(*this, allocations_.Next(0));
134 }
135 
end() const136 AllocationRegister::ConstIterator AllocationRegister::end() const {
137   return ConstIterator(*this, AllocationMap::kInvalidKVIndex);
138 }
139 
EstimateTraceMemoryOverhead(TraceEventMemoryOverhead * overhead) const140 void AllocationRegister::EstimateTraceMemoryOverhead(
141     TraceEventMemoryOverhead* overhead) const {
142   size_t allocated = sizeof(AllocationRegister);
143   size_t resident = sizeof(AllocationRegister)
144                     + allocations_.EstimateUsedMemory()
145                     + backtraces_.EstimateUsedMemory();
146   overhead->Add("AllocationRegister", allocated, resident);
147 }
148 
InsertBacktrace(const Backtrace & backtrace)149 AllocationRegister::BacktraceMap::KVIndex AllocationRegister::InsertBacktrace(
150     const Backtrace& backtrace) {
151   auto index = backtraces_.Insert(backtrace, 0).first;
152   auto& backtrace_and_count = backtraces_.Get(index);
153   backtrace_and_count.second++;
154   return index;
155 }
156 
RemoveBacktrace(BacktraceMap::KVIndex index)157 void AllocationRegister::RemoveBacktrace(BacktraceMap::KVIndex index) {
158   auto& backtrace_and_count = backtraces_.Get(index);
159   if (--backtrace_and_count.second == 0) {
160     // Backtrace is not referenced anymore - remove it.
161     backtraces_.Remove(index);
162   }
163 }
164 
GetAllocation(AllocationMap::KVIndex index) const165 AllocationRegister::Allocation AllocationRegister::GetAllocation(
166     AllocationMap::KVIndex index) const {
167   const auto& address_and_info = allocations_.Get(index);
168   const auto& backtrace_and_count = backtraces_.Get(
169       address_and_info.second.backtrace_index);
170   return {
171       address_and_info.first,
172       address_and_info.second.size,
173       AllocationContext(
174           backtrace_and_count.first,
175           address_and_info.second.type_name)
176   };
177 }
178 
179 }  // namespace trace_event
180 }  // namespace base
181