1 // Copyright 2015 The Chromium Authors
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #ifdef UNSAFE_BUFFERS_BUILD
6 // TODO(crbug.com/40284755): Remove this and spanify to fix the errors.
7 #pragma allow_unsafe_buffers
8 #endif
9
10 #include "base/trace_event/heap_profiler_allocation_context.h"
11
12 #include <algorithm>
13 #include <cstring>
14
15 #include "base/containers/span.h"
16 #include "base/hash/hash.h"
17
18 namespace base {
19 namespace trace_event {
20
operator <(const StackFrame & lhs,const StackFrame & rhs)21 bool operator < (const StackFrame& lhs, const StackFrame& rhs) {
22 return lhs.value < rhs.value;
23 }
24
operator ==(const StackFrame & lhs,const StackFrame & rhs)25 bool operator == (const StackFrame& lhs, const StackFrame& rhs) {
26 return lhs.value == rhs.value;
27 }
28
operator !=(const StackFrame & lhs,const StackFrame & rhs)29 bool operator != (const StackFrame& lhs, const StackFrame& rhs) {
30 return !(lhs.value == rhs.value);
31 }
32
33 Backtrace::Backtrace() = default;
34
operator ==(const Backtrace & lhs,const Backtrace & rhs)35 bool operator==(const Backtrace& lhs, const Backtrace& rhs) {
36 if (lhs.frame_count != rhs.frame_count) return false;
37 return std::equal(lhs.frames, lhs.frames + lhs.frame_count, rhs.frames);
38 }
39
operator !=(const Backtrace & lhs,const Backtrace & rhs)40 bool operator!=(const Backtrace& lhs, const Backtrace& rhs) {
41 return !(lhs == rhs);
42 }
43
AllocationContext()44 AllocationContext::AllocationContext(): type_name(nullptr) {}
45
AllocationContext(const Backtrace & backtrace,const char * type_name)46 AllocationContext::AllocationContext(const Backtrace& backtrace,
47 const char* type_name)
48 : backtrace(backtrace), type_name(type_name) {}
49
operator ==(const AllocationContext & lhs,const AllocationContext & rhs)50 bool operator==(const AllocationContext& lhs, const AllocationContext& rhs) {
51 return (lhs.backtrace == rhs.backtrace) && (lhs.type_name == rhs.type_name);
52 }
53
operator !=(const AllocationContext & lhs,const AllocationContext & rhs)54 bool operator!=(const AllocationContext& lhs, const AllocationContext& rhs) {
55 return !(lhs == rhs);
56 }
57
58 } // namespace trace_event
59 } // namespace base
60
61 namespace std {
62
63 using base::trace_event::AllocationContext;
64 using base::trace_event::Backtrace;
65 using base::trace_event::StackFrame;
66
operator ()(const StackFrame & frame) const67 size_t hash<StackFrame>::operator()(const StackFrame& frame) const {
68 return hash<const void*>()(frame.value.get());
69 }
70
operator ()(const Backtrace & backtrace) const71 size_t hash<Backtrace>::operator()(const Backtrace& backtrace) const {
72 const void* values[Backtrace::kMaxFrameCount];
73 for (size_t i = 0; i != backtrace.frame_count; ++i) {
74 values[i] = backtrace.frames[i].value;
75 }
76 return base::PersistentHash(
77 base::as_bytes(base::span(values).first(backtrace.frame_count)));
78 }
79
operator ()(const AllocationContext & ctx) const80 size_t hash<AllocationContext>::operator()(const AllocationContext& ctx) const {
81 size_t backtrace_hash = hash<Backtrace>()(ctx.backtrace);
82
83 // Multiplicative hash from [Knuth 1998]. Works best if |size_t| is 32 bits,
84 // because the magic number is a prime very close to 2^32 / golden ratio, but
85 // will still redistribute keys bijectively on 64-bit architectures because
86 // the magic number is coprime to 2^64.
87 size_t type_hash = reinterpret_cast<size_t>(ctx.type_name) * 2654435761;
88
89 // Multiply one side to break the commutativity of +. Multiplication with a
90 // number coprime to |numeric_limits<size_t>::max() + 1| is bijective so
91 // randomness is preserved.
92 return (backtrace_hash * 3) + type_hash;
93 }
94
95 } // namespace std
96