1 // Copyright 2016 The Chromium Authors
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "base/trace_event/category_registry.h"
6
7 #include <string.h>
8
9 #include <ostream>
10 #include <type_traits>
11
12 #include "base/check.h"
13 #include "base/debug/leak_annotations.h"
14 #include "base/notreached.h"
15 #include "base/third_party/dynamic_annotations/dynamic_annotations.h"
16
17 namespace base {
18 namespace trace_event {
19
20 namespace {
21
22 // |categories_| might end up causing creating dynamic initializers if not POD.
23 static_assert(std::is_trivial_v<TraceCategory> &&
24 std::is_standard_layout_v<TraceCategory>,
25 "TraceCategory must be POD");
26
27 } // namespace
28
29 // static
30 TraceCategory CategoryRegistry::categories_[kMaxCategories] = {
31 INTERNAL_TRACE_LIST_BUILTIN_CATEGORIES(INTERNAL_TRACE_INIT_CATEGORY)};
32
33 // static
34 std::atomic<size_t> CategoryRegistry::category_index_{
35 BuiltinCategories::Size()};
36
37 // static
38 TraceCategory* const CategoryRegistry::kCategoryExhausted = &categories_[0];
39 TraceCategory* const CategoryRegistry::kCategoryAlreadyShutdown =
40 &categories_[1];
41 TraceCategory* const CategoryRegistry::kCategoryMetadata = &categories_[2];
42
43 // static
Initialize()44 void CategoryRegistry::Initialize() {
45 // Trace is enabled or disabled on one thread while other threads are
46 // accessing the enabled flag. We don't care whether edge-case events are
47 // traced or not, so we allow races on the enabled flag to keep the trace
48 // macros fast.
49 for (size_t i = 0; i < kMaxCategories; ++i) {
50 ANNOTATE_BENIGN_RACE(categories_[i].state_ptr(),
51 "trace_event category enabled");
52 // If this DCHECK is hit in a test it means that ResetForTesting() is not
53 // called and the categories state leaks between test fixtures.
54 DCHECK(!categories_[i].is_enabled());
55 }
56 }
57
58 // static
ResetForTesting()59 void CategoryRegistry::ResetForTesting() {
60 // reset_for_testing clears up only the enabled state and filters. The
61 // categories themselves cannot be cleared up because the static pointers
62 // injected by the macros still point to them and cannot be reset.
63 for (size_t i = 0; i < kMaxCategories; ++i)
64 categories_[i].reset_for_testing();
65 }
66
67 // static
GetCategoryByName(const char * category_name)68 TraceCategory* CategoryRegistry::GetCategoryByName(const char* category_name) {
69 DCHECK(!strchr(category_name, '"'))
70 << "Category names may not contain double quote";
71
72 // The categories_ is append only, avoid using a lock for the fast path.
73 size_t category_index = category_index_.load(std::memory_order_acquire);
74
75 // Search for pre-existing category group.
76 for (size_t i = 0; i < category_index; ++i) {
77 if (strcmp(categories_[i].name(), category_name) == 0) {
78 return &categories_[i];
79 }
80 }
81 return nullptr;
82 }
83
GetOrCreateCategoryLocked(const char * category_name,CategoryInitializerFn category_initializer_fn,TraceCategory ** category)84 bool CategoryRegistry::GetOrCreateCategoryLocked(
85 const char* category_name,
86 CategoryInitializerFn category_initializer_fn,
87 TraceCategory** category) {
88 // This is the slow path: the lock is not held in the fastpath
89 // (GetCategoryByName), so more than one thread could have reached here trying
90 // to add the same category.
91 *category = GetCategoryByName(category_name);
92 if (*category)
93 return false;
94
95 // Create a new category.
96 size_t category_index = category_index_.load(std::memory_order_acquire);
97 if (category_index >= kMaxCategories) {
98 NOTREACHED() << "must increase kMaxCategories";
99 *category = kCategoryExhausted;
100 return false;
101 }
102
103 // TODO(primiano): this strdup should be removed. The only documented reason
104 // for it was TraceWatchEvent, which is gone. However, something might have
105 // ended up relying on this. Needs some auditing before removal.
106 const char* category_name_copy = strdup(category_name);
107 ANNOTATE_LEAKING_OBJECT_PTR(category_name_copy);
108
109 *category = &categories_[category_index];
110 DCHECK(!(*category)->is_valid());
111 DCHECK(!(*category)->is_enabled());
112 (*category)->set_name(category_name_copy);
113 category_initializer_fn(*category);
114
115 // Update the max index now.
116 category_index_.store(category_index + 1, std::memory_order_release);
117 return true;
118 }
119
120 // static
GetCategoryByStatePtr(const uint8_t * category_state)121 const TraceCategory* CategoryRegistry::GetCategoryByStatePtr(
122 const uint8_t* category_state) {
123 const TraceCategory* category = TraceCategory::FromStatePtr(category_state);
124 DCHECK(IsValidCategoryPtr(category));
125 return category;
126 }
127
128 // static
IsMetaCategory(const TraceCategory * category)129 bool CategoryRegistry::IsMetaCategory(const TraceCategory* category) {
130 DCHECK(IsValidCategoryPtr(category));
131 return category <= kCategoryMetadata;
132 }
133
134 // static
GetAllCategories()135 base::span<TraceCategory> CategoryRegistry::GetAllCategories() {
136 // The |categories_| array is append only. We have to only guarantee to
137 // not return an index to a category which is being initialized by
138 // GetOrCreateCategoryByName().
139 size_t category_index = category_index_.load(std::memory_order_acquire);
140 return base::make_span(categories_).first(category_index);
141 }
142
143 // static
IsValidCategoryPtr(const TraceCategory * category)144 bool CategoryRegistry::IsValidCategoryPtr(const TraceCategory* category) {
145 // If any of these are hit, something has cached a corrupt category pointer.
146 uintptr_t ptr = reinterpret_cast<uintptr_t>(category);
147 return ptr % sizeof(void*) == 0 &&
148 ptr >= reinterpret_cast<uintptr_t>(&categories_[0]) &&
149 ptr <= reinterpret_cast<uintptr_t>(&categories_[kMaxCategories - 1]);
150 }
151
152 } // namespace trace_event
153 } // namespace base
154