• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2016 The Chromium Authors
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #ifdef UNSAFE_BUFFERS_BUILD
6 // TODO(crbug.com/40284755): Remove this and spanify to fix the errors.
7 #pragma allow_unsafe_buffers
8 #endif
9 
10 #include "base/trace_event/category_registry.h"
11 
12 #include <string.h>
13 
14 #include <ostream>
15 #include <type_traits>
16 
17 #include "base/check.h"
18 #include "base/debug/leak_annotations.h"
19 #include "base/notreached.h"
20 #include "third_party/abseil-cpp/absl/base/dynamic_annotations.h"
21 
22 namespace base {
23 namespace trace_event {
24 
25 namespace {
26 
27 // |categories_| might end up causing creating dynamic initializers if not POD.
28 static_assert(std::is_trivial_v<TraceCategory> &&
29                   std::is_standard_layout_v<TraceCategory>,
30               "TraceCategory must be POD");
31 
32 }  // namespace
33 
34 // static
35 TraceCategory CategoryRegistry::categories_[kMaxCategories] = {
36     INTERNAL_TRACE_LIST_BUILTIN_CATEGORIES(INTERNAL_TRACE_INIT_CATEGORY)};
37 
38 // static
39 std::atomic<size_t> CategoryRegistry::category_index_{
40     BuiltinCategories::Size()};
41 
42 // static
43 TraceCategory* const CategoryRegistry::kCategoryAlreadyShutdown =
44     &categories_[1];
45 TraceCategory* const CategoryRegistry::kCategoryMetadata = &categories_[2];
46 
47 // static
Initialize()48 void CategoryRegistry::Initialize() {
49   // Trace is enabled or disabled on one thread while other threads are
50   // accessing the enabled flag. We don't care whether edge-case events are
51   // traced or not, so we allow races on the enabled flag to keep the trace
52   // macros fast.
53   for (size_t i = 0; i < kMaxCategories; ++i) {
54     ABSL_ANNOTATE_BENIGN_RACE(categories_[i].state_ptr(),
55                               "trace_event category enabled");
56     // If this DCHECK is hit in a test it means that ResetForTesting() is not
57     // called and the categories state leaks between test fixtures.
58     DCHECK(!categories_[i].is_enabled());
59   }
60 }
61 
62 // static
ResetForTesting()63 void CategoryRegistry::ResetForTesting() {
64   // reset_for_testing clears up only the enabled state and filters. The
65   // categories themselves cannot be cleared up because the static pointers
66   // injected by the macros still point to them and cannot be reset.
67   for (size_t i = 0; i < kMaxCategories; ++i)
68     categories_[i].reset_for_testing();
69 }
70 
71 // static
GetCategoryByName(const char * category_name)72 TraceCategory* CategoryRegistry::GetCategoryByName(const char* category_name) {
73   DCHECK(!strchr(category_name, '"'))
74       << "Category names may not contain double quote";
75 
76   // The categories_ is append only, avoid using a lock for the fast path.
77   size_t category_index = category_index_.load(std::memory_order_acquire);
78 
79   // Search for pre-existing category group.
80   for (size_t i = 0; i < category_index; ++i) {
81     if (strcmp(categories_[i].name(), category_name) == 0) {
82       return &categories_[i];
83     }
84   }
85   return nullptr;
86 }
87 
GetOrCreateCategoryLocked(const char * category_name,CategoryInitializerFn category_initializer_fn,TraceCategory ** category)88 bool CategoryRegistry::GetOrCreateCategoryLocked(
89     const char* category_name,
90     CategoryInitializerFn category_initializer_fn,
91     TraceCategory** category) {
92   // This is the slow path: the lock is not held in the fastpath
93   // (GetCategoryByName), so more than one thread could have reached here trying
94   // to add the same category.
95   *category = GetCategoryByName(category_name);
96   if (*category)
97     return false;
98 
99   // Create a new category.
100   size_t category_index = category_index_.load(std::memory_order_acquire);
101   if (category_index >= kMaxCategories) {
102     NOTREACHED() << "must increase kMaxCategories";
103   }
104 
105   // TODO(primiano): this strdup should be removed. The only documented reason
106   // for it was TraceWatchEvent, which is gone. However, something might have
107   // ended up relying on this. Needs some auditing before removal.
108   const char* category_name_copy = strdup(category_name);
109   ANNOTATE_LEAKING_OBJECT_PTR(category_name_copy);
110 
111   *category = &categories_[category_index];
112   DCHECK(!(*category)->is_valid());
113   DCHECK(!(*category)->is_enabled());
114   (*category)->set_name(category_name_copy);
115   category_initializer_fn(*category);
116 
117   // Update the max index now.
118   category_index_.store(category_index + 1, std::memory_order_release);
119   return true;
120 }
121 
122 // static
GetCategoryByStatePtr(const uint8_t * category_state)123 const TraceCategory* CategoryRegistry::GetCategoryByStatePtr(
124     const uint8_t* category_state) {
125   const TraceCategory* category = TraceCategory::FromStatePtr(category_state);
126   DCHECK(IsValidCategoryPtr(category));
127   return category;
128 }
129 
130 // static
IsMetaCategory(const TraceCategory * category)131 bool CategoryRegistry::IsMetaCategory(const TraceCategory* category) {
132   DCHECK(IsValidCategoryPtr(category));
133   return category <= kCategoryMetadata;
134 }
135 
136 // static
GetAllCategories()137 span<TraceCategory> CategoryRegistry::GetAllCategories() {
138   // The |categories_| array is append only. We have to only guarantee to
139   // not return an index to a category which is being initialized by
140   // GetOrCreateCategoryByName().
141   size_t category_index = category_index_.load(std::memory_order_acquire);
142   return span(categories_).first(category_index);
143 }
144 
145 // static
IsValidCategoryPtr(const TraceCategory * category)146 bool CategoryRegistry::IsValidCategoryPtr(const TraceCategory* category) {
147   // If any of these are hit, something has cached a corrupt category pointer.
148   uintptr_t ptr = reinterpret_cast<uintptr_t>(category);
149   return ptr % sizeof(void*) == 0 &&
150          ptr >= reinterpret_cast<uintptr_t>(&categories_[0]) &&
151          ptr <= reinterpret_cast<uintptr_t>(&categories_[kMaxCategories - 1]);
152 }
153 
154 }  // namespace trace_event
155 }  // namespace base
156