• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2016 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "base/trace_event/category_registry.h"
6 
7 #include <string.h>
8 
9 #include <type_traits>
10 
11 #include "base/atomicops.h"
12 #include "base/debug/leak_annotations.h"
13 #include "base/logging.h"
14 #include "base/third_party/dynamic_annotations/dynamic_annotations.h"
15 #include "base/trace_event/trace_category.h"
16 
17 namespace base {
18 namespace trace_event {
19 
20 namespace {
21 
22 constexpr size_t kMaxCategories = 200;
23 const int kNumBuiltinCategories = 4;
24 
25 // |g_categories| might end up causing creating dynamic initializers if not POD.
26 static_assert(std::is_pod<TraceCategory>::value, "TraceCategory must be POD");
27 
28 // These entries must be kept consistent with the kCategory* consts below.
29 TraceCategory g_categories[kMaxCategories] = {
30     {0, 0, "tracing categories exhausted; must increase kMaxCategories"},
31     {0, 0, "tracing already shutdown"},  // See kCategoryAlreadyShutdown below.
32     {0, 0, "__metadata"},                // See kCategoryMetadata below.
33     {0, 0, "toplevel"},                  // Warmup the toplevel category.
34 };
35 
36 base::subtle::AtomicWord g_category_index = kNumBuiltinCategories;
37 
IsValidCategoryPtr(const TraceCategory * category)38 bool IsValidCategoryPtr(const TraceCategory* category) {
39   // If any of these are hit, something has cached a corrupt category pointer.
40   uintptr_t ptr = reinterpret_cast<uintptr_t>(category);
41   return ptr % sizeof(void*) == 0 &&
42          ptr >= reinterpret_cast<uintptr_t>(&g_categories[0]) &&
43          ptr <= reinterpret_cast<uintptr_t>(&g_categories[kMaxCategories - 1]);
44 }
45 
46 }  // namespace
47 
48 // static
49 TraceCategory* const CategoryRegistry::kCategoryExhausted = &g_categories[0];
50 TraceCategory* const CategoryRegistry::kCategoryAlreadyShutdown =
51     &g_categories[1];
52 TraceCategory* const CategoryRegistry::kCategoryMetadata = &g_categories[2];
53 
54 // static
Initialize()55 void CategoryRegistry::Initialize() {
56   // Trace is enabled or disabled on one thread while other threads are
57   // accessing the enabled flag. We don't care whether edge-case events are
58   // traced or not, so we allow races on the enabled flag to keep the trace
59   // macros fast.
60   for (size_t i = 0; i < kMaxCategories; ++i) {
61     ANNOTATE_BENIGN_RACE(g_categories[i].state_ptr(),
62                          "trace_event category enabled");
63     // If this DCHECK is hit in a test it means that ResetForTesting() is not
64     // called and the categories state leaks between test fixtures.
65     DCHECK(!g_categories[i].is_enabled());
66   }
67 }
68 
69 // static
ResetForTesting()70 void CategoryRegistry::ResetForTesting() {
71   // reset_for_testing clears up only the enabled state and filters. The
72   // categories themselves cannot be cleared up because the static pointers
73   // injected by the macros still point to them and cannot be reset.
74   for (size_t i = 0; i < kMaxCategories; ++i)
75     g_categories[i].reset_for_testing();
76 }
77 
78 // static
GetCategoryByName(const char * category_name)79 TraceCategory* CategoryRegistry::GetCategoryByName(const char* category_name) {
80   DCHECK(!strchr(category_name, '"'))
81       << "Category names may not contain double quote";
82 
83   // The g_categories is append only, avoid using a lock for the fast path.
84   size_t category_index = base::subtle::Acquire_Load(&g_category_index);
85 
86   // Search for pre-existing category group.
87   for (size_t i = 0; i < category_index; ++i) {
88     if (strcmp(g_categories[i].name(), category_name) == 0) {
89       return &g_categories[i];
90     }
91   }
92   return nullptr;
93 }
94 
GetOrCreateCategoryLocked(const char * category_name,CategoryInitializerFn category_initializer_fn,TraceCategory ** category)95 bool CategoryRegistry::GetOrCreateCategoryLocked(
96     const char* category_name,
97     CategoryInitializerFn category_initializer_fn,
98     TraceCategory** category) {
99   // This is the slow path: the lock is not held in the fastpath
100   // (GetCategoryByName), so more than one thread could have reached here trying
101   // to add the same category.
102   *category = GetCategoryByName(category_name);
103   if (*category)
104     return false;
105 
106   // Create a new category.
107   size_t category_index = base::subtle::Acquire_Load(&g_category_index);
108   if (category_index >= kMaxCategories) {
109     NOTREACHED() << "must increase kMaxCategories";
110     *category = kCategoryExhausted;
111     return false;
112   }
113 
114   // TODO(primiano): this strdup should be removed. The only documented reason
115   // for it was TraceWatchEvent, which is gone. However, something might have
116   // ended up relying on this. Needs some auditing before removal.
117   const char* category_name_copy = strdup(category_name);
118   ANNOTATE_LEAKING_OBJECT_PTR(category_name_copy);
119 
120   *category = &g_categories[category_index];
121   DCHECK(!(*category)->is_valid());
122   DCHECK(!(*category)->is_enabled());
123   (*category)->set_name(category_name_copy);
124   category_initializer_fn(*category);
125 
126   // Update the max index now.
127   base::subtle::Release_Store(&g_category_index, category_index + 1);
128   return true;
129 }
130 
131 // static
GetCategoryByStatePtr(const uint8_t * category_state)132 const TraceCategory* CategoryRegistry::GetCategoryByStatePtr(
133     const uint8_t* category_state) {
134   const TraceCategory* category = TraceCategory::FromStatePtr(category_state);
135   DCHECK(IsValidCategoryPtr(category));
136   return category;
137 }
138 
139 // static
IsBuiltinCategory(const TraceCategory * category)140 bool CategoryRegistry::IsBuiltinCategory(const TraceCategory* category) {
141   DCHECK(IsValidCategoryPtr(category));
142   return category < &g_categories[kNumBuiltinCategories];
143 }
144 
145 // static
GetAllCategories()146 CategoryRegistry::Range CategoryRegistry::GetAllCategories() {
147   // The |g_categories| array is append only. We have to only guarantee to
148   // not return an index to a category which is being initialized by
149   // GetOrCreateCategoryByName().
150   size_t category_index = base::subtle::Acquire_Load(&g_category_index);
151   return CategoryRegistry::Range(&g_categories[0],
152                                  &g_categories[category_index]);
153 }
154 
155 }  // namespace trace_event
156 }  // namespace base
157