1 // Copyright 2014 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "Config.h"
6 #include "RecordInfo.h"
7
8 using namespace clang;
9 using std::string;
10
RecordInfo(CXXRecordDecl * record,RecordCache * cache)11 RecordInfo::RecordInfo(CXXRecordDecl* record, RecordCache* cache)
12 : cache_(cache),
13 record_(record),
14 name_(record->getName()),
15 fields_need_tracing_(TracingStatus::Unknown()),
16 bases_(0),
17 fields_(0),
18 is_stack_allocated_(kNotComputed),
19 is_non_newable_(kNotComputed),
20 is_only_placement_newable_(kNotComputed),
21 determined_trace_methods_(false),
22 trace_method_(0),
23 trace_dispatch_method_(0),
24 finalize_dispatch_method_(0),
25 is_gc_derived_(false),
26 base_paths_(0) {}
27
~RecordInfo()28 RecordInfo::~RecordInfo() {
29 delete fields_;
30 delete bases_;
31 delete base_paths_;
32 }
33
34 // Get |count| number of template arguments. Returns false if there
35 // are fewer than |count| arguments or any of the arguments are not
36 // of a valid Type structure. If |count| is non-positive, all
37 // arguments are collected.
GetTemplateArgs(size_t count,TemplateArgs * output_args)38 bool RecordInfo::GetTemplateArgs(size_t count, TemplateArgs* output_args) {
39 ClassTemplateSpecializationDecl* tmpl =
40 dyn_cast<ClassTemplateSpecializationDecl>(record_);
41 if (!tmpl)
42 return false;
43 const TemplateArgumentList& args = tmpl->getTemplateArgs();
44 if (args.size() < count)
45 return false;
46 if (count <= 0)
47 count = args.size();
48 for (unsigned i = 0; i < count; ++i) {
49 TemplateArgument arg = args[i];
50 if (arg.getKind() == TemplateArgument::Type && !arg.getAsType().isNull()) {
51 output_args->push_back(arg.getAsType().getTypePtr());
52 } else {
53 return false;
54 }
55 }
56 return true;
57 }
58
59 // Test if a record is a HeapAllocated collection.
IsHeapAllocatedCollection()60 bool RecordInfo::IsHeapAllocatedCollection() {
61 if (!Config::IsGCCollection(name_) && !Config::IsWTFCollection(name_))
62 return false;
63
64 TemplateArgs args;
65 if (GetTemplateArgs(0, &args)) {
66 for (TemplateArgs::iterator it = args.begin(); it != args.end(); ++it) {
67 if (CXXRecordDecl* decl = (*it)->getAsCXXRecordDecl())
68 if (decl->getName() == kHeapAllocatorName)
69 return true;
70 }
71 }
72
73 return Config::IsGCCollection(name_);
74 }
75
IsGCBaseCallback(const CXXBaseSpecifier * specifier,CXXBasePath & path,void * data)76 static bool IsGCBaseCallback(const CXXBaseSpecifier* specifier,
77 CXXBasePath& path,
78 void* data) {
79 if (CXXRecordDecl* record = specifier->getType()->getAsCXXRecordDecl())
80 return Config::IsGCBase(record->getName());
81 return false;
82 }
83
84 // Test if a record is derived from a garbage collected base.
IsGCDerived()85 bool RecordInfo::IsGCDerived() {
86 // If already computed, return the known result.
87 if (base_paths_)
88 return is_gc_derived_;
89
90 base_paths_ = new CXXBasePaths(true, true, false);
91
92 if (!record_->hasDefinition())
93 return false;
94
95 // The base classes are not themselves considered garbage collected objects.
96 if (Config::IsGCBase(name_))
97 return false;
98
99 // Walk the inheritance tree to find GC base classes.
100 is_gc_derived_ = record_->lookupInBases(IsGCBaseCallback, 0, *base_paths_);
101 return is_gc_derived_;
102 }
103
IsGCFinalized()104 bool RecordInfo::IsGCFinalized() {
105 if (!IsGCDerived())
106 return false;
107 for (CXXBasePaths::paths_iterator it = base_paths_->begin();
108 it != base_paths_->end();
109 ++it) {
110 const CXXBasePathElement& elem = (*it)[it->size() - 1];
111 CXXRecordDecl* base = elem.Base->getType()->getAsCXXRecordDecl();
112 if (Config::IsGCFinalizedBase(base->getName()))
113 return true;
114 }
115 return false;
116 }
117
118 // A GC mixin is a class that inherits from a GC mixin base and has
119 // not yet been "mixed in" with another GC base class.
IsGCMixin()120 bool RecordInfo::IsGCMixin() {
121 if (!IsGCDerived() || base_paths_->begin() == base_paths_->end())
122 return false;
123 for (CXXBasePaths::paths_iterator it = base_paths_->begin();
124 it != base_paths_->end();
125 ++it) {
126 // Get the last element of the path.
127 const CXXBasePathElement& elem = (*it)[it->size() - 1];
128 CXXRecordDecl* base = elem.Base->getType()->getAsCXXRecordDecl();
129 // If it is not a mixin base we are done.
130 if (!Config::IsGCMixinBase(base->getName()))
131 return false;
132 }
133 // This is a mixin if all GC bases are mixins.
134 return true;
135 }
136
137 // Test if a record is allocated on the managed heap.
IsGCAllocated()138 bool RecordInfo::IsGCAllocated() {
139 return IsGCDerived() || IsHeapAllocatedCollection();
140 }
141
Lookup(CXXRecordDecl * record)142 RecordInfo* RecordCache::Lookup(CXXRecordDecl* record) {
143 // Ignore classes annotated with the GC_PLUGIN_IGNORE macro.
144 if (!record || Config::IsIgnoreAnnotated(record))
145 return 0;
146 Cache::iterator it = cache_.find(record);
147 if (it != cache_.end())
148 return &it->second;
149 return &cache_.insert(std::make_pair(record, RecordInfo(record, this)))
150 .first->second;
151 }
152
IsStackAllocated()153 bool RecordInfo::IsStackAllocated() {
154 if (is_stack_allocated_ == kNotComputed) {
155 is_stack_allocated_ = kFalse;
156 for (Bases::iterator it = GetBases().begin();
157 it != GetBases().end();
158 ++it) {
159 if (it->second.info()->IsStackAllocated()) {
160 is_stack_allocated_ = kTrue;
161 return is_stack_allocated_;
162 }
163 }
164 for (CXXRecordDecl::method_iterator it = record_->method_begin();
165 it != record_->method_end();
166 ++it) {
167 if (it->getNameAsString() == kNewOperatorName &&
168 it->isDeleted() &&
169 Config::IsStackAnnotated(*it)) {
170 is_stack_allocated_ = kTrue;
171 return is_stack_allocated_;
172 }
173 }
174 }
175 return is_stack_allocated_;
176 }
177
IsNonNewable()178 bool RecordInfo::IsNonNewable() {
179 if (is_non_newable_ == kNotComputed) {
180 bool deleted = false;
181 bool all_deleted = true;
182 for (CXXRecordDecl::method_iterator it = record_->method_begin();
183 it != record_->method_end();
184 ++it) {
185 if (it->getNameAsString() == kNewOperatorName) {
186 deleted = it->isDeleted();
187 all_deleted = all_deleted && deleted;
188 }
189 }
190 is_non_newable_ = (deleted && all_deleted) ? kTrue : kFalse;
191 }
192 return is_non_newable_;
193 }
194
IsOnlyPlacementNewable()195 bool RecordInfo::IsOnlyPlacementNewable() {
196 if (is_only_placement_newable_ == kNotComputed) {
197 bool placement = false;
198 bool new_deleted = false;
199 for (CXXRecordDecl::method_iterator it = record_->method_begin();
200 it != record_->method_end();
201 ++it) {
202 if (it->getNameAsString() == kNewOperatorName) {
203 if (it->getNumParams() == 1) {
204 new_deleted = it->isDeleted();
205 } else if (it->getNumParams() == 2) {
206 placement = !it->isDeleted();
207 }
208 }
209 }
210 is_only_placement_newable_ = (placement && new_deleted) ? kTrue : kFalse;
211 }
212 return is_only_placement_newable_;
213 }
214
DeclaresNewOperator()215 CXXMethodDecl* RecordInfo::DeclaresNewOperator() {
216 for (CXXRecordDecl::method_iterator it = record_->method_begin();
217 it != record_->method_end();
218 ++it) {
219 if (it->getNameAsString() == kNewOperatorName && it->getNumParams() == 1)
220 return *it;
221 }
222 return 0;
223 }
224
225 // An object requires a tracing method if it has any fields that need tracing
226 // or if it inherits from multiple bases that need tracing.
RequiresTraceMethod()227 bool RecordInfo::RequiresTraceMethod() {
228 if (IsStackAllocated())
229 return false;
230 unsigned bases_with_trace = 0;
231 for (Bases::iterator it = GetBases().begin(); it != GetBases().end(); ++it) {
232 if (it->second.NeedsTracing().IsNeeded())
233 ++bases_with_trace;
234 }
235 if (bases_with_trace > 1)
236 return true;
237 GetFields();
238 return fields_need_tracing_.IsNeeded();
239 }
240
241 // Get the actual tracing method (ie, can be traceAfterDispatch if there is a
242 // dispatch method).
GetTraceMethod()243 CXXMethodDecl* RecordInfo::GetTraceMethod() {
244 DetermineTracingMethods();
245 return trace_method_;
246 }
247
248 // Get the static trace dispatch method.
GetTraceDispatchMethod()249 CXXMethodDecl* RecordInfo::GetTraceDispatchMethod() {
250 DetermineTracingMethods();
251 return trace_dispatch_method_;
252 }
253
GetFinalizeDispatchMethod()254 CXXMethodDecl* RecordInfo::GetFinalizeDispatchMethod() {
255 DetermineTracingMethods();
256 return finalize_dispatch_method_;
257 }
258
GetBases()259 RecordInfo::Bases& RecordInfo::GetBases() {
260 if (!bases_)
261 bases_ = CollectBases();
262 return *bases_;
263 }
264
InheritsTrace()265 bool RecordInfo::InheritsTrace() {
266 if (GetTraceMethod())
267 return true;
268 for (Bases::iterator it = GetBases().begin(); it != GetBases().end(); ++it) {
269 if (it->second.info()->InheritsTrace())
270 return true;
271 }
272 return false;
273 }
274
InheritsNonVirtualTrace()275 CXXMethodDecl* RecordInfo::InheritsNonVirtualTrace() {
276 if (CXXMethodDecl* trace = GetTraceMethod())
277 return trace->isVirtual() ? 0 : trace;
278 for (Bases::iterator it = GetBases().begin(); it != GetBases().end(); ++it) {
279 if (CXXMethodDecl* trace = it->second.info()->InheritsNonVirtualTrace())
280 return trace;
281 }
282 return 0;
283 }
284
285 // A (non-virtual) class is considered abstract in Blink if it has
286 // no public constructors and no create methods.
IsConsideredAbstract()287 bool RecordInfo::IsConsideredAbstract() {
288 for (CXXRecordDecl::ctor_iterator it = record_->ctor_begin();
289 it != record_->ctor_end();
290 ++it) {
291 if (!it->isCopyOrMoveConstructor() && it->getAccess() == AS_public)
292 return false;
293 }
294 for (CXXRecordDecl::method_iterator it = record_->method_begin();
295 it != record_->method_end();
296 ++it) {
297 if (it->getNameAsString() == kCreateName)
298 return false;
299 }
300 return true;
301 }
302
CollectBases()303 RecordInfo::Bases* RecordInfo::CollectBases() {
304 // Compute the collection locally to avoid inconsistent states.
305 Bases* bases = new Bases;
306 if (!record_->hasDefinition())
307 return bases;
308 for (CXXRecordDecl::base_class_iterator it = record_->bases_begin();
309 it != record_->bases_end();
310 ++it) {
311 const CXXBaseSpecifier& spec = *it;
312 RecordInfo* info = cache_->Lookup(spec.getType());
313 if (!info)
314 continue;
315 CXXRecordDecl* base = info->record();
316 TracingStatus status = info->InheritsTrace()
317 ? TracingStatus::Needed()
318 : TracingStatus::Unneeded();
319 bases->insert(std::make_pair(base, BasePoint(spec, info, status)));
320 }
321 return bases;
322 }
323
GetFields()324 RecordInfo::Fields& RecordInfo::GetFields() {
325 if (!fields_)
326 fields_ = CollectFields();
327 return *fields_;
328 }
329
CollectFields()330 RecordInfo::Fields* RecordInfo::CollectFields() {
331 // Compute the collection locally to avoid inconsistent states.
332 Fields* fields = new Fields;
333 if (!record_->hasDefinition())
334 return fields;
335 TracingStatus fields_status = TracingStatus::Unneeded();
336 for (RecordDecl::field_iterator it = record_->field_begin();
337 it != record_->field_end();
338 ++it) {
339 FieldDecl* field = *it;
340 // Ignore fields annotated with the GC_PLUGIN_IGNORE macro.
341 if (Config::IsIgnoreAnnotated(field))
342 continue;
343 if (Edge* edge = CreateEdge(field->getType().getTypePtrOrNull())) {
344 fields_status = fields_status.LUB(edge->NeedsTracing(Edge::kRecursive));
345 fields->insert(std::make_pair(field, FieldPoint(field, edge)));
346 }
347 }
348 fields_need_tracing_ = fields_status;
349 return fields;
350 }
351
DetermineTracingMethods()352 void RecordInfo::DetermineTracingMethods() {
353 if (determined_trace_methods_)
354 return;
355 determined_trace_methods_ = true;
356 if (Config::IsGCBase(name_))
357 return;
358 CXXMethodDecl* trace = 0;
359 CXXMethodDecl* traceAfterDispatch = 0;
360 bool isTraceAfterDispatch;
361 for (CXXRecordDecl::method_iterator it = record_->method_begin();
362 it != record_->method_end();
363 ++it) {
364 if (Config::IsTraceMethod(*it, &isTraceAfterDispatch)) {
365 if (isTraceAfterDispatch) {
366 traceAfterDispatch = *it;
367 } else {
368 trace = *it;
369 }
370 } else if (it->getNameAsString() == kFinalizeName) {
371 finalize_dispatch_method_ = *it;
372 }
373 }
374 if (traceAfterDispatch) {
375 trace_method_ = traceAfterDispatch;
376 trace_dispatch_method_ = trace;
377 } else {
378 // TODO: Can we never have a dispatch method called trace without the same
379 // class defining a traceAfterDispatch method?
380 trace_method_ = trace;
381 trace_dispatch_method_ = 0;
382 }
383 if (trace_dispatch_method_ && finalize_dispatch_method_)
384 return;
385 // If this class does not define dispatching methods inherit them.
386 for (Bases::iterator it = GetBases().begin(); it != GetBases().end(); ++it) {
387 // TODO: Does it make sense to inherit multiple dispatch methods?
388 if (CXXMethodDecl* dispatch = it->second.info()->GetTraceDispatchMethod()) {
389 assert(!trace_dispatch_method_ && "Multiple trace dispatching methods");
390 trace_dispatch_method_ = dispatch;
391 }
392 if (CXXMethodDecl* dispatch =
393 it->second.info()->GetFinalizeDispatchMethod()) {
394 assert(!finalize_dispatch_method_ &&
395 "Multiple finalize dispatching methods");
396 finalize_dispatch_method_ = dispatch;
397 }
398 }
399 }
400
401 // TODO: Add classes with a finalize() method that specialize FinalizerTrait.
NeedsFinalization()402 bool RecordInfo::NeedsFinalization() {
403 return record_->hasNonTrivialDestructor();
404 }
405
406 // A class needs tracing if:
407 // - it is allocated on the managed heap,
408 // - it is derived from a class that needs tracing, or
409 // - it contains fields that need tracing.
410 // TODO: Defining NeedsTracing based on whether a class defines a trace method
411 // (of the proper signature) over approximates too much. The use of transition
412 // types causes some classes to have trace methods without them needing to be
413 // traced.
NeedsTracing(Edge::NeedsTracingOption option)414 TracingStatus RecordInfo::NeedsTracing(Edge::NeedsTracingOption option) {
415 if (IsGCAllocated())
416 return TracingStatus::Needed();
417
418 if (IsStackAllocated())
419 return TracingStatus::Unneeded();
420
421 for (Bases::iterator it = GetBases().begin(); it != GetBases().end(); ++it) {
422 if (it->second.info()->NeedsTracing(option).IsNeeded())
423 return TracingStatus::Needed();
424 }
425
426 if (option == Edge::kRecursive)
427 GetFields();
428
429 return fields_need_tracing_;
430 }
431
CreateEdge(const Type * type)432 Edge* RecordInfo::CreateEdge(const Type* type) {
433 if (!type) {
434 return 0;
435 }
436
437 if (type->isPointerType()) {
438 if (Edge* ptr = CreateEdge(type->getPointeeType().getTypePtrOrNull()))
439 return new RawPtr(ptr, false);
440 return 0;
441 }
442
443 RecordInfo* info = cache_->Lookup(type);
444
445 // If the type is neither a pointer or a C++ record we ignore it.
446 if (!info) {
447 return 0;
448 }
449
450 TemplateArgs args;
451
452 if (Config::IsRawPtr(info->name()) && info->GetTemplateArgs(1, &args)) {
453 if (Edge* ptr = CreateEdge(args[0]))
454 return new RawPtr(ptr, true);
455 return 0;
456 }
457
458 if (Config::IsRefPtr(info->name()) && info->GetTemplateArgs(1, &args)) {
459 if (Edge* ptr = CreateEdge(args[0]))
460 return new RefPtr(ptr);
461 return 0;
462 }
463
464 if (Config::IsOwnPtr(info->name()) && info->GetTemplateArgs(1, &args)) {
465 if (Edge* ptr = CreateEdge(args[0]))
466 return new OwnPtr(ptr);
467 return 0;
468 }
469
470 if (Config::IsMember(info->name()) && info->GetTemplateArgs(1, &args)) {
471 if (Edge* ptr = CreateEdge(args[0]))
472 return new Member(ptr);
473 return 0;
474 }
475
476 if (Config::IsWeakMember(info->name()) && info->GetTemplateArgs(1, &args)) {
477 if (Edge* ptr = CreateEdge(args[0]))
478 return new WeakMember(ptr);
479 return 0;
480 }
481
482 if (Config::IsPersistent(info->name())) {
483 // Persistent might refer to v8::Persistent, so check the name space.
484 // TODO: Consider using a more canonical identification than names.
485 NamespaceDecl* ns =
486 dyn_cast<NamespaceDecl>(info->record()->getDeclContext());
487 if (!ns || ns->getName() != "WebCore")
488 return 0;
489 if (!info->GetTemplateArgs(1, &args))
490 return 0;
491 if (Edge* ptr = CreateEdge(args[0]))
492 return new Persistent(ptr);
493 return 0;
494 }
495
496 if (Config::IsGCCollection(info->name()) ||
497 Config::IsWTFCollection(info->name())) {
498 bool is_root = Config::IsPersistentGCCollection(info->name());
499 bool on_heap = is_root || info->IsHeapAllocatedCollection();
500 size_t count = Config::CollectionDimension(info->name());
501 if (!info->GetTemplateArgs(count, &args))
502 return 0;
503 Collection* edge = new Collection(info, on_heap, is_root);
504 for (TemplateArgs::iterator it = args.begin(); it != args.end(); ++it) {
505 if (Edge* member = CreateEdge(*it)) {
506 edge->members().push_back(member);
507 }
508 // TODO: Handle the case where we fail to create an edge (eg, if the
509 // argument is a primitive type or just not fully known yet).
510 }
511 return edge;
512 }
513
514 return new Value(info);
515 }
516