1 // Copyright 2014 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
6 #include "RecordInfo.h"
11 RecordInfo::RecordInfo(CXXRecordDecl* record, RecordCache* cache)
14 name_(record->getName()),
15 fields_need_tracing_(TracingStatus::Unknown()),
18 is_stack_allocated_(kNotComputed),
19 is_non_newable_(kNotComputed),
20 is_only_placement_newable_(kNotComputed),
21 does_need_finalization_(kNotComputed),
22 determined_trace_methods_(false),
24 trace_dispatch_method_(0),
25 finalize_dispatch_method_(0),
26 is_gc_derived_(false),
29 RecordInfo::~RecordInfo() {
35 // Get |count| number of template arguments. Returns false if there
36 // are fewer than |count| arguments or any of the arguments are not
37 // of a valid Type structure. If |count| is non-positive, all
38 // arguments are collected.
39 bool RecordInfo::GetTemplateArgs(size_t count, TemplateArgs* output_args) {
40 ClassTemplateSpecializationDecl* tmpl =
41 dyn_cast<ClassTemplateSpecializationDecl>(record_);
44 const TemplateArgumentList& args = tmpl->getTemplateArgs();
45 if (args.size() < count)
49 for (unsigned i = 0; i < count; ++i) {
50 TemplateArgument arg = args[i];
51 if (arg.getKind() == TemplateArgument::Type && !arg.getAsType().isNull()) {
52 output_args->push_back(arg.getAsType().getTypePtr());
60 // Test if a record is a HeapAllocated collection.
61 bool RecordInfo::IsHeapAllocatedCollection() {
62 if (!Config::IsGCCollection(name_) && !Config::IsWTFCollection(name_))
66 if (GetTemplateArgs(0, &args)) {
67 for (TemplateArgs::iterator it = args.begin(); it != args.end(); ++it) {
68 if (CXXRecordDecl* decl = (*it)->getAsCXXRecordDecl())
69 if (decl->getName() == kHeapAllocatorName)
74 return Config::IsGCCollection(name_);
77 static bool IsGCBaseCallback(const CXXBaseSpecifier* specifier,
80 if (CXXRecordDecl* record = specifier->getType()->getAsCXXRecordDecl())
81 return Config::IsGCBase(record->getName());
85 // Test if a record is derived from a garbage collected base.
86 bool RecordInfo::IsGCDerived() {
87 // If already computed, return the known result.
89 return is_gc_derived_;
91 base_paths_ = new CXXBasePaths(true, true, false);
93 if (!record_->hasDefinition())
96 // The base classes are not themselves considered garbage collected objects.
97 if (Config::IsGCBase(name_))
100 // Walk the inheritance tree to find GC base classes.
101 is_gc_derived_ = record_->lookupInBases(IsGCBaseCallback, 0, *base_paths_);
102 return is_gc_derived_;
105 bool RecordInfo::IsGCFinalized() {
108 for (CXXBasePaths::paths_iterator it = base_paths_->begin();
109 it != base_paths_->end();
111 const CXXBasePathElement& elem = (*it)[it->size() - 1];
112 CXXRecordDecl* base = elem.Base->getType()->getAsCXXRecordDecl();
113 if (Config::IsGCFinalizedBase(base->getName()))
119 // A GC mixin is a class that inherits from a GC mixin base and has
120 // not yet been "mixed in" with another GC base class.
121 bool RecordInfo::IsGCMixin() {
122 if (!IsGCDerived() || base_paths_->begin() == base_paths_->end())
124 for (CXXBasePaths::paths_iterator it = base_paths_->begin();
125 it != base_paths_->end();
127 // Get the last element of the path.
128 const CXXBasePathElement& elem = (*it)[it->size() - 1];
129 CXXRecordDecl* base = elem.Base->getType()->getAsCXXRecordDecl();
130 // If it is not a mixin base we are done.
131 if (!Config::IsGCMixinBase(base->getName()))
134 // This is a mixin if all GC bases are mixins.
138 // Test if a record is allocated on the managed heap.
139 bool RecordInfo::IsGCAllocated() {
140 return IsGCDerived() || IsHeapAllocatedCollection();
143 RecordInfo* RecordCache::Lookup(CXXRecordDecl* record) {
144 // Ignore classes annotated with the GC_PLUGIN_IGNORE macro.
145 if (!record || Config::IsIgnoreAnnotated(record))
147 Cache::iterator it = cache_.find(record);
148 if (it != cache_.end())
150 return &cache_.insert(std::make_pair(record, RecordInfo(record, this)))
154 bool RecordInfo::IsStackAllocated() {
155 if (is_stack_allocated_ == kNotComputed) {
156 is_stack_allocated_ = kFalse;
157 for (Bases::iterator it = GetBases().begin();
158 it != GetBases().end();
160 if (it->second.info()->IsStackAllocated()) {
161 is_stack_allocated_ = kTrue;
162 return is_stack_allocated_;
165 for (CXXRecordDecl::method_iterator it = record_->method_begin();
166 it != record_->method_end();
168 if (it->getNameAsString() == kNewOperatorName &&
170 Config::IsStackAnnotated(*it)) {
171 is_stack_allocated_ = kTrue;
172 return is_stack_allocated_;
176 return is_stack_allocated_;
179 bool RecordInfo::IsNonNewable() {
180 if (is_non_newable_ == kNotComputed) {
181 bool deleted = false;
182 bool all_deleted = true;
183 for (CXXRecordDecl::method_iterator it = record_->method_begin();
184 it != record_->method_end();
186 if (it->getNameAsString() == kNewOperatorName) {
187 deleted = it->isDeleted();
188 all_deleted = all_deleted && deleted;
191 is_non_newable_ = (deleted && all_deleted) ? kTrue : kFalse;
193 return is_non_newable_;
196 bool RecordInfo::IsOnlyPlacementNewable() {
197 if (is_only_placement_newable_ == kNotComputed) {
198 bool placement = false;
199 bool new_deleted = false;
200 for (CXXRecordDecl::method_iterator it = record_->method_begin();
201 it != record_->method_end();
203 if (it->getNameAsString() == kNewOperatorName) {
204 if (it->getNumParams() == 1) {
205 new_deleted = it->isDeleted();
206 } else if (it->getNumParams() == 2) {
207 placement = !it->isDeleted();
211 is_only_placement_newable_ = (placement && new_deleted) ? kTrue : kFalse;
213 return is_only_placement_newable_;
216 CXXMethodDecl* RecordInfo::DeclaresNewOperator() {
217 for (CXXRecordDecl::method_iterator it = record_->method_begin();
218 it != record_->method_end();
220 if (it->getNameAsString() == kNewOperatorName && it->getNumParams() == 1)
226 // An object requires a tracing method if it has any fields that need tracing
227 // or if it inherits from multiple bases that need tracing.
228 bool RecordInfo::RequiresTraceMethod() {
229 if (IsStackAllocated())
231 unsigned bases_with_trace = 0;
232 for (Bases::iterator it = GetBases().begin(); it != GetBases().end(); ++it) {
233 if (it->second.NeedsTracing().IsNeeded())
236 if (bases_with_trace > 1)
239 return fields_need_tracing_.IsNeeded();
242 // Get the actual tracing method (ie, can be traceAfterDispatch if there is a
244 CXXMethodDecl* RecordInfo::GetTraceMethod() {
245 DetermineTracingMethods();
246 return trace_method_;
249 // Get the static trace dispatch method.
250 CXXMethodDecl* RecordInfo::GetTraceDispatchMethod() {
251 DetermineTracingMethods();
252 return trace_dispatch_method_;
255 CXXMethodDecl* RecordInfo::GetFinalizeDispatchMethod() {
256 DetermineTracingMethods();
257 return finalize_dispatch_method_;
260 RecordInfo::Bases& RecordInfo::GetBases() {
262 bases_ = CollectBases();
266 bool RecordInfo::InheritsTrace() {
267 if (GetTraceMethod())
269 for (Bases::iterator it = GetBases().begin(); it != GetBases().end(); ++it) {
270 if (it->second.info()->InheritsTrace())
276 CXXMethodDecl* RecordInfo::InheritsNonVirtualTrace() {
277 if (CXXMethodDecl* trace = GetTraceMethod())
278 return trace->isVirtual() ? 0 : trace;
279 for (Bases::iterator it = GetBases().begin(); it != GetBases().end(); ++it) {
280 if (CXXMethodDecl* trace = it->second.info()->InheritsNonVirtualTrace())
286 // A (non-virtual) class is considered abstract in Blink if it has
287 // no public constructors and no create methods.
288 bool RecordInfo::IsConsideredAbstract() {
289 for (CXXRecordDecl::ctor_iterator it = record_->ctor_begin();
290 it != record_->ctor_end();
292 if (!it->isCopyOrMoveConstructor() && it->getAccess() == AS_public)
295 for (CXXRecordDecl::method_iterator it = record_->method_begin();
296 it != record_->method_end();
298 if (it->getNameAsString() == kCreateName)
304 RecordInfo::Bases* RecordInfo::CollectBases() {
305 // Compute the collection locally to avoid inconsistent states.
306 Bases* bases = new Bases;
307 if (!record_->hasDefinition())
309 for (CXXRecordDecl::base_class_iterator it = record_->bases_begin();
310 it != record_->bases_end();
312 const CXXBaseSpecifier& spec = *it;
313 RecordInfo* info = cache_->Lookup(spec.getType());
316 CXXRecordDecl* base = info->record();
317 TracingStatus status = info->InheritsTrace()
318 ? TracingStatus::Needed()
319 : TracingStatus::Unneeded();
320 bases->insert(std::make_pair(base, BasePoint(spec, info, status)));
325 RecordInfo::Fields& RecordInfo::GetFields() {
327 fields_ = CollectFields();
331 RecordInfo::Fields* RecordInfo::CollectFields() {
332 // Compute the collection locally to avoid inconsistent states.
333 Fields* fields = new Fields;
334 if (!record_->hasDefinition())
336 TracingStatus fields_status = TracingStatus::Unneeded();
337 for (RecordDecl::field_iterator it = record_->field_begin();
338 it != record_->field_end();
340 FieldDecl* field = *it;
341 // Ignore fields annotated with the GC_PLUGIN_IGNORE macro.
342 if (Config::IsIgnoreAnnotated(field))
344 if (Edge* edge = CreateEdge(field->getType().getTypePtrOrNull())) {
345 fields_status = fields_status.LUB(edge->NeedsTracing(Edge::kRecursive));
346 fields->insert(std::make_pair(field, FieldPoint(field, edge)));
349 fields_need_tracing_ = fields_status;
353 void RecordInfo::DetermineTracingMethods() {
354 if (determined_trace_methods_)
356 determined_trace_methods_ = true;
357 if (Config::IsGCBase(name_))
359 CXXMethodDecl* trace = 0;
360 CXXMethodDecl* traceAfterDispatch = 0;
361 bool isTraceAfterDispatch;
362 for (CXXRecordDecl::method_iterator it = record_->method_begin();
363 it != record_->method_end();
365 if (Config::IsTraceMethod(*it, &isTraceAfterDispatch)) {
366 if (isTraceAfterDispatch) {
367 traceAfterDispatch = *it;
371 } else if (it->getNameAsString() == kFinalizeName) {
372 finalize_dispatch_method_ = *it;
375 if (traceAfterDispatch) {
376 trace_method_ = traceAfterDispatch;
377 trace_dispatch_method_ = trace;
379 // TODO: Can we never have a dispatch method called trace without the same
380 // class defining a traceAfterDispatch method?
381 trace_method_ = trace;
382 trace_dispatch_method_ = 0;
384 if (trace_dispatch_method_ && finalize_dispatch_method_)
386 // If this class does not define dispatching methods inherit them.
387 for (Bases::iterator it = GetBases().begin(); it != GetBases().end(); ++it) {
388 // TODO: Does it make sense to inherit multiple dispatch methods?
389 if (CXXMethodDecl* dispatch = it->second.info()->GetTraceDispatchMethod()) {
390 assert(!trace_dispatch_method_ && "Multiple trace dispatching methods");
391 trace_dispatch_method_ = dispatch;
393 if (CXXMethodDecl* dispatch =
394 it->second.info()->GetFinalizeDispatchMethod()) {
395 assert(!finalize_dispatch_method_ &&
396 "Multiple finalize dispatching methods");
397 finalize_dispatch_method_ = dispatch;
402 // TODO: Add classes with a finalize() method that specialize FinalizerTrait.
403 bool RecordInfo::NeedsFinalization() {
404 if (does_need_finalization_ == kNotComputed) {
405 // Rely on hasNonTrivialDestructor(), but if the only
406 // identifiable reason for it being true is the presence
407 // of a safely ignorable class as a direct base,
408 // or we're processing such an 'ignorable' class, then it does
409 // not need finalization.
410 does_need_finalization_ =
411 record_->hasNonTrivialDestructor() ? kTrue : kFalse;
412 if (!does_need_finalization_)
413 return does_need_finalization_;
415 // Processing a class with a safely-ignorable destructor.
417 dyn_cast<NamespaceDecl>(record_->getDeclContext());
418 if (ns && Config::HasIgnorableDestructor(ns->getName(), name_)) {
419 does_need_finalization_ = kFalse;
420 return does_need_finalization_;
423 CXXDestructorDecl* dtor = record_->getDestructor();
424 if (dtor && dtor->isUserProvided())
425 return does_need_finalization_;
426 for (Fields::iterator it = GetFields().begin();
427 it != GetFields().end();
429 if (it->second.edge()->NeedsFinalization())
430 return does_need_finalization_;
433 for (Bases::iterator it = GetBases().begin();
434 it != GetBases().end();
436 if (it->second.info()->NeedsFinalization())
437 return does_need_finalization_;
439 // Destructor was non-trivial due to bases with destructors that
440 // can be safely ignored. Hence, no need for finalization.
441 does_need_finalization_ = kFalse;
443 return does_need_finalization_;
446 // A class needs tracing if:
447 // - it is allocated on the managed heap,
448 // - it is derived from a class that needs tracing, or
449 // - it contains fields that need tracing.
450 // TODO: Defining NeedsTracing based on whether a class defines a trace method
451 // (of the proper signature) over approximates too much. The use of transition
452 // types causes some classes to have trace methods without them needing to be
454 TracingStatus RecordInfo::NeedsTracing(Edge::NeedsTracingOption option) {
456 return TracingStatus::Needed();
458 if (IsStackAllocated())
459 return TracingStatus::Unneeded();
461 for (Bases::iterator it = GetBases().begin(); it != GetBases().end(); ++it) {
462 if (it->second.info()->NeedsTracing(option).IsNeeded())
463 return TracingStatus::Needed();
466 if (option == Edge::kRecursive)
469 return fields_need_tracing_;
472 Edge* RecordInfo::CreateEdge(const Type* type) {
477 if (type->isPointerType()) {
478 if (Edge* ptr = CreateEdge(type->getPointeeType().getTypePtrOrNull()))
479 return new RawPtr(ptr, false);
483 RecordInfo* info = cache_->Lookup(type);
485 // If the type is neither a pointer or a C++ record we ignore it.
492 if (Config::IsRawPtr(info->name()) && info->GetTemplateArgs(1, &args)) {
493 if (Edge* ptr = CreateEdge(args[0]))
494 return new RawPtr(ptr, true);
498 if (Config::IsRefPtr(info->name()) && info->GetTemplateArgs(1, &args)) {
499 if (Edge* ptr = CreateEdge(args[0]))
500 return new RefPtr(ptr);
504 if (Config::IsOwnPtr(info->name()) && info->GetTemplateArgs(1, &args)) {
505 if (Edge* ptr = CreateEdge(args[0]))
506 return new OwnPtr(ptr);
510 if (Config::IsMember(info->name()) && info->GetTemplateArgs(1, &args)) {
511 if (Edge* ptr = CreateEdge(args[0]))
512 return new Member(ptr);
516 if (Config::IsWeakMember(info->name()) && info->GetTemplateArgs(1, &args)) {
517 if (Edge* ptr = CreateEdge(args[0]))
518 return new WeakMember(ptr);
522 if (Config::IsPersistent(info->name())) {
523 // Persistent might refer to v8::Persistent, so check the name space.
524 // TODO: Consider using a more canonical identification than names.
526 dyn_cast<NamespaceDecl>(info->record()->getDeclContext());
527 if (!ns || ns->getName() != "blink")
529 if (!info->GetTemplateArgs(1, &args))
531 if (Edge* ptr = CreateEdge(args[0]))
532 return new Persistent(ptr);
536 if (Config::IsGCCollection(info->name()) ||
537 Config::IsWTFCollection(info->name())) {
538 bool is_root = Config::IsPersistentGCCollection(info->name());
539 bool on_heap = is_root || info->IsHeapAllocatedCollection();
540 size_t count = Config::CollectionDimension(info->name());
541 if (!info->GetTemplateArgs(count, &args))
543 Collection* edge = new Collection(info, on_heap, is_root);
544 for (TemplateArgs::iterator it = args.begin(); it != args.end(); ++it) {
545 if (Edge* member = CreateEdge(*it)) {
546 edge->members().push_back(member);
548 // TODO: Handle the case where we fail to create an edge (eg, if the
549 // argument is a primitive type or just not fully known yet).
554 return new Value(info);