1 // Copyright 2014 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 // This clang plugin checks various invariants of the Blink garbage
6 // collection infrastructure.
8 // Errors are described at:
9 // http://www.chromium.org/developers/blink-gc-plugin-errors
12 #include "JsonWriter.h"
13 #include "RecordInfo.h"
15 #include "clang/AST/AST.h"
16 #include "clang/AST/ASTConsumer.h"
17 #include "clang/AST/RecursiveASTVisitor.h"
18 #include "clang/Frontend/CompilerInstance.h"
19 #include "clang/Frontend/FrontendPluginRegistry.h"
21 using namespace clang;
26 const char kClassMustLeftMostlyDeriveGC[] =
27 "[blink-gc] Class %0 must derive its GC base in the left-most position.";
29 const char kClassRequiresTraceMethod[] =
30 "[blink-gc] Class %0 requires a trace method.";
32 const char kBaseRequiresTracing[] =
33 "[blink-gc] Base class %0 of derived class %1 requires tracing.";
35 const char kBaseRequiresTracingNote[] =
36 "[blink-gc] Untraced base class %0 declared here:";
38 const char kFieldsRequireTracing[] =
39 "[blink-gc] Class %0 has untraced fields that require tracing.";
41 const char kFieldRequiresTracingNote[] =
42 "[blink-gc] Untraced field %0 declared here:";
44 const char kClassContainsInvalidFields[] =
45 "[blink-gc] Class %0 contains invalid fields.";
47 const char kClassContainsGCRoot[] =
48 "[blink-gc] Class %0 contains GC root in field %1.";
50 const char kClassRequiresFinalization[] =
51 "[blink-gc] Class %0 requires finalization.";
53 const char kFinalizerAccessesFinalizedField[] =
54 "[blink-gc] Finalizer %0 accesses potentially finalized field %1.";
56 const char kRawPtrToGCManagedClassNote[] =
57 "[blink-gc] Raw pointer field %0 to a GC managed class declared here:";
59 const char kRefPtrToGCManagedClassNote[] =
60 "[blink-gc] RefPtr field %0 to a GC managed class declared here:";
62 const char kOwnPtrToGCManagedClassNote[] =
63 "[blink-gc] OwnPtr field %0 to a GC managed class declared here:";
65 const char kStackAllocatedFieldNote[] =
66 "[blink-gc] Stack-allocated field %0 declared here:";
68 const char kMemberInUnmanagedClassNote[] =
69 "[blink-gc] Member field %0 in unmanaged class declared here:";
71 const char kPartObjectToGCDerivedClassNote[] =
72 "[blink-gc] Part-object field %0 to a GC derived class declared here:";
74 const char kPartObjectContainsGCRootNote[] =
75 "[blink-gc] Field %0 with embedded GC root in %1 declared here:";
77 const char kFieldContainsGCRootNote[] =
78 "[blink-gc] Field %0 defining a GC root declared here:";
80 const char kOverriddenNonVirtualTrace[] =
81 "[blink-gc] Class %0 overrides non-virtual trace of base class %1.";
83 const char kOverriddenNonVirtualTraceNote[] =
84 "[blink-gc] Non-virtual trace method declared here:";
86 const char kMissingTraceDispatchMethod[] =
87 "[blink-gc] Class %0 is missing manual trace dispatch.";
89 const char kMissingFinalizeDispatchMethod[] =
90 "[blink-gc] Class %0 is missing manual finalize dispatch.";
92 const char kVirtualAndManualDispatch[] =
93 "[blink-gc] Class %0 contains or inherits virtual methods"
94 " but implements manual dispatching.";
96 const char kMissingTraceDispatch[] =
97 "[blink-gc] Missing dispatch to class %0 in manual trace dispatch.";
99 const char kMissingFinalizeDispatch[] =
100 "[blink-gc] Missing dispatch to class %0 in manual finalize dispatch.";
102 const char kFinalizedFieldNote[] =
103 "[blink-gc] Potentially finalized field %0 declared here:";
105 const char kUserDeclaredDestructorNote[] =
106 "[blink-gc] User-declared destructor declared here:";
108 const char kUserDeclaredFinalizerNote[] =
109 "[blink-gc] User-declared finalizer declared here:";
111 const char kBaseRequiresFinalizationNote[] =
112 "[blink-gc] Base class %0 requiring finalization declared here:";
114 const char kFieldRequiresFinalizationNote[] =
115 "[blink-gc] Field %0 requiring finalization declared here:";
117 const char kManualDispatchMethodNote[] =
118 "[blink-gc] Manual dispatch %0 declared here:";
120 const char kDerivesNonStackAllocated[] =
121 "[blink-gc] Stack-allocated class %0 derives class %1"
122 " which is not stack allocated.";
124 const char kClassOverridesNew[] =
125 "[blink-gc] Garbage collected class %0"
126 " is not permitted to override its new operator.";
128 const char kClassDeclaresPureVirtualTrace[] =
129 "[blink-gc] Garbage collected class %0"
130 " is not permitted to declare a pure-virtual trace method.";
132 const char kLeftMostBaseMustBePolymorphic[] =
133 "[blink-gc] Left-most base class %0 of derived class %1"
134 " must be polymorphic.";
136 const char kBaseClassMustDeclareVirtualTrace[] =
137 "[blink-gc] Left-most base class %0 of derived class %1"
138 " must define a virtual trace method.";
140 struct BlinkGCPluginOptions {
141 BlinkGCPluginOptions() : enable_oilpan(false), dump_graph(false) {}
144 std::set<std::string> ignored_classes;
145 std::set<std::string> checked_namespaces;
146 std::vector<std::string> ignored_directories;
149 typedef std::vector<CXXRecordDecl*> RecordVector;
150 typedef std::vector<CXXMethodDecl*> MethodVector;
152 // Test if a template specialization is an instantiation.
153 static bool IsTemplateInstantiation(CXXRecordDecl* record) {
154 ClassTemplateSpecializationDecl* spec =
155 dyn_cast<ClassTemplateSpecializationDecl>(record);
158 switch (spec->getTemplateSpecializationKind()) {
159 case TSK_ImplicitInstantiation:
160 case TSK_ExplicitInstantiationDefinition:
163 case TSK_ExplicitSpecialization:
165 // TODO: unsupported cases.
166 case TSK_ExplicitInstantiationDeclaration:
169 assert(false && "Unknown template specialization kind");
172 // This visitor collects the entry points for the checker.
173 class CollectVisitor : public RecursiveASTVisitor<CollectVisitor> {
177 RecordVector& record_decls() { return record_decls_; }
178 MethodVector& trace_decls() { return trace_decls_; }
180 bool shouldVisitTemplateInstantiations() { return false; }
182 // Collect record declarations, including nested declarations.
183 bool VisitCXXRecordDecl(CXXRecordDecl* record) {
184 if (record->hasDefinition() && record->isCompleteDefinition())
185 record_decls_.push_back(record);
189 // Collect tracing method definitions, but don't traverse method bodies.
190 bool TraverseCXXMethodDecl(CXXMethodDecl* method) {
191 if (method->isThisDeclarationADefinition() && Config::IsTraceMethod(method))
192 trace_decls_.push_back(method);
197 RecordVector record_decls_;
198 MethodVector trace_decls_;
201 // This visitor checks that a finalizer method does not have invalid access to
202 // fields that are potentially finalized. A potentially finalized field is
203 // either a Member, a heap-allocated collection or an off-heap collection that
204 // contains Members. Invalid uses are currently identified as passing the field
205 // as the argument of a procedure call or using the -> or [] operators on it.
206 class CheckFinalizerVisitor
207 : public RecursiveASTVisitor<CheckFinalizerVisitor> {
209 // Simple visitor to determine if the content of a field might be collected
210 // during finalization.
211 class MightBeCollectedVisitor : public EdgeVisitor {
213 MightBeCollectedVisitor() : might_be_collected_(false) {}
214 bool might_be_collected() { return might_be_collected_; }
215 void VisitMember(Member* edge) override { might_be_collected_ = true; }
216 void VisitCollection(Collection* edge) override {
217 if (edge->on_heap()) {
218 might_be_collected_ = !edge->is_root();
220 edge->AcceptMembers(this);
225 bool might_be_collected_;
229 typedef std::vector<std::pair<MemberExpr*, FieldPoint*> > Errors;
231 CheckFinalizerVisitor(RecordCache* cache)
232 : blacklist_context_(false), cache_(cache) {}
234 Errors& finalized_fields() { return finalized_fields_; }
236 bool WalkUpFromCXXOperatorCallExpr(CXXOperatorCallExpr* expr) {
237 // Only continue the walk-up if the operator is a blacklisted one.
238 switch (expr->getOperator()) {
241 this->WalkUpFromCallExpr(expr);
247 // We consider all non-operator calls to be blacklisted contexts.
248 bool WalkUpFromCallExpr(CallExpr* expr) {
249 bool prev_blacklist_context = blacklist_context_;
250 blacklist_context_ = true;
251 for (size_t i = 0; i < expr->getNumArgs(); ++i)
252 this->TraverseStmt(expr->getArg(i));
253 blacklist_context_ = prev_blacklist_context;
257 bool VisitMemberExpr(MemberExpr* member) {
258 FieldDecl* field = dyn_cast<FieldDecl>(member->getMemberDecl());
262 RecordInfo* info = cache_->Lookup(field->getParent());
266 RecordInfo::Fields::iterator it = info->GetFields().find(field);
267 if (it == info->GetFields().end())
270 if (blacklist_context_ && MightBeCollected(&it->second))
271 finalized_fields_.push_back(std::make_pair(member, &it->second));
275 bool MightBeCollected(FieldPoint* point) {
276 MightBeCollectedVisitor visitor;
277 point->edge()->Accept(&visitor);
278 return visitor.might_be_collected();
282 bool blacklist_context_;
283 Errors finalized_fields_;
287 // This visitor checks that a method contains within its body, a call to a
288 // method on the provided receiver class. This is used to check manual
289 // dispatching for trace and finalize methods.
290 class CheckDispatchVisitor : public RecursiveASTVisitor<CheckDispatchVisitor> {
292 CheckDispatchVisitor(RecordInfo* receiver)
293 : receiver_(receiver), dispatched_to_receiver_(false) {}
295 bool dispatched_to_receiver() { return dispatched_to_receiver_; }
297 bool VisitMemberExpr(MemberExpr* member) {
298 if (CXXMethodDecl* fn = dyn_cast<CXXMethodDecl>(member->getMemberDecl())) {
299 if (fn->getParent() == receiver_->record())
300 dispatched_to_receiver_ = true;
306 RecordInfo* receiver_;
307 bool dispatched_to_receiver_;
310 // This visitor checks a tracing method by traversing its body.
311 // - A member field is considered traced if it is referenced in the body.
312 // - A base is traced if a base-qualified call to a trace method is found.
313 class CheckTraceVisitor : public RecursiveASTVisitor<CheckTraceVisitor> {
315 CheckTraceVisitor(CXXMethodDecl* trace, RecordInfo* info)
316 : trace_(trace), info_(info) {}
318 bool VisitMemberExpr(MemberExpr* member) {
319 // In weak callbacks, consider any occurrence as a correct usage.
320 // TODO: We really want to require that isAlive is checked on manually
321 // processed weak fields.
322 if (IsWeakCallback()) {
323 if (FieldDecl* field = dyn_cast<FieldDecl>(member->getMemberDecl()))
329 bool VisitCallExpr(CallExpr* call) {
330 // In weak callbacks we don't check calls (see VisitMemberExpr).
331 if (IsWeakCallback())
334 Expr* callee = call->getCallee();
336 // Trace calls from a templated derived class result in a
337 // DependentScopeMemberExpr because the concrete trace call depends on the
338 // instantiation of any shared template parameters. In this case the call is
339 // "unresolved" and we resort to comparing the syntactic type names.
340 if (CXXDependentScopeMemberExpr* expr =
341 dyn_cast<CXXDependentScopeMemberExpr>(callee)) {
342 CheckCXXDependentScopeMemberExpr(call, expr);
346 // A tracing call will have either a |visitor| or a |m_field| argument.
347 // A registerWeakMembers call will have a |this| argument.
348 if (call->getNumArgs() != 1)
350 Expr* arg = call->getArg(0);
352 if (UnresolvedMemberExpr* expr = dyn_cast<UnresolvedMemberExpr>(callee)) {
353 // If we find a call to registerWeakMembers which is unresolved we
354 // unsoundly consider all weak members as traced.
355 // TODO: Find out how to validate weak member tracing for unresolved call.
356 if (expr->getMemberName().getAsString() == kRegisterWeakMembersName) {
357 for (RecordInfo::Fields::iterator it = info_->GetFields().begin();
358 it != info_->GetFields().end();
360 if (it->second.edge()->IsWeakMember())
361 it->second.MarkTraced();
365 QualType base = expr->getBaseType();
366 if (!base->isPointerType())
368 CXXRecordDecl* decl = base->getPointeeType()->getAsCXXRecordDecl();
370 CheckTraceFieldCall(expr->getMemberName().getAsString(), decl, arg);
374 if (CXXMemberCallExpr* expr = dyn_cast<CXXMemberCallExpr>(call)) {
375 if (CheckTraceFieldCall(expr) || CheckRegisterWeakMembers(expr))
379 CheckTraceBaseCall(call);
385 CXXRecordDecl* GetDependentTemplatedDecl(CXXDependentScopeMemberExpr* expr) {
386 NestedNameSpecifier* qual = expr->getQualifier();
390 const Type* type = qual->getAsType();
394 const TemplateSpecializationType* tmpl_type =
395 type->getAs<TemplateSpecializationType>();
399 TemplateDecl* tmpl_decl = tmpl_type->getTemplateName().getAsTemplateDecl();
403 return dyn_cast<CXXRecordDecl>(tmpl_decl->getTemplatedDecl());
406 void CheckCXXDependentScopeMemberExpr(CallExpr* call,
407 CXXDependentScopeMemberExpr* expr) {
408 string fn_name = expr->getMember().getAsString();
409 CXXRecordDecl* tmpl = GetDependentTemplatedDecl(expr);
413 // Check for Super<T>::trace(visitor)
414 if (call->getNumArgs() == 1 && fn_name == trace_->getName()) {
415 RecordInfo::Bases::iterator it = info_->GetBases().begin();
416 for (; it != info_->GetBases().end(); ++it) {
417 if (it->first->getName() == tmpl->getName())
418 it->second.MarkTraced();
423 // Check for TraceIfNeeded<T>::trace(visitor, &field)
424 if (call->getNumArgs() == 2 && fn_name == kTraceName &&
425 tmpl->getName() == kTraceIfNeededName) {
426 FindFieldVisitor finder;
427 finder.TraverseStmt(call->getArg(1));
429 FoundField(finder.field());
433 bool CheckTraceBaseCall(CallExpr* call) {
434 MemberExpr* callee = dyn_cast<MemberExpr>(call->getCallee());
438 FunctionDecl* fn = dyn_cast<FunctionDecl>(callee->getMemberDecl());
439 if (!fn || !Config::IsTraceMethod(fn))
442 // Currently, a manually dispatched class cannot have mixin bases (having
443 // one would add a vtable which we explicitly check against). This means
444 // that we can only make calls to a trace method of the same name. Revisit
445 // this if our mixin/vtable assumption changes.
446 if (fn->getName() != trace_->getName())
449 CXXRecordDecl* decl = 0;
450 if (callee && callee->hasQualifier()) {
451 if (const Type* type = callee->getQualifier()->getAsType())
452 decl = type->getAsCXXRecordDecl();
457 RecordInfo::Bases::iterator it = info_->GetBases().find(decl);
458 if (it != info_->GetBases().end()) {
459 it->second.MarkTraced();
465 bool CheckTraceFieldCall(CXXMemberCallExpr* call) {
466 return CheckTraceFieldCall(call->getMethodDecl()->getNameAsString(),
467 call->getRecordDecl(),
471 bool CheckTraceFieldCall(string name, CXXRecordDecl* callee, Expr* arg) {
472 if (name != kTraceName || !Config::IsVisitor(callee->getName()))
475 FindFieldVisitor finder;
476 finder.TraverseStmt(arg);
478 FoundField(finder.field());
483 bool CheckRegisterWeakMembers(CXXMemberCallExpr* call) {
484 CXXMethodDecl* fn = call->getMethodDecl();
485 if (fn->getName() != kRegisterWeakMembersName)
488 if (fn->isTemplateInstantiation()) {
489 const TemplateArgumentList& args =
490 *fn->getTemplateSpecializationInfo()->TemplateArguments;
491 // The second template argument is the callback method.
492 if (args.size() > 1 &&
493 args[1].getKind() == TemplateArgument::Declaration) {
494 if (FunctionDecl* callback =
495 dyn_cast<FunctionDecl>(args[1].getAsDecl())) {
496 if (callback->hasBody()) {
497 CheckTraceVisitor nested_visitor(info_);
498 nested_visitor.TraverseStmt(callback->getBody());
506 class FindFieldVisitor : public RecursiveASTVisitor<FindFieldVisitor> {
508 FindFieldVisitor() : member_(0), field_(0) {}
509 MemberExpr* member() const { return member_; }
510 FieldDecl* field() const { return field_; }
511 bool TraverseMemberExpr(MemberExpr* member) {
512 if (FieldDecl* field = dyn_cast<FieldDecl>(member->getMemberDecl())) {
524 // Nested checking for weak callbacks.
525 CheckTraceVisitor(RecordInfo* info) : trace_(0), info_(info) {}
527 bool IsWeakCallback() { return !trace_; }
529 void MarkTraced(RecordInfo::Fields::iterator it) {
530 // In a weak callback we can't mark strong fields as traced.
531 if (IsWeakCallback() && !it->second.edge()->IsWeakMember())
533 it->second.MarkTraced();
536 void FoundField(FieldDecl* field) {
537 if (IsTemplateInstantiation(info_->record())) {
538 // Pointer equality on fields does not work for template instantiations.
539 // The trace method refers to fields of the template definition which
540 // are different from the instantiated fields that need to be traced.
541 const string& name = field->getNameAsString();
542 for (RecordInfo::Fields::iterator it = info_->GetFields().begin();
543 it != info_->GetFields().end();
545 if (it->first->getNameAsString() == name) {
551 RecordInfo::Fields::iterator it = info_->GetFields().find(field);
552 if (it != info_->GetFields().end())
557 CXXMethodDecl* trace_;
561 // This visitor checks that the fields of a class and the fields of
562 // its part objects don't define GC roots.
563 class CheckGCRootsVisitor : public RecursiveEdgeVisitor {
565 typedef std::vector<FieldPoint*> RootPath;
566 typedef std::vector<RootPath> Errors;
568 CheckGCRootsVisitor() {}
570 Errors& gc_roots() { return gc_roots_; }
572 bool ContainsGCRoots(RecordInfo* info) {
573 for (RecordInfo::Fields::iterator it = info->GetFields().begin();
574 it != info->GetFields().end();
576 current_.push_back(&it->second);
577 it->second.edge()->Accept(this);
580 return !gc_roots_.empty();
583 void VisitValue(Value* edge) override {
584 // TODO: what should we do to check unions?
585 if (edge->value()->record()->isUnion())
588 // If the value is a part object, then continue checking for roots.
589 for (Context::iterator it = context().begin();
590 it != context().end();
592 if (!(*it)->IsCollection())
595 ContainsGCRoots(edge->value());
598 void VisitPersistent(Persistent* edge) override {
599 gc_roots_.push_back(current_);
602 void AtCollection(Collection* edge) override {
604 gc_roots_.push_back(current_);
612 // This visitor checks that the fields of a class are "well formed".
613 // - OwnPtr, RefPtr and RawPtr must not point to a GC derived types.
614 // - Part objects must not be GC derived types.
615 // - An on-heap class must never contain GC roots.
616 // - Only stack-allocated types may point to stack-allocated types.
617 class CheckFieldsVisitor : public RecursiveEdgeVisitor {
629 typedef std::vector<std::pair<FieldPoint*, Error> > Errors;
631 CheckFieldsVisitor(const BlinkGCPluginOptions& options)
632 : options_(options), current_(0), stack_allocated_host_(false) {}
634 Errors& invalid_fields() { return invalid_fields_; }
636 bool ContainsInvalidFields(RecordInfo* info) {
637 stack_allocated_host_ = info->IsStackAllocated();
638 managed_host_ = stack_allocated_host_ ||
639 info->IsGCAllocated() ||
640 info->IsNonNewable() ||
641 info->IsOnlyPlacementNewable();
642 for (RecordInfo::Fields::iterator it = info->GetFields().begin();
643 it != info->GetFields().end();
646 current_ = &it->second;
647 current_->edge()->Accept(this);
649 return !invalid_fields_.empty();
652 void AtMember(Member* edge) override {
655 // A member is allowed to appear in the context of a root.
656 for (Context::iterator it = context().begin();
657 it != context().end();
659 if ((*it)->Kind() == Edge::kRoot)
662 invalid_fields_.push_back(std::make_pair(current_, kMemberInUnmanaged));
665 void AtValue(Value* edge) override {
666 // TODO: what should we do to check unions?
667 if (edge->value()->record()->isUnion())
670 if (!stack_allocated_host_ && edge->value()->IsStackAllocated()) {
671 invalid_fields_.push_back(std::make_pair(current_, kPtrFromHeapToStack));
676 edge->value()->IsGCDerived() &&
677 !edge->value()->IsGCMixin()) {
678 invalid_fields_.push_back(std::make_pair(current_, kGCDerivedPartObject));
682 if (!Parent() || !edge->value()->IsGCAllocated())
685 // In transition mode, disallow OwnPtr<T>, RawPtr<T> to GC allocated T's,
686 // also disallow T* in stack-allocated types.
687 if (options_.enable_oilpan) {
688 if (Parent()->IsOwnPtr() ||
689 Parent()->IsRawPtrClass() ||
690 (stack_allocated_host_ && Parent()->IsRawPtr())) {
691 invalid_fields_.push_back(std::make_pair(
692 current_, InvalidSmartPtr(Parent())));
699 if (Parent()->IsRawPtr() || Parent()->IsRefPtr() || Parent()->IsOwnPtr()) {
700 invalid_fields_.push_back(std::make_pair(
701 current_, InvalidSmartPtr(Parent())));
706 void AtCollection(Collection* edge) override {
707 if (edge->on_heap() && Parent() && Parent()->IsOwnPtr())
708 invalid_fields_.push_back(std::make_pair(current_, kOwnPtrToGCManaged));
712 Error InvalidSmartPtr(Edge* ptr) {
714 return kRawPtrToGCManaged;
716 return kRefPtrToGCManaged;
718 return kOwnPtrToGCManaged;
719 assert(false && "Unknown smart pointer kind");
722 const BlinkGCPluginOptions& options_;
723 FieldPoint* current_;
724 bool stack_allocated_host_;
726 Errors invalid_fields_;
729 // Main class containing checks for various invariants of the Blink
730 // garbage collection infrastructure.
731 class BlinkGCPluginConsumer : public ASTConsumer {
733 BlinkGCPluginConsumer(CompilerInstance& instance,
734 const BlinkGCPluginOptions& options)
735 : instance_(instance),
736 diagnostic_(instance.getDiagnostics()),
740 // Only check structures in the blink and WebKit namespaces.
741 options_.checked_namespaces.insert("blink");
742 options_.checked_namespaces.insert("WebKit");
744 // Ignore GC implementation files.
745 options_.ignored_directories.push_back("/heap/");
747 // Register warning/error messages.
748 diag_class_must_left_mostly_derive_gc_ = diagnostic_.getCustomDiagID(
749 getErrorLevel(), kClassMustLeftMostlyDeriveGC);
750 diag_class_requires_trace_method_ =
751 diagnostic_.getCustomDiagID(getErrorLevel(), kClassRequiresTraceMethod);
752 diag_base_requires_tracing_ =
753 diagnostic_.getCustomDiagID(getErrorLevel(), kBaseRequiresTracing);
754 diag_fields_require_tracing_ =
755 diagnostic_.getCustomDiagID(getErrorLevel(), kFieldsRequireTracing);
756 diag_class_contains_invalid_fields_ = diagnostic_.getCustomDiagID(
757 getErrorLevel(), kClassContainsInvalidFields);
758 diag_class_contains_gc_root_ =
759 diagnostic_.getCustomDiagID(getErrorLevel(), kClassContainsGCRoot);
760 diag_class_requires_finalization_ = diagnostic_.getCustomDiagID(
761 getErrorLevel(), kClassRequiresFinalization);
762 diag_finalizer_accesses_finalized_field_ = diagnostic_.getCustomDiagID(
763 getErrorLevel(), kFinalizerAccessesFinalizedField);
764 diag_overridden_non_virtual_trace_ = diagnostic_.getCustomDiagID(
765 getErrorLevel(), kOverriddenNonVirtualTrace);
766 diag_missing_trace_dispatch_method_ = diagnostic_.getCustomDiagID(
767 getErrorLevel(), kMissingTraceDispatchMethod);
768 diag_missing_finalize_dispatch_method_ = diagnostic_.getCustomDiagID(
769 getErrorLevel(), kMissingFinalizeDispatchMethod);
770 diag_virtual_and_manual_dispatch_ =
771 diagnostic_.getCustomDiagID(getErrorLevel(), kVirtualAndManualDispatch);
772 diag_missing_trace_dispatch_ =
773 diagnostic_.getCustomDiagID(getErrorLevel(), kMissingTraceDispatch);
774 diag_missing_finalize_dispatch_ =
775 diagnostic_.getCustomDiagID(getErrorLevel(), kMissingFinalizeDispatch);
776 diag_derives_non_stack_allocated_ =
777 diagnostic_.getCustomDiagID(getErrorLevel(), kDerivesNonStackAllocated);
778 diag_class_overrides_new_ =
779 diagnostic_.getCustomDiagID(getErrorLevel(), kClassOverridesNew);
780 diag_class_declares_pure_virtual_trace_ = diagnostic_.getCustomDiagID(
781 getErrorLevel(), kClassDeclaresPureVirtualTrace);
782 diag_left_most_base_must_be_polymorphic_ = diagnostic_.getCustomDiagID(
783 getErrorLevel(), kLeftMostBaseMustBePolymorphic);
784 diag_base_class_must_declare_virtual_trace_ = diagnostic_.getCustomDiagID(
785 getErrorLevel(), kBaseClassMustDeclareVirtualTrace);
787 // Register note messages.
788 diag_base_requires_tracing_note_ = diagnostic_.getCustomDiagID(
789 DiagnosticsEngine::Note, kBaseRequiresTracingNote);
790 diag_field_requires_tracing_note_ = diagnostic_.getCustomDiagID(
791 DiagnosticsEngine::Note, kFieldRequiresTracingNote);
792 diag_raw_ptr_to_gc_managed_class_note_ = diagnostic_.getCustomDiagID(
793 DiagnosticsEngine::Note, kRawPtrToGCManagedClassNote);
794 diag_ref_ptr_to_gc_managed_class_note_ = diagnostic_.getCustomDiagID(
795 DiagnosticsEngine::Note, kRefPtrToGCManagedClassNote);
796 diag_own_ptr_to_gc_managed_class_note_ = diagnostic_.getCustomDiagID(
797 DiagnosticsEngine::Note, kOwnPtrToGCManagedClassNote);
798 diag_stack_allocated_field_note_ = diagnostic_.getCustomDiagID(
799 DiagnosticsEngine::Note, kStackAllocatedFieldNote);
800 diag_member_in_unmanaged_class_note_ = diagnostic_.getCustomDiagID(
801 DiagnosticsEngine::Note, kMemberInUnmanagedClassNote);
802 diag_part_object_to_gc_derived_class_note_ = diagnostic_.getCustomDiagID(
803 DiagnosticsEngine::Note, kPartObjectToGCDerivedClassNote);
804 diag_part_object_contains_gc_root_note_ = diagnostic_.getCustomDiagID(
805 DiagnosticsEngine::Note, kPartObjectContainsGCRootNote);
806 diag_field_contains_gc_root_note_ = diagnostic_.getCustomDiagID(
807 DiagnosticsEngine::Note, kFieldContainsGCRootNote);
808 diag_finalized_field_note_ = diagnostic_.getCustomDiagID(
809 DiagnosticsEngine::Note, kFinalizedFieldNote);
810 diag_user_declared_destructor_note_ = diagnostic_.getCustomDiagID(
811 DiagnosticsEngine::Note, kUserDeclaredDestructorNote);
812 diag_user_declared_finalizer_note_ = diagnostic_.getCustomDiagID(
813 DiagnosticsEngine::Note, kUserDeclaredFinalizerNote);
814 diag_base_requires_finalization_note_ = diagnostic_.getCustomDiagID(
815 DiagnosticsEngine::Note, kBaseRequiresFinalizationNote);
816 diag_field_requires_finalization_note_ = diagnostic_.getCustomDiagID(
817 DiagnosticsEngine::Note, kFieldRequiresFinalizationNote);
818 diag_overridden_non_virtual_trace_note_ = diagnostic_.getCustomDiagID(
819 DiagnosticsEngine::Note, kOverriddenNonVirtualTraceNote);
820 diag_manual_dispatch_method_note_ = diagnostic_.getCustomDiagID(
821 DiagnosticsEngine::Note, kManualDispatchMethodNote);
824 void HandleTranslationUnit(ASTContext& context) override {
825 CollectVisitor visitor;
826 visitor.TraverseDecl(context.getTranslationUnitDecl());
828 if (options_.dump_graph) {
830 // TODO: Make createDefaultOutputFile or a shorter createOutputFile work.
831 json_ = JsonWriter::from(instance_.createOutputFile(
835 true, // RemoveFileOnSignal
836 instance_.getFrontendOpts().OutputFile, // BaseInput
837 "graph.json", // Extension
838 false, // UseTemporary
839 false, // CreateMissingDirectories
842 if (err.empty() && json_) {
848 << "Failed to create an output file for the object graph.\n";
852 for (RecordVector::iterator it = visitor.record_decls().begin();
853 it != visitor.record_decls().end();
855 CheckRecord(cache_.Lookup(*it));
858 for (MethodVector::iterator it = visitor.trace_decls().begin();
859 it != visitor.trace_decls().end();
861 CheckTracingMethod(*it);
871 // Main entry for checking a record declaration.
872 void CheckRecord(RecordInfo* info) {
876 CXXRecordDecl* record = info->record();
878 // TODO: what should we do to check unions?
879 if (record->isUnion())
882 // If this is the primary template declaration, check its specializations.
883 if (record->isThisDeclarationADefinition() &&
884 record->getDescribedClassTemplate()) {
885 ClassTemplateDecl* tmpl = record->getDescribedClassTemplate();
886 for (ClassTemplateDecl::spec_iterator it = tmpl->spec_begin();
887 it != tmpl->spec_end();
889 CheckClass(cache_.Lookup(*it));
897 // Check a class-like object (eg, class, specialization, instantiation).
898 void CheckClass(RecordInfo* info) {
902 // Check consistency of stack-allocated hierarchies.
903 if (info->IsStackAllocated()) {
904 for (RecordInfo::Bases::iterator it = info->GetBases().begin();
905 it != info->GetBases().end();
907 if (!it->second.info()->IsStackAllocated())
908 ReportDerivesNonStackAllocated(info, &it->second);
912 if (CXXMethodDecl* trace = info->GetTraceMethod()) {
914 ReportClassDeclaresPureVirtualTrace(info, trace);
915 if (info->record()->isPolymorphic())
916 CheckPolymorphicClass(info, trace);
917 } else if (info->RequiresTraceMethod()) {
918 ReportClassRequiresTraceMethod(info);
922 CheckFieldsVisitor visitor(options_);
923 if (visitor.ContainsInvalidFields(info))
924 ReportClassContainsInvalidFields(info, &visitor.invalid_fields());
927 if (info->IsGCDerived()) {
929 if (!info->IsGCMixin()) {
930 CheckLeftMostDerived(info);
932 if (CXXMethodDecl* newop = info->DeclaresNewOperator())
933 ReportClassOverridesNew(info, newop);
937 CheckGCRootsVisitor visitor;
938 if (visitor.ContainsGCRoots(info))
939 ReportClassContainsGCRoots(info, &visitor.gc_roots());
942 if (info->NeedsFinalization())
943 CheckFinalization(info);
949 CXXRecordDecl* GetDependentTemplatedDecl(const Type& type) {
950 const TemplateSpecializationType* tmpl_type =
951 type.getAs<TemplateSpecializationType>();
955 TemplateDecl* tmpl_decl = tmpl_type->getTemplateName().getAsTemplateDecl();
959 return dyn_cast<CXXRecordDecl>(tmpl_decl->getTemplatedDecl());
962 // The GC infrastructure assumes that if the vtable of a polymorphic
963 // base-class is not initialized for a given object (ie, it is partially
964 // initialized) then the object does not need to be traced. Thus, we must
965 // ensure that any polymorphic class with a trace method does not have any
966 // tractable fields that are initialized before we are sure that the vtable
967 // and the trace method are both defined. There are two cases that need to
968 // hold to satisfy that assumption:
970 // 1. If trace is virtual, then it must be defined in the left-most base.
971 // This ensures that if the vtable is initialized and it contains a pointer to
974 // 2. If trace is non-virtual, then the trace method is defined and we must
975 // ensure that the left-most base defines a vtable. This ensures that the
976 // first thing to be initialized when constructing the object is the vtable
978 void CheckPolymorphicClass(RecordInfo* info, CXXMethodDecl* trace) {
979 CXXRecordDecl* left_most = info->record();
980 CXXRecordDecl::base_class_iterator it = left_most->bases_begin();
981 CXXRecordDecl* left_most_base = 0;
982 while (it != left_most->bases_end()) {
983 left_most_base = it->getType()->getAsCXXRecordDecl();
984 if (!left_most_base && it->getType()->isDependentType())
985 left_most_base = GetDependentTemplatedDecl(*it->getType());
987 // TODO: Find a way to correctly check actual instantiations
988 // for dependent types. The escape below will be hit, eg, when
989 // we have a primary template with no definition and
990 // specializations for each case (such as SupplementBase) in
991 // which case we don't succeed in checking the required
993 if (!left_most_base || !left_most_base->hasDefinition())
996 StringRef name = left_most_base->getName();
997 // We know GCMixin base defines virtual trace.
998 if (Config::IsGCMixinBase(name))
1001 // Stop with the left-most prior to a safe polymorphic base (a safe base
1002 // is non-polymorphic and contains no fields that need tracing).
1003 if (Config::IsSafePolymorphicBase(name))
1006 left_most = left_most_base;
1007 it = left_most->bases_begin();
1010 if (RecordInfo* left_most_info = cache_.Lookup(left_most)) {
1012 // Check condition (1):
1013 if (trace->isVirtual()) {
1014 if (CXXMethodDecl* trace = left_most_info->GetTraceMethod()) {
1015 if (trace->isVirtual())
1018 ReportBaseClassMustDeclareVirtualTrace(info, left_most);
1022 // Check condition (2):
1023 if (DeclaresVirtualMethods(info->record()))
1025 if (left_most_base) {
1026 ++it; // Get the base next to the "safe polymorphic base"
1027 if (it != left_most->bases_end()) {
1028 if (CXXRecordDecl* next_base = it->getType()->getAsCXXRecordDecl()) {
1029 if (CXXRecordDecl* next_left_most = GetLeftMostBase(next_base)) {
1030 if (DeclaresVirtualMethods(next_left_most))
1032 ReportLeftMostBaseMustBePolymorphic(info, next_left_most);
1038 ReportLeftMostBaseMustBePolymorphic(info, left_most);
1042 CXXRecordDecl* GetLeftMostBase(CXXRecordDecl* left_most) {
1043 CXXRecordDecl::base_class_iterator it = left_most->bases_begin();
1044 while (it != left_most->bases_end()) {
1045 if (it->getType()->isDependentType())
1046 left_most = GetDependentTemplatedDecl(*it->getType());
1048 left_most = it->getType()->getAsCXXRecordDecl();
1049 if (!left_most || !left_most->hasDefinition())
1051 it = left_most->bases_begin();
1056 bool DeclaresVirtualMethods(CXXRecordDecl* decl) {
1057 CXXRecordDecl::method_iterator it = decl->method_begin();
1058 for (; it != decl->method_end(); ++it)
1059 if (it->isVirtual() && !it->isPure())
1064 void CheckLeftMostDerived(RecordInfo* info) {
1065 CXXRecordDecl* left_most = info->record();
1066 CXXRecordDecl::base_class_iterator it = left_most->bases_begin();
1067 while (it != left_most->bases_end()) {
1068 left_most = it->getType()->getAsCXXRecordDecl();
1069 it = left_most->bases_begin();
1071 if (!Config::IsGCBase(left_most->getName()))
1072 ReportClassMustLeftMostlyDeriveGC(info);
1075 void CheckDispatch(RecordInfo* info) {
1076 bool finalized = info->IsGCFinalized();
1077 CXXMethodDecl* trace_dispatch = info->GetTraceDispatchMethod();
1078 CXXMethodDecl* finalize_dispatch = info->GetFinalizeDispatchMethod();
1079 if (!trace_dispatch && !finalize_dispatch)
1082 CXXRecordDecl* base = trace_dispatch ? trace_dispatch->getParent()
1083 : finalize_dispatch->getParent();
1085 // Check that dispatch methods are defined at the base.
1086 if (base == info->record()) {
1087 if (!trace_dispatch)
1088 ReportMissingTraceDispatchMethod(info);
1089 if (finalized && !finalize_dispatch)
1090 ReportMissingFinalizeDispatchMethod(info);
1091 if (!finalized && finalize_dispatch) {
1092 ReportClassRequiresFinalization(info);
1093 NoteUserDeclaredFinalizer(finalize_dispatch);
1097 // Check that classes implementing manual dispatch do not have vtables.
1098 if (info->record()->isPolymorphic())
1099 ReportVirtualAndManualDispatch(
1100 info, trace_dispatch ? trace_dispatch : finalize_dispatch);
1102 // If this is a non-abstract class check that it is dispatched to.
1103 // TODO: Create a global variant of this local check. We can only check if
1104 // the dispatch body is known in this compilation unit.
1105 if (info->IsConsideredAbstract())
1108 const FunctionDecl* defn;
1110 if (trace_dispatch && trace_dispatch->isDefined(defn)) {
1111 CheckDispatchVisitor visitor(info);
1112 visitor.TraverseStmt(defn->getBody());
1113 if (!visitor.dispatched_to_receiver())
1114 ReportMissingTraceDispatch(defn, info);
1117 if (finalized && finalize_dispatch && finalize_dispatch->isDefined(defn)) {
1118 CheckDispatchVisitor visitor(info);
1119 visitor.TraverseStmt(defn->getBody());
1120 if (!visitor.dispatched_to_receiver())
1121 ReportMissingFinalizeDispatch(defn, info);
1125 // TODO: Should we collect destructors similar to trace methods?
1126 void CheckFinalization(RecordInfo* info) {
1127 CXXDestructorDecl* dtor = info->record()->getDestructor();
1129 // For finalized classes, check the finalization method if possible.
1130 if (info->IsGCFinalized()) {
1131 if (dtor && dtor->hasBody()) {
1132 CheckFinalizerVisitor visitor(&cache_);
1133 visitor.TraverseCXXMethodDecl(dtor);
1134 if (!visitor.finalized_fields().empty()) {
1135 ReportFinalizerAccessesFinalizedFields(
1136 dtor, &visitor.finalized_fields());
1142 // Don't require finalization of a mixin that has not yet been "mixed in".
1143 if (info->IsGCMixin())
1146 // Report the finalization error, and proceed to print possible causes for
1147 // the finalization requirement.
1148 ReportClassRequiresFinalization(info);
1150 if (dtor && dtor->isUserProvided())
1151 NoteUserDeclaredDestructor(dtor);
1153 for (RecordInfo::Bases::iterator it = info->GetBases().begin();
1154 it != info->GetBases().end();
1156 if (it->second.info()->NeedsFinalization())
1157 NoteBaseRequiresFinalization(&it->second);
1160 for (RecordInfo::Fields::iterator it = info->GetFields().begin();
1161 it != info->GetFields().end();
1163 if (it->second.edge()->NeedsFinalization())
1164 NoteField(&it->second, diag_field_requires_finalization_note_);
1168 // This is the main entry for tracing method definitions.
1169 void CheckTracingMethod(CXXMethodDecl* method) {
1170 RecordInfo* parent = cache_.Lookup(method->getParent());
1171 if (IsIgnored(parent))
1174 // Check templated tracing methods by checking the template instantiations.
1175 // Specialized templates are handled as ordinary classes.
1176 if (ClassTemplateDecl* tmpl =
1177 parent->record()->getDescribedClassTemplate()) {
1178 for (ClassTemplateDecl::spec_iterator it = tmpl->spec_begin();
1179 it != tmpl->spec_end();
1181 // Check trace using each template instantiation as the holder.
1182 if (IsTemplateInstantiation(*it))
1183 CheckTraceOrDispatchMethod(cache_.Lookup(*it), method);
1188 CheckTraceOrDispatchMethod(parent, method);
1191 // Determine what type of tracing method this is (dispatch or trace).
1192 void CheckTraceOrDispatchMethod(RecordInfo* parent, CXXMethodDecl* method) {
1193 bool isTraceAfterDispatch;
1194 if (Config::IsTraceMethod(method, &isTraceAfterDispatch)) {
1195 if (isTraceAfterDispatch || !parent->GetTraceDispatchMethod()) {
1196 CheckTraceMethod(parent, method, isTraceAfterDispatch);
1198 // Dispatch methods are checked when we identify subclasses.
1202 // Check an actual trace method.
1203 void CheckTraceMethod(RecordInfo* parent,
1204 CXXMethodDecl* trace,
1205 bool isTraceAfterDispatch) {
1206 // A trace method must not override any non-virtual trace methods.
1207 if (!isTraceAfterDispatch) {
1208 for (RecordInfo::Bases::iterator it = parent->GetBases().begin();
1209 it != parent->GetBases().end();
1211 RecordInfo* base = it->second.info();
1212 if (CXXMethodDecl* other = base->InheritsNonVirtualTrace())
1213 ReportOverriddenNonVirtualTrace(parent, trace, other);
1217 CheckTraceVisitor visitor(trace, parent);
1218 visitor.TraverseCXXMethodDecl(trace);
1220 for (RecordInfo::Bases::iterator it = parent->GetBases().begin();
1221 it != parent->GetBases().end();
1223 if (!it->second.IsProperlyTraced())
1224 ReportBaseRequiresTracing(parent, trace, it->first);
1227 for (RecordInfo::Fields::iterator it = parent->GetFields().begin();
1228 it != parent->GetFields().end();
1230 if (!it->second.IsProperlyTraced()) {
1231 // Discontinue once an untraced-field error is found.
1232 ReportFieldsRequireTracing(parent, trace);
1238 void DumpClass(RecordInfo* info) {
1242 json_->OpenObject();
1243 json_->Write("name", info->record()->getQualifiedNameAsString());
1244 json_->Write("loc", GetLocString(info->record()->getLocStart()));
1245 json_->CloseObject();
1247 class DumpEdgeVisitor : public RecursiveEdgeVisitor {
1249 DumpEdgeVisitor(JsonWriter* json) : json_(json) {}
1250 void DumpEdge(RecordInfo* src,
1253 const Edge::LivenessKind& kind,
1254 const string& loc) {
1255 json_->OpenObject();
1256 json_->Write("src", src->record()->getQualifiedNameAsString());
1257 json_->Write("dst", dst->record()->getQualifiedNameAsString());
1258 json_->Write("lbl", lbl);
1259 json_->Write("kind", kind);
1260 json_->Write("loc", loc);
1263 Parent()->IsRawPtr() ? "raw" :
1264 Parent()->IsRefPtr() ? "ref" :
1265 Parent()->IsOwnPtr() ? "own" :
1266 (Parent()->IsMember() ||
1267 Parent()->IsWeakMember()) ? "mem" :
1269 json_->CloseObject();
1272 void DumpField(RecordInfo* src, FieldPoint* point, const string& loc) {
1276 point_->edge()->Accept(this);
1279 void AtValue(Value* e) override {
1280 // The liveness kind of a path from the point to this value
1281 // is given by the innermost place that is non-strong.
1282 Edge::LivenessKind kind = Edge::kStrong;
1283 if (Config::IsIgnoreCycleAnnotated(point_->field())) {
1286 for (Context::iterator it = context().begin();
1287 it != context().end();
1289 Edge::LivenessKind pointer_kind = (*it)->Kind();
1290 if (pointer_kind != Edge::kStrong) {
1291 kind = pointer_kind;
1297 src_, e->value(), point_->field()->getNameAsString(), kind, loc_);
1307 DumpEdgeVisitor visitor(json_);
1309 RecordInfo::Bases& bases = info->GetBases();
1310 for (RecordInfo::Bases::iterator it = bases.begin();
1313 visitor.DumpEdge(info,
1317 GetLocString(it->second.spec().getLocStart()));
1320 RecordInfo::Fields& fields = info->GetFields();
1321 for (RecordInfo::Fields::iterator it = fields.begin();
1324 visitor.DumpField(info,
1326 GetLocString(it->second.field()->getLocStart()));
1330 // Adds either a warning or error, based on the current handling of -Werror.
1331 DiagnosticsEngine::Level getErrorLevel() {
1332 return diagnostic_.getWarningsAsErrors() ? DiagnosticsEngine::Error
1333 : DiagnosticsEngine::Warning;
1336 const string GetLocString(SourceLocation loc) {
1337 const SourceManager& source_manager = instance_.getSourceManager();
1338 PresumedLoc ploc = source_manager.getPresumedLoc(loc);
1339 if (ploc.isInvalid())
1342 llvm::raw_string_ostream OS(loc_str);
1343 OS << ploc.getFilename()
1344 << ":" << ploc.getLine()
1345 << ":" << ploc.getColumn();
1349 bool IsIgnored(RecordInfo* record) {
1351 !InCheckedNamespace(record) ||
1352 IsIgnoredClass(record) ||
1353 InIgnoredDirectory(record);
1356 bool IsIgnoredClass(RecordInfo* info) {
1357 // Ignore any class prefixed by SameSizeAs. These are used in
1358 // Blink to verify class sizes and don't need checking.
1359 const string SameSizeAs = "SameSizeAs";
1360 if (info->name().compare(0, SameSizeAs.size(), SameSizeAs) == 0)
1362 return options_.ignored_classes.find(info->name()) !=
1363 options_.ignored_classes.end();
1366 bool InIgnoredDirectory(RecordInfo* info) {
1368 if (!GetFilename(info->record()->getLocStart(), &filename))
1369 return false; // TODO: should we ignore non-existing file locations?
1370 std::vector<string>::iterator it = options_.ignored_directories.begin();
1371 for (; it != options_.ignored_directories.end(); ++it)
1372 if (filename.find(*it) != string::npos)
1377 bool InCheckedNamespace(RecordInfo* info) {
1380 for (DeclContext* context = info->record()->getDeclContext();
1381 !context->isTranslationUnit();
1382 context = context->getParent()) {
1383 if (NamespaceDecl* decl = dyn_cast<NamespaceDecl>(context)) {
1384 if (options_.checked_namespaces.find(decl->getNameAsString()) !=
1385 options_.checked_namespaces.end()) {
1393 bool GetFilename(SourceLocation loc, string* filename) {
1394 const SourceManager& source_manager = instance_.getSourceManager();
1395 SourceLocation spelling_location = source_manager.getSpellingLoc(loc);
1396 PresumedLoc ploc = source_manager.getPresumedLoc(spelling_location);
1397 if (ploc.isInvalid()) {
1398 // If we're in an invalid location, we're looking at things that aren't
1399 // actually stated in the source.
1402 *filename = ploc.getFilename();
1406 void ReportClassMustLeftMostlyDeriveGC(RecordInfo* info) {
1407 SourceLocation loc = info->record()->getInnerLocStart();
1408 SourceManager& manager = instance_.getSourceManager();
1409 FullSourceLoc full_loc(loc, manager);
1410 diagnostic_.Report(full_loc, diag_class_must_left_mostly_derive_gc_)
1414 void ReportClassRequiresTraceMethod(RecordInfo* info) {
1415 SourceLocation loc = info->record()->getInnerLocStart();
1416 SourceManager& manager = instance_.getSourceManager();
1417 FullSourceLoc full_loc(loc, manager);
1418 diagnostic_.Report(full_loc, diag_class_requires_trace_method_)
1421 for (RecordInfo::Bases::iterator it = info->GetBases().begin();
1422 it != info->GetBases().end();
1424 if (it->second.NeedsTracing().IsNeeded())
1425 NoteBaseRequiresTracing(&it->second);
1428 for (RecordInfo::Fields::iterator it = info->GetFields().begin();
1429 it != info->GetFields().end();
1431 if (!it->second.IsProperlyTraced())
1432 NoteFieldRequiresTracing(info, it->first);
1436 void ReportBaseRequiresTracing(RecordInfo* derived,
1437 CXXMethodDecl* trace,
1438 CXXRecordDecl* base) {
1439 SourceLocation loc = trace->getLocStart();
1440 SourceManager& manager = instance_.getSourceManager();
1441 FullSourceLoc full_loc(loc, manager);
1442 diagnostic_.Report(full_loc, diag_base_requires_tracing_)
1443 << base << derived->record();
1446 void ReportFieldsRequireTracing(RecordInfo* info, CXXMethodDecl* trace) {
1447 SourceLocation loc = trace->getLocStart();
1448 SourceManager& manager = instance_.getSourceManager();
1449 FullSourceLoc full_loc(loc, manager);
1450 diagnostic_.Report(full_loc, diag_fields_require_tracing_)
1452 for (RecordInfo::Fields::iterator it = info->GetFields().begin();
1453 it != info->GetFields().end();
1455 if (!it->second.IsProperlyTraced())
1456 NoteFieldRequiresTracing(info, it->first);
1460 void ReportClassContainsInvalidFields(RecordInfo* info,
1461 CheckFieldsVisitor::Errors* errors) {
1462 SourceLocation loc = info->record()->getLocStart();
1463 SourceManager& manager = instance_.getSourceManager();
1464 FullSourceLoc full_loc(loc, manager);
1465 diagnostic_.Report(full_loc, diag_class_contains_invalid_fields_)
1467 for (CheckFieldsVisitor::Errors::iterator it = errors->begin();
1468 it != errors->end();
1471 if (it->second == CheckFieldsVisitor::kRawPtrToGCManaged) {
1472 error = diag_raw_ptr_to_gc_managed_class_note_;
1473 } else if (it->second == CheckFieldsVisitor::kRefPtrToGCManaged) {
1474 error = diag_ref_ptr_to_gc_managed_class_note_;
1475 } else if (it->second == CheckFieldsVisitor::kOwnPtrToGCManaged) {
1476 error = diag_own_ptr_to_gc_managed_class_note_;
1477 } else if (it->second == CheckFieldsVisitor::kMemberInUnmanaged) {
1478 error = diag_member_in_unmanaged_class_note_;
1479 } else if (it->second == CheckFieldsVisitor::kPtrFromHeapToStack) {
1480 error = diag_stack_allocated_field_note_;
1481 } else if (it->second == CheckFieldsVisitor::kGCDerivedPartObject) {
1482 error = diag_part_object_to_gc_derived_class_note_;
1484 assert(false && "Unknown field error");
1486 NoteField(it->first, error);
1490 void ReportClassContainsGCRoots(RecordInfo* info,
1491 CheckGCRootsVisitor::Errors* errors) {
1492 SourceLocation loc = info->record()->getLocStart();
1493 SourceManager& manager = instance_.getSourceManager();
1494 FullSourceLoc full_loc(loc, manager);
1495 for (CheckGCRootsVisitor::Errors::iterator it = errors->begin();
1496 it != errors->end();
1498 CheckGCRootsVisitor::RootPath::iterator path = it->begin();
1499 FieldPoint* point = *path;
1500 diagnostic_.Report(full_loc, diag_class_contains_gc_root_)
1501 << info->record() << point->field();
1502 while (++path != it->end()) {
1503 NotePartObjectContainsGCRoot(point);
1506 NoteFieldContainsGCRoot(point);
1510 void ReportFinalizerAccessesFinalizedFields(
1511 CXXMethodDecl* dtor,
1512 CheckFinalizerVisitor::Errors* fields) {
1513 for (CheckFinalizerVisitor::Errors::iterator it = fields->begin();
1514 it != fields->end();
1516 SourceLocation loc = it->first->getLocStart();
1517 SourceManager& manager = instance_.getSourceManager();
1518 FullSourceLoc full_loc(loc, manager);
1519 diagnostic_.Report(full_loc, diag_finalizer_accesses_finalized_field_)
1520 << dtor << it->second->field();
1521 NoteField(it->second, diag_finalized_field_note_);
1525 void ReportClassRequiresFinalization(RecordInfo* info) {
1526 SourceLocation loc = info->record()->getInnerLocStart();
1527 SourceManager& manager = instance_.getSourceManager();
1528 FullSourceLoc full_loc(loc, manager);
1529 diagnostic_.Report(full_loc, diag_class_requires_finalization_)
1533 void ReportOverriddenNonVirtualTrace(RecordInfo* info,
1534 CXXMethodDecl* trace,
1535 CXXMethodDecl* overridden) {
1536 SourceLocation loc = trace->getLocStart();
1537 SourceManager& manager = instance_.getSourceManager();
1538 FullSourceLoc full_loc(loc, manager);
1539 diagnostic_.Report(full_loc, diag_overridden_non_virtual_trace_)
1540 << info->record() << overridden->getParent();
1541 NoteOverriddenNonVirtualTrace(overridden);
1544 void ReportMissingTraceDispatchMethod(RecordInfo* info) {
1545 ReportMissingDispatchMethod(info, diag_missing_trace_dispatch_method_);
1548 void ReportMissingFinalizeDispatchMethod(RecordInfo* info) {
1549 ReportMissingDispatchMethod(info, diag_missing_finalize_dispatch_method_);
1552 void ReportMissingDispatchMethod(RecordInfo* info, unsigned error) {
1553 SourceLocation loc = info->record()->getInnerLocStart();
1554 SourceManager& manager = instance_.getSourceManager();
1555 FullSourceLoc full_loc(loc, manager);
1556 diagnostic_.Report(full_loc, error) << info->record();
1559 void ReportVirtualAndManualDispatch(RecordInfo* info,
1560 CXXMethodDecl* dispatch) {
1561 SourceLocation loc = info->record()->getInnerLocStart();
1562 SourceManager& manager = instance_.getSourceManager();
1563 FullSourceLoc full_loc(loc, manager);
1564 diagnostic_.Report(full_loc, diag_virtual_and_manual_dispatch_)
1566 NoteManualDispatchMethod(dispatch);
1569 void ReportMissingTraceDispatch(const FunctionDecl* dispatch,
1570 RecordInfo* receiver) {
1571 ReportMissingDispatch(dispatch, receiver, diag_missing_trace_dispatch_);
1574 void ReportMissingFinalizeDispatch(const FunctionDecl* dispatch,
1575 RecordInfo* receiver) {
1576 ReportMissingDispatch(dispatch, receiver, diag_missing_finalize_dispatch_);
1579 void ReportMissingDispatch(const FunctionDecl* dispatch,
1580 RecordInfo* receiver,
1582 SourceLocation loc = dispatch->getLocStart();
1583 SourceManager& manager = instance_.getSourceManager();
1584 FullSourceLoc full_loc(loc, manager);
1585 diagnostic_.Report(full_loc, error) << receiver->record();
1588 void ReportDerivesNonStackAllocated(RecordInfo* info, BasePoint* base) {
1589 SourceLocation loc = base->spec().getLocStart();
1590 SourceManager& manager = instance_.getSourceManager();
1591 FullSourceLoc full_loc(loc, manager);
1592 diagnostic_.Report(full_loc, diag_derives_non_stack_allocated_)
1593 << info->record() << base->info()->record();
1596 void ReportClassOverridesNew(RecordInfo* info, CXXMethodDecl* newop) {
1597 SourceLocation loc = newop->getLocStart();
1598 SourceManager& manager = instance_.getSourceManager();
1599 FullSourceLoc full_loc(loc, manager);
1600 diagnostic_.Report(full_loc, diag_class_overrides_new_) << info->record();
1603 void ReportClassDeclaresPureVirtualTrace(RecordInfo* info,
1604 CXXMethodDecl* trace) {
1605 SourceLocation loc = trace->getLocStart();
1606 SourceManager& manager = instance_.getSourceManager();
1607 FullSourceLoc full_loc(loc, manager);
1608 diagnostic_.Report(full_loc, diag_class_declares_pure_virtual_trace_)
1612 void ReportLeftMostBaseMustBePolymorphic(RecordInfo* derived,
1613 CXXRecordDecl* base) {
1614 SourceLocation loc = base->getLocStart();
1615 SourceManager& manager = instance_.getSourceManager();
1616 FullSourceLoc full_loc(loc, manager);
1617 diagnostic_.Report(full_loc, diag_left_most_base_must_be_polymorphic_)
1618 << base << derived->record();
1621 void ReportBaseClassMustDeclareVirtualTrace(RecordInfo* derived,
1622 CXXRecordDecl* base) {
1623 SourceLocation loc = base->getLocStart();
1624 SourceManager& manager = instance_.getSourceManager();
1625 FullSourceLoc full_loc(loc, manager);
1626 diagnostic_.Report(full_loc, diag_base_class_must_declare_virtual_trace_)
1627 << base << derived->record();
1630 void NoteManualDispatchMethod(CXXMethodDecl* dispatch) {
1631 SourceLocation loc = dispatch->getLocStart();
1632 SourceManager& manager = instance_.getSourceManager();
1633 FullSourceLoc full_loc(loc, manager);
1634 diagnostic_.Report(full_loc, diag_manual_dispatch_method_note_) << dispatch;
1637 void NoteBaseRequiresTracing(BasePoint* base) {
1638 SourceLocation loc = base->spec().getLocStart();
1639 SourceManager& manager = instance_.getSourceManager();
1640 FullSourceLoc full_loc(loc, manager);
1641 diagnostic_.Report(full_loc, diag_base_requires_tracing_note_)
1642 << base->info()->record();
1645 void NoteFieldRequiresTracing(RecordInfo* holder, FieldDecl* field) {
1646 NoteField(field, diag_field_requires_tracing_note_);
1649 void NotePartObjectContainsGCRoot(FieldPoint* point) {
1650 FieldDecl* field = point->field();
1651 SourceLocation loc = field->getLocStart();
1652 SourceManager& manager = instance_.getSourceManager();
1653 FullSourceLoc full_loc(loc, manager);
1654 diagnostic_.Report(full_loc, diag_part_object_contains_gc_root_note_)
1655 << field << field->getParent();
1658 void NoteFieldContainsGCRoot(FieldPoint* point) {
1659 NoteField(point, diag_field_contains_gc_root_note_);
1662 void NoteUserDeclaredDestructor(CXXMethodDecl* dtor) {
1663 SourceLocation loc = dtor->getLocStart();
1664 SourceManager& manager = instance_.getSourceManager();
1665 FullSourceLoc full_loc(loc, manager);
1666 diagnostic_.Report(full_loc, diag_user_declared_destructor_note_);
1669 void NoteUserDeclaredFinalizer(CXXMethodDecl* dtor) {
1670 SourceLocation loc = dtor->getLocStart();
1671 SourceManager& manager = instance_.getSourceManager();
1672 FullSourceLoc full_loc(loc, manager);
1673 diagnostic_.Report(full_loc, diag_user_declared_finalizer_note_);
1676 void NoteBaseRequiresFinalization(BasePoint* base) {
1677 SourceLocation loc = base->spec().getLocStart();
1678 SourceManager& manager = instance_.getSourceManager();
1679 FullSourceLoc full_loc(loc, manager);
1680 diagnostic_.Report(full_loc, diag_base_requires_finalization_note_)
1681 << base->info()->record();
1684 void NoteField(FieldPoint* point, unsigned note) {
1685 NoteField(point->field(), note);
1688 void NoteField(FieldDecl* field, unsigned note) {
1689 SourceLocation loc = field->getLocStart();
1690 SourceManager& manager = instance_.getSourceManager();
1691 FullSourceLoc full_loc(loc, manager);
1692 diagnostic_.Report(full_loc, note) << field;
1695 void NoteOverriddenNonVirtualTrace(CXXMethodDecl* overridden) {
1696 SourceLocation loc = overridden->getLocStart();
1697 SourceManager& manager = instance_.getSourceManager();
1698 FullSourceLoc full_loc(loc, manager);
1699 diagnostic_.Report(full_loc, diag_overridden_non_virtual_trace_note_)
1703 unsigned diag_class_must_left_mostly_derive_gc_;
1704 unsigned diag_class_requires_trace_method_;
1705 unsigned diag_base_requires_tracing_;
1706 unsigned diag_fields_require_tracing_;
1707 unsigned diag_class_contains_invalid_fields_;
1708 unsigned diag_class_contains_gc_root_;
1709 unsigned diag_class_requires_finalization_;
1710 unsigned diag_finalizer_accesses_finalized_field_;
1711 unsigned diag_overridden_non_virtual_trace_;
1712 unsigned diag_missing_trace_dispatch_method_;
1713 unsigned diag_missing_finalize_dispatch_method_;
1714 unsigned diag_virtual_and_manual_dispatch_;
1715 unsigned diag_missing_trace_dispatch_;
1716 unsigned diag_missing_finalize_dispatch_;
1717 unsigned diag_derives_non_stack_allocated_;
1718 unsigned diag_class_overrides_new_;
1719 unsigned diag_class_declares_pure_virtual_trace_;
1720 unsigned diag_left_most_base_must_be_polymorphic_;
1721 unsigned diag_base_class_must_declare_virtual_trace_;
1723 unsigned diag_base_requires_tracing_note_;
1724 unsigned diag_field_requires_tracing_note_;
1725 unsigned diag_raw_ptr_to_gc_managed_class_note_;
1726 unsigned diag_ref_ptr_to_gc_managed_class_note_;
1727 unsigned diag_own_ptr_to_gc_managed_class_note_;
1728 unsigned diag_stack_allocated_field_note_;
1729 unsigned diag_member_in_unmanaged_class_note_;
1730 unsigned diag_part_object_to_gc_derived_class_note_;
1731 unsigned diag_part_object_contains_gc_root_note_;
1732 unsigned diag_field_contains_gc_root_note_;
1733 unsigned diag_finalized_field_note_;
1734 unsigned diag_user_declared_destructor_note_;
1735 unsigned diag_user_declared_finalizer_note_;
1736 unsigned diag_base_requires_finalization_note_;
1737 unsigned diag_field_requires_finalization_note_;
1738 unsigned diag_overridden_non_virtual_trace_note_;
1739 unsigned diag_manual_dispatch_method_note_;
1741 CompilerInstance& instance_;
1742 DiagnosticsEngine& diagnostic_;
1743 BlinkGCPluginOptions options_;
1748 class BlinkGCPluginAction : public PluginASTAction {
1750 BlinkGCPluginAction() {}
1753 // Overridden from PluginASTAction:
1754 virtual ASTConsumer* CreateASTConsumer(CompilerInstance& instance,
1755 llvm::StringRef ref) {
1756 return new BlinkGCPluginConsumer(instance, options_);
1759 virtual bool ParseArgs(const CompilerInstance& instance,
1760 const std::vector<string>& args) {
1763 for (size_t i = 0; i < args.size() && parsed; ++i) {
1764 if (args[i] == "enable-oilpan") {
1765 options_.enable_oilpan = true;
1766 } else if (args[i] == "dump-graph") {
1767 options_.dump_graph = true;
1770 llvm::errs() << "Unknown blink-gc-plugin argument: " << args[i] << "\n";
1778 BlinkGCPluginOptions options_;
1783 static FrontendPluginRegistry::Add<BlinkGCPluginAction> X(
1785 "Check Blink GC invariants");