RecordInfo.cpp 23 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763
  1. // Copyright 2014 The Chromium Authors. All rights reserved.
  2. // Use of this source code is governed by a BSD-style license that can be
  3. // found in the LICENSE file.
  4. #include "RecordInfo.h"
  5. #include <string>
  6. #include "Config.h"
  7. #include "clang/Sema/Sema.h"
  8. using namespace clang;
  9. using std::string;
  10. RecordInfo::RecordInfo(CXXRecordDecl* record, RecordCache* cache)
  11. : cache_(cache),
  12. record_(record),
  13. name_(record->getName()),
  14. fields_need_tracing_(TracingStatus::Unknown()),
  15. bases_(0),
  16. fields_(0),
  17. is_stack_allocated_(kNotComputed),
  18. is_non_newable_(kNotComputed),
  19. is_only_placement_newable_(kNotComputed),
  20. does_need_finalization_(kNotComputed),
  21. has_gc_mixin_methods_(kNotComputed),
  22. is_declaring_local_trace_(kNotComputed),
  23. determined_trace_methods_(false),
  24. trace_method_(0),
  25. trace_dispatch_method_(0),
  26. finalize_dispatch_method_(0),
  27. is_gc_derived_(false),
  28. directly_derived_gc_base_(nullptr) {}
  29. RecordInfo::~RecordInfo() {
  30. delete fields_;
  31. delete bases_;
  32. }
  33. // Get |count| number of template arguments. Returns false if there
  34. // are fewer than |count| arguments or any of the arguments are not
  35. // of a valid Type structure. If |count| is non-positive, all
  36. // arguments are collected.
  37. bool RecordInfo::GetTemplateArgs(size_t count, TemplateArgs* output_args) {
  38. ClassTemplateSpecializationDecl* tmpl =
  39. dyn_cast<ClassTemplateSpecializationDecl>(record_);
  40. if (!tmpl)
  41. return false;
  42. const TemplateArgumentList& args = tmpl->getTemplateArgs();
  43. if (args.size() < count)
  44. return false;
  45. if (count <= 0)
  46. count = args.size();
  47. for (unsigned i = 0; i < count; ++i) {
  48. TemplateArgument arg = args[i];
  49. if (arg.getKind() == TemplateArgument::Type && !arg.getAsType().isNull()) {
  50. output_args->push_back(arg.getAsType().getTypePtr());
  51. } else {
  52. return false;
  53. }
  54. }
  55. return true;
  56. }
  57. // Test if a record is a HeapAllocated collection.
  58. bool RecordInfo::IsHeapAllocatedCollection() {
  59. if (!Config::IsGCCollection(name_) && !Config::IsWTFCollection(name_))
  60. return false;
  61. TemplateArgs args;
  62. if (GetTemplateArgs(0, &args)) {
  63. for (TemplateArgs::iterator it = args.begin(); it != args.end(); ++it) {
  64. if (CXXRecordDecl* decl = (*it)->getAsCXXRecordDecl())
  65. if (decl->getName() == kHeapAllocatorName)
  66. return true;
  67. }
  68. }
  69. return Config::IsGCCollection(name_);
  70. }
  71. bool RecordInfo::HasOptionalFinalizer() {
  72. if (!IsHeapAllocatedCollection())
  73. return false;
  74. // Heap collections may have a finalizer but it is optional (i.e. may be
  75. // delayed until FinalizeGarbageCollectedObject() gets called), unless there
  76. // is an inline buffer. Vector and Deque can have an inline
  77. // buffer.
  78. if (name_ != "Vector" && name_ != "Deque" && name_ != "HeapVector" &&
  79. name_ != "HeapDeque")
  80. return true;
  81. ClassTemplateSpecializationDecl* tmpl =
  82. dyn_cast<ClassTemplateSpecializationDecl>(record_);
  83. // These collections require template specialization so tmpl should always be
  84. // non-null for valid code.
  85. if (!tmpl)
  86. return false;
  87. const TemplateArgumentList& args = tmpl->getTemplateArgs();
  88. if (args.size() < 2)
  89. return true;
  90. TemplateArgument arg = args[1];
  91. // The second template argument must be void or 0 so there is no inline
  92. // buffer.
  93. return (arg.getKind() == TemplateArgument::Type &&
  94. arg.getAsType()->isVoidType()) ||
  95. (arg.getKind() == TemplateArgument::Integral &&
  96. arg.getAsIntegral().getExtValue() == 0);
  97. }
  98. // Test if a record is derived from a garbage collected base.
  99. bool RecordInfo::IsGCDerived() {
  100. // If already computed, return the known result.
  101. if (gc_base_names_.size())
  102. return is_gc_derived_;
  103. if (!record_->hasDefinition())
  104. return false;
  105. // The base classes are not themselves considered garbage collected objects.
  106. if (Config::IsGCBase(name_))
  107. return false;
  108. // Walk the inheritance tree to find GC base classes.
  109. walkBases();
  110. return is_gc_derived_;
  111. }
  112. // Test if a record is directly derived from a garbage collected base.
  113. bool RecordInfo::IsGCDirectlyDerived() {
  114. // If already computed, return the known result.
  115. if (directly_derived_gc_base_)
  116. return true;
  117. if (!record_->hasDefinition())
  118. return false;
  119. // The base classes are not themselves considered garbage collected objects.
  120. if (Config::IsGCBase(name_))
  121. return false;
  122. for (const auto& it : record()->bases()) {
  123. const CXXRecordDecl* base = it.getType()->getAsCXXRecordDecl();
  124. if (!base)
  125. continue;
  126. if (Config::IsGCSimpleBase(base->getName())) {
  127. directly_derived_gc_base_ = &it;
  128. break;
  129. }
  130. }
  131. return directly_derived_gc_base_;
  132. }
  133. CXXRecordDecl* RecordInfo::GetDependentTemplatedDecl(const Type& type) {
  134. const TemplateSpecializationType* tmpl_type =
  135. type.getAs<TemplateSpecializationType>();
  136. if (!tmpl_type)
  137. return 0;
  138. TemplateDecl* tmpl_decl = tmpl_type->getTemplateName().getAsTemplateDecl();
  139. if (!tmpl_decl)
  140. return 0;
  141. if (CXXRecordDecl* record_decl =
  142. dyn_cast_or_null<CXXRecordDecl>(tmpl_decl->getTemplatedDecl()))
  143. return record_decl;
  144. // Type is an alias.
  145. TypeAliasDecl* alias_decl =
  146. dyn_cast<TypeAliasDecl>(tmpl_decl->getTemplatedDecl());
  147. assert(alias_decl);
  148. const Type* alias_type = alias_decl->getUnderlyingType().getTypePtr();
  149. if (CXXRecordDecl* record_decl = alias_type->getAsCXXRecordDecl())
  150. return record_decl;
  151. return GetDependentTemplatedDecl(*alias_type);
  152. }
  153. void RecordInfo::walkBases() {
  154. // This traversal is akin to CXXRecordDecl::forallBases()'s,
  155. // but without stepping over dependent bases -- these might also
  156. // have a "GC base name", so are to be included and considered.
  157. SmallVector<const CXXRecordDecl*, 8> queue;
  158. const CXXRecordDecl* base_record = record();
  159. while (true) {
  160. for (const auto& it : base_record->bases()) {
  161. const RecordType* type = it.getType()->getAs<RecordType>();
  162. CXXRecordDecl* base;
  163. if (!type)
  164. base = GetDependentTemplatedDecl(*it.getType());
  165. else {
  166. base = cast_or_null<CXXRecordDecl>(type->getDecl()->getDefinition());
  167. if (base)
  168. queue.push_back(base);
  169. }
  170. if (!base)
  171. continue;
  172. llvm::StringRef name = base->getName();
  173. if (Config::IsGCBase(name)) {
  174. gc_base_names_.push_back(std::string(name));
  175. is_gc_derived_ = true;
  176. }
  177. }
  178. if (queue.empty())
  179. break;
  180. base_record = queue.pop_back_val(); // not actually a queue.
  181. }
  182. }
  183. // A GC mixin is a class that inherits from a GC mixin base and has
  184. // not yet been "mixed in" with another GC base class.
  185. bool RecordInfo::IsGCMixin() {
  186. if (!IsGCDerived() || !gc_base_names_.size())
  187. return false;
  188. for (const auto& gc_base : gc_base_names_) {
  189. // If it is not a mixin base we are done.
  190. if (!Config::IsGCMixinBase(gc_base))
  191. return false;
  192. }
  193. // This is a mixin if all GC bases are mixins.
  194. return true;
  195. }
  196. // Test if a record is allocated on the managed heap.
  197. bool RecordInfo::IsGCAllocated() {
  198. return IsGCDerived() || IsHeapAllocatedCollection();
  199. }
  200. bool RecordInfo::HasDefinition() {
  201. return record_->hasDefinition();
  202. }
  203. RecordInfo* RecordCache::Lookup(CXXRecordDecl* record) {
  204. // Ignore classes annotated with the GC_PLUGIN_IGNORE macro.
  205. if (!record || Config::IsIgnoreAnnotated(record))
  206. return 0;
  207. Cache::iterator it = cache_.find(record);
  208. if (it != cache_.end())
  209. return &it->second;
  210. return &cache_.insert(std::make_pair(record, RecordInfo(record, this)))
  211. .first->second;
  212. }
  213. bool RecordInfo::HasTypeAlias(std::string marker_name) const {
  214. for (Decl* decl : record_->decls()) {
  215. TypeAliasDecl* alias = dyn_cast<TypeAliasDecl>(decl);
  216. if (!alias)
  217. continue;
  218. if (alias->getName() == marker_name)
  219. return true;
  220. }
  221. return false;
  222. }
  223. bool RecordInfo::IsStackAllocated() {
  224. if (is_stack_allocated_ == kNotComputed) {
  225. is_stack_allocated_ = kFalse;
  226. if (HasTypeAlias("IsStackAllocatedTypeMarker")) {
  227. is_stack_allocated_ = kTrue;
  228. } else {
  229. for (Bases::iterator it = GetBases().begin(); it != GetBases().end();
  230. ++it) {
  231. if (it->second.info()->IsStackAllocated()) {
  232. is_stack_allocated_ = kTrue;
  233. break;
  234. }
  235. }
  236. }
  237. }
  238. return is_stack_allocated_;
  239. }
  240. bool RecordInfo::IsNonNewable() {
  241. if (is_non_newable_ == kNotComputed) {
  242. bool deleted = false;
  243. bool all_deleted = true;
  244. for (CXXRecordDecl::method_iterator it = record_->method_begin();
  245. it != record_->method_end();
  246. ++it) {
  247. if (it->getNameAsString() == kNewOperatorName) {
  248. deleted = it->isDeleted();
  249. all_deleted = all_deleted && deleted;
  250. }
  251. }
  252. is_non_newable_ = (deleted && all_deleted) ? kTrue : kFalse;
  253. }
  254. return is_non_newable_;
  255. }
  256. bool RecordInfo::IsOnlyPlacementNewable() {
  257. if (is_only_placement_newable_ == kNotComputed) {
  258. bool placement = false;
  259. bool new_deleted = false;
  260. for (CXXRecordDecl::method_iterator it = record_->method_begin();
  261. it != record_->method_end();
  262. ++it) {
  263. if (it->getNameAsString() == kNewOperatorName) {
  264. if (it->getNumParams() == 1) {
  265. new_deleted = it->isDeleted();
  266. } else if (it->getNumParams() == 2) {
  267. placement = !it->isDeleted();
  268. }
  269. }
  270. }
  271. is_only_placement_newable_ = (placement && new_deleted) ? kTrue : kFalse;
  272. }
  273. return is_only_placement_newable_;
  274. }
  275. CXXMethodDecl* RecordInfo::DeclaresNewOperator() {
  276. for (CXXRecordDecl::method_iterator it = record_->method_begin();
  277. it != record_->method_end();
  278. ++it) {
  279. if (it->getNameAsString() == kNewOperatorName && it->getNumParams() == 1)
  280. return *it;
  281. }
  282. return 0;
  283. }
  284. // An object requires a tracing method if it has any fields that need tracing
  285. // or if it inherits from multiple bases that need tracing.
  286. bool RecordInfo::RequiresTraceMethod() {
  287. if (IsStackAllocated())
  288. return false;
  289. if (GetTraceMethod())
  290. return true;
  291. unsigned bases_with_trace = 0;
  292. for (Bases::iterator it = GetBases().begin(); it != GetBases().end(); ++it) {
  293. if (it->second.NeedsTracing().IsNeeded())
  294. ++bases_with_trace;
  295. }
  296. // If a single base has a Trace method, this type can inherit the Trace
  297. // method from that base. If more than a single base has a Trace method,
  298. // this type needs it's own Trace method which will delegate to each of
  299. // the bases' Trace methods.
  300. if (bases_with_trace > 1)
  301. return true;
  302. GetFields();
  303. return fields_need_tracing_.IsNeeded();
  304. }
  305. // Get the actual tracing method (ie, can be traceAfterDispatch if there is a
  306. // dispatch method).
  307. CXXMethodDecl* RecordInfo::GetTraceMethod() {
  308. DetermineTracingMethods();
  309. return trace_method_;
  310. }
  311. // Get the static trace dispatch method.
  312. CXXMethodDecl* RecordInfo::GetTraceDispatchMethod() {
  313. DetermineTracingMethods();
  314. return trace_dispatch_method_;
  315. }
  316. CXXMethodDecl* RecordInfo::GetFinalizeDispatchMethod() {
  317. DetermineTracingMethods();
  318. return finalize_dispatch_method_;
  319. }
  320. const CXXBaseSpecifier* RecordInfo::GetDirectGCBase() {
  321. if (!IsGCDirectlyDerived())
  322. return nullptr;
  323. return directly_derived_gc_base_;
  324. }
  325. RecordInfo::Bases& RecordInfo::GetBases() {
  326. if (!bases_)
  327. bases_ = CollectBases();
  328. return *bases_;
  329. }
  330. bool RecordInfo::InheritsTrace() {
  331. if (GetTraceMethod())
  332. return true;
  333. for (Bases::iterator it = GetBases().begin(); it != GetBases().end(); ++it) {
  334. if (it->second.info()->InheritsTrace())
  335. return true;
  336. }
  337. return false;
  338. }
  339. CXXMethodDecl* RecordInfo::InheritsNonVirtualTrace() {
  340. if (CXXMethodDecl* trace = GetTraceMethod())
  341. return trace->isVirtual() ? 0 : trace;
  342. for (Bases::iterator it = GetBases().begin(); it != GetBases().end(); ++it) {
  343. if (CXXMethodDecl* trace = it->second.info()->InheritsNonVirtualTrace())
  344. return trace;
  345. }
  346. return 0;
  347. }
  348. bool RecordInfo::DeclaresGCMixinMethods() {
  349. DetermineTracingMethods();
  350. return has_gc_mixin_methods_;
  351. }
  352. bool RecordInfo::DeclaresLocalTraceMethod() {
  353. if (is_declaring_local_trace_ != kNotComputed)
  354. return is_declaring_local_trace_;
  355. DetermineTracingMethods();
  356. is_declaring_local_trace_ = trace_method_ ? kTrue : kFalse;
  357. if (is_declaring_local_trace_) {
  358. for (auto it = record_->method_begin();
  359. it != record_->method_end(); ++it) {
  360. if (*it == trace_method_) {
  361. is_declaring_local_trace_ = kTrue;
  362. break;
  363. }
  364. }
  365. }
  366. return is_declaring_local_trace_;
  367. }
  368. // A (non-virtual) class is considered abstract in Blink if it has
  369. // no public constructors and no create methods.
  370. bool RecordInfo::IsConsideredAbstract() {
  371. for (CXXRecordDecl::ctor_iterator it = record_->ctor_begin();
  372. it != record_->ctor_end();
  373. ++it) {
  374. if (!it->isCopyOrMoveConstructor() && it->getAccess() == AS_public)
  375. return false;
  376. }
  377. for (CXXRecordDecl::method_iterator it = record_->method_begin();
  378. it != record_->method_end();
  379. ++it) {
  380. if (it->getNameAsString() == kCreateName)
  381. return false;
  382. }
  383. return true;
  384. }
  385. RecordInfo::Bases* RecordInfo::CollectBases() {
  386. // Compute the collection locally to avoid inconsistent states.
  387. Bases* bases = new Bases;
  388. if (!record_->hasDefinition())
  389. return bases;
  390. for (CXXRecordDecl::base_class_iterator it = record_->bases_begin();
  391. it != record_->bases_end();
  392. ++it) {
  393. const CXXBaseSpecifier& spec = *it;
  394. RecordInfo* info = cache_->Lookup(spec.getType());
  395. if (!info)
  396. continue;
  397. CXXRecordDecl* base = info->record();
  398. TracingStatus status = info->InheritsTrace()
  399. ? TracingStatus::Needed()
  400. : TracingStatus::Unneeded();
  401. bases->push_back(std::make_pair(base, BasePoint(spec, info, status)));
  402. }
  403. return bases;
  404. }
  405. RecordInfo::Fields& RecordInfo::GetFields() {
  406. if (!fields_)
  407. fields_ = CollectFields();
  408. return *fields_;
  409. }
  410. RecordInfo::Fields* RecordInfo::CollectFields() {
  411. // Compute the collection locally to avoid inconsistent states.
  412. Fields* fields = new Fields;
  413. if (!record_->hasDefinition())
  414. return fields;
  415. TracingStatus fields_status = TracingStatus::Unneeded();
  416. for (RecordDecl::field_iterator it = record_->field_begin();
  417. it != record_->field_end();
  418. ++it) {
  419. FieldDecl* field = *it;
  420. // Ignore fields annotated with the GC_PLUGIN_IGNORE macro.
  421. if (Config::IsIgnoreAnnotated(field))
  422. continue;
  423. // Check if the unexpanded type should be recorded; needed
  424. // to track iterator aliases only
  425. const Type* unexpandedType = field->getType().getSplitUnqualifiedType().Ty;
  426. Edge* edge = CreateEdgeFromOriginalType(unexpandedType);
  427. if (!edge)
  428. edge = CreateEdge(field->getType().getTypePtrOrNull());
  429. if (edge) {
  430. fields_status = fields_status.LUB(edge->NeedsTracing(Edge::kRecursive));
  431. fields->insert(std::make_pair(field, FieldPoint(field, edge)));
  432. }
  433. }
  434. fields_need_tracing_ = fields_status;
  435. return fields;
  436. }
  437. void RecordInfo::DetermineTracingMethods() {
  438. if (determined_trace_methods_)
  439. return;
  440. determined_trace_methods_ = true;
  441. if (Config::IsGCBase(name_))
  442. return;
  443. CXXMethodDecl* trace = nullptr;
  444. CXXMethodDecl* trace_after_dispatch = nullptr;
  445. bool has_adjust_and_mark = false;
  446. bool has_is_heap_object_alive = false;
  447. for (Decl* decl : record_->decls()) {
  448. CXXMethodDecl* method = dyn_cast<CXXMethodDecl>(decl);
  449. if (!method) {
  450. if (FunctionTemplateDecl* func_template =
  451. dyn_cast<FunctionTemplateDecl>(decl))
  452. method = dyn_cast<CXXMethodDecl>(func_template->getTemplatedDecl());
  453. }
  454. if (!method)
  455. continue;
  456. switch (Config::GetTraceMethodType(method)) {
  457. case Config::TRACE_METHOD:
  458. trace = method;
  459. break;
  460. case Config::TRACE_AFTER_DISPATCH_METHOD:
  461. trace_after_dispatch = method;
  462. break;
  463. case Config::NOT_TRACE_METHOD:
  464. if (method->getNameAsString() == kFinalizeName) {
  465. finalize_dispatch_method_ = method;
  466. } else if (method->getNameAsString() == kAdjustAndMarkName) {
  467. has_adjust_and_mark = true;
  468. } else if (method->getNameAsString() == kIsHeapObjectAliveName) {
  469. has_is_heap_object_alive = true;
  470. }
  471. break;
  472. }
  473. }
  474. // Record if class defines the two GCMixin methods.
  475. has_gc_mixin_methods_ =
  476. has_adjust_and_mark && has_is_heap_object_alive ? kTrue : kFalse;
  477. if (trace_after_dispatch) {
  478. trace_method_ = trace_after_dispatch;
  479. trace_dispatch_method_ = trace;
  480. } else {
  481. // TODO: Can we never have a dispatch method called trace without the same
  482. // class defining a traceAfterDispatch method?
  483. trace_method_ = trace;
  484. trace_dispatch_method_ = nullptr;
  485. }
  486. if (trace_dispatch_method_ && finalize_dispatch_method_)
  487. return;
  488. // If this class does not define dispatching methods inherit them.
  489. for (Bases::iterator it = GetBases().begin(); it != GetBases().end(); ++it) {
  490. // TODO: Does it make sense to inherit multiple dispatch methods?
  491. if (CXXMethodDecl* dispatch = it->second.info()->GetTraceDispatchMethod()) {
  492. assert(!trace_dispatch_method_ && "Multiple trace dispatching methods");
  493. trace_dispatch_method_ = dispatch;
  494. }
  495. if (CXXMethodDecl* dispatch =
  496. it->second.info()->GetFinalizeDispatchMethod()) {
  497. assert(!finalize_dispatch_method_ &&
  498. "Multiple finalize dispatching methods");
  499. finalize_dispatch_method_ = dispatch;
  500. }
  501. }
  502. }
  503. // TODO: Add classes with a finalize() method that specialize FinalizerTrait.
  504. bool RecordInfo::NeedsFinalization() {
  505. if (does_need_finalization_ == kNotComputed) {
  506. if (HasOptionalFinalizer()) {
  507. does_need_finalization_ = kFalse;
  508. return does_need_finalization_;
  509. }
  510. // Rely on hasNonTrivialDestructor(), but if the only
  511. // identifiable reason for it being true is the presence
  512. // of a safely ignorable class as a direct base,
  513. // or we're processing such an 'ignorable' class, then it does
  514. // not need finalization.
  515. does_need_finalization_ =
  516. record_->hasNonTrivialDestructor() ? kTrue : kFalse;
  517. if (!does_need_finalization_)
  518. return does_need_finalization_;
  519. CXXDestructorDecl* dtor = record_->getDestructor();
  520. if (dtor && dtor->isUserProvided())
  521. return does_need_finalization_;
  522. for (Fields::iterator it = GetFields().begin();
  523. it != GetFields().end();
  524. ++it) {
  525. if (it->second.edge()->NeedsFinalization())
  526. return does_need_finalization_;
  527. }
  528. for (Bases::iterator it = GetBases().begin();
  529. it != GetBases().end();
  530. ++it) {
  531. if (it->second.info()->NeedsFinalization())
  532. return does_need_finalization_;
  533. }
  534. // Destructor was non-trivial due to bases with destructors that
  535. // can be safely ignored. Hence, no need for finalization.
  536. does_need_finalization_ = kFalse;
  537. }
  538. return does_need_finalization_;
  539. }
  540. // A class needs tracing if:
  541. // - it is allocated on the managed heap,
  542. // - it has a Trace method (i.e. the plugin assumes such a method was added for
  543. // a reason).
  544. // - it is derived from a class that needs tracing, or
  545. // - it contains fields that need tracing.
  546. //
  547. TracingStatus RecordInfo::NeedsTracing(Edge::NeedsTracingOption option) {
  548. if (IsGCAllocated())
  549. return TracingStatus::Needed();
  550. if (IsStackAllocated())
  551. return TracingStatus::Unneeded();
  552. if (GetTraceMethod())
  553. return TracingStatus::Needed();
  554. for (Bases::iterator it = GetBases().begin(); it != GetBases().end(); ++it) {
  555. if (it->second.info()->NeedsTracing(option).IsNeeded())
  556. return TracingStatus::Needed();
  557. }
  558. if (option == Edge::kRecursive)
  559. GetFields();
  560. return fields_need_tracing_;
  561. }
  562. static bool isInStdNamespace(clang::Sema& sema, NamespaceDecl* ns)
  563. {
  564. while (ns) {
  565. if (sema.getStdNamespace()->InEnclosingNamespaceSetOf(ns))
  566. return true;
  567. ns = dyn_cast<NamespaceDecl>(ns->getParent());
  568. }
  569. return false;
  570. }
  571. Edge* RecordInfo::CreateEdgeFromOriginalType(const Type* type) {
  572. if (!type)
  573. return nullptr;
  574. // look for "typedef ... iterator;"
  575. if (!isa<ElaboratedType>(type))
  576. return nullptr;
  577. const ElaboratedType* elaboratedType = cast<ElaboratedType>(type);
  578. if (!isa<TypedefType>(elaboratedType->getNamedType()))
  579. return nullptr;
  580. const TypedefType* typedefType =
  581. cast<TypedefType>(elaboratedType->getNamedType());
  582. std::string typeName = typedefType->getDecl()->getNameAsString();
  583. if (!Config::IsIterator(typeName))
  584. return nullptr;
  585. RecordInfo* info =
  586. cache_->Lookup(elaboratedType->getQualifier()->getAsType());
  587. bool on_heap = false;
  588. // Silently handle unknown types; the on-heap collection types will
  589. // have to be in scope for the declaration to compile, though.
  590. if (info) {
  591. on_heap = Config::IsGCCollection(info->name());
  592. }
  593. return new Iterator(info, on_heap);
  594. }
  595. Edge* RecordInfo::CreateEdge(const Type* type) {
  596. if (!type) {
  597. return 0;
  598. }
  599. if (type->isPointerType() || type->isReferenceType()) {
  600. if (Edge* ptr = CreateEdge(type->getPointeeType().getTypePtrOrNull()))
  601. return new RawPtr(ptr, type->isReferenceType());
  602. return 0;
  603. }
  604. RecordInfo* info = cache_->Lookup(type);
  605. // If the type is neither a pointer or a C++ record we ignore it.
  606. if (!info) {
  607. return 0;
  608. }
  609. TemplateArgs args;
  610. if (Config::IsRefOrWeakPtr(info->name()) && info->GetTemplateArgs(1, &args)) {
  611. if (Edge* ptr = CreateEdge(args[0]))
  612. return new RefPtr(
  613. ptr, Config::IsRefPtr(info->name()) ? Edge::kStrong : Edge::kWeak);
  614. return 0;
  615. }
  616. if (Config::IsUniquePtr(info->name()) && info->GetTemplateArgs(1, &args)) {
  617. // Check that this is std::unique_ptr
  618. NamespaceDecl* ns =
  619. dyn_cast<NamespaceDecl>(info->record()->getDeclContext());
  620. clang::Sema& sema = cache_->instance().getSema();
  621. if (!isInStdNamespace(sema, ns))
  622. return 0;
  623. if (Edge* ptr = CreateEdge(args[0]))
  624. return new UniquePtr(ptr);
  625. return 0;
  626. }
  627. // Find top-level namespace.
  628. NamespaceDecl* ns = dyn_cast<NamespaceDecl>(info->record()->getDeclContext());
  629. if (ns) {
  630. while (NamespaceDecl* outer_ns =
  631. dyn_cast<NamespaceDecl>(ns->getDeclContext())) {
  632. ns = outer_ns;
  633. }
  634. }
  635. auto ns_name = ns ? ns->getName() : "";
  636. if (Config::IsMember(info->name(), ns_name, info, &args)) {
  637. if (Edge* ptr = CreateEdge(args[0])) {
  638. return new Member(ptr);
  639. }
  640. return 0;
  641. }
  642. if (Config::IsWeakMember(info->name(), ns_name, info, &args)) {
  643. if (Edge* ptr = CreateEdge(args[0]))
  644. return new WeakMember(ptr);
  645. return 0;
  646. }
  647. bool is_persistent = Config::IsPersistent(info->name(), ns_name, info, &args);
  648. if (is_persistent ||
  649. Config::IsCrossThreadPersistent(info->name(), ns_name, info, &args)) {
  650. if (Edge* ptr = CreateEdge(args[0])) {
  651. if (is_persistent)
  652. return new Persistent(ptr);
  653. else
  654. return new CrossThreadPersistent(ptr);
  655. }
  656. return 0;
  657. }
  658. if (Config::IsGCCollection(info->name()) ||
  659. Config::IsWTFCollection(info->name())) {
  660. bool on_heap = info->IsHeapAllocatedCollection();
  661. size_t count = Config::CollectionDimension(info->name());
  662. if (!info->GetTemplateArgs(count, &args))
  663. return 0;
  664. Collection* edge = new Collection(info, on_heap);
  665. for (TemplateArgs::iterator it = args.begin(); it != args.end(); ++it) {
  666. if (Edge* member = CreateEdge(*it)) {
  667. edge->members().push_back(member);
  668. }
  669. // TODO: Handle the case where we fail to create an edge (eg, if the
  670. // argument is a primitive type or just not fully known yet).
  671. }
  672. return edge;
  673. }
  674. if (Config::IsTraceWrapperV8Reference(info->name(), ns_name, info, &args)) {
  675. if (Edge* ptr = CreateEdge(args[0]))
  676. return new TraceWrapperV8Reference(ptr);
  677. return 0;
  678. }
  679. return new Value(info);
  680. }