node_id_traversal.cpp 6.3 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174
  1. // Part of the Carbon Language project, under the Apache License v2.0 with LLVM
  2. // Exceptions. See /LICENSE for license information.
  3. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
  4. #include "toolchain/check/node_id_traversal.h"
  5. #include "toolchain/check/handle.h"
  6. namespace Carbon::Check {
  7. NodeIdTraversal::NodeIdTraversal(Context& context,
  8. llvm::raw_ostream* vlog_stream)
  9. : context_(context),
  10. next_deferred_definition_(&context.parse_tree()),
  11. worklist_(vlog_stream) {
  12. auto range = context.parse_tree().postorder();
  13. chunks_.push_back({.it = range.begin(),
  14. .end = range.end(),
  15. .next_definition = Parse::DeferredDefinitionIndex::None});
  16. }
  17. auto NodeIdTraversal::Next() -> std::optional<Parse::NodeId> {
  18. while (true) {
  19. // If we're checking deferred definitions, find the next definition we
  20. // should check, restore its suspended state, and add a corresponding
  21. // `Chunk` to the top of the chunk list.
  22. if (chunks_.back().checking_deferred_definitions) {
  23. std::visit(
  24. [&](auto&& task) { PerformTask(std::forward<decltype(task)>(task)); },
  25. worklist_.Pop());
  26. continue;
  27. }
  28. // If we're not checking deferred definitions, produce the next parse node
  29. // for this chunk. If we've run out of parse nodes, we're done with this
  30. // chunk of the parse tree.
  31. if (chunks_.back().it == chunks_.back().end) {
  32. auto old_chunk = chunks_.pop_back_val();
  33. // If we're out of chunks, then we're done entirely.
  34. if (chunks_.empty()) {
  35. worklist_.VerifyEmpty();
  36. return std::nullopt;
  37. }
  38. next_deferred_definition_.SkipTo(old_chunk.next_definition);
  39. continue;
  40. }
  41. auto node_id = *chunks_.back().it;
  42. // If we've reached the start of a deferred definition, skip to the end of
  43. // it, and track that we need to check it later.
  44. if (node_id == next_deferred_definition_.start_id()) {
  45. const auto& definition_info =
  46. context_.parse_tree().deferred_definitions().Get(
  47. next_deferred_definition_.index());
  48. worklist_.SuspendFunctionAndPush(context_,
  49. next_deferred_definition_.index(),
  50. definition_info.start_id);
  51. // Continue type-checking the parse tree after the end of the definition.
  52. chunks_.back().it =
  53. Parse::Tree::PostorderIterator(definition_info.definition_id) + 1;
  54. next_deferred_definition_.SkipTo(definition_info.next_definition_index);
  55. continue;
  56. }
  57. ++chunks_.back().it;
  58. return node_id;
  59. }
  60. }
  61. // Determines whether this node kind is the start of a deferred definition
  62. // scope.
  63. static auto IsStartOfDeferredDefinitionScope(Parse::NodeKind kind) -> bool {
  64. switch (kind) {
  65. case Parse::NodeKind::ClassDefinitionStart:
  66. case Parse::NodeKind::ImplDefinitionStart:
  67. case Parse::NodeKind::InterfaceDefinitionStart:
  68. case Parse::NodeKind::NamedConstraintDefinitionStart:
  69. // TODO: Mixins.
  70. return true;
  71. default:
  72. return false;
  73. }
  74. }
  75. // Determines whether this node kind is the end of a deferred definition scope.
  76. static auto IsEndOfDeferredDefinitionScope(Parse::NodeKind kind) -> bool {
  77. switch (kind) {
  78. case Parse::NodeKind::ClassDefinition:
  79. case Parse::NodeKind::ImplDefinition:
  80. case Parse::NodeKind::InterfaceDefinition:
  81. case Parse::NodeKind::NamedConstraintDefinition:
  82. // TODO: Mixins.
  83. return true;
  84. default:
  85. return false;
  86. }
  87. }
  88. // TODO: Investigate factoring out `IsStartOfDeferredDefinitionScope` and
  89. // `IsEndOfDeferredDefinitionScope` in order to make `NodeIdTraversal`
  90. // reusable.
  91. auto NodeIdTraversal::Handle(Parse::NodeKind parse_kind) -> void {
  92. // When we reach the start of a deferred definition scope, add a task to the
  93. // worklist to check future skipped definitions in the new context.
  94. if (IsStartOfDeferredDefinitionScope(parse_kind)) {
  95. worklist_.PushEnterDeferredDefinitionScope(context_);
  96. }
  97. // When we reach the end of a deferred definition scope, add a task to the
  98. // worklist to leave the scope. If this is not a nested scope, start
  99. // checking the deferred definitions now.
  100. if (IsEndOfDeferredDefinitionScope(parse_kind)) {
  101. chunks_.back().checking_deferred_definitions =
  102. worklist_.SuspendFinishedScopeAndPush(context_);
  103. }
  104. }
  105. auto NodeIdTraversal::PerformTask(
  106. DeferredDefinitionWorklist::EnterDeferredDefinitionScope&& enter) -> void {
  107. CARBON_CHECK(enter.suspended_name,
  108. "Entering a scope with no suspension information.");
  109. context_.decl_name_stack().Restore(std::move(*enter.suspended_name));
  110. }
  111. auto NodeIdTraversal::PerformTask(
  112. DeferredDefinitionWorklist::LeaveDeferredDefinitionScope&& leave) -> void {
  113. if (!leave.in_deferred_definition_scope) {
  114. // We're done with checking deferred definitions.
  115. chunks_.back().checking_deferred_definitions = false;
  116. }
  117. context_.decl_name_stack().PopScope();
  118. }
  119. auto NodeIdTraversal::PerformTask(
  120. DeferredDefinitionWorklist::CheckSkippedDefinition&& parse_definition)
  121. -> void {
  122. auto& [definition_index, suspended_fn] = parse_definition;
  123. const auto& definition_info =
  124. context_.parse_tree().deferred_definitions().Get(definition_index);
  125. HandleFunctionDefinitionResume(context_, definition_info.start_id,
  126. std::move(suspended_fn));
  127. auto range = Parse::Tree::PostorderIterator::MakeRange(
  128. definition_info.start_id, definition_info.definition_id);
  129. chunks_.push_back({.it = range.begin() + 1,
  130. .end = range.end(),
  131. .next_definition = next_deferred_definition_.index()});
  132. ++definition_index.index;
  133. next_deferred_definition_.SkipTo(definition_index);
  134. }
  135. NodeIdTraversal::NextDeferredDefinitionCache::NextDeferredDefinitionCache(
  136. const Parse::Tree* tree)
  137. : tree_(tree) {
  138. SkipTo(Parse::DeferredDefinitionIndex(0));
  139. }
  140. // Set the specified deferred definition index as being the next one that
  141. // will be encountered.
  142. auto NodeIdTraversal::NextDeferredDefinitionCache::SkipTo(
  143. Parse::DeferredDefinitionIndex next_index) -> void {
  144. index_ = next_index;
  145. if (static_cast<size_t>(index_.index) ==
  146. tree_->deferred_definitions().size()) {
  147. start_id_ = Parse::NodeId::None;
  148. } else {
  149. start_id_ = tree_->deferred_definitions().Get(index_).start_id;
  150. }
  151. }
  152. } // namespace Carbon::Check