node_id_traversal.cpp 8.0 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212
  1. // Part of the Carbon Language project, under the Apache License v2.0 with LLVM
  2. // Exceptions. See /LICENSE for license information.
  3. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
  4. #include "toolchain/check/node_id_traversal.h"
  5. #include <optional>
  6. #include <utility>
  7. #include <variant>
  8. #include "toolchain/check/deferred_definition_worklist.h"
  9. #include "toolchain/check/handle.h"
  10. #include "toolchain/check/thunk.h"
  11. namespace Carbon::Check {
  12. NodeIdTraversal::NodeIdTraversal(Context* context,
  13. llvm::raw_ostream* vlog_stream)
  14. : context_(context),
  15. next_deferred_definition_(&context->parse_tree()),
  16. worklist_(vlog_stream) {
  17. auto range = context->parse_tree().postorder();
  18. chunks_.push_back({.it = range.begin(),
  19. .end = range.end(),
  20. .next_definition = Parse::DeferredDefinitionIndex::None});
  21. }
  22. auto NodeIdTraversal::Next() -> std::optional<Parse::NodeId> {
  23. while (true) {
  24. // If we're checking deferred definitions, find the next definition we
  25. // should check, restore its suspended state, and add a corresponding
  26. // `Chunk` to the top of the chunk list.
  27. if (chunks_.back().checking_deferred_definitions) {
  28. if (chunks_.back().next_worklist_index < worklist_.size()) {
  29. std::visit([&](auto& task) { PerformTask(std::move(task)); },
  30. worklist_[chunks_.back().next_worklist_index++]);
  31. continue;
  32. }
  33. // Worklist is empty: discard the worklist items associated with this
  34. // chunk, and leave the scope.
  35. worklist_.truncate(chunks_.back().first_worklist_index);
  36. // We reach here when
  37. // `DeferredDefinitionScope::SuspendFinishedScopeAndPush` returns
  38. // `NonNestedWithWork`. In this case it's our responsibility to pop the
  39. // scope left behind by the `Handle*Definition` function for the
  40. // non-nested definition.
  41. context_->decl_name_stack().PopScope();
  42. chunks_.back().checking_deferred_definitions = false;
  43. }
  44. // If we're not checking deferred definitions, produce the next parse node
  45. // for this chunk. If we've run out of parse nodes, we're done with this
  46. // chunk of the parse tree.
  47. if (chunks_.back().it == chunks_.back().end) {
  48. auto old_chunk = chunks_.pop_back_val();
  49. // If we're out of chunks, then we're done entirely.
  50. if (chunks_.empty()) {
  51. worklist_.VerifyEmpty();
  52. return std::nullopt;
  53. }
  54. next_deferred_definition_.SkipTo(old_chunk.next_definition);
  55. continue;
  56. }
  57. auto node_id = *chunks_.back().it;
  58. // If we've reached the start of a deferred definition, skip to the end of
  59. // it, and track that we need to check it later.
  60. if (node_id == next_deferred_definition_.start_id()) {
  61. const auto& definition_info =
  62. context_->parse_tree().deferred_definitions().Get(
  63. next_deferred_definition_.index());
  64. worklist_.SuspendFunctionAndPush(*context_,
  65. next_deferred_definition_.index(),
  66. definition_info.start_id);
  67. // Continue type-checking the parse tree after the end of the definition.
  68. chunks_.back().it =
  69. Parse::Tree::PostorderIterator(definition_info.definition_id) + 1;
  70. next_deferred_definition_.SkipTo(definition_info.next_definition_index);
  71. continue;
  72. }
  73. ++chunks_.back().it;
  74. return node_id;
  75. }
  76. }
  77. // Determines whether this node kind is the start of a deferred definition
  78. // scope.
  79. static auto IsStartOfDeferredDefinitionScope(Parse::NodeKind kind) -> bool {
  80. switch (kind) {
  81. case Parse::NodeKind::ClassDefinitionStart:
  82. case Parse::NodeKind::ImplDefinitionStart:
  83. case Parse::NodeKind::InterfaceDefinitionStart:
  84. case Parse::NodeKind::NamedConstraintDefinitionStart:
  85. // TODO: Mixins.
  86. return true;
  87. default:
  88. return false;
  89. }
  90. }
  91. // Determines whether this node kind is the end of a deferred definition scope.
  92. static auto IsEndOfDeferredDefinitionScope(Parse::NodeKind kind) -> bool {
  93. switch (kind) {
  94. case Parse::NodeKind::ClassDefinition:
  95. case Parse::NodeKind::ImplDefinition:
  96. case Parse::NodeKind::InterfaceDefinition:
  97. case Parse::NodeKind::NamedConstraintDefinition:
  98. // TODO: Mixins.
  99. return true;
  100. default:
  101. return false;
  102. }
  103. }
  104. // TODO: Investigate factoring out `IsStartOfDeferredDefinitionScope` and
  105. // `IsEndOfDeferredDefinitionScope` in order to make `NodeIdTraversal`
  106. // reusable.
  107. auto NodeIdTraversal::Handle(Parse::NodeKind parse_kind) -> void {
  108. // When we reach the start of a deferred definition scope, add a task to the
  109. // worklist to check future skipped definitions in the new context.
  110. if (IsStartOfDeferredDefinitionScope(parse_kind)) {
  111. if (worklist_.PushEnterDeferredDefinitionScope(*context_)) {
  112. // Track that we're within a new non-nested deferred definition scope.
  113. context_->deferred_definition_scope_stack().Push();
  114. }
  115. }
  116. // When we reach the end of a deferred definition scope, add a task to the
  117. // worklist to leave the scope. If this is not a nested scope, start
  118. // checking the deferred definitions now.
  119. if (IsEndOfDeferredDefinitionScope(parse_kind)) {
  120. auto scope_kind = worklist_.SuspendFinishedScopeAndPush(*context_);
  121. // At the end of a non-nested scope, define any pending thunks and clean up
  122. // the stack.
  123. if (scope_kind != DeferredDefinitionWorklist::FinishedScopeKind::Nested) {
  124. for (auto& thunk :
  125. context_->deferred_definition_scope_stack().PeekPendingThunks()) {
  126. BuildThunkDefinition(*context_, std::move(thunk));
  127. }
  128. context_->deferred_definition_scope_stack().Pop();
  129. }
  130. // If we have function definitions in this scope, process them next.
  131. if (scope_kind ==
  132. DeferredDefinitionWorklist::FinishedScopeKind::NonNestedWithWork) {
  133. chunks_.back().checking_deferred_definitions = true;
  134. chunks_.back().next_worklist_index = chunks_.back().first_worklist_index;
  135. }
  136. }
  137. }
  138. auto NodeIdTraversal::PerformTask(
  139. DeferredDefinitionWorklist::EnterNestedDeferredDefinitionScope&& enter)
  140. -> void {
  141. CARBON_CHECK(enter.suspended_name,
  142. "Entering a scope with no suspension information.");
  143. context_->decl_name_stack().Restore(std::move(*enter.suspended_name));
  144. }
  145. auto NodeIdTraversal::PerformTask(
  146. DeferredDefinitionWorklist::LeaveNestedDeferredDefinitionScope&& /*leave*/)
  147. -> void {
  148. context_->decl_name_stack().PopScope();
  149. }
  150. auto NodeIdTraversal::PerformTask(
  151. DeferredDefinitionWorklist::CheckSkippedDefinition&& parse_definition)
  152. -> void {
  153. auto& [definition_index, suspended_fn] = parse_definition;
  154. const auto& definition_info =
  155. context_->parse_tree().deferred_definitions().Get(definition_index);
  156. HandleFunctionDefinitionResume(*context_, definition_info.start_id,
  157. std::move(suspended_fn));
  158. auto range = Parse::Tree::PostorderIterator::MakeRange(
  159. definition_info.start_id, definition_info.definition_id);
  160. chunks_.push_back({.it = range.begin() + 1,
  161. .end = range.end(),
  162. .next_definition = next_deferred_definition_.index(),
  163. .checking_deferred_definitions = false,
  164. .first_worklist_index = worklist_.size(),
  165. .next_worklist_index = worklist_.size()});
  166. ++definition_index.index;
  167. next_deferred_definition_.SkipTo(definition_index);
  168. }
  169. NodeIdTraversal::NextDeferredDefinitionCache::NextDeferredDefinitionCache(
  170. const Parse::Tree* tree)
  171. : tree_(tree) {
  172. SkipTo(Parse::DeferredDefinitionIndex(0));
  173. }
  174. // Set the specified deferred definition index as being the next one that
  175. // will be encountered.
  176. auto NodeIdTraversal::NextDeferredDefinitionCache::SkipTo(
  177. Parse::DeferredDefinitionIndex next_index) -> void {
  178. index_ = next_index;
  179. if (static_cast<size_t>(index_.index) ==
  180. tree_->deferred_definitions().size()) {
  181. start_id_ = Parse::NodeId::None;
  182. } else {
  183. start_id_ = tree_->deferred_definitions().Get(index_).start_id;
  184. }
  185. }
  186. } // namespace Carbon::Check