node_id_traversal.cpp 6.4 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178
  1. // Part of the Carbon Language project, under the Apache License v2.0 with LLVM
  2. // Exceptions. See /LICENSE for license information.
  3. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
  4. #include "toolchain/check/node_id_traversal.h"
  5. #include <optional>
  6. #include <utility>
  7. #include <variant>
  8. #include "toolchain/check/handle.h"
  9. namespace Carbon::Check {
  10. NodeIdTraversal::NodeIdTraversal(Context* context,
  11. llvm::raw_ostream* vlog_stream)
  12. : context_(context),
  13. next_deferred_definition_(&context->parse_tree()),
  14. worklist_(vlog_stream) {
  15. auto range = context->parse_tree().postorder();
  16. chunks_.push_back({.it = range.begin(),
  17. .end = range.end(),
  18. .next_definition = Parse::DeferredDefinitionIndex::None});
  19. }
  20. auto NodeIdTraversal::Next() -> std::optional<Parse::NodeId> {
  21. while (true) {
  22. // If we're checking deferred definitions, find the next definition we
  23. // should check, restore its suspended state, and add a corresponding
  24. // `Chunk` to the top of the chunk list.
  25. if (chunks_.back().checking_deferred_definitions) {
  26. std::visit(
  27. [&](auto&& task) { PerformTask(std::forward<decltype(task)>(task)); },
  28. worklist_.Pop());
  29. continue;
  30. }
  31. // If we're not checking deferred definitions, produce the next parse node
  32. // for this chunk. If we've run out of parse nodes, we're done with this
  33. // chunk of the parse tree.
  34. if (chunks_.back().it == chunks_.back().end) {
  35. auto old_chunk = chunks_.pop_back_val();
  36. // If we're out of chunks, then we're done entirely.
  37. if (chunks_.empty()) {
  38. worklist_.VerifyEmpty();
  39. return std::nullopt;
  40. }
  41. next_deferred_definition_.SkipTo(old_chunk.next_definition);
  42. continue;
  43. }
  44. auto node_id = *chunks_.back().it;
  45. // If we've reached the start of a deferred definition, skip to the end of
  46. // it, and track that we need to check it later.
  47. if (node_id == next_deferred_definition_.start_id()) {
  48. const auto& definition_info =
  49. context_->parse_tree().deferred_definitions().Get(
  50. next_deferred_definition_.index());
  51. worklist_.SuspendFunctionAndPush(*context_,
  52. next_deferred_definition_.index(),
  53. definition_info.start_id);
  54. // Continue type-checking the parse tree after the end of the definition.
  55. chunks_.back().it =
  56. Parse::Tree::PostorderIterator(definition_info.definition_id) + 1;
  57. next_deferred_definition_.SkipTo(definition_info.next_definition_index);
  58. continue;
  59. }
  60. ++chunks_.back().it;
  61. return node_id;
  62. }
  63. }
  64. // Determines whether this node kind is the start of a deferred definition
  65. // scope.
  66. static auto IsStartOfDeferredDefinitionScope(Parse::NodeKind kind) -> bool {
  67. switch (kind) {
  68. case Parse::NodeKind::ClassDefinitionStart:
  69. case Parse::NodeKind::ImplDefinitionStart:
  70. case Parse::NodeKind::InterfaceDefinitionStart:
  71. case Parse::NodeKind::NamedConstraintDefinitionStart:
  72. // TODO: Mixins.
  73. return true;
  74. default:
  75. return false;
  76. }
  77. }
  78. // Determines whether this node kind is the end of a deferred definition scope.
  79. static auto IsEndOfDeferredDefinitionScope(Parse::NodeKind kind) -> bool {
  80. switch (kind) {
  81. case Parse::NodeKind::ClassDefinition:
  82. case Parse::NodeKind::ImplDefinition:
  83. case Parse::NodeKind::InterfaceDefinition:
  84. case Parse::NodeKind::NamedConstraintDefinition:
  85. // TODO: Mixins.
  86. return true;
  87. default:
  88. return false;
  89. }
  90. }
  91. // TODO: Investigate factoring out `IsStartOfDeferredDefinitionScope` and
  92. // `IsEndOfDeferredDefinitionScope` in order to make `NodeIdTraversal`
  93. // reusable.
  94. auto NodeIdTraversal::Handle(Parse::NodeKind parse_kind) -> void {
  95. // When we reach the start of a deferred definition scope, add a task to the
  96. // worklist to check future skipped definitions in the new context.
  97. if (IsStartOfDeferredDefinitionScope(parse_kind)) {
  98. worklist_.PushEnterDeferredDefinitionScope(*context_);
  99. }
  100. // When we reach the end of a deferred definition scope, add a task to the
  101. // worklist to leave the scope. If this is not a nested scope, start
  102. // checking the deferred definitions now.
  103. if (IsEndOfDeferredDefinitionScope(parse_kind)) {
  104. chunks_.back().checking_deferred_definitions =
  105. worklist_.SuspendFinishedScopeAndPush(*context_);
  106. }
  107. }
  108. auto NodeIdTraversal::PerformTask(
  109. DeferredDefinitionWorklist::EnterDeferredDefinitionScope&& enter) -> void {
  110. CARBON_CHECK(enter.suspended_name,
  111. "Entering a scope with no suspension information.");
  112. context_->decl_name_stack().Restore(std::move(*enter.suspended_name));
  113. }
  114. auto NodeIdTraversal::PerformTask(
  115. DeferredDefinitionWorklist::LeaveDeferredDefinitionScope&& leave) -> void {
  116. if (!leave.in_deferred_definition_scope) {
  117. // We're done with checking deferred definitions.
  118. chunks_.back().checking_deferred_definitions = false;
  119. }
  120. context_->decl_name_stack().PopScope();
  121. }
  122. auto NodeIdTraversal::PerformTask(
  123. DeferredDefinitionWorklist::CheckSkippedDefinition&& parse_definition)
  124. -> void {
  125. auto& [definition_index, suspended_fn] = parse_definition;
  126. const auto& definition_info =
  127. context_->parse_tree().deferred_definitions().Get(definition_index);
  128. HandleFunctionDefinitionResume(*context_, definition_info.start_id,
  129. std::move(suspended_fn));
  130. auto range = Parse::Tree::PostorderIterator::MakeRange(
  131. definition_info.start_id, definition_info.definition_id);
  132. chunks_.push_back({.it = range.begin() + 1,
  133. .end = range.end(),
  134. .next_definition = next_deferred_definition_.index()});
  135. ++definition_index.index;
  136. next_deferred_definition_.SkipTo(definition_index);
  137. }
  138. NodeIdTraversal::NextDeferredDefinitionCache::NextDeferredDefinitionCache(
  139. const Parse::Tree* tree)
  140. : tree_(tree) {
  141. SkipTo(Parse::DeferredDefinitionIndex(0));
  142. }
  143. // Set the specified deferred definition index as being the next one that
  144. // will be encountered.
  145. auto NodeIdTraversal::NextDeferredDefinitionCache::SkipTo(
  146. Parse::DeferredDefinitionIndex next_index) -> void {
  147. index_ = next_index;
  148. if (static_cast<size_t>(index_.index) ==
  149. tree_->deferred_definitions().size()) {
  150. start_id_ = Parse::NodeId::None;
  151. } else {
  152. start_id_ = tree_->deferred_definitions().Get(index_).start_id;
  153. }
  154. }
  155. } // namespace Carbon::Check