context.cpp 41 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093
  1. // Part of the Carbon Language project, under the Apache License v2.0 with LLVM
  2. // Exceptions. See /LICENSE for license information.
  3. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
  4. #include "toolchain/check/context.h"
  5. #include <string>
  6. #include <utility>
  7. #include "common/check.h"
  8. #include "common/vlog.h"
  9. #include "llvm/ADT/Sequence.h"
  10. #include "toolchain/check/declaration_name_stack.h"
  11. #include "toolchain/check/inst_block_stack.h"
  12. #include "toolchain/lex/tokenized_buffer.h"
  13. #include "toolchain/parse/node_kind.h"
  14. #include "toolchain/sem_ir/file.h"
  15. #include "toolchain/sem_ir/inst.h"
  16. #include "toolchain/sem_ir/inst_kind.h"
  17. namespace Carbon::Check {
  18. Context::Context(const Lex::TokenizedBuffer& tokens, DiagnosticEmitter& emitter,
  19. const Parse::Tree& parse_tree, SemIR::File& sem_ir,
  20. llvm::raw_ostream* vlog_stream)
  21. : tokens_(&tokens),
  22. emitter_(&emitter),
  23. parse_tree_(&parse_tree),
  24. sem_ir_(&sem_ir),
  25. vlog_stream_(vlog_stream),
  26. node_stack_(parse_tree, vlog_stream),
  27. inst_block_stack_("inst_block_stack_", sem_ir, vlog_stream),
  28. params_or_args_stack_("params_or_args_stack_", sem_ir, vlog_stream),
  29. args_type_info_stack_("args_type_info_stack_", sem_ir, vlog_stream),
  30. decl_name_stack_(this) {
  31. // Inserts the "Error" and "Type" types as "used types" so that
  32. // canonicalization can skip them. We don't emit either for lowering.
  33. canonical_types_.insert({SemIR::InstId::BuiltinError, SemIR::TypeId::Error});
  34. canonical_types_.insert(
  35. {SemIR::InstId::BuiltinTypeType, SemIR::TypeId::TypeType});
  36. }
  37. auto Context::TODO(Parse::Node parse_node, std::string label) -> bool {
  38. CARBON_DIAGNOSTIC(SemanticsTodo, Error, "Semantics TODO: `{0}`.",
  39. std::string);
  40. emitter_->Emit(parse_node, SemanticsTodo, std::move(label));
  41. return false;
  42. }
  43. auto Context::VerifyOnFinish() -> void {
  44. // Information in all the various context objects should be cleaned up as
  45. // various pieces of context go out of scope. At this point, nothing should
  46. // remain.
  47. // node_stack_ will still contain top-level entities.
  48. CARBON_CHECK(name_lookup_.empty()) << name_lookup_.size();
  49. CARBON_CHECK(scope_stack_.empty()) << scope_stack_.size();
  50. CARBON_CHECK(inst_block_stack_.empty()) << inst_block_stack_.size();
  51. CARBON_CHECK(params_or_args_stack_.empty()) << params_or_args_stack_.size();
  52. }
  53. auto Context::AddInst(SemIR::Inst inst) -> SemIR::InstId {
  54. auto inst_id = inst_block_stack_.AddInst(inst);
  55. CARBON_VLOG() << "AddInst: " << inst << "\n";
  56. return inst_id;
  57. }
  58. auto Context::AddConstantInst(SemIR::Inst inst) -> SemIR::InstId {
  59. auto inst_id = insts().AddInNoBlock(inst);
  60. constants().Add(inst_id);
  61. CARBON_VLOG() << "AddConstantInst: " << inst << "\n";
  62. return inst_id;
  63. }
  64. auto Context::AddInstAndPush(Parse::Node parse_node, SemIR::Inst inst) -> void {
  65. auto inst_id = AddInst(inst);
  66. node_stack_.Push(parse_node, inst_id);
  67. }
  68. auto Context::DiagnoseDuplicateName(Parse::Node parse_node,
  69. SemIR::InstId prev_def_id) -> void {
  70. CARBON_DIAGNOSTIC(NameDeclDuplicate, Error,
  71. "Duplicate name being declared in the same scope.");
  72. CARBON_DIAGNOSTIC(NameDeclPrevious, Note,
  73. "Name is previously declared here.");
  74. auto prev_def = insts().Get(prev_def_id);
  75. emitter_->Build(parse_node, NameDeclDuplicate)
  76. .Note(prev_def.parse_node(), NameDeclPrevious)
  77. .Emit();
  78. }
  79. auto Context::DiagnoseNameNotFound(Parse::Node parse_node,
  80. SemIR::NameId name_id) -> void {
  81. CARBON_DIAGNOSTIC(NameNotFound, Error, "Name `{0}` not found.",
  82. llvm::StringRef);
  83. emitter_->Emit(parse_node, NameNotFound, names().GetFormatted(name_id));
  84. }
  85. auto Context::NoteIncompleteClass(SemIR::ClassId class_id,
  86. DiagnosticBuilder& builder) -> void {
  87. CARBON_DIAGNOSTIC(ClassForwardDeclaredHere, Note,
  88. "Class was forward declared here.");
  89. CARBON_DIAGNOSTIC(ClassIncompleteWithinDefinition, Note,
  90. "Class is incomplete within its definition.");
  91. const auto& class_info = classes().Get(class_id);
  92. CARBON_CHECK(!class_info.is_defined()) << "Class is not incomplete";
  93. if (class_info.definition_id.is_valid()) {
  94. builder.Note(insts().Get(class_info.definition_id).parse_node(),
  95. ClassIncompleteWithinDefinition);
  96. } else {
  97. builder.Note(insts().Get(class_info.decl_id).parse_node(),
  98. ClassForwardDeclaredHere);
  99. }
  100. }
  101. auto Context::AddNameToLookup(Parse::Node name_node, SemIR::NameId name_id,
  102. SemIR::InstId target_id) -> void {
  103. if (current_scope().names.insert(name_id).second) {
  104. // TODO: Reject if we previously performed a failed lookup for this name in
  105. // this scope or a scope nested within it.
  106. auto& lexical_results = name_lookup_[name_id];
  107. CARBON_CHECK(lexical_results.empty() ||
  108. lexical_results.back().scope_index < current_scope_index())
  109. << "Failed to clean up after scope nested within the current scope";
  110. lexical_results.push_back(
  111. {.node_id = target_id, .scope_index = current_scope_index()});
  112. } else {
  113. DiagnoseDuplicateName(name_node, name_lookup_[name_id].back().node_id);
  114. }
  115. }
  116. auto Context::LookupNameInDecl(Parse::Node parse_node, SemIR::NameId name_id,
  117. SemIR::NameScopeId scope_id) -> SemIR::InstId {
  118. if (scope_id == SemIR::NameScopeId::Invalid) {
  119. // Look for a name in the current scope only. There are two cases where the
  120. // name would be in an outer scope:
  121. //
  122. // - The name is the sole component of the declared name:
  123. //
  124. // class A;
  125. // fn F() {
  126. // class A;
  127. // }
  128. //
  129. // In this case, the inner A is not the same class as the outer A, so
  130. // lookup should not find the outer A.
  131. //
  132. // - The name is a qualifier of some larger declared name:
  133. //
  134. // class A { class B; }
  135. // fn F() {
  136. // class A.B {}
  137. // }
  138. //
  139. // In this case, we're not in the correct scope to define a member of
  140. // class A, so we should reject, and we achieve this by not finding the
  141. // name A from the outer scope.
  142. if (auto name_it = name_lookup_.find(name_id);
  143. name_it != name_lookup_.end()) {
  144. CARBON_CHECK(!name_it->second.empty())
  145. << "Should have been erased: " << names().GetFormatted(name_id);
  146. auto result = name_it->second.back();
  147. if (result.scope_index == current_scope_index()) {
  148. return result.node_id;
  149. }
  150. }
  151. return SemIR::InstId::Invalid;
  152. } else {
  153. // TODO: Once we support `extend`, do not look into `extend`ed scopes here,
  154. // following the same logic as above.
  155. return LookupQualifiedName(parse_node, name_id, scope_id,
  156. /*required=*/false);
  157. }
  158. }
  159. auto Context::LookupUnqualifiedName(Parse::Node parse_node,
  160. SemIR::NameId name_id) -> SemIR::InstId {
  161. // TODO: Check for shadowed lookup results.
  162. // Find the results from enclosing lexical scopes. These will be combined with
  163. // results from non-lexical scopes such as namespaces and classes.
  164. llvm::ArrayRef<LexicalLookupResult> lexical_results;
  165. if (auto name_it = name_lookup_.find(name_id);
  166. name_it != name_lookup_.end()) {
  167. lexical_results = name_it->second;
  168. CARBON_CHECK(!lexical_results.empty())
  169. << "Should have been erased: " << names().GetFormatted(name_id);
  170. }
  171. // Walk the non-lexical scopes and perform lookups into each of them.
  172. for (auto [index, name_scope_id] : llvm::reverse(non_lexical_scope_stack_)) {
  173. // If the innermost lexical result is within this non-lexical scope, then
  174. // it shadows all further non-lexical results and we're done.
  175. if (!lexical_results.empty() &&
  176. lexical_results.back().scope_index > index) {
  177. return lexical_results.back().node_id;
  178. }
  179. auto non_lexical_result =
  180. LookupQualifiedName(parse_node, name_id, name_scope_id,
  181. /*required=*/false);
  182. if (non_lexical_result.is_valid()) {
  183. return non_lexical_result;
  184. }
  185. }
  186. if (!lexical_results.empty()) {
  187. return lexical_results.back().node_id;
  188. }
  189. // We didn't find anything at all.
  190. DiagnoseNameNotFound(parse_node, name_id);
  191. return SemIR::InstId::BuiltinError;
  192. }
  193. auto Context::LookupQualifiedName(Parse::Node parse_node, SemIR::NameId name_id,
  194. SemIR::NameScopeId scope_id, bool required)
  195. -> SemIR::InstId {
  196. CARBON_CHECK(scope_id.is_valid()) << "No scope to perform lookup into";
  197. const auto& scope = name_scopes().Get(scope_id);
  198. auto it = scope.find(name_id);
  199. if (it == scope.end()) {
  200. // TODO: Also perform lookups into `extend`ed scopes.
  201. if (required) {
  202. DiagnoseNameNotFound(parse_node, name_id);
  203. return SemIR::InstId::BuiltinError;
  204. }
  205. return SemIR::InstId::Invalid;
  206. }
  207. return it->second;
  208. }
  209. auto Context::PushScope(SemIR::InstId scope_inst_id,
  210. SemIR::NameScopeId scope_id) -> void {
  211. scope_stack_.push_back({.index = next_scope_index_,
  212. .scope_inst_id = scope_inst_id,
  213. .scope_id = scope_id});
  214. if (scope_id.is_valid()) {
  215. non_lexical_scope_stack_.push_back({next_scope_index_, scope_id});
  216. }
  217. // TODO: Handle this case more gracefully.
  218. CARBON_CHECK(next_scope_index_.index != std::numeric_limits<int32_t>::max())
  219. << "Ran out of scopes";
  220. ++next_scope_index_.index;
  221. }
  222. auto Context::PopScope() -> void {
  223. auto scope = scope_stack_.pop_back_val();
  224. for (const auto& str_id : scope.names) {
  225. auto it = name_lookup_.find(str_id);
  226. CARBON_CHECK(it->second.back().scope_index == scope.index)
  227. << "Inconsistent scope index for name " << names().GetFormatted(str_id);
  228. if (it->second.size() == 1) {
  229. // Erase names that no longer resolve.
  230. name_lookup_.erase(it);
  231. } else {
  232. it->second.pop_back();
  233. }
  234. }
  235. if (scope.scope_id.is_valid()) {
  236. CARBON_CHECK(non_lexical_scope_stack_.back().first == scope.index);
  237. non_lexical_scope_stack_.pop_back();
  238. }
  239. }
  240. auto Context::PopToScope(ScopeIndex index) -> void {
  241. while (current_scope_index() > index) {
  242. PopScope();
  243. }
  244. CARBON_CHECK(current_scope_index() == index)
  245. << "Scope index " << index << " does not enclose the current scope "
  246. << current_scope_index();
  247. }
  248. auto Context::FollowNameReferences(SemIR::InstId inst_id) -> SemIR::InstId {
  249. while (auto name_ref = insts().Get(inst_id).TryAs<SemIR::NameReference>()) {
  250. inst_id = name_ref->value_id;
  251. }
  252. return inst_id;
  253. }
  254. auto Context::GetConstantValue(SemIR::InstId inst_id) -> SemIR::InstId {
  255. // TODO: The constant value of an instruction should be computed as we build
  256. // the instruction, or at least cached once computed.
  257. while (true) {
  258. auto inst = insts().Get(inst_id);
  259. switch (inst.kind()) {
  260. case SemIR::NameReference::Kind:
  261. inst_id = inst.As<SemIR::NameReference>().value_id;
  262. break;
  263. case SemIR::BindName::Kind:
  264. inst_id = inst.As<SemIR::BindName>().value_id;
  265. break;
  266. case SemIR::Field::Kind:
  267. case SemIR::FunctionDecl::Kind:
  268. return inst_id;
  269. default:
  270. // TODO: Handle the remaining cases.
  271. return SemIR::InstId::Invalid;
  272. }
  273. }
  274. }
  275. template <typename BranchNode, typename... Args>
  276. static auto AddDominatedBlockAndBranchImpl(Context& context,
  277. Parse::Node parse_node, Args... args)
  278. -> SemIR::InstBlockId {
  279. if (!context.inst_block_stack().is_current_block_reachable()) {
  280. return SemIR::InstBlockId::Unreachable;
  281. }
  282. auto block_id = context.inst_blocks().AddDefaultValue();
  283. context.AddInst(BranchNode{parse_node, block_id, args...});
  284. return block_id;
  285. }
  286. auto Context::AddDominatedBlockAndBranch(Parse::Node parse_node)
  287. -> SemIR::InstBlockId {
  288. return AddDominatedBlockAndBranchImpl<SemIR::Branch>(*this, parse_node);
  289. }
  290. auto Context::AddDominatedBlockAndBranchWithArg(Parse::Node parse_node,
  291. SemIR::InstId arg_id)
  292. -> SemIR::InstBlockId {
  293. return AddDominatedBlockAndBranchImpl<SemIR::BranchWithArg>(*this, parse_node,
  294. arg_id);
  295. }
  296. auto Context::AddDominatedBlockAndBranchIf(Parse::Node parse_node,
  297. SemIR::InstId cond_id)
  298. -> SemIR::InstBlockId {
  299. return AddDominatedBlockAndBranchImpl<SemIR::BranchIf>(*this, parse_node,
  300. cond_id);
  301. }
  302. auto Context::AddConvergenceBlockAndPush(Parse::Node parse_node, int num_blocks)
  303. -> void {
  304. CARBON_CHECK(num_blocks >= 2) << "no convergence";
  305. SemIR::InstBlockId new_block_id = SemIR::InstBlockId::Unreachable;
  306. for ([[maybe_unused]] auto _ : llvm::seq(num_blocks)) {
  307. if (inst_block_stack().is_current_block_reachable()) {
  308. if (new_block_id == SemIR::InstBlockId::Unreachable) {
  309. new_block_id = inst_blocks().AddDefaultValue();
  310. }
  311. AddInst(SemIR::Branch{parse_node, new_block_id});
  312. }
  313. inst_block_stack().Pop();
  314. }
  315. inst_block_stack().Push(new_block_id);
  316. }
  317. auto Context::AddConvergenceBlockWithArgAndPush(
  318. Parse::Node parse_node, std::initializer_list<SemIR::InstId> block_args)
  319. -> SemIR::InstId {
  320. CARBON_CHECK(block_args.size() >= 2) << "no convergence";
  321. SemIR::InstBlockId new_block_id = SemIR::InstBlockId::Unreachable;
  322. for (auto arg_id : block_args) {
  323. if (inst_block_stack().is_current_block_reachable()) {
  324. if (new_block_id == SemIR::InstBlockId::Unreachable) {
  325. new_block_id = inst_blocks().AddDefaultValue();
  326. }
  327. AddInst(SemIR::BranchWithArg{parse_node, new_block_id, arg_id});
  328. }
  329. inst_block_stack().Pop();
  330. }
  331. inst_block_stack().Push(new_block_id);
  332. // Acquire the result value.
  333. SemIR::TypeId result_type_id = insts().Get(*block_args.begin()).type_id();
  334. return AddInst(SemIR::BlockArg{parse_node, result_type_id, new_block_id});
  335. }
  336. // Add the current code block to the enclosing function.
  337. auto Context::AddCurrentCodeBlockToFunction(Parse::Node parse_node) -> void {
  338. CARBON_CHECK(!inst_block_stack().empty()) << "no current code block";
  339. if (return_scope_stack().empty()) {
  340. CARBON_CHECK(parse_node.is_valid())
  341. << "No current function, but parse_node not provided";
  342. TODO(parse_node,
  343. "Control flow expressions are currently only supported inside "
  344. "functions.");
  345. return;
  346. }
  347. if (!inst_block_stack().is_current_block_reachable()) {
  348. // Don't include unreachable blocks in the function.
  349. return;
  350. }
  351. auto function_id =
  352. insts()
  353. .GetAs<SemIR::FunctionDecl>(return_scope_stack().back())
  354. .function_id;
  355. functions()
  356. .Get(function_id)
  357. .body_block_ids.push_back(inst_block_stack().PeekOrAdd());
  358. }
  359. auto Context::is_current_position_reachable() -> bool {
  360. if (!inst_block_stack().is_current_block_reachable()) {
  361. return false;
  362. }
  363. // Our current position is at the end of a reachable block. That position is
  364. // reachable unless the previous instruction is a terminator instruction.
  365. auto block_contents = inst_block_stack().PeekCurrentBlockContents();
  366. if (block_contents.empty()) {
  367. return true;
  368. }
  369. const auto& last_inst = insts().Get(block_contents.back());
  370. return last_inst.kind().terminator_kind() !=
  371. SemIR::TerminatorKind::Terminator;
  372. }
  373. auto Context::ParamOrArgStart() -> void { params_or_args_stack_.Push(); }
  374. auto Context::ParamOrArgComma() -> void {
  375. ParamOrArgSave(node_stack_.PopExpr());
  376. }
  377. auto Context::ParamOrArgEndNoPop(Parse::NodeKind start_kind) -> void {
  378. if (parse_tree_->node_kind(node_stack_.PeekParseNode()) != start_kind) {
  379. ParamOrArgSave(node_stack_.PopExpr());
  380. }
  381. }
  382. auto Context::ParamOrArgPop() -> SemIR::InstBlockId {
  383. return params_or_args_stack_.Pop();
  384. }
  385. auto Context::ParamOrArgEnd(Parse::NodeKind start_kind) -> SemIR::InstBlockId {
  386. ParamOrArgEndNoPop(start_kind);
  387. return ParamOrArgPop();
  388. }
  389. namespace {
  390. // Worklist-based type completion mechanism.
  391. //
  392. // When attempting to complete a type, we may find other types that also need to
  393. // be completed: types nested within that type, and the value representation of
  394. // the type. In order to complete a type without recursing arbitrarily deeply,
  395. // we use a worklist of tasks:
  396. //
  397. // - An `AddNestedIncompleteTypes` step adds a task for all incomplete types
  398. // nested within a type to the work list.
  399. // - A `BuildValueRepresentation` step computes the value representation for a
  400. // type, once all of its nested types are complete, and marks the type as
  401. // complete.
  402. class TypeCompleter {
  403. public:
  404. TypeCompleter(
  405. Context& context,
  406. std::optional<llvm::function_ref<auto()->Context::DiagnosticBuilder>>
  407. diagnoser)
  408. : context_(context), diagnoser_(diagnoser) {}
  409. // Attempts to complete the given type. Returns true if it is now complete,
  410. // false if it could not be completed.
  411. auto Complete(SemIR::TypeId type_id) -> bool {
  412. Push(type_id);
  413. while (!work_list_.empty()) {
  414. if (!ProcessStep()) {
  415. return false;
  416. }
  417. }
  418. return true;
  419. }
  420. private:
  421. // Adds `type_id` to the work list, if it's not already complete.
  422. auto Push(SemIR::TypeId type_id) -> void {
  423. if (!context_.sem_ir().IsTypeComplete(type_id)) {
  424. work_list_.push_back({type_id, Phase::AddNestedIncompleteTypes});
  425. }
  426. }
  427. // Runs the next step.
  428. auto ProcessStep() -> bool {
  429. auto [type_id, phase] = work_list_.back();
  430. // We might have enqueued the same type more than once. Just skip the
  431. // type if it's already complete.
  432. if (context_.sem_ir().IsTypeComplete(type_id)) {
  433. work_list_.pop_back();
  434. return true;
  435. }
  436. auto inst_id = context_.sem_ir().GetTypeAllowBuiltinTypes(type_id);
  437. auto inst = context_.insts().Get(inst_id);
  438. auto old_work_list_size = work_list_.size();
  439. switch (phase) {
  440. case Phase::AddNestedIncompleteTypes:
  441. if (!AddNestedIncompleteTypes(inst)) {
  442. return false;
  443. }
  444. CARBON_CHECK(work_list_.size() >= old_work_list_size)
  445. << "AddNestedIncompleteTypes should not remove work items";
  446. work_list_[old_work_list_size - 1].phase =
  447. Phase::BuildValueRepresentation;
  448. break;
  449. case Phase::BuildValueRepresentation: {
  450. auto value_rep = BuildValueRepresentation(type_id, inst);
  451. context_.sem_ir().CompleteType(type_id, value_rep);
  452. CARBON_CHECK(old_work_list_size == work_list_.size())
  453. << "BuildValueRepresentation should not change work items";
  454. work_list_.pop_back();
  455. // Also complete the value representation type, if necessary. This
  456. // should never fail: the value representation shouldn't require any
  457. // additional nested types to be complete.
  458. if (!context_.sem_ir().IsTypeComplete(value_rep.type_id)) {
  459. work_list_.push_back(
  460. {value_rep.type_id, Phase::BuildValueRepresentation});
  461. }
  462. // For a pointer representation, the pointee also needs to be complete.
  463. if (value_rep.kind == SemIR::ValueRepresentation::Pointer) {
  464. auto pointee_type_id =
  465. context_.sem_ir().GetPointeeType(value_rep.type_id);
  466. if (!context_.sem_ir().IsTypeComplete(pointee_type_id)) {
  467. work_list_.push_back(
  468. {pointee_type_id, Phase::BuildValueRepresentation});
  469. }
  470. }
  471. break;
  472. }
  473. }
  474. return true;
  475. }
  476. // Adds any types nested within `type_inst` that need to be complete for
  477. // `type_inst` to be complete to our work list.
  478. auto AddNestedIncompleteTypes(SemIR::Inst type_inst) -> bool {
  479. switch (type_inst.kind()) {
  480. case SemIR::ArrayType::Kind:
  481. Push(type_inst.As<SemIR::ArrayType>().element_type_id);
  482. break;
  483. case SemIR::StructType::Kind:
  484. for (auto field_id : context_.inst_blocks().Get(
  485. type_inst.As<SemIR::StructType>().fields_id)) {
  486. Push(context_.insts()
  487. .GetAs<SemIR::StructTypeField>(field_id)
  488. .field_type_id);
  489. }
  490. break;
  491. case SemIR::TupleType::Kind:
  492. for (auto element_type_id : context_.type_blocks().Get(
  493. type_inst.As<SemIR::TupleType>().elements_id)) {
  494. Push(element_type_id);
  495. }
  496. break;
  497. case SemIR::ClassType::Kind: {
  498. auto class_type = type_inst.As<SemIR::ClassType>();
  499. auto& class_info = context_.classes().Get(class_type.class_id);
  500. if (!class_info.is_defined()) {
  501. if (diagnoser_) {
  502. auto builder = (*diagnoser_)();
  503. context_.NoteIncompleteClass(class_type.class_id, builder);
  504. builder.Emit();
  505. }
  506. return false;
  507. }
  508. Push(class_info.object_representation_id);
  509. break;
  510. }
  511. case SemIR::ConstType::Kind:
  512. Push(type_inst.As<SemIR::ConstType>().inner_id);
  513. break;
  514. default:
  515. break;
  516. }
  517. return true;
  518. }
  519. // Makes an empty value representation, which is used for types that have no
  520. // state, such as empty structs and tuples.
  521. auto MakeEmptyRepresentation(Parse::Node parse_node) const
  522. -> SemIR::ValueRepresentation {
  523. return {.kind = SemIR::ValueRepresentation::None,
  524. .type_id = context_.CanonicalizeTupleType(parse_node, {})};
  525. }
  526. // Makes a value representation that uses pass-by-copy, copying the given
  527. // type.
  528. auto MakeCopyRepresentation(
  529. SemIR::TypeId rep_id,
  530. SemIR::ValueRepresentation::AggregateKind aggregate_kind =
  531. SemIR::ValueRepresentation::NotAggregate) const
  532. -> SemIR::ValueRepresentation {
  533. return {.kind = SemIR::ValueRepresentation::Copy,
  534. .aggregate_kind = aggregate_kind,
  535. .type_id = rep_id};
  536. }
  537. // Makes a value representation that uses pass-by-address with the given
  538. // pointee type.
  539. auto MakePointerRepresentation(
  540. Parse::Node parse_node, SemIR::TypeId pointee_id,
  541. SemIR::ValueRepresentation::AggregateKind aggregate_kind =
  542. SemIR::ValueRepresentation::NotAggregate) const
  543. -> SemIR::ValueRepresentation {
  544. // TODO: Should we add `const` qualification to `pointee_id`?
  545. return {.kind = SemIR::ValueRepresentation::Pointer,
  546. .aggregate_kind = aggregate_kind,
  547. .type_id = context_.GetPointerType(parse_node, pointee_id)};
  548. }
  549. // Gets the value representation of a nested type, which should already be
  550. // complete.
  551. auto GetNestedValueRepresentation(SemIR::TypeId nested_type_id) const {
  552. CARBON_CHECK(context_.sem_ir().IsTypeComplete(nested_type_id))
  553. << "Nested type should already be complete";
  554. auto value_rep = context_.sem_ir().GetValueRepresentation(nested_type_id);
  555. CARBON_CHECK(value_rep.kind != SemIR::ValueRepresentation::Unknown)
  556. << "Complete type should have a value representation";
  557. return value_rep;
  558. };
  559. auto BuildCrossReferenceValueRepresentation(SemIR::TypeId type_id,
  560. SemIR::CrossReference xref) const
  561. -> SemIR::ValueRepresentation {
  562. auto xref_inst = context_.sem_ir()
  563. .GetCrossReferenceIR(xref.ir_id)
  564. .insts()
  565. .Get(xref.inst_id);
  566. // The canonical description of a type should only have cross-references
  567. // for entities owned by another File, such as builtins, which are owned
  568. // by the prelude, and named entities like classes and interfaces, which
  569. // we don't support yet.
  570. CARBON_CHECK(xref_inst.kind() == SemIR::Builtin::Kind)
  571. << "TODO: Handle other kinds of inst cross-references";
  572. // clang warns on unhandled enum values; clang-tidy is incorrect here.
  573. // NOLINTNEXTLINE(bugprone-switch-missing-default-case)
  574. switch (xref_inst.As<SemIR::Builtin>().builtin_kind) {
  575. case SemIR::BuiltinKind::TypeType:
  576. case SemIR::BuiltinKind::Error:
  577. case SemIR::BuiltinKind::Invalid:
  578. case SemIR::BuiltinKind::BoolType:
  579. case SemIR::BuiltinKind::IntegerType:
  580. case SemIR::BuiltinKind::FloatingPointType:
  581. case SemIR::BuiltinKind::NamespaceType:
  582. case SemIR::BuiltinKind::FunctionType:
  583. case SemIR::BuiltinKind::BoundMethodType:
  584. return MakeCopyRepresentation(type_id);
  585. case SemIR::BuiltinKind::StringType:
  586. // TODO: Decide on string value semantics. This should probably be a
  587. // custom value representation carrying a pointer and size or
  588. // similar.
  589. return MakePointerRepresentation(Parse::Node::Invalid, type_id);
  590. }
  591. llvm_unreachable("All builtin kinds were handled above");
  592. }
  593. auto BuildStructOrTupleValueRepresentation(Parse::Node parse_node,
  594. std::size_t num_elements,
  595. SemIR::TypeId elementwise_rep,
  596. bool same_as_object_rep) const
  597. -> SemIR::ValueRepresentation {
  598. SemIR::ValueRepresentation::AggregateKind aggregate_kind =
  599. same_as_object_rep ? SemIR::ValueRepresentation::ValueAndObjectAggregate
  600. : SemIR::ValueRepresentation::ValueAggregate;
  601. if (num_elements == 1) {
  602. // The value representation for a struct or tuple with a single element
  603. // is a struct or tuple containing the value representation of the
  604. // element.
  605. // TODO: Consider doing the same whenever `elementwise_rep` is
  606. // sufficiently small.
  607. return MakeCopyRepresentation(elementwise_rep, aggregate_kind);
  608. }
  609. // For a struct or tuple with multiple fields, we use a pointer
  610. // to the elementwise value representation.
  611. return MakePointerRepresentation(parse_node, elementwise_rep,
  612. aggregate_kind);
  613. }
  614. auto BuildStructTypeValueRepresentation(SemIR::TypeId type_id,
  615. SemIR::StructType struct_type) const
  616. -> SemIR::ValueRepresentation {
  617. // TODO: Share more code with tuples.
  618. auto fields = context_.inst_blocks().Get(struct_type.fields_id);
  619. if (fields.empty()) {
  620. return MakeEmptyRepresentation(struct_type.parse_node);
  621. }
  622. // Find the value representation for each field, and construct a struct
  623. // of value representations.
  624. llvm::SmallVector<SemIR::InstId> value_rep_fields;
  625. value_rep_fields.reserve(fields.size());
  626. bool same_as_object_rep = true;
  627. for (auto field_id : fields) {
  628. auto field = context_.insts().GetAs<SemIR::StructTypeField>(field_id);
  629. auto field_value_rep = GetNestedValueRepresentation(field.field_type_id);
  630. if (field_value_rep.type_id != field.field_type_id) {
  631. same_as_object_rep = false;
  632. field.field_type_id = field_value_rep.type_id;
  633. field_id = context_.AddConstantInst(field);
  634. }
  635. value_rep_fields.push_back(field_id);
  636. }
  637. auto value_rep = same_as_object_rep
  638. ? type_id
  639. : context_.CanonicalizeStructType(
  640. struct_type.parse_node,
  641. context_.inst_blocks().Add(value_rep_fields));
  642. return BuildStructOrTupleValueRepresentation(
  643. struct_type.parse_node, fields.size(), value_rep, same_as_object_rep);
  644. }
  645. auto BuildTupleTypeValueRepresentation(SemIR::TypeId type_id,
  646. SemIR::TupleType tuple_type) const
  647. -> SemIR::ValueRepresentation {
  648. // TODO: Share more code with structs.
  649. auto elements = context_.type_blocks().Get(tuple_type.elements_id);
  650. if (elements.empty()) {
  651. return MakeEmptyRepresentation(tuple_type.parse_node);
  652. }
  653. // Find the value representation for each element, and construct a tuple
  654. // of value representations.
  655. llvm::SmallVector<SemIR::TypeId> value_rep_elements;
  656. value_rep_elements.reserve(elements.size());
  657. bool same_as_object_rep = true;
  658. for (auto element_type_id : elements) {
  659. auto element_value_rep = GetNestedValueRepresentation(element_type_id);
  660. if (element_value_rep.type_id != element_type_id) {
  661. same_as_object_rep = false;
  662. }
  663. value_rep_elements.push_back(element_value_rep.type_id);
  664. }
  665. auto value_rep = same_as_object_rep
  666. ? type_id
  667. : context_.CanonicalizeTupleType(tuple_type.parse_node,
  668. value_rep_elements);
  669. return BuildStructOrTupleValueRepresentation(
  670. tuple_type.parse_node, elements.size(), value_rep, same_as_object_rep);
  671. }
  672. // Builds and returns the value representation for the given type. All nested
  673. // types, as found by AddNestedIncompleteTypes, are known to be complete.
  674. auto BuildValueRepresentation(SemIR::TypeId type_id, SemIR::Inst inst) const
  675. -> SemIR::ValueRepresentation {
  676. // TODO: This can emit new SemIR instructions. Consider emitting them into a
  677. // dedicated file-scope instruction block where possible, or somewhere else
  678. // that better reflects the definition of the type, rather than wherever the
  679. // type happens to first be required to be complete.
  680. // clang warns on unhandled enum values; clang-tidy is incorrect here.
  681. // NOLINTNEXTLINE(bugprone-switch-missing-default-case)
  682. switch (inst.kind()) {
  683. case SemIR::AddressOf::Kind:
  684. case SemIR::ArrayIndex::Kind:
  685. case SemIR::ArrayInit::Kind:
  686. case SemIR::Assign::Kind:
  687. case SemIR::BinaryOperatorAdd::Kind:
  688. case SemIR::BindName::Kind:
  689. case SemIR::BindValue::Kind:
  690. case SemIR::BlockArg::Kind:
  691. case SemIR::BoolLiteral::Kind:
  692. case SemIR::BoundMethod::Kind:
  693. case SemIR::Branch::Kind:
  694. case SemIR::BranchIf::Kind:
  695. case SemIR::BranchWithArg::Kind:
  696. case SemIR::Call::Kind:
  697. case SemIR::ClassDecl::Kind:
  698. case SemIR::ClassFieldAccess::Kind:
  699. case SemIR::ClassInit::Kind:
  700. case SemIR::Converted::Kind:
  701. case SemIR::Dereference::Kind:
  702. case SemIR::Field::Kind:
  703. case SemIR::FunctionDecl::Kind:
  704. case SemIR::InitializeFrom::Kind:
  705. case SemIR::IntegerLiteral::Kind:
  706. case SemIR::NameReference::Kind:
  707. case SemIR::Namespace::Kind:
  708. case SemIR::NoOp::Kind:
  709. case SemIR::Parameter::Kind:
  710. case SemIR::RealLiteral::Kind:
  711. case SemIR::Return::Kind:
  712. case SemIR::ReturnExpr::Kind:
  713. case SemIR::SelfParameter::Kind:
  714. case SemIR::SpliceBlock::Kind:
  715. case SemIR::StringLiteral::Kind:
  716. case SemIR::StructAccess::Kind:
  717. case SemIR::StructTypeField::Kind:
  718. case SemIR::StructLiteral::Kind:
  719. case SemIR::StructInit::Kind:
  720. case SemIR::StructValue::Kind:
  721. case SemIR::Temporary::Kind:
  722. case SemIR::TemporaryStorage::Kind:
  723. case SemIR::TupleAccess::Kind:
  724. case SemIR::TupleIndex::Kind:
  725. case SemIR::TupleLiteral::Kind:
  726. case SemIR::TupleInit::Kind:
  727. case SemIR::TupleValue::Kind:
  728. case SemIR::UnaryOperatorNot::Kind:
  729. case SemIR::ValueAsReference::Kind:
  730. case SemIR::ValueOfInitializer::Kind:
  731. case SemIR::VarStorage::Kind:
  732. CARBON_FATAL() << "Type refers to non-type inst " << inst;
  733. case SemIR::CrossReference::Kind:
  734. return BuildCrossReferenceValueRepresentation(
  735. type_id, inst.As<SemIR::CrossReference>());
  736. case SemIR::ArrayType::Kind: {
  737. // For arrays, it's convenient to always use a pointer representation,
  738. // even when the array has zero or one element, in order to support
  739. // indexing.
  740. return MakePointerRepresentation(
  741. inst.parse_node(), type_id,
  742. SemIR::ValueRepresentation::ObjectAggregate);
  743. }
  744. case SemIR::StructType::Kind:
  745. return BuildStructTypeValueRepresentation(type_id,
  746. inst.As<SemIR::StructType>());
  747. case SemIR::TupleType::Kind:
  748. return BuildTupleTypeValueRepresentation(type_id,
  749. inst.As<SemIR::TupleType>());
  750. case SemIR::ClassType::Kind:
  751. // The value representation for a class is a pointer to the object
  752. // representation.
  753. // TODO: Support customized value representations for classes.
  754. // TODO: Pick a better value representation when possible.
  755. return MakePointerRepresentation(
  756. inst.parse_node(),
  757. context_.classes()
  758. .Get(inst.As<SemIR::ClassType>().class_id)
  759. .object_representation_id,
  760. SemIR::ValueRepresentation::ObjectAggregate);
  761. case SemIR::Builtin::Kind:
  762. CARBON_FATAL() << "Builtins should be named as cross-references";
  763. case SemIR::PointerType::Kind:
  764. case SemIR::UnboundFieldType::Kind:
  765. return MakeCopyRepresentation(type_id);
  766. case SemIR::ConstType::Kind:
  767. // The value representation of `const T` is the same as that of `T`.
  768. // Objects are not modifiable through their value representations.
  769. return GetNestedValueRepresentation(
  770. inst.As<SemIR::ConstType>().inner_id);
  771. }
  772. }
  773. enum class Phase : int8_t {
  774. // The next step is to add nested types to the list of types to complete.
  775. AddNestedIncompleteTypes,
  776. // The next step is to build the value representation for the type.
  777. BuildValueRepresentation,
  778. };
  779. struct WorkItem {
  780. SemIR::TypeId type_id;
  781. Phase phase;
  782. };
  783. Context& context_;
  784. llvm::SmallVector<WorkItem> work_list_;
  785. std::optional<llvm::function_ref<auto()->Context::DiagnosticBuilder>>
  786. diagnoser_;
  787. };
  788. } // namespace
  789. auto Context::TryToCompleteType(
  790. SemIR::TypeId type_id,
  791. std::optional<llvm::function_ref<auto()->DiagnosticBuilder>> diagnoser)
  792. -> bool {
  793. return TypeCompleter(*this, diagnoser).Complete(type_id);
  794. }
  795. auto Context::CanonicalizeTypeImpl(
  796. SemIR::InstKind kind,
  797. llvm::function_ref<bool(llvm::FoldingSetNodeID& canonical_id)> profile_type,
  798. llvm::function_ref<SemIR::InstId()> make_inst) -> SemIR::TypeId {
  799. llvm::FoldingSetNodeID canonical_id;
  800. kind.Profile(canonical_id);
  801. if (!profile_type(canonical_id)) {
  802. return SemIR::TypeId::Error;
  803. }
  804. void* insert_pos;
  805. auto* node =
  806. canonical_type_nodes_.FindNodeOrInsertPos(canonical_id, insert_pos);
  807. if (node != nullptr) {
  808. return node->type_id();
  809. }
  810. auto inst_id = make_inst();
  811. auto type_id = types().Add({.inst_id = inst_id});
  812. CARBON_CHECK(canonical_types_.insert({inst_id, type_id}).second);
  813. type_node_storage_.push_back(
  814. std::make_unique<TypeNode>(canonical_id, type_id));
  815. // In a debug build, check that our insertion position is still valid. It
  816. // could have been invalidated by a misbehaving `make_inst`.
  817. CARBON_DCHECK([&] {
  818. void* check_insert_pos;
  819. auto* check_node = canonical_type_nodes_.FindNodeOrInsertPos(
  820. canonical_id, check_insert_pos);
  821. return !check_node && insert_pos == check_insert_pos;
  822. }()) << "Type was created recursively during canonicalization";
  823. canonical_type_nodes_.InsertNode(type_node_storage_.back().get(), insert_pos);
  824. return type_id;
  825. }
  826. // Compute a fingerprint for a tuple type, for use as a key in a folding set.
  827. static auto ProfileTupleType(llvm::ArrayRef<SemIR::TypeId> type_ids,
  828. llvm::FoldingSetNodeID& canonical_id) -> void {
  829. for (auto type_id : type_ids) {
  830. canonical_id.AddInteger(type_id.index);
  831. }
  832. }
  833. // Compute a fingerprint for a type, for use as a key in a folding set. Returns
  834. // false if not supported, which is presently the case for compile-time
  835. // expressions.
  836. // TODO: Once support is more complete, in particular ensuring that various
  837. // valid compile-time expressions are supported, it may be desirable to switch
  838. // the default to a CARBON_FATAL error.
  839. static auto ProfileType(Context& semantics_context, SemIR::Inst inst,
  840. llvm::FoldingSetNodeID& canonical_id) -> bool {
  841. switch (inst.kind()) {
  842. case SemIR::ArrayType::Kind: {
  843. auto array_type = inst.As<SemIR::ArrayType>();
  844. canonical_id.AddInteger(
  845. semantics_context.sem_ir().GetArrayBoundValue(array_type.bound_id));
  846. canonical_id.AddInteger(array_type.element_type_id.index);
  847. break;
  848. }
  849. case SemIR::Builtin::Kind:
  850. canonical_id.AddInteger(inst.As<SemIR::Builtin>().builtin_kind.AsInt());
  851. break;
  852. case SemIR::ClassType::Kind:
  853. canonical_id.AddInteger(inst.As<SemIR::ClassType>().class_id.index);
  854. break;
  855. case SemIR::CrossReference::Kind: {
  856. // TODO: Cross-references should be canonicalized by looking at their
  857. // target rather than treating them as new unique types.
  858. auto xref = inst.As<SemIR::CrossReference>();
  859. canonical_id.AddInteger(xref.ir_id.index);
  860. canonical_id.AddInteger(xref.inst_id.index);
  861. break;
  862. }
  863. case SemIR::ConstType::Kind:
  864. canonical_id.AddInteger(
  865. semantics_context
  866. .GetUnqualifiedType(inst.As<SemIR::ConstType>().inner_id)
  867. .index);
  868. break;
  869. case SemIR::PointerType::Kind:
  870. canonical_id.AddInteger(inst.As<SemIR::PointerType>().pointee_id.index);
  871. break;
  872. case SemIR::StructType::Kind: {
  873. auto fields = semantics_context.inst_blocks().Get(
  874. inst.As<SemIR::StructType>().fields_id);
  875. for (const auto& field_id : fields) {
  876. auto field =
  877. semantics_context.insts().GetAs<SemIR::StructTypeField>(field_id);
  878. canonical_id.AddInteger(field.name_id.index);
  879. canonical_id.AddInteger(field.field_type_id.index);
  880. }
  881. break;
  882. }
  883. case SemIR::TupleType::Kind:
  884. ProfileTupleType(semantics_context.type_blocks().Get(
  885. inst.As<SemIR::TupleType>().elements_id),
  886. canonical_id);
  887. break;
  888. case SemIR::UnboundFieldType::Kind: {
  889. auto unbound_field_type = inst.As<SemIR::UnboundFieldType>();
  890. canonical_id.AddInteger(unbound_field_type.class_type_id.index);
  891. canonical_id.AddInteger(unbound_field_type.field_type_id.index);
  892. break;
  893. }
  894. default: {
  895. // Right now, this is only expected to occur in calls from
  896. // ExprAsType. Diagnostics are issued there.
  897. return false;
  898. }
  899. }
  900. return true;
  901. }
  902. auto Context::CanonicalizeTypeAndAddInstIfNew(SemIR::Inst inst)
  903. -> SemIR::TypeId {
  904. auto profile_node = [&](llvm::FoldingSetNodeID& canonical_id) {
  905. return ProfileType(*this, inst, canonical_id);
  906. };
  907. auto make_inst = [&] { return AddConstantInst(inst); };
  908. return CanonicalizeTypeImpl(inst.kind(), profile_node, make_inst);
  909. }
  910. auto Context::CanonicalizeType(SemIR::InstId inst_id) -> SemIR::TypeId {
  911. while (auto converted = insts().Get(inst_id).TryAs<SemIR::Converted>()) {
  912. inst_id = converted->result_id;
  913. }
  914. inst_id = FollowNameReferences(inst_id);
  915. auto it = canonical_types_.find(inst_id);
  916. if (it != canonical_types_.end()) {
  917. return it->second;
  918. }
  919. auto inst = insts().Get(inst_id);
  920. auto profile_node = [&](llvm::FoldingSetNodeID& canonical_id) {
  921. return ProfileType(*this, inst, canonical_id);
  922. };
  923. auto make_inst = [&] { return inst_id; };
  924. return CanonicalizeTypeImpl(inst.kind(), profile_node, make_inst);
  925. }
  926. auto Context::CanonicalizeStructType(Parse::Node parse_node,
  927. SemIR::InstBlockId refs_id)
  928. -> SemIR::TypeId {
  929. return CanonicalizeTypeAndAddInstIfNew(
  930. SemIR::StructType{parse_node, SemIR::TypeId::TypeType, refs_id});
  931. }
  932. auto Context::CanonicalizeTupleType(Parse::Node parse_node,
  933. llvm::ArrayRef<SemIR::TypeId> type_ids)
  934. -> SemIR::TypeId {
  935. // Defer allocating a SemIR::TypeBlockId until we know this is a new type.
  936. auto profile_tuple = [&](llvm::FoldingSetNodeID& canonical_id) {
  937. ProfileTupleType(type_ids, canonical_id);
  938. return true;
  939. };
  940. auto make_tuple_inst = [&] {
  941. return AddConstantInst(SemIR::TupleType{parse_node, SemIR::TypeId::TypeType,
  942. type_blocks().Add(type_ids)});
  943. };
  944. return CanonicalizeTypeImpl(SemIR::TupleType::Kind, profile_tuple,
  945. make_tuple_inst);
  946. }
  947. auto Context::GetBuiltinType(SemIR::BuiltinKind kind) -> SemIR::TypeId {
  948. CARBON_CHECK(kind != SemIR::BuiltinKind::Invalid);
  949. auto type_id = CanonicalizeType(SemIR::InstId::ForBuiltin(kind));
  950. // To keep client code simpler, complete builtin types before returning them.
  951. bool complete = TryToCompleteType(type_id);
  952. CARBON_CHECK(complete) << "Failed to complete builtin type";
  953. return type_id;
  954. }
  955. auto Context::GetPointerType(Parse::Node parse_node,
  956. SemIR::TypeId pointee_type_id) -> SemIR::TypeId {
  957. return CanonicalizeTypeAndAddInstIfNew(
  958. SemIR::PointerType{parse_node, SemIR::TypeId::TypeType, pointee_type_id});
  959. }
  960. auto Context::GetUnqualifiedType(SemIR::TypeId type_id) -> SemIR::TypeId {
  961. SemIR::Inst type_inst =
  962. insts().Get(sem_ir_->GetTypeAllowBuiltinTypes(type_id));
  963. if (auto const_type = type_inst.TryAs<SemIR::ConstType>()) {
  964. return const_type->inner_id;
  965. }
  966. return type_id;
  967. }
  968. auto Context::PrintForStackDump(llvm::raw_ostream& output) const -> void {
  969. node_stack_.PrintForStackDump(output);
  970. inst_block_stack_.PrintForStackDump(output);
  971. params_or_args_stack_.PrintForStackDump(output);
  972. args_type_info_stack_.PrintForStackDump(output);
  973. }
  974. } // namespace Carbon::Check