semantics_context.cpp 32 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810
  1. // Part of the Carbon Language project, under the Apache License v2.0 with LLVM
  2. // Exceptions. See /LICENSE for license information.
  3. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
  4. #include "toolchain/semantics/semantics_context.h"
  5. #include <utility>
  6. #include "common/check.h"
  7. #include "common/vlog.h"
  8. #include "llvm/ADT/STLExtras.h"
  9. #include "toolchain/diagnostics/diagnostic_kind.h"
  10. #include "toolchain/lexer/tokenized_buffer.h"
  11. #include "toolchain/parser/parse_node_kind.h"
  12. #include "toolchain/semantics/semantics_declaration_name_stack.h"
  13. #include "toolchain/semantics/semantics_ir.h"
  14. #include "toolchain/semantics/semantics_node.h"
  15. #include "toolchain/semantics/semantics_node_block_stack.h"
  16. #include "toolchain/semantics/semantics_node_kind.h"
  17. namespace Carbon::Check {
  18. Context::Context(const TokenizedBuffer& tokens,
  19. DiagnosticEmitter<Parse::Node>& emitter,
  20. const Parse::Tree& parse_tree, SemIR::File& semantics_ir,
  21. llvm::raw_ostream* vlog_stream)
  22. : tokens_(&tokens),
  23. emitter_(&emitter),
  24. parse_tree_(&parse_tree),
  25. semantics_ir_(&semantics_ir),
  26. vlog_stream_(vlog_stream),
  27. node_stack_(parse_tree, vlog_stream),
  28. node_block_stack_("node_block_stack_", semantics_ir, vlog_stream),
  29. params_or_args_stack_("params_or_args_stack_", semantics_ir, vlog_stream),
  30. args_type_info_stack_("args_type_info_stack_", semantics_ir, vlog_stream),
  31. declaration_name_stack_(this) {
  32. // Inserts the "Error" and "Type" types as "used types" so that
  33. // canonicalization can skip them. We don't emit either for lowering.
  34. canonical_types_.insert({SemIR::NodeId::BuiltinError, SemIR::TypeId::Error});
  35. canonical_types_.insert(
  36. {SemIR::NodeId::BuiltinTypeType, SemIR::TypeId::TypeType});
  37. }
  38. auto Context::TODO(Parse::Node parse_node, std::string label) -> bool {
  39. CARBON_DIAGNOSTIC(SemanticsTodo, Error, "Semantics TODO: `{0}`.",
  40. std::string);
  41. emitter_->Emit(parse_node, SemanticsTodo, std::move(label));
  42. return false;
  43. }
  44. auto Context::VerifyOnFinish() -> void {
  45. // Information in all the various context objects should be cleaned up as
  46. // various pieces of context go out of scope. At this point, nothing should
  47. // remain.
  48. // node_stack_ will still contain top-level entities.
  49. CARBON_CHECK(name_lookup_.empty()) << name_lookup_.size();
  50. CARBON_CHECK(scope_stack_.empty()) << scope_stack_.size();
  51. CARBON_CHECK(node_block_stack_.empty()) << node_block_stack_.size();
  52. CARBON_CHECK(params_or_args_stack_.empty()) << params_or_args_stack_.size();
  53. }
  54. auto Context::AddNode(SemIR::Node node) -> SemIR::NodeId {
  55. return AddNodeToBlock(node_block_stack_.PeekForAdd(), node);
  56. }
  57. auto Context::AddNodeToBlock(SemIR::NodeBlockId block, SemIR::Node node)
  58. -> SemIR::NodeId {
  59. CARBON_VLOG() << "AddNode " << block << ": " << node << "\n";
  60. return semantics_ir_->AddNode(block, node);
  61. }
  62. auto Context::AddNodeAndPush(Parse::Node parse_node, SemIR::Node node) -> void {
  63. auto node_id = AddNode(node);
  64. node_stack_.Push(parse_node, node_id);
  65. }
  66. auto Context::DiagnoseDuplicateName(Parse::Node parse_node,
  67. SemIR::NodeId prev_def_id) -> void {
  68. CARBON_DIAGNOSTIC(NameDeclarationDuplicate, Error,
  69. "Duplicate name being declared in the same scope.");
  70. CARBON_DIAGNOSTIC(NameDeclarationPrevious, Note,
  71. "Name is previously declared here.");
  72. auto prev_def = semantics_ir_->GetNode(prev_def_id);
  73. emitter_->Build(parse_node, NameDeclarationDuplicate)
  74. .Note(prev_def.parse_node(), NameDeclarationPrevious)
  75. .Emit();
  76. }
  77. auto Context::DiagnoseNameNotFound(Parse::Node parse_node,
  78. SemIR::StringId name_id) -> void {
  79. CARBON_DIAGNOSTIC(NameNotFound, Error, "Name `{0}` not found.",
  80. llvm::StringRef);
  81. emitter_->Emit(parse_node, NameNotFound, semantics_ir_->GetString(name_id));
  82. }
  83. auto Context::AddNameToLookup(Parse::Node name_node, SemIR::StringId name_id,
  84. SemIR::NodeId target_id) -> void {
  85. if (current_scope().names.insert(name_id).second) {
  86. name_lookup_[name_id].push_back(target_id);
  87. } else {
  88. DiagnoseDuplicateName(name_node, name_lookup_[name_id].back());
  89. }
  90. }
  91. auto Context::LookupName(Parse::Node parse_node, SemIR::StringId name_id,
  92. SemIR::NameScopeId scope_id, bool print_diagnostics)
  93. -> SemIR::NodeId {
  94. if (scope_id == SemIR::NameScopeId::Invalid) {
  95. auto it = name_lookup_.find(name_id);
  96. if (it == name_lookup_.end()) {
  97. if (print_diagnostics) {
  98. DiagnoseNameNotFound(parse_node, name_id);
  99. }
  100. return SemIR::NodeId::BuiltinError;
  101. }
  102. CARBON_CHECK(!it->second.empty())
  103. << "Should have been erased: " << semantics_ir_->GetString(name_id);
  104. // TODO: Check for ambiguous lookups.
  105. return it->second.back();
  106. } else {
  107. const auto& scope = semantics_ir_->GetNameScope(scope_id);
  108. auto it = scope.find(name_id);
  109. if (it == scope.end()) {
  110. if (print_diagnostics) {
  111. DiagnoseNameNotFound(parse_node, name_id);
  112. }
  113. return SemIR::NodeId::BuiltinError;
  114. }
  115. return it->second;
  116. }
  117. }
  118. auto Context::PushScope() -> void { scope_stack_.push_back({}); }
  119. auto Context::PopScope() -> void {
  120. auto scope = scope_stack_.pop_back_val();
  121. for (const auto& str_id : scope.names) {
  122. auto it = name_lookup_.find(str_id);
  123. if (it->second.size() == 1) {
  124. // Erase names that no longer resolve.
  125. name_lookup_.erase(it);
  126. } else {
  127. it->second.pop_back();
  128. }
  129. }
  130. }
  131. template <typename BranchNode, typename... Args>
  132. static auto AddDominatedBlockAndBranchImpl(Context& context,
  133. Parse::Node parse_node, Args... args)
  134. -> SemIR::NodeBlockId {
  135. if (!context.node_block_stack().is_current_block_reachable()) {
  136. return SemIR::NodeBlockId::Unreachable;
  137. }
  138. auto block_id = context.semantics_ir().AddNodeBlock();
  139. context.AddNode(BranchNode::Make(parse_node, block_id, args...));
  140. return block_id;
  141. }
  142. auto Context::AddDominatedBlockAndBranch(Parse::Node parse_node)
  143. -> SemIR::NodeBlockId {
  144. return AddDominatedBlockAndBranchImpl<SemIR::Node::Branch>(*this, parse_node);
  145. }
  146. auto Context::AddDominatedBlockAndBranchWithArg(Parse::Node parse_node,
  147. SemIR::NodeId arg_id)
  148. -> SemIR::NodeBlockId {
  149. return AddDominatedBlockAndBranchImpl<SemIR::Node::BranchWithArg>(
  150. *this, parse_node, arg_id);
  151. }
  152. auto Context::AddDominatedBlockAndBranchIf(Parse::Node parse_node,
  153. SemIR::NodeId cond_id)
  154. -> SemIR::NodeBlockId {
  155. return AddDominatedBlockAndBranchImpl<SemIR::Node::BranchIf>(
  156. *this, parse_node, cond_id);
  157. }
  158. auto Context::AddConvergenceBlockAndPush(
  159. Parse::Node parse_node, std::initializer_list<SemIR::NodeBlockId> blocks)
  160. -> void {
  161. CARBON_CHECK(blocks.size() >= 2) << "no convergence";
  162. SemIR::NodeBlockId new_block_id = SemIR::NodeBlockId::Unreachable;
  163. for (SemIR::NodeBlockId block_id : blocks) {
  164. if (block_id != SemIR::NodeBlockId::Unreachable) {
  165. if (new_block_id == SemIR::NodeBlockId::Unreachable) {
  166. new_block_id = semantics_ir().AddNodeBlock();
  167. }
  168. AddNodeToBlock(block_id,
  169. SemIR::Node::Branch::Make(parse_node, new_block_id));
  170. }
  171. }
  172. node_block_stack().Push(new_block_id);
  173. }
  174. auto Context::AddConvergenceBlockWithArgAndPush(
  175. Parse::Node parse_node,
  176. std::initializer_list<std::pair<SemIR::NodeBlockId, SemIR::NodeId>>
  177. blocks_and_args) -> SemIR::NodeId {
  178. CARBON_CHECK(blocks_and_args.size() >= 2) << "no convergence";
  179. SemIR::NodeBlockId new_block_id = SemIR::NodeBlockId::Unreachable;
  180. for (auto [block_id, arg_id] : blocks_and_args) {
  181. if (block_id != SemIR::NodeBlockId::Unreachable) {
  182. if (new_block_id == SemIR::NodeBlockId::Unreachable) {
  183. new_block_id = semantics_ir().AddNodeBlock();
  184. }
  185. AddNodeToBlock(block_id, SemIR::Node::BranchWithArg::Make(
  186. parse_node, new_block_id, arg_id));
  187. }
  188. }
  189. node_block_stack().Push(new_block_id);
  190. // Acquire the result value.
  191. SemIR::TypeId result_type_id =
  192. semantics_ir().GetNode(blocks_and_args.begin()->second).type_id();
  193. return AddNode(
  194. SemIR::Node::BlockArg::Make(parse_node, result_type_id, new_block_id));
  195. }
  196. // Add the current code block to the enclosing function.
  197. auto Context::AddCurrentCodeBlockToFunction() -> void {
  198. CARBON_CHECK(!node_block_stack().empty()) << "no current code block";
  199. CARBON_CHECK(!return_scope_stack().empty()) << "no current function";
  200. if (!node_block_stack().is_current_block_reachable()) {
  201. // Don't include unreachable blocks in the function.
  202. return;
  203. }
  204. auto function_id = semantics_ir()
  205. .GetNode(return_scope_stack().back())
  206. .GetAsFunctionDeclaration();
  207. semantics_ir()
  208. .GetFunction(function_id)
  209. .body_block_ids.push_back(node_block_stack().PeekForAdd());
  210. }
  211. auto Context::is_current_position_reachable() -> bool {
  212. switch (auto block_id = node_block_stack().Peek(); block_id.index) {
  213. case SemIR::NodeBlockId::Unreachable.index: {
  214. return false;
  215. }
  216. case SemIR::NodeBlockId::Invalid.index: {
  217. return true;
  218. }
  219. default: {
  220. // Our current position is at the end of a real block. That position is
  221. // reachable unless the previous instruction is a terminator instruction.
  222. const auto& block_contents = semantics_ir().GetNodeBlock(block_id);
  223. if (block_contents.empty()) {
  224. return true;
  225. }
  226. const auto& last_node = semantics_ir().GetNode(block_contents.back());
  227. return last_node.kind().terminator_kind() !=
  228. SemIR::TerminatorKind::Terminator;
  229. }
  230. }
  231. }
  232. auto Context::Initialize(Parse::Node parse_node, SemIR::NodeId target_id,
  233. SemIR::NodeId value_id) -> SemIR::NodeId {
  234. // Implicitly convert the value to the type of the target.
  235. auto type_id = semantics_ir().GetNode(target_id).type_id();
  236. auto expr_id = ImplicitAsRequired(parse_node, value_id, type_id);
  237. SemIR::Node expr = semantics_ir().GetNode(expr_id);
  238. // Perform initialization now that we have an expression of the right type.
  239. switch (SemIR::GetExpressionCategory(semantics_ir(), expr_id)) {
  240. case SemIR::ExpressionCategory::NotExpression:
  241. CARBON_FATAL() << "Converting non-expression node " << expr
  242. << " to initializing expression";
  243. case SemIR::ExpressionCategory::DurableReference:
  244. case SemIR::ExpressionCategory::EphemeralReference:
  245. // The design uses a custom "copy initialization" process here. We model
  246. // that as value binding followed by direct initialization.
  247. //
  248. // TODO: Determine whether this is observably different from the design,
  249. // and change either the toolchain or the design so they match.
  250. return AddNode(SemIR::Node::BindValue::Make(expr.parse_node(),
  251. expr.type_id(), expr_id));
  252. case SemIR::ExpressionCategory::Value:
  253. // TODO: For class types, use an interface to determine how to perform
  254. // this operation.
  255. return expr_id;
  256. case SemIR::ExpressionCategory::Initializing:
  257. MarkInitializerFor(expr_id, target_id);
  258. return expr_id;
  259. }
  260. }
  261. auto Context::ConvertToValueExpression(SemIR::NodeId expr_id) -> SemIR::NodeId {
  262. switch (SemIR::GetExpressionCategory(semantics_ir(), expr_id)) {
  263. case SemIR::ExpressionCategory::NotExpression:
  264. CARBON_FATAL() << "Converting non-expression node "
  265. << semantics_ir().GetNode(expr_id)
  266. << " to value expression";
  267. case SemIR::ExpressionCategory::Initializing:
  268. // Commit to using a temporary for this initializing expression.
  269. // TODO: Don't create a temporary if the initializing representation is
  270. // already a value representation.
  271. expr_id = FinalizeTemporary(expr_id, /*discarded=*/false);
  272. [[fallthrough]];
  273. case SemIR::ExpressionCategory::DurableReference:
  274. case SemIR::ExpressionCategory::EphemeralReference: {
  275. // TODO: Support types with custom value representations.
  276. SemIR::Node expr = semantics_ir().GetNode(expr_id);
  277. return AddNode(SemIR::Node::BindValue::Make(expr.parse_node(),
  278. expr.type_id(), expr_id));
  279. }
  280. case SemIR::ExpressionCategory::Value:
  281. return expr_id;
  282. }
  283. }
  284. auto Context::FinalizeTemporary(SemIR::NodeId init_id, bool discarded)
  285. -> SemIR::NodeId {
  286. // TODO: See if we can refactor this with MarkInitializerFor once recursion
  287. // through struct and tuple values is properly handled.
  288. auto orig_init_id = init_id;
  289. while (true) {
  290. SemIR::Node init = semantics_ir().GetNode(init_id);
  291. CARBON_CHECK(SemIR::GetExpressionCategory(semantics_ir(), init_id) ==
  292. SemIR::ExpressionCategory::Initializing)
  293. << "Can only materialize initializing expressions, found " << init;
  294. switch (init.kind()) {
  295. default:
  296. CARBON_FATAL() << "Initialization from unexpected node " << init;
  297. case SemIR::NodeKind::StructValue:
  298. case SemIR::NodeKind::TupleValue:
  299. CARBON_FATAL() << init << " is not modeled as initializing yet";
  300. case SemIR::NodeKind::StubReference: {
  301. init_id = init.GetAsStubReference();
  302. continue;
  303. }
  304. case SemIR::NodeKind::Call: {
  305. auto [refs_id, callee_id] = init.GetAsCall();
  306. if (semantics_ir().GetFunction(callee_id).return_slot_id.is_valid()) {
  307. // The return slot should have a materialized temporary in it.
  308. auto temporary_id = semantics_ir().GetNodeBlock(refs_id).back();
  309. CARBON_CHECK(semantics_ir().GetNode(temporary_id).kind() ==
  310. SemIR::NodeKind::TemporaryStorage)
  311. << "Return slot for function call does not contain a temporary; "
  312. << "initialized multiple times? Have "
  313. << semantics_ir().GetNode(temporary_id);
  314. return AddNode(SemIR::Node::Temporary::Make(
  315. init.parse_node(), init.type_id(), temporary_id, orig_init_id));
  316. }
  317. if (discarded) {
  318. // Don't invent a temporary that we're going to discard.
  319. return SemIR::NodeId::Invalid;
  320. }
  321. // The function has no return slot, but we want to produce a temporary
  322. // object. Materialize one now.
  323. // TODO: Consider using an invalid ID to mean that we immediately
  324. // materialize and initialize a temporary, rather than two separate
  325. // nodes.
  326. auto temporary_id = AddNode(SemIR::Node::TemporaryStorage::Make(
  327. init.parse_node(), init.type_id()));
  328. return AddNode(SemIR::Node::Temporary::Make(
  329. init.parse_node(), init.type_id(), temporary_id, init_id));
  330. }
  331. }
  332. }
  333. }
  334. auto Context::MarkInitializerFor(SemIR::NodeId init_id, SemIR::NodeId target_id)
  335. -> void {
  336. while (true) {
  337. SemIR::Node init = semantics_ir().GetNode(init_id);
  338. CARBON_CHECK(SemIR::GetExpressionCategory(semantics_ir(), init_id) ==
  339. SemIR::ExpressionCategory::Initializing)
  340. << "initialization from non-initializing node " << init;
  341. switch (init.kind()) {
  342. default:
  343. CARBON_FATAL() << "Initialization from unexpected node " << init;
  344. case SemIR::NodeKind::StructValue:
  345. case SemIR::NodeKind::TupleValue:
  346. CARBON_FATAL() << init << " is not modeled as initializing yet";
  347. case SemIR::NodeKind::StubReference:
  348. init_id = init.GetAsStubReference();
  349. continue;
  350. case SemIR::NodeKind::Call: {
  351. // If the callee has a return slot, point it at our target.
  352. auto [refs_id, callee_id] = init.GetAsCall();
  353. if (semantics_ir().GetFunction(callee_id).return_slot_id.is_valid()) {
  354. // Replace the return slot with our given target, and remove the
  355. // tentatively-created temporary.
  356. auto temporary_id = std::exchange(
  357. semantics_ir().GetNodeBlock(refs_id).back(), target_id);
  358. auto temporary = semantics_ir().GetNode(temporary_id);
  359. CARBON_CHECK(temporary.kind() == SemIR::NodeKind::TemporaryStorage)
  360. << "Return slot for function call does not contain a temporary; "
  361. << "initialized multiple times? Have " << temporary;
  362. semantics_ir().ReplaceNode(
  363. temporary_id, SemIR::Node::NoOp::Make(temporary.parse_node()));
  364. }
  365. return;
  366. }
  367. }
  368. }
  369. }
  370. auto Context::HandleDiscardedExpression(SemIR::NodeId expr_id) -> void {
  371. // If we discard an initializing expression, materialize it first.
  372. if (SemIR::GetExpressionCategory(semantics_ir(), expr_id) ==
  373. SemIR::ExpressionCategory::Initializing) {
  374. FinalizeTemporary(expr_id, /*discarded=*/true);
  375. }
  376. // TODO: This will eventually need to do some "do not discard" analysis.
  377. (void)expr_id;
  378. }
  379. auto Context::ImplicitAsForArgs(
  380. SemIR::NodeBlockId arg_refs_id, Parse::Node param_parse_node,
  381. SemIR::NodeBlockId param_refs_id,
  382. DiagnosticEmitter<Parse::Node>::DiagnosticBuilder* diagnostic) -> bool {
  383. // If both arguments and parameters are empty, return quickly. Otherwise,
  384. // we'll fetch both so that errors are consistent.
  385. if (arg_refs_id == SemIR::NodeBlockId::Empty &&
  386. param_refs_id == SemIR::NodeBlockId::Empty) {
  387. return true;
  388. }
  389. auto& arg_refs = semantics_ir_->GetNodeBlock(arg_refs_id);
  390. const auto& param_refs = semantics_ir_->GetNodeBlock(param_refs_id);
  391. // If sizes mismatch, fail early.
  392. if (arg_refs.size() != param_refs.size()) {
  393. CARBON_CHECK(diagnostic != nullptr) << "Should have validated first";
  394. CARBON_DIAGNOSTIC(CallArgCountMismatch, Note,
  395. "Function cannot be used: Received {0} argument(s), but "
  396. "require {1} argument(s).",
  397. int, int);
  398. diagnostic->Note(param_parse_node, CallArgCountMismatch, arg_refs.size(),
  399. param_refs.size());
  400. return false;
  401. }
  402. // Check type conversions per-element.
  403. // TODO: arg_ir_id is passed so that implicit conversions can be inserted.
  404. // It's currently not supported, but will be needed.
  405. for (auto [i, value_id, param_ref] : llvm::enumerate(arg_refs, param_refs)) {
  406. auto as_type_id = semantics_ir_->GetNode(param_ref).type_id();
  407. if (ImplicitAsImpl(value_id, as_type_id,
  408. diagnostic == nullptr ? &value_id : nullptr) ==
  409. ImplicitAsKind::Incompatible) {
  410. CARBON_CHECK(diagnostic != nullptr) << "Should have validated first";
  411. CARBON_DIAGNOSTIC(CallArgTypeMismatch, Note,
  412. "Function cannot be used: Cannot implicitly convert "
  413. "argument {0} from `{1}` to `{2}`.",
  414. size_t, std::string, std::string);
  415. diagnostic->Note(param_parse_node, CallArgTypeMismatch, i,
  416. semantics_ir_->StringifyType(
  417. semantics_ir_->GetNode(value_id).type_id()),
  418. semantics_ir_->StringifyType(as_type_id));
  419. return false;
  420. }
  421. // TODO: Convert to the proper expression category. For now, we assume
  422. // parameters are all `let` bindings.
  423. if (!diagnostic) {
  424. // TODO: Insert the conversion in the proper place in the node block.
  425. arg_refs[i] = ConvertToValueExpression(value_id);
  426. }
  427. }
  428. return true;
  429. }
  430. auto Context::ImplicitAsRequired(Parse::Node parse_node, SemIR::NodeId value_id,
  431. SemIR::TypeId as_type_id) -> SemIR::NodeId {
  432. SemIR::NodeId output_value_id = value_id;
  433. if (ImplicitAsImpl(value_id, as_type_id, &output_value_id) ==
  434. ImplicitAsKind::Incompatible) {
  435. // Only error when the system is trying to use the result.
  436. CARBON_DIAGNOSTIC(ImplicitAsConversionFailure, Error,
  437. "Cannot implicitly convert from `{0}` to `{1}`.",
  438. std::string, std::string);
  439. emitter_
  440. ->Build(parse_node, ImplicitAsConversionFailure,
  441. semantics_ir_->StringifyType(
  442. semantics_ir_->GetNode(value_id).type_id()),
  443. semantics_ir_->StringifyType(as_type_id))
  444. .Emit();
  445. }
  446. return output_value_id;
  447. }
  448. auto Context::ImplicitAsImpl(SemIR::NodeId value_id, SemIR::TypeId as_type_id,
  449. SemIR::NodeId* output_value_id) -> ImplicitAsKind {
  450. // Start by making sure both sides are valid. If any part is invalid, the
  451. // result is invalid and we shouldn't error.
  452. if (value_id == SemIR::NodeId::BuiltinError) {
  453. // If the value is invalid, we can't do much, but do "succeed".
  454. return ImplicitAsKind::Identical;
  455. }
  456. auto value = semantics_ir_->GetNode(value_id);
  457. auto value_type_id = value.type_id();
  458. if (value_type_id == SemIR::TypeId::Error) {
  459. // Although the source type is invalid, this still changes the value.
  460. if (output_value_id != nullptr) {
  461. *output_value_id = SemIR::NodeId::BuiltinError;
  462. }
  463. return ImplicitAsKind::Compatible;
  464. }
  465. if (as_type_id == SemIR::TypeId::Error) {
  466. // Although the target type is invalid, this still changes the value.
  467. if (output_value_id != nullptr) {
  468. *output_value_id = SemIR::NodeId::BuiltinError;
  469. }
  470. return ImplicitAsKind::Compatible;
  471. }
  472. if (value_type_id == as_type_id) {
  473. // Type doesn't need to change.
  474. return ImplicitAsKind::Identical;
  475. }
  476. auto as_type = semantics_ir_->GetTypeAllowBuiltinTypes(as_type_id);
  477. auto as_type_node = semantics_ir_->GetNode(as_type);
  478. if (as_type_node.kind() == SemIR::NodeKind::ArrayType) {
  479. auto [bound_node_id, element_type_id] = as_type_node.GetAsArrayType();
  480. // To resolve lambda issue.
  481. auto element_type = element_type_id;
  482. auto value_type_node = semantics_ir_->GetNode(
  483. semantics_ir_->GetTypeAllowBuiltinTypes(value_type_id));
  484. if (value_type_node.kind() == SemIR::NodeKind::TupleType) {
  485. auto tuple_type_block_id = value_type_node.GetAsTupleType();
  486. const auto& type_block = semantics_ir_->GetTypeBlock(tuple_type_block_id);
  487. if (type_block.size() ==
  488. semantics_ir_->GetArrayBoundValue(bound_node_id) &&
  489. std::all_of(type_block.begin(), type_block.end(),
  490. [&](auto type) { return type == element_type; })) {
  491. if (output_value_id != nullptr) {
  492. // TODO: We should convert an initializing expression of tuple type
  493. // to an initializing expression of array type.
  494. value_id = ConvertToValueExpression(value_id);
  495. *output_value_id = AddNode(SemIR::Node::ArrayValue::Make(
  496. value.parse_node(), as_type_id, value_id));
  497. }
  498. return ImplicitAsKind::Compatible;
  499. }
  500. }
  501. }
  502. if (as_type_id == SemIR::TypeId::TypeType) {
  503. if (value.kind() == SemIR::NodeKind::TupleValue) {
  504. auto tuple_block_id = value.GetAsTupleValue();
  505. llvm::SmallVector<SemIR::TypeId> type_ids;
  506. // If it is empty tuple type, we don't fetch anything.
  507. if (tuple_block_id != SemIR::NodeBlockId::Empty) {
  508. const auto& tuple_block = semantics_ir_->GetNodeBlock(tuple_block_id);
  509. for (auto tuple_node_id : tuple_block) {
  510. // TODO: Eventually ExpressionAsType will insert implicit cast
  511. // instructions. When that happens, this will need to verify the full
  512. // tuple conversion will work before calling it.
  513. type_ids.push_back(
  514. ExpressionAsType(value.parse_node(), tuple_node_id));
  515. }
  516. }
  517. auto tuple_type_id =
  518. CanonicalizeTupleType(value.parse_node(), std::move(type_ids));
  519. if (output_value_id != nullptr) {
  520. *output_value_id =
  521. semantics_ir_->GetTypeAllowBuiltinTypes(tuple_type_id);
  522. }
  523. return ImplicitAsKind::Compatible;
  524. }
  525. // When converting `{}` to a type, the result is `{} as Type`.
  526. if (value.kind() == SemIR::NodeKind::StructValue &&
  527. value.GetAsStructValue() == SemIR::NodeBlockId::Empty) {
  528. if (output_value_id != nullptr) {
  529. *output_value_id = semantics_ir_->GetType(value_type_id);
  530. }
  531. return ImplicitAsKind::Compatible;
  532. }
  533. }
  534. // TODO: Handle ImplicitAs for compatible structs and tuples.
  535. if (output_value_id != nullptr) {
  536. *output_value_id = SemIR::NodeId::BuiltinError;
  537. }
  538. return ImplicitAsKind::Incompatible;
  539. }
  540. auto Context::ParamOrArgStart() -> void { params_or_args_stack_.Push(); }
  541. auto Context::ParamOrArgComma(bool for_args) -> void {
  542. ParamOrArgSave(for_args);
  543. }
  544. auto Context::ParamOrArgEnd(bool for_args, Parse::NodeKind start_kind)
  545. -> SemIR::NodeBlockId {
  546. if (parse_tree_->node_kind(node_stack_.PeekParseNode()) != start_kind) {
  547. ParamOrArgSave(for_args);
  548. }
  549. return params_or_args_stack_.Pop();
  550. }
  551. auto Context::ParamOrArgSave(bool for_args) -> void {
  552. auto [entry_parse_node, entry_node_id] =
  553. node_stack_.PopExpressionWithParseNode();
  554. if (for_args) {
  555. // For an argument, we add a stub reference to the expression on the top of
  556. // the stack. There may not be anything on the IR prior to this.
  557. entry_node_id = AddNode(SemIR::Node::StubReference::Make(
  558. entry_parse_node, semantics_ir_->GetNode(entry_node_id).type_id(),
  559. entry_node_id));
  560. }
  561. // Save the param or arg ID.
  562. auto& params_or_args =
  563. semantics_ir_->GetNodeBlock(params_or_args_stack_.PeekForAdd());
  564. params_or_args.push_back(entry_node_id);
  565. }
  566. auto Context::CanonicalizeTypeImpl(
  567. SemIR::NodeKind kind,
  568. llvm::function_ref<void(llvm::FoldingSetNodeID& canonical_id)> profile_type,
  569. llvm::function_ref<SemIR::NodeId()> make_node) -> SemIR::TypeId {
  570. llvm::FoldingSetNodeID canonical_id;
  571. kind.Profile(canonical_id);
  572. profile_type(canonical_id);
  573. void* insert_pos;
  574. auto* node =
  575. canonical_type_nodes_.FindNodeOrInsertPos(canonical_id, insert_pos);
  576. if (node != nullptr) {
  577. return node->type_id();
  578. }
  579. auto node_id = make_node();
  580. auto type_id = semantics_ir_->AddType(node_id);
  581. CARBON_CHECK(canonical_types_.insert({node_id, type_id}).second);
  582. type_node_storage_.push_back(
  583. std::make_unique<TypeNode>(canonical_id, type_id));
  584. // In a debug build, check that our insertion position is still valid. It
  585. // could have been invalidated by a misbehaving `make_node`.
  586. CARBON_DCHECK([&] {
  587. void* check_insert_pos;
  588. auto* check_node = canonical_type_nodes_.FindNodeOrInsertPos(
  589. canonical_id, check_insert_pos);
  590. return !check_node && insert_pos == check_insert_pos;
  591. }()) << "Type was created recursively during canonicalization";
  592. canonical_type_nodes_.InsertNode(type_node_storage_.back().get(), insert_pos);
  593. return type_id;
  594. }
  595. // Compute a fingerprint for a tuple type, for use as a key in a folding set.
  596. static auto ProfileTupleType(const llvm::SmallVector<SemIR::TypeId>& type_ids,
  597. llvm::FoldingSetNodeID& canonical_id) -> void {
  598. for (const auto& type_id : type_ids) {
  599. canonical_id.AddInteger(type_id.index);
  600. }
  601. }
  602. // Compute a fingerprint for a type, for use as a key in a folding set.
  603. static auto ProfileType(Context& semantics_context, SemIR::Node node,
  604. llvm::FoldingSetNodeID& canonical_id) -> void {
  605. switch (node.kind()) {
  606. case SemIR::NodeKind::ArrayType: {
  607. auto [bound_id, element_type_id] = node.GetAsArrayType();
  608. canonical_id.AddInteger(
  609. semantics_context.semantics_ir().GetArrayBoundValue(bound_id));
  610. canonical_id.AddInteger(element_type_id.index);
  611. break;
  612. }
  613. case SemIR::NodeKind::Builtin:
  614. canonical_id.AddInteger(node.GetAsBuiltin().AsInt());
  615. break;
  616. case SemIR::NodeKind::CrossReference: {
  617. // TODO: Cross-references should be canonicalized by looking at their
  618. // target rather than treating them as new unique types.
  619. auto [xref_id, node_id] = node.GetAsCrossReference();
  620. canonical_id.AddInteger(xref_id.index);
  621. canonical_id.AddInteger(node_id.index);
  622. break;
  623. }
  624. case SemIR::NodeKind::ConstType:
  625. canonical_id.AddInteger(
  626. semantics_context.GetUnqualifiedType(node.GetAsConstType()).index);
  627. break;
  628. case SemIR::NodeKind::PointerType:
  629. canonical_id.AddInteger(node.GetAsPointerType().index);
  630. break;
  631. case SemIR::NodeKind::StructType: {
  632. auto refs =
  633. semantics_context.semantics_ir().GetNodeBlock(node.GetAsStructType());
  634. for (const auto& ref_id : refs) {
  635. auto ref = semantics_context.semantics_ir().GetNode(ref_id);
  636. auto [name_id, type_id] = ref.GetAsStructTypeField();
  637. canonical_id.AddInteger(name_id.index);
  638. canonical_id.AddInteger(type_id.index);
  639. }
  640. break;
  641. }
  642. case SemIR::NodeKind::StubReference: {
  643. // We rely on stub references not referring to each other to ensure we
  644. // only recurse once here.
  645. auto inner =
  646. semantics_context.semantics_ir().GetNode(node.GetAsStubReference());
  647. CARBON_CHECK(inner.kind() != SemIR::NodeKind::StubReference)
  648. << "A stub reference should never refer to another stub reference.";
  649. ProfileType(semantics_context, inner, canonical_id);
  650. break;
  651. }
  652. case SemIR::NodeKind::TupleType:
  653. ProfileTupleType(
  654. semantics_context.semantics_ir().GetTypeBlock(node.GetAsTupleType()),
  655. canonical_id);
  656. break;
  657. default:
  658. CARBON_FATAL() << "Unexpected type node " << node;
  659. }
  660. }
  661. auto Context::CanonicalizeTypeAndAddNodeIfNew(SemIR::Node node)
  662. -> SemIR::TypeId {
  663. auto profile_node = [&](llvm::FoldingSetNodeID& canonical_id) {
  664. ProfileType(*this, node, canonical_id);
  665. };
  666. auto make_node = [&] { return AddNode(node); };
  667. return CanonicalizeTypeImpl(node.kind(), profile_node, make_node);
  668. }
  669. auto Context::CanonicalizeType(SemIR::NodeId node_id) -> SemIR::TypeId {
  670. auto it = canonical_types_.find(node_id);
  671. if (it != canonical_types_.end()) {
  672. return it->second;
  673. }
  674. auto node = semantics_ir_->GetNode(node_id);
  675. auto profile_node = [&](llvm::FoldingSetNodeID& canonical_id) {
  676. ProfileType(*this, node, canonical_id);
  677. };
  678. auto make_node = [&] { return node_id; };
  679. return CanonicalizeTypeImpl(node.kind(), profile_node, make_node);
  680. }
  681. auto Context::CanonicalizeStructType(Parse::Node parse_node,
  682. SemIR::NodeBlockId refs_id)
  683. -> SemIR::TypeId {
  684. return CanonicalizeTypeAndAddNodeIfNew(SemIR::Node::StructType::Make(
  685. parse_node, SemIR::TypeId::TypeType, refs_id));
  686. }
  687. auto Context::CanonicalizeTupleType(Parse::Node parse_node,
  688. llvm::SmallVector<SemIR::TypeId>&& type_ids)
  689. -> SemIR::TypeId {
  690. // Defer allocating a SemIR::TypeBlockId until we know this is a new type.
  691. auto profile_tuple = [&](llvm::FoldingSetNodeID& canonical_id) {
  692. ProfileTupleType(type_ids, canonical_id);
  693. };
  694. auto make_tuple_node = [&] {
  695. auto type_block_id = semantics_ir_->AddTypeBlock();
  696. auto& type_block = semantics_ir_->GetTypeBlock(type_block_id);
  697. type_block = std::move(type_ids);
  698. return AddNode(SemIR::Node::TupleType::Make(
  699. parse_node, SemIR::TypeId::TypeType, type_block_id));
  700. };
  701. return CanonicalizeTypeImpl(SemIR::NodeKind::TupleType, profile_tuple,
  702. make_tuple_node);
  703. }
  704. auto Context::GetPointerType(Parse::Node parse_node,
  705. SemIR::TypeId pointee_type_id) -> SemIR::TypeId {
  706. return CanonicalizeTypeAndAddNodeIfNew(SemIR::Node::PointerType::Make(
  707. parse_node, SemIR::TypeId::TypeType, pointee_type_id));
  708. }
  709. auto Context::GetUnqualifiedType(SemIR::TypeId type_id) -> SemIR::TypeId {
  710. SemIR::Node type_node =
  711. semantics_ir_->GetNode(semantics_ir_->GetTypeAllowBuiltinTypes(type_id));
  712. if (type_node.kind() == SemIR::NodeKind::ConstType) {
  713. return type_node.GetAsConstType();
  714. }
  715. return type_id;
  716. }
  717. auto Context::PrintForStackDump(llvm::raw_ostream& output) const -> void {
  718. node_stack_.PrintForStackDump(output);
  719. node_block_stack_.PrintForStackDump(output);
  720. params_or_args_stack_.PrintForStackDump(output);
  721. args_type_info_stack_.PrintForStackDump(output);
  722. }
  723. } // namespace Carbon::Check