pattern_match.cpp 15 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368
  1. // Part of the Carbon Language project, under the Apache License v2.0 with LLVM
  2. // Exceptions. See /LICENSE for license information.
  3. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
  4. #include "toolchain/check/pattern_match.h"
  5. #include <functional>
  6. #include <vector>
  7. #include "llvm/ADT/STLExtras.h"
  8. #include "llvm/ADT/SmallVector.h"
  9. #include "toolchain/base/kind_switch.h"
  10. #include "toolchain/check/context.h"
  11. #include "toolchain/check/convert.h"
  12. namespace Carbon::Check {
  13. // Returns a best-effort name for the given ParamPattern, suitable for use in
  14. // IR pretty-printing.
  15. // TODO: Resolve overlap with SemIR::Function::ParamPatternInfo::GetNameId
  16. template <typename ParamPattern>
  17. static auto GetPrettyName(Context& context, ParamPattern param_pattern)
  18. -> SemIR::NameId {
  19. if (context.insts().Is<SemIR::ReturnSlotPattern>(
  20. param_pattern.subpattern_id)) {
  21. return SemIR::NameId::ReturnSlot;
  22. }
  23. if (auto binding_pattern = context.insts().TryGetAs<SemIR::AnyBindingPattern>(
  24. param_pattern.subpattern_id)) {
  25. return context.entity_names().Get(binding_pattern->entity_name_id).name_id;
  26. }
  27. return SemIR::NameId::Invalid;
  28. }
  29. namespace {
  30. // Selects between the different kinds of pattern matching.
  31. enum class MatchKind : uint8_t {
  32. // Caller pattern matching occurs on the caller side of a function call, and
  33. // is responsible for matching the argument expression against the portion
  34. // of the pattern above the ParamPattern insts.
  35. Caller,
  36. // Callee pattern matching occurs in the function decl block, and is
  37. // responsible for matching the function's calling-convention parameters
  38. // against the portion of the pattern below the ParamPattern insts.
  39. Callee,
  40. // TODO: Add enumerator for non-function-call pattern match.
  41. };
  42. // The collected state of a pattern-matching operation.
  43. class MatchContext {
  44. public:
  45. struct WorkItem {
  46. SemIR::InstId pattern_id;
  47. // Invalid when processing the callee side.
  48. SemIR::InstId scrutinee_id;
  49. };
  50. // Constructs a MatchContext. If `callee_specific_id` is valid, this pattern
  51. // match operation is part of implementing the signature of the given
  52. // specific.
  53. explicit MatchContext(MatchKind kind, SemIR::SpecificId callee_specific_id =
  54. SemIR::SpecificId::Invalid)
  55. : next_index_(0), kind_(kind), callee_specific_id_(callee_specific_id) {}
  56. // Adds a work item to the stack.
  57. auto AddWork(WorkItem work_item) -> void { stack_.push_back(work_item); }
  58. // Processes all work items on the stack. When performing caller pattern
  59. // matching, returns an inst block with one inst reference for each
  60. // calling-convention argument. When performing callee pattern matching,
  61. // returns an inst block with references to all the emitted BindName insts.
  62. auto DoWork(Context& context) -> SemIR::InstBlockId;
  63. private:
  64. // Allocates the next unallocated RuntimeParamIndex, starting from 0.
  65. auto NextRuntimeIndex() -> SemIR::RuntimeParamIndex {
  66. auto result = next_index_;
  67. ++next_index_.index;
  68. return result;
  69. }
  70. // Emits the pattern-match insts necessary to match the pattern inst
  71. // `entry.pattern_id` against the scrutinee value `entry.scrutinee_id`, and
  72. // adds to `stack_` any work necessary to traverse into its subpatterns. This
  73. // behavior is contingent on the kind of match being performed, as indicated
  74. // by kind_`. For example, when performing a callee pattern match, this does
  75. // not emit insts for patterns on the caller side. However, it still traverses
  76. // into subpatterns if any of their descendants might emit insts.
  77. // TODO: Require that `entry.scrutinee_id` is valid if and only if insts
  78. // should be emitted, once we start emitting `Param` insts in the
  79. // `ParamPattern` case.
  80. auto EmitPatternMatch(Context& context, MatchContext::WorkItem entry) -> void;
  81. // The stack of work to be processed.
  82. llvm::SmallVector<WorkItem> stack_;
  83. // The next index to be allocated by `NextRuntimeIndex`.
  84. SemIR::RuntimeParamIndex next_index_;
  85. // The pending results that will be returned by the current `DoWork` call.
  86. llvm::SmallVector<SemIR::InstId> results_;
  87. // The kind of pattern match being performed.
  88. MatchKind kind_;
  89. // The SpecificId of the function being called (if any).
  90. SemIR::SpecificId callee_specific_id_;
  91. };
  92. } // namespace
  93. auto MatchContext::DoWork(Context& context) -> SemIR::InstBlockId {
  94. results_.reserve(stack_.size());
  95. while (!stack_.empty()) {
  96. EmitPatternMatch(context, stack_.pop_back_val());
  97. }
  98. auto block_id = context.inst_blocks().Add(results_);
  99. results_.clear();
  100. return block_id;
  101. }
  102. auto MatchContext::EmitPatternMatch(Context& context,
  103. MatchContext::WorkItem entry) -> void {
  104. if (entry.pattern_id == SemIR::ErrorInst::SingletonInstId) {
  105. results_.push_back(SemIR::ErrorInst::SingletonInstId);
  106. return;
  107. }
  108. DiagnosticAnnotationScope annotate_diagnostics(
  109. &context.emitter(), [&](auto& builder) {
  110. if (kind_ == MatchKind::Caller) {
  111. CARBON_DIAGNOSTIC(InCallToFunctionParam, Note,
  112. "initializing function parameter");
  113. builder.Note(entry.pattern_id, InCallToFunctionParam);
  114. }
  115. });
  116. auto pattern = context.insts().GetWithLocId(entry.pattern_id);
  117. CARBON_KIND_SWITCH(pattern.inst) {
  118. case SemIR::BindingPattern::Kind:
  119. case SemIR::SymbolicBindingPattern::Kind: {
  120. CARBON_CHECK(kind_ == MatchKind::Callee);
  121. auto binding_pattern = pattern.inst.As<SemIR::AnyBindingPattern>();
  122. auto cache_entry =
  123. context.bind_name_cache().Lookup(binding_pattern.entity_name_id);
  124. // The cached bind_name should only be used once.
  125. auto bind_name_id =
  126. std::exchange(cache_entry.value(), SemIR::InstId::Invalid);
  127. auto bind_name = context.insts().GetAs<SemIR::AnyBindName>(bind_name_id);
  128. CARBON_CHECK(!bind_name.value_id.is_valid());
  129. bind_name.value_id = entry.scrutinee_id;
  130. context.ReplaceInstBeforeConstantUse(bind_name_id, bind_name);
  131. context.inst_block_stack().AddInstId(bind_name_id);
  132. if (context.insts()
  133. .GetAs<SemIR::AnyParam>(entry.scrutinee_id)
  134. .runtime_index.is_valid()) {
  135. results_.push_back(entry.scrutinee_id);
  136. }
  137. break;
  138. }
  139. case CARBON_KIND(SemIR::AddrPattern addr_pattern): {
  140. if (kind_ == MatchKind::Callee) {
  141. // We're emitting pattern-match IR for the callee, but we're still on
  142. // the caller side of the pattern, so we traverse without emitting any
  143. // insts.
  144. AddWork({.pattern_id = addr_pattern.inner_id,
  145. .scrutinee_id = SemIR::InstId::Invalid});
  146. break;
  147. }
  148. CARBON_CHECK(entry.scrutinee_id.is_valid());
  149. auto scrutinee_ref_id =
  150. ConvertToValueOrRefExpr(context, entry.scrutinee_id);
  151. switch (SemIR::GetExprCategory(context.sem_ir(), scrutinee_ref_id)) {
  152. case SemIR::ExprCategory::Error:
  153. case SemIR::ExprCategory::DurableRef:
  154. case SemIR::ExprCategory::EphemeralRef:
  155. break;
  156. default:
  157. CARBON_DIAGNOSTIC(AddrSelfIsNonRef, Error,
  158. "`addr self` method cannot be invoked on a value");
  159. context.emitter().Emit(
  160. TokenOnly(context.insts().GetLocId(entry.scrutinee_id)),
  161. AddrSelfIsNonRef);
  162. results_.push_back(SemIR::ErrorInst::SingletonInstId);
  163. return;
  164. }
  165. auto scrutinee_ref = context.insts().Get(scrutinee_ref_id);
  166. auto new_scrutinee = context.AddInst<SemIR::AddrOf>(
  167. context.insts().GetLocId(scrutinee_ref_id),
  168. {.type_id = context.GetPointerType(scrutinee_ref.type_id()),
  169. .lvalue_id = scrutinee_ref_id});
  170. AddWork(
  171. {.pattern_id = addr_pattern.inner_id, .scrutinee_id = new_scrutinee});
  172. break;
  173. }
  174. case CARBON_KIND(SemIR::ValueParamPattern param_pattern): {
  175. CARBON_CHECK(param_pattern.runtime_index.index < 0 ||
  176. static_cast<size_t>(param_pattern.runtime_index.index) ==
  177. results_.size(),
  178. "Parameters out of order; expecting {0} but got {1}",
  179. results_.size(), param_pattern.runtime_index.index);
  180. switch (kind_) {
  181. case MatchKind::Caller: {
  182. CARBON_CHECK(entry.scrutinee_id.is_valid());
  183. if (entry.scrutinee_id == SemIR::ErrorInst::SingletonInstId) {
  184. results_.push_back(SemIR::ErrorInst::SingletonInstId);
  185. } else {
  186. results_.push_back(ConvertToValueOfType(
  187. context, context.insts().GetLocId(entry.scrutinee_id),
  188. entry.scrutinee_id,
  189. SemIR::GetTypeInSpecific(context.sem_ir(), callee_specific_id_,
  190. param_pattern.type_id)));
  191. }
  192. // Do not traverse farther, because the caller side of the pattern
  193. // ends here.
  194. break;
  195. }
  196. case MatchKind::Callee: {
  197. if (param_pattern.runtime_index ==
  198. SemIR::RuntimeParamIndex::Unknown) {
  199. param_pattern.runtime_index = NextRuntimeIndex();
  200. context.ReplaceInstBeforeConstantUse(entry.pattern_id,
  201. param_pattern);
  202. }
  203. AddWork(
  204. {.pattern_id = param_pattern.subpattern_id,
  205. .scrutinee_id = context.AddInst<SemIR::ValueParam>(
  206. pattern.loc_id,
  207. {.type_id = param_pattern.type_id,
  208. .runtime_index = param_pattern.runtime_index,
  209. .pretty_name_id = GetPrettyName(context, param_pattern)})});
  210. break;
  211. }
  212. }
  213. break;
  214. }
  215. case CARBON_KIND(SemIR::OutParamPattern param_pattern): {
  216. switch (kind_) {
  217. case MatchKind::Caller: {
  218. CARBON_CHECK(entry.scrutinee_id.is_valid());
  219. CARBON_CHECK(context.insts().Get(entry.scrutinee_id).type_id() ==
  220. SemIR::GetTypeInSpecific(context.sem_ir(),
  221. callee_specific_id_,
  222. param_pattern.type_id));
  223. results_.push_back(entry.scrutinee_id);
  224. // Do not traverse farther, because the caller side of the pattern
  225. // ends here.
  226. break;
  227. }
  228. case MatchKind::Callee: {
  229. // TODO: Consider ways to address near-duplication with the
  230. // ValueParamPattern case.
  231. if (param_pattern.runtime_index ==
  232. SemIR::RuntimeParamIndex::Unknown) {
  233. param_pattern.runtime_index = NextRuntimeIndex();
  234. context.ReplaceInstBeforeConstantUse(entry.pattern_id,
  235. param_pattern);
  236. }
  237. AddWork(
  238. {.pattern_id = param_pattern.subpattern_id,
  239. .scrutinee_id = context.AddInst<SemIR::OutParam>(
  240. pattern.loc_id,
  241. {.type_id = param_pattern.type_id,
  242. .runtime_index = param_pattern.runtime_index,
  243. .pretty_name_id = GetPrettyName(context, param_pattern)})});
  244. break;
  245. }
  246. }
  247. break;
  248. }
  249. case CARBON_KIND(SemIR::ReturnSlotPattern return_slot_pattern): {
  250. CARBON_CHECK(kind_ == MatchKind::Callee);
  251. auto return_slot_id = context.AddInst<SemIR::ReturnSlot>(
  252. pattern.loc_id, {.type_id = return_slot_pattern.type_id,
  253. .type_inst_id = return_slot_pattern.type_inst_id,
  254. .storage_id = entry.scrutinee_id});
  255. bool already_in_lookup =
  256. context.scope_stack()
  257. .LookupOrAddName(SemIR::NameId::ReturnSlot, return_slot_id)
  258. .is_valid();
  259. CARBON_CHECK(!already_in_lookup);
  260. results_.push_back(entry.scrutinee_id);
  261. break;
  262. }
  263. default: {
  264. CARBON_FATAL("Inst kind not handled: {0}", pattern.inst.kind());
  265. }
  266. }
  267. }
  268. auto CalleePatternMatch(Context& context,
  269. SemIR::InstBlockId implicit_param_patterns_id,
  270. SemIR::InstBlockId param_patterns_id,
  271. SemIR::InstId return_slot_pattern_id)
  272. -> SemIR::InstBlockId {
  273. if (!return_slot_pattern_id.is_valid() && !param_patterns_id.is_valid() &&
  274. !implicit_param_patterns_id.is_valid()) {
  275. return SemIR::InstBlockId::Invalid;
  276. }
  277. MatchContext match(MatchKind::Callee);
  278. // We add work to the stack in reverse so that the results will be produced
  279. // in the original order.
  280. if (return_slot_pattern_id.is_valid()) {
  281. match.AddWork({.pattern_id = return_slot_pattern_id,
  282. .scrutinee_id = SemIR::InstId::Invalid});
  283. }
  284. if (param_patterns_id.is_valid()) {
  285. for (SemIR::InstId inst_id :
  286. llvm::reverse(context.inst_blocks().Get(param_patterns_id))) {
  287. match.AddWork(
  288. {.pattern_id = inst_id, .scrutinee_id = SemIR::InstId::Invalid});
  289. }
  290. }
  291. if (implicit_param_patterns_id.is_valid()) {
  292. for (SemIR::InstId inst_id :
  293. llvm::reverse(context.inst_blocks().Get(implicit_param_patterns_id))) {
  294. match.AddWork(
  295. {.pattern_id = inst_id, .scrutinee_id = SemIR::InstId::Invalid});
  296. }
  297. }
  298. return match.DoWork(context);
  299. }
  300. auto CallerPatternMatch(Context& context, SemIR::SpecificId specific_id,
  301. SemIR::InstId self_pattern_id,
  302. SemIR::InstBlockId param_patterns_id,
  303. SemIR::InstId return_slot_pattern_id,
  304. SemIR::InstId self_arg_id,
  305. llvm::ArrayRef<SemIR::InstId> arg_refs,
  306. SemIR::InstId return_slot_arg_id)
  307. -> SemIR::InstBlockId {
  308. MatchContext match(MatchKind::Caller, specific_id);
  309. // Track the return storage, if present.
  310. if (return_slot_arg_id.is_valid()) {
  311. CARBON_CHECK(return_slot_pattern_id.is_valid());
  312. match.AddWork({.pattern_id = return_slot_pattern_id,
  313. .scrutinee_id = return_slot_arg_id});
  314. }
  315. // Check type conversions per-element.
  316. for (auto [arg_id, param_pattern_id] : llvm::reverse(llvm::zip_equal(
  317. arg_refs, context.inst_blocks().GetOrEmpty(param_patterns_id)))) {
  318. auto runtime_index = SemIR::Function::GetParamPatternInfoFromPatternId(
  319. context.sem_ir(), param_pattern_id)
  320. .inst.runtime_index;
  321. if (!runtime_index.is_valid()) {
  322. // Not a runtime parameter: we don't pass an argument.
  323. continue;
  324. }
  325. match.AddWork({.pattern_id = param_pattern_id, .scrutinee_id = arg_id});
  326. }
  327. if (self_pattern_id.is_valid()) {
  328. match.AddWork({.pattern_id = self_pattern_id, .scrutinee_id = self_arg_id});
  329. }
  330. return match.DoWork(context);
  331. }
  332. } // namespace Carbon::Check