context.h 19 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483
  1. // Part of the Carbon Language project, under the Apache License v2.0 with LLVM
  2. // Exceptions. See /LICENSE for license information.
  3. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
  4. #ifndef CARBON_TOOLCHAIN_PARSE_CONTEXT_H_
  5. #define CARBON_TOOLCHAIN_PARSE_CONTEXT_H_
  6. #include <optional>
  7. #include "common/check.h"
  8. #include "common/vlog.h"
  9. #include "toolchain/lex/token_kind.h"
  10. #include "toolchain/lex/tokenized_buffer.h"
  11. #include "toolchain/parse/node_ids.h"
  12. #include "toolchain/parse/node_kind.h"
  13. #include "toolchain/parse/precedence.h"
  14. #include "toolchain/parse/state.h"
  15. #include "toolchain/parse/tree.h"
  16. namespace Carbon::Parse {
  17. // An amount by which to look ahead of the current token. Lookahead should be
  18. // used sparingly, and unbounded lookahead should be avoided.
  19. //
  20. // TODO: Decide whether we want to avoid lookahead altogether.
  21. //
  22. // The size of the enum deliberately matches the index size.
  23. enum class Lookahead : int32_t {
  24. CurrentToken = 0,
  25. NextToken = 1,
  26. };
  27. // Context and shared functionality for parser handlers. See state.def for state
  28. // documentation.
  29. class Context {
  30. public:
  31. // A token-based emitter for use during parse.
  32. class DiagnosticEmitter : public Diagnostics::Emitter<Lex::TokenIndex> {
  33. public:
  34. explicit DiagnosticEmitter(Diagnostics::Consumer* consumer,
  35. Context* context)
  36. : Emitter(consumer), context_(context) {}
  37. protected:
  38. // Applies the `position_` to the `last_byte_offset` returned by
  39. // `TokenToDiagnosticLoc`.
  40. auto ConvertLoc(Lex::TokenIndex token, ContextFnT /*context_fn*/) const
  41. -> Diagnostics::ConvertedLoc override {
  42. auto converted = context_->tokens().TokenToDiagnosticLoc(token);
  43. converted.last_byte_offset =
  44. std::max(converted.last_byte_offset,
  45. context_->tokens().GetByteOffset(*context_->position()));
  46. return converted;
  47. }
  48. private:
  49. Context* context_;
  50. };
  51. // Possible operator fixities for errors.
  52. enum class OperatorFixity : int8_t { Prefix, Infix, Postfix };
  53. // Possible return values for FindListToken.
  54. enum class ListTokenKind : int8_t { Comma, Close, CommaClose };
  55. // Used for restricting ordering of `package` and `import` declarations.
  56. enum class PackagingState : int8_t {
  57. FileStart,
  58. InImports,
  59. AfterNonPackagingDecl,
  60. // A warning about `import` placement has been issued so we don't keep
  61. // issuing more (when `import` is repeated) until more non-`import`
  62. // declarations come up.
  63. InImportsAfterNonPackagingDecl,
  64. };
  65. // Used to track state on state_stack_.
  66. struct State : public Printable<State> {
  67. // Prints state information for verbose output.
  68. auto Print(llvm::raw_ostream& output) const -> void {
  69. output << kind << " @" << token << " subtree_start=" << subtree_start
  70. << " has_error=" << has_error;
  71. }
  72. // The state.
  73. StateKind kind;
  74. // Set to true to indicate that an error was found, and that contextual
  75. // error recovery may be needed.
  76. bool has_error : 1 = false;
  77. // Set to true to indicate that this state is handling a pattern nested
  78. // inside a `var` pattern.
  79. // TODO: This is meaningful only for patterns, and the precedence fields
  80. // are meaningful only for expressions, so expressing them as a union
  81. // could help catch errors.
  82. bool in_var_pattern : 1 = false;
  83. // Set to true to indicate that this state is handling a pattern nested
  84. // inside an `unused` pattern.
  85. // TODO: This is meaningful only for patterns, and the precedence fields
  86. // are meaningful only for expressions, so expressing them as a union
  87. // could help catch errors.
  88. bool in_unused_pattern : 1 = false;
  89. // Precedence information used by expression states in order to determine
  90. // operator precedence. The ambient_precedence deals with how the expression
  91. // should interact with outside context, while the lhs_precedence is
  92. // specific to the lhs of an operator expression.
  93. PrecedenceGroup ambient_precedence = PrecedenceGroup::ForTopLevelExpr();
  94. PrecedenceGroup lhs_precedence = PrecedenceGroup::ForTopLevelExpr();
  95. // A token providing context based on the subtree. This will typically be
  96. // the first token in the subtree, but may sometimes be a token within. It
  97. // will typically be used for the subtree's root node.
  98. Lex::TokenIndex token;
  99. // The offset within the Tree of the subtree start.
  100. int32_t subtree_start;
  101. };
  102. // We expect State to fit into 12 bytes:
  103. // state = 1 byte
  104. // has_error, in_var_pattern, and in_unused_pattern = 1 byte
  105. // ambient_precedence = 1 byte
  106. // lhs_precedence = 1 byte
  107. // token = 4 bytes
  108. // subtree_start = 4 bytes
  109. // If it becomes bigger, it'd be worth examining better packing; it should be
  110. // feasible to pack the 1-byte entries more tightly.
  111. static_assert(sizeof(State) == 12, "State has unexpected size!");
  112. explicit Context(Tree* tree, Lex::TokenizedBuffer* tokens,
  113. Diagnostics::Consumer* consumer,
  114. llvm::raw_ostream* vlog_stream);
  115. // Adds a node to the parse tree that has no children (a leaf).
  116. auto AddLeafNode(NodeKind kind, Lex::TokenIndex token, bool has_error = false)
  117. -> void {
  118. AddNode(kind, token, has_error);
  119. }
  120. // Adds a node to the parse tree that has children.
  121. auto AddNode(NodeKind kind, Lex::TokenIndex token, bool has_error) -> void {
  122. CARBON_CHECK(has_error || (kind != NodeKind::InvalidParse &&
  123. kind != NodeKind::InvalidParseStart &&
  124. kind != NodeKind::InvalidParseSubtree),
  125. "{0} nodes must always have an error", kind);
  126. tree_->node_impls_.push_back(Tree::NodeImpl(kind, has_error, token));
  127. CARBON_VLOG("Add #node{0}: {1}",
  128. llvm::format_hex_no_prefix(tree_->node_impls_.size() - 1, 0,
  129. /*Upper=*/true),
  130. tree_->node_impls_.back());
  131. }
  132. // Adds a node and returns its typed NodeId.
  133. template <const Parse::NodeKind& Kind>
  134. auto AddNode(Lex::TokenIndex token, bool has_error) -> NodeIdForKind<Kind> {
  135. AddNode(Kind, token, has_error);
  136. return NodeIdForKind<Kind>::UnsafeMake(
  137. NodeId(tree_->node_impls_.size() - 1));
  138. }
  139. // Adds an invalid parse node.
  140. auto AddInvalidParse(Lex::TokenIndex token) -> void {
  141. AddNode(NodeKind::InvalidParse, token, /*has_error=*/true);
  142. }
  143. // Replaces the placeholder node at the indicated position with a leaf node.
  144. //
  145. // To reserve a position in the parse tree, you may add a placeholder parse
  146. // node using code like:
  147. // ```
  148. // context.PushState(State::WillFillInPlaceholder);
  149. // context.AddLeafNode(NodeKind::Placeholder, *context.position());
  150. // ```
  151. // It may be replaced with the intended leaf parse node with code like:
  152. // ```
  153. // auto HandleWillFillInPlaceholder(Context& context) -> void {
  154. // auto state = context.PopState();
  155. // context.ReplacePlaceholderNode(state.subtree_start, /* replacement */);
  156. // }
  157. // ```
  158. auto ReplacePlaceholderNode(int32_t position, NodeKind kind,
  159. Lex::TokenIndex token, bool has_error = false)
  160. -> void;
  161. // Returns the current position and moves past it.
  162. auto Consume() -> Lex::TokenIndex { return *(position_++); }
  163. // Consumes the current token. Does not return it.
  164. auto ConsumeAndDiscard() -> void { ++position_; }
  165. // Parses an open paren token, possibly diagnosing if necessary. Creates a
  166. // leaf parse node of the specified start kind. The default_token is used when
  167. // there's no open paren. Returns the open paren token if it was found.
  168. auto ConsumeAndAddOpenParen(Lex::TokenIndex default_token,
  169. NodeKind start_kind)
  170. -> std::optional<Lex::TokenIndex>;
  171. // Parses a closing symbol corresponding to the opening symbol `state.token`,
  172. // possibly skipping forward and diagnosing if necessary. Creates a parse node
  173. // of the specified close kind. If `state.token` is not an opening symbol,
  174. // no input will be consumed, and no diagnostic will be emitted, but the parse
  175. // node will still be marked as having an error.
  176. auto ConsumeAndAddCloseSymbol(State state, NodeKind close_kind) -> void;
  177. // Composes `ConsumeIf` and `AddLeafNode`, returning false when ConsumeIf
  178. // fails.
  179. auto ConsumeAndAddLeafNodeIf(Lex::TokenKind token_kind, NodeKind node_kind)
  180. -> bool;
  181. // Returns the current position and moves past it. Requires the token is the
  182. // expected kind.
  183. auto ConsumeChecked(Lex::TokenKind kind) -> Lex::TokenIndex;
  184. // If the current position's token matches this `Kind`, returns it and
  185. // advances to the next position. Otherwise returns an empty optional.
  186. auto ConsumeIf(Lex::TokenKind kind) -> std::optional<Lex::TokenIndex> {
  187. if (!PositionIs(kind)) {
  188. return std::nullopt;
  189. }
  190. return Consume();
  191. }
  192. // Find the next token of any of the given kinds at the current bracketing
  193. // level.
  194. auto FindNextOf(std::initializer_list<Lex::TokenKind> desired_kinds)
  195. -> std::optional<Lex::TokenIndex>;
  196. // If the token is an opening symbol for a matched group, skips to the matched
  197. // closing symbol and returns true. Otherwise, returns false.
  198. auto SkipMatchingGroup() -> bool;
  199. // Skips forward to move past the likely end of a declaration or statement.
  200. //
  201. // Looks forward, skipping over any matched symbol groups, to find the next
  202. // position that is likely past the end of a declaration or statement. This
  203. // is a heuristic and should only be called when skipping past parse errors.
  204. //
  205. // The strategy for recognizing when we have likely passed the end of a
  206. // declaration or statement:
  207. // - If we get to a close curly brace, we likely ended the entire context.
  208. // - If we get to a semicolon, that should have ended the declaration or
  209. // statement.
  210. // - If we get to a new line from the `SkipRoot` token, but with the same or
  211. // less indentation, there is likely a missing semicolon. Continued
  212. // declarations or statements across multiple lines should be indented.
  213. //
  214. // Returns the last token consumed.
  215. auto SkipPastLikelyEnd(Lex::TokenIndex skip_root) -> Lex::TokenIndex;
  216. // Skip forward to the given token. Verifies that it is actually forward.
  217. auto SkipTo(Lex::TokenIndex t) -> void;
  218. // Returns true if the current token satisfies the lexical validity rules
  219. // for an infix operator.
  220. auto IsLexicallyValidInfixOperator() -> bool;
  221. // Determines whether the current trailing operator should be treated as
  222. // infix.
  223. auto IsTrailingOperatorInfix() -> bool;
  224. // Diagnoses whether the current token is not written properly for the given
  225. // fixity. For example, because mandatory whitespace is missing. Regardless of
  226. // whether there's an error, it's expected that parsing continues.
  227. auto DiagnoseOperatorFixity(OperatorFixity fixity) -> void;
  228. // If the current position is a `,`, consumes it, adds the provided token, and
  229. // returns `Comma`. Returns `Close` if the current position is close_token
  230. // (for example, `)`). `CommaClose` indicates it found both (for example,
  231. // `,)`). Handles cases where invalid tokens are present by advancing the
  232. // position, and may emit errors. Pass already_has_error in order to suppress
  233. // duplicate errors.
  234. auto ConsumeListToken(NodeKind comma_kind, Lex::TokenKind close_kind,
  235. bool already_has_error) -> ListTokenKind;
  236. // Gets the kind of the next token to be consumed. If `lookahead` is
  237. // provided, it specifies which token to inspect.
  238. auto PositionKind(Lookahead lookahead = Lookahead::CurrentToken) const
  239. -> Lex::TokenKind {
  240. return tokens_->GetKind(position_[static_cast<int32_t>(lookahead)]);
  241. }
  242. // Tests whether the next token to be consumed is of the specified kind. If
  243. // `lookahead` is provided, it specifies which token to inspect.
  244. auto PositionIs(Lex::TokenKind kind,
  245. Lookahead lookahead = Lookahead::CurrentToken) const -> bool {
  246. return PositionKind(lookahead) == kind;
  247. }
  248. // Pops the state and keeps the value for inspection.
  249. auto PopState() -> State {
  250. auto back = state_stack_.pop_back_val();
  251. CARBON_VLOG("Pop {0}: {1}\n", state_stack_.size(), back);
  252. return back;
  253. }
  254. // Pops the state and discards it.
  255. auto PopAndDiscardState() -> void {
  256. CARBON_VLOG("PopAndDiscard {0}: {1}\n", state_stack_.size() - 1,
  257. state_stack_.back());
  258. state_stack_.pop_back();
  259. }
  260. // Pushes a new state with the current position for context.
  261. auto PushState(StateKind kind) -> void { PushState(kind, *position_); }
  262. // Pushes a new state with a specific token for context. Used when forming a
  263. // new subtree when the current position isn't the start of the subtree.
  264. auto PushState(StateKind kind, Lex::TokenIndex token) -> void {
  265. PushState({.kind = kind, .token = token, .subtree_start = tree_->size()});
  266. }
  267. // Pushes a new expression state with specific precedence.
  268. auto PushStateForExpr(PrecedenceGroup ambient_precedence) -> void {
  269. PushState({.kind = StateKind::Expr,
  270. .ambient_precedence = ambient_precedence,
  271. .token = *position_,
  272. .subtree_start = tree_->size()});
  273. }
  274. // Pushes a new state with detailed precedence for expression resume states.
  275. auto PushStateForExprLoop(StateKind kind, PrecedenceGroup ambient_precedence,
  276. PrecedenceGroup lhs_precedence) -> void {
  277. PushState({.kind = kind,
  278. .ambient_precedence = ambient_precedence,
  279. .lhs_precedence = lhs_precedence,
  280. .token = *position_,
  281. .subtree_start = tree_->size()});
  282. }
  283. // Pushes a new state for handling a pattern. `in_var_pattern` and
  284. // `in_unused_pattern` indicate whether that pattern is nested inside a `var`
  285. // or `unused` pattern.
  286. auto PushStateForPattern(StateKind kind, bool in_var_pattern,
  287. bool in_unused_pattern) -> void {
  288. PushState({.kind = kind,
  289. .in_var_pattern = in_var_pattern,
  290. .in_unused_pattern = in_unused_pattern,
  291. .token = *position_,
  292. .subtree_start = tree_->size()});
  293. }
  294. // Pushes a constructed state onto the stack.
  295. auto PushState(State state) -> void {
  296. CARBON_VLOG("Push {0}: {1}\n", state_stack_.size(), state);
  297. state_stack_.push_back(state);
  298. CARBON_CHECK(state_stack_.size() < (1 << 20),
  299. "Excessive stack size: likely infinite loop");
  300. }
  301. // Pushes a constructed state onto the stack, with a different kind.
  302. auto PushState(State state, StateKind kind) -> void {
  303. state.kind = kind;
  304. PushState(state);
  305. }
  306. // Propagates an error up the state stack, to the parent state.
  307. auto ReturnErrorOnState() -> void { state_stack_.back().has_error = true; }
  308. // Adds a node for a declaration's semicolon. Includes error recovery when the
  309. // token is not a semicolon, using `decl_kind` and `is_def_allowed` to inform
  310. // diagnostics.
  311. auto AddNodeExpectingDeclSemi(State state, NodeKind node_kind,
  312. Lex::TokenKind decl_kind, bool is_def_allowed)
  313. -> void;
  314. // Emits a diagnostic for a declaration missing a semi.
  315. auto DiagnoseExpectedDeclSemi(Lex::TokenKind expected_kind) -> void;
  316. // Emits a diagnostic for a declaration missing a semi or definition.
  317. auto DiagnoseExpectedDeclSemiOrDefinition(Lex::TokenKind expected_kind)
  318. -> void;
  319. // Handles error recovery in a declaration, particularly before any possible
  320. // definition has started (although one could be present). Recover to a
  321. // semicolon when it makes sense as a possible end, otherwise use the
  322. // introducer token for the error.
  323. auto RecoverFromDeclError(State state, NodeKind node_kind,
  324. bool skip_past_likely_end) -> void;
  325. // Handles parsing of the library name. Returns the name's ID on success,
  326. // which may be invalid for `default`.
  327. // TODO: Add an invalid node on error, fix callers to adapt.
  328. auto ParseLibraryName(bool accept_default)
  329. -> std::optional<StringLiteralValueId>;
  330. // Handles parsing `library <name>`. Requires that the position is a `library`
  331. // token. Returns the name's ID on success, which may be invalid for
  332. // `default`.
  333. auto ParseLibrarySpecifier(bool accept_default)
  334. -> std::optional<StringLiteralValueId>;
  335. // Sets the package declaration information. Called at most once.
  336. auto set_packaging_decl(Tree::PackagingNames packaging_names, bool is_impl)
  337. -> void {
  338. CARBON_CHECK(!tree_->packaging_decl_);
  339. tree_->packaging_decl_ = {.names = packaging_names, .is_impl = is_impl};
  340. }
  341. // Adds an import.
  342. auto AddImport(Tree::PackagingNames package) -> void {
  343. tree_->imports_.push_back(package);
  344. }
  345. // Adds a function definition start node, and begins tracking a deferred
  346. // definition if necessary.
  347. auto AddFunctionDefinitionStart(Lex::TokenIndex token, bool has_error)
  348. -> void;
  349. // Adds a function definition node, and ends tracking a deferred definition if
  350. // necessary.
  351. auto AddFunctionDefinition(Lex::TokenIndex token, bool has_error) -> void;
  352. // Adds a function terse definition node, and ends tracking a deferred
  353. // definition if necessary.
  354. auto AddFunctionTerseDefinition(Lex::TokenIndex token, bool has_error)
  355. -> void;
  356. // Prints information for a stack dump.
  357. auto PrintForStackDump(llvm::raw_ostream& output) const -> void;
  358. auto tree() const -> const Tree& { return *tree_; }
  359. auto tokens() const -> const Lex::TokenizedBuffer& { return *tokens_; }
  360. auto has_errors() const -> bool { return err_tracker_.seen_error(); }
  361. auto emitter() -> DiagnosticEmitter& { return emitter_; }
  362. auto position() -> Lex::TokenIterator& { return position_; }
  363. auto position() const -> Lex::TokenIterator { return position_; }
  364. auto state_stack() -> llvm::SmallVector<State>& { return state_stack_; }
  365. auto state_stack() const -> const llvm::SmallVector<State>& {
  366. return state_stack_;
  367. }
  368. auto packaging_state() const -> PackagingState { return packaging_state_; }
  369. auto set_packaging_state(PackagingState packaging_state) -> void {
  370. packaging_state_ = packaging_state;
  371. }
  372. auto first_non_packaging_token() const -> Lex::TokenIndex {
  373. return first_non_packaging_token_;
  374. }
  375. auto set_first_non_packaging_token(Lex::TokenIndex token) -> void {
  376. CARBON_CHECK(!first_non_packaging_token_.has_value());
  377. first_non_packaging_token_ = token;
  378. }
  379. private:
  380. // Prints a single token for a stack dump. Used by PrintForStackDump.
  381. auto PrintTokenForStackDump(llvm::raw_ostream& output,
  382. Lex::TokenIndex token) const -> void;
  383. Tree* tree_;
  384. Lex::TokenizedBuffer* tokens_;
  385. Diagnostics::ErrorTrackingConsumer err_tracker_;
  386. DiagnosticEmitter emitter_;
  387. // Whether to print verbose output.
  388. llvm::raw_ostream* vlog_stream_;
  389. // The current position within the token buffer.
  390. Lex::TokenIterator position_;
  391. // The FileEnd token.
  392. Lex::TokenIterator end_;
  393. llvm::SmallVector<State> state_stack_;
  394. // The deferred definition indexes of functions whose definitions have begun
  395. // but not yet finished.
  396. llvm::SmallVector<DeferredDefinitionIndex> deferred_definition_stack_;
  397. // The current packaging state, whether `import`/`package` are allowed.
  398. PackagingState packaging_state_ = PackagingState::FileStart;
  399. // The first non-packaging token, starting as invalid. Used for packaging
  400. // state warnings.
  401. Lex::TokenIndex first_non_packaging_token_ = Lex::TokenIndex::None;
  402. };
  403. } // namespace Carbon::Parse
  404. #endif // CARBON_TOOLCHAIN_PARSE_CONTEXT_H_