context.h 16 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409
  1. // Part of the Carbon Language project, under the Apache License v2.0 with LLVM
  2. // Exceptions. See /LICENSE for license information.
  3. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
  4. #ifndef CARBON_TOOLCHAIN_PARSE_CONTEXT_H_
  5. #define CARBON_TOOLCHAIN_PARSE_CONTEXT_H_
  6. #include <optional>
  7. #include "common/check.h"
  8. #include "common/vlog.h"
  9. #include "toolchain/lex/token_kind.h"
  10. #include "toolchain/lex/tokenized_buffer.h"
  11. #include "toolchain/parse/node_kind.h"
  12. #include "toolchain/parse/precedence.h"
  13. #include "toolchain/parse/state.h"
  14. #include "toolchain/parse/tree.h"
  15. namespace Carbon::Parse {
  16. // An amount by which to look ahead of the current token. Lookahead should be
  17. // used sparingly, and unbounded lookahead should be avoided.
  18. //
  19. // TODO: Decide whether we want to avoid lookahead altogether.
  20. //
  21. // NOLINTNEXTLINE(performance-enum-size): Deliberately matches index size.
  22. enum class Lookahead : int32_t {
  23. CurrentToken = 0,
  24. NextToken = 1,
  25. };
  26. // Context and shared functionality for parser handlers. See state.def for state
  27. // documentation.
  28. class Context {
  29. public:
  30. // Possible operator fixities for errors.
  31. enum class OperatorFixity : int8_t { Prefix, Infix, Postfix };
  32. // Possible return values for FindListToken.
  33. enum class ListTokenKind : int8_t { Comma, Close, CommaClose };
  34. // Used for restricting ordering of `package` and `import` declarations.
  35. enum class PackagingState : int8_t {
  36. FileStart,
  37. InImports,
  38. AfterNonPackagingDecl,
  39. // A warning about `import` placement has been issued so we don't keep
  40. // issuing more (when `import` is repeated) until more non-`import`
  41. // declarations come up.
  42. InImportsAfterNonPackagingDecl,
  43. };
  44. // Used to track state on state_stack_.
  45. struct StateStackEntry : public Printable<StateStackEntry> {
  46. // Prints state information for verbose output.
  47. auto Print(llvm::raw_ostream& output) const -> void {
  48. output << state << " @" << token << " subtree_start=" << subtree_start
  49. << " has_error=" << has_error;
  50. }
  51. // The state.
  52. State state;
  53. // Set to true to indicate that an error was found, and that contextual
  54. // error recovery may be needed.
  55. bool has_error = false;
  56. // Precedence information used by expression states in order to determine
  57. // operator precedence. The ambient_precedence deals with how the expression
  58. // should interact with outside context, while the lhs_precedence is
  59. // specific to the lhs of an operator expression.
  60. PrecedenceGroup ambient_precedence = PrecedenceGroup::ForTopLevelExpr();
  61. PrecedenceGroup lhs_precedence = PrecedenceGroup::ForTopLevelExpr();
  62. // A token providing context based on the subtree. This will typically be
  63. // the first token in the subtree, but may sometimes be a token within. It
  64. // will typically be used for the subtree's root node.
  65. Lex::TokenIndex token;
  66. // The offset within the Tree of the subtree start.
  67. int32_t subtree_start;
  68. };
  69. // We expect StateStackEntry to fit into 12 bytes:
  70. // state = 1 byte
  71. // has_error = 1 byte
  72. // ambient_precedence = 1 byte
  73. // lhs_precedence = 1 byte
  74. // token = 4 bytes
  75. // subtree_start = 4 bytes
  76. // If it becomes bigger, it'd be worth examining better packing; it should be
  77. // feasible to pack the 1-byte entries more tightly.
  78. static_assert(sizeof(StateStackEntry) == 12,
  79. "StateStackEntry has unexpected size!");
  80. explicit Context(Tree& tree, Lex::TokenizedBuffer& tokens,
  81. Lex::TokenDiagnosticEmitter& emitter,
  82. llvm::raw_ostream* vlog_stream);
  83. // Adds a node to the parse tree that has no children (a leaf).
  84. auto AddLeafNode(NodeKind kind, Lex::TokenIndex token, bool has_error = false)
  85. -> void {
  86. tree_->node_impls_.push_back(Tree::NodeImpl(kind, has_error, token));
  87. }
  88. // Adds a node to the parse tree that has children.
  89. auto AddNode(NodeKind kind, Lex::TokenIndex token, bool has_error) -> void {
  90. tree_->node_impls_.push_back(Tree::NodeImpl(kind, has_error, token));
  91. }
  92. // Replaces the placeholder node at the indicated position with a leaf node.
  93. //
  94. // To reserve a position in the parse tree, you may add a placeholder parse
  95. // node using code like:
  96. // ```
  97. // context.PushState(State::WillFillInPlaceholder);
  98. // context.AddLeafNode(NodeKind::Placeholder, *context.position());
  99. // ```
  100. // It may be replaced with the intended leaf parse node with code like:
  101. // ```
  102. // auto HandleWillFillInPlaceholder(Context& context) -> void {
  103. // auto state = context.PopState();
  104. // context.ReplacePlaceholderNode(state.subtree_start, /* replacement */);
  105. // }
  106. // ```
  107. auto ReplacePlaceholderNode(int32_t position, NodeKind kind,
  108. Lex::TokenIndex token, bool has_error = false)
  109. -> void;
  110. // Returns the current position and moves past it.
  111. auto Consume() -> Lex::TokenIndex { return *(position_++); }
  112. // Consumes the current token. Does not return it.
  113. auto ConsumeAndDiscard() -> void { ++position_; }
  114. // Parses an open paren token, possibly diagnosing if necessary. Creates a
  115. // leaf parse node of the specified start kind. The default_token is used when
  116. // there's no open paren. Returns the open paren token if it was found.
  117. auto ConsumeAndAddOpenParen(Lex::TokenIndex default_token,
  118. NodeKind start_kind)
  119. -> std::optional<Lex::TokenIndex>;
  120. // Parses a closing symbol corresponding to the opening symbol
  121. // `expected_open`, possibly skipping forward and diagnosing if necessary.
  122. // Creates a parse node of the specified close kind. If `expected_open` is not
  123. // an opening symbol, the parse node will be associated with `state.token`,
  124. // no input will be consumed, and no diagnostic will be emitted.
  125. auto ConsumeAndAddCloseSymbol(Lex::TokenIndex expected_open,
  126. StateStackEntry state, NodeKind close_kind)
  127. -> void;
  128. // Composes `ConsumeIf` and `AddLeafNode`, returning false when ConsumeIf
  129. // fails.
  130. auto ConsumeAndAddLeafNodeIf(Lex::TokenKind token_kind, NodeKind node_kind)
  131. -> bool;
  132. // Returns the current position and moves past it. Requires the token is the
  133. // expected kind.
  134. auto ConsumeChecked(Lex::TokenKind kind) -> Lex::TokenIndex;
  135. // If the current position's token matches this `Kind`, returns it and
  136. // advances to the next position. Otherwise returns an empty optional.
  137. auto ConsumeIf(Lex::TokenKind kind) -> std::optional<Lex::TokenIndex> {
  138. if (!PositionIs(kind)) {
  139. return std::nullopt;
  140. }
  141. return Consume();
  142. }
  143. // Find the next token of any of the given kinds at the current bracketing
  144. // level.
  145. auto FindNextOf(std::initializer_list<Lex::TokenKind> desired_kinds)
  146. -> std::optional<Lex::TokenIndex>;
  147. // If the token is an opening symbol for a matched group, skips to the matched
  148. // closing symbol and returns true. Otherwise, returns false.
  149. auto SkipMatchingGroup() -> bool;
  150. // Skips forward to move past the likely end of a declaration or statement.
  151. //
  152. // Looks forward, skipping over any matched symbol groups, to find the next
  153. // position that is likely past the end of a declaration or statement. This
  154. // is a heuristic and should only be called when skipping past parse errors.
  155. //
  156. // The strategy for recognizing when we have likely passed the end of a
  157. // declaration or statement:
  158. // - If we get to a close curly brace, we likely ended the entire context.
  159. // - If we get to a semicolon, that should have ended the declaration or
  160. // statement.
  161. // - If we get to a new line from the `SkipRoot` token, but with the same or
  162. // less indentation, there is likely a missing semicolon. Continued
  163. // declarations or statements across multiple lines should be indented.
  164. //
  165. // Returns the last token consumed.
  166. auto SkipPastLikelyEnd(Lex::TokenIndex skip_root) -> Lex::TokenIndex;
  167. // Skip forward to the given token. Verifies that it is actually forward.
  168. auto SkipTo(Lex::TokenIndex t) -> void;
  169. // Returns true if the current token satisfies the lexical validity rules
  170. // for an infix operator.
  171. auto IsLexicallyValidInfixOperator() -> bool;
  172. // Determines whether the current trailing operator should be treated as
  173. // infix.
  174. auto IsTrailingOperatorInfix() -> bool;
  175. // Diagnoses whether the current token is not written properly for the given
  176. // fixity. For example, because mandatory whitespace is missing. Regardless of
  177. // whether there's an error, it's expected that parsing continues.
  178. auto DiagnoseOperatorFixity(OperatorFixity fixity) -> void;
  179. // If the current position is a `,`, consumes it, adds the provided token, and
  180. // returns `Comma`. Returns `Close` if the current position is close_token
  181. // (for example, `)`). `CommaClose` indicates it found both (for example,
  182. // `,)`). Handles cases where invalid tokens are present by advancing the
  183. // position, and may emit errors. Pass already_has_error in order to suppress
  184. // duplicate errors.
  185. auto ConsumeListToken(NodeKind comma_kind, Lex::TokenKind close_kind,
  186. bool already_has_error) -> ListTokenKind;
  187. // Gets the kind of the next token to be consumed. If `lookahead` is
  188. // provided, it specifies which token to inspect.
  189. auto PositionKind(Lookahead lookahead = Lookahead::CurrentToken) const
  190. -> Lex::TokenKind {
  191. return tokens_->GetKind(position_[static_cast<int32_t>(lookahead)]);
  192. }
  193. // Tests whether the next token to be consumed is of the specified kind. If
  194. // `lookahead` is provided, it specifies which token to inspect.
  195. auto PositionIs(Lex::TokenKind kind,
  196. Lookahead lookahead = Lookahead::CurrentToken) const -> bool {
  197. return PositionKind(lookahead) == kind;
  198. }
  199. // Pops the state and keeps the value for inspection.
  200. auto PopState() -> StateStackEntry {
  201. auto back = state_stack_.pop_back_val();
  202. CARBON_VLOG("Pop {0}: {1}\n", state_stack_.size(), back);
  203. return back;
  204. }
  205. // Pops the state and discards it.
  206. auto PopAndDiscardState() -> void {
  207. CARBON_VLOG("PopAndDiscard {0}: {1}\n", state_stack_.size() - 1,
  208. state_stack_.back());
  209. state_stack_.pop_back();
  210. }
  211. // Pushes a new state with the current position for context.
  212. auto PushState(State state) -> void { PushState(state, *position_); }
  213. // Pushes a new state with a specific token for context. Used when forming a
  214. // new subtree when the current position isn't the start of the subtree.
  215. auto PushState(State state, Lex::TokenIndex token) -> void {
  216. PushState({.state = state, .token = token, .subtree_start = tree_->size()});
  217. }
  218. // Pushes a new expression state with specific precedence.
  219. auto PushStateForExpr(PrecedenceGroup ambient_precedence) -> void {
  220. PushState({.state = State::Expr,
  221. .ambient_precedence = ambient_precedence,
  222. .token = *position_,
  223. .subtree_start = tree_->size()});
  224. }
  225. // Pushes a new state with detailed precedence for expression resume states.
  226. auto PushStateForExprLoop(State state, PrecedenceGroup ambient_precedence,
  227. PrecedenceGroup lhs_precedence) -> void {
  228. PushState({.state = state,
  229. .ambient_precedence = ambient_precedence,
  230. .lhs_precedence = lhs_precedence,
  231. .token = *position_,
  232. .subtree_start = tree_->size()});
  233. }
  234. // Pushes a constructed state onto the stack.
  235. auto PushState(StateStackEntry state) -> void {
  236. CARBON_VLOG("Push {0}: {1}\n", state_stack_.size(), state);
  237. state_stack_.push_back(state);
  238. CARBON_CHECK(state_stack_.size() < (1 << 20),
  239. "Excessive stack size: likely infinite loop");
  240. }
  241. // Pushes a constructed state onto the stack, with a different parse state.
  242. auto PushState(StateStackEntry state_entry, State parse_state) -> void {
  243. state_entry.state = parse_state;
  244. PushState(state_entry);
  245. }
  246. // Propagates an error up the state stack, to the parent state.
  247. auto ReturnErrorOnState() -> void { state_stack_.back().has_error = true; }
  248. // Adds a node for a declaration's semicolon. Includes error recovery when the
  249. // token is not a semicolon, using `decl_kind` and `is_def_allowed` to inform
  250. // diagnostics.
  251. auto AddNodeExpectingDeclSemi(StateStackEntry state, NodeKind node_kind,
  252. Lex::TokenKind decl_kind, bool is_def_allowed)
  253. -> void;
  254. // Emits a diagnostic for a declaration missing a semi.
  255. auto DiagnoseExpectedDeclSemi(Lex::TokenKind expected_kind) -> void;
  256. // Emits a diagnostic for a declaration missing a semi or definition.
  257. auto DiagnoseExpectedDeclSemiOrDefinition(Lex::TokenKind expected_kind)
  258. -> void;
  259. // Handles error recovery in a declaration, particularly before any possible
  260. // definition has started (although one could be present). Recover to a
  261. // semicolon when it makes sense as a possible end, otherwise use the
  262. // introducer token for the error.
  263. auto RecoverFromDeclError(StateStackEntry state, NodeKind node_kind,
  264. bool skip_past_likely_end) -> void;
  265. // Handles parsing of the library name. Returns the name's ID on success,
  266. // which may be invalid for `default`.
  267. // TODO: Add an invalid node on error, fix callers to adapt.
  268. auto ParseLibraryName(bool accept_default)
  269. -> std::optional<StringLiteralValueId>;
  270. // Handles parsing `library <name>`. Requires that the position is a `library`
  271. // token. Returns the name's ID on success, which may be invalid for
  272. // `default`.
  273. auto ParseLibrarySpecifier(bool accept_default)
  274. -> std::optional<StringLiteralValueId>;
  275. // Sets the package declaration information. Called at most once.
  276. auto set_packaging_decl(Tree::PackagingNames packaging_names, bool is_impl)
  277. -> void {
  278. CARBON_CHECK(!tree_->packaging_decl_);
  279. tree_->packaging_decl_ = {.names = packaging_names, .is_impl = is_impl};
  280. }
  281. // Adds an import.
  282. auto AddImport(Tree::PackagingNames package) -> void {
  283. tree_->imports_.push_back(package);
  284. }
  285. // Adds a function definition start node, and begins tracking a deferred
  286. // definition if necessary.
  287. auto AddFunctionDefinitionStart(Lex::TokenIndex token, bool has_error)
  288. -> void;
  289. // Adds a function definition node, and ends tracking a deferred definition if
  290. // necessary.
  291. auto AddFunctionDefinition(Lex::TokenIndex token, bool has_error) -> void;
  292. // Prints information for a stack dump.
  293. auto PrintForStackDump(llvm::raw_ostream& output) const -> void;
  294. auto tree() const -> const Tree& { return *tree_; }
  295. auto tokens() const -> const Lex::TokenizedBuffer& { return *tokens_; }
  296. auto emitter() -> Lex::TokenDiagnosticEmitter& { return *emitter_; }
  297. auto position() -> Lex::TokenIterator& { return position_; }
  298. auto position() const -> Lex::TokenIterator { return position_; }
  299. auto state_stack() -> llvm::SmallVector<StateStackEntry>& {
  300. return state_stack_;
  301. }
  302. auto state_stack() const -> const llvm::SmallVector<StateStackEntry>& {
  303. return state_stack_;
  304. }
  305. auto packaging_state() const -> PackagingState { return packaging_state_; }
  306. auto set_packaging_state(PackagingState packaging_state) -> void {
  307. packaging_state_ = packaging_state;
  308. }
  309. auto first_non_packaging_token() const -> Lex::TokenIndex {
  310. return first_non_packaging_token_;
  311. }
  312. auto set_first_non_packaging_token(Lex::TokenIndex token) -> void {
  313. CARBON_CHECK(!first_non_packaging_token_.is_valid());
  314. first_non_packaging_token_ = token;
  315. }
  316. private:
  317. // Prints a single token for a stack dump. Used by PrintForStackDump.
  318. auto PrintTokenForStackDump(llvm::raw_ostream& output,
  319. Lex::TokenIndex token) const -> void;
  320. Tree* tree_;
  321. Lex::TokenizedBuffer* tokens_;
  322. Lex::TokenDiagnosticEmitter* emitter_;
  323. // Whether to print verbose output.
  324. llvm::raw_ostream* vlog_stream_;
  325. // The current position within the token buffer.
  326. Lex::TokenIterator position_;
  327. // The FileEnd token.
  328. Lex::TokenIterator end_;
  329. llvm::SmallVector<StateStackEntry> state_stack_;
  330. // The deferred definition indexes of functions whose definitions have begun
  331. // but not yet finished.
  332. llvm::SmallVector<DeferredDefinitionIndex> deferred_definition_stack_;
  333. // The current packaging state, whether `import`/`package` are allowed.
  334. PackagingState packaging_state_ = PackagingState::FileStart;
  335. // The first non-packaging token, starting as invalid. Used for packaging
  336. // state warnings.
  337. Lex::TokenIndex first_non_packaging_token_ = Lex::TokenIndex::Invalid;
  338. };
  339. } // namespace Carbon::Parse
  340. #endif // CARBON_TOOLCHAIN_PARSE_CONTEXT_H_