context.h 15 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381
  1. // Part of the Carbon Language project, under the Apache License v2.0 with LLVM
  2. // Exceptions. See /LICENSE for license information.
  3. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
  4. #ifndef CARBON_TOOLCHAIN_PARSE_CONTEXT_H_
  5. #define CARBON_TOOLCHAIN_PARSE_CONTEXT_H_
  6. #include <optional>
  7. #include "common/check.h"
  8. #include "common/vlog.h"
  9. #include "toolchain/lex/token_kind.h"
  10. #include "toolchain/lex/tokenized_buffer.h"
  11. #include "toolchain/parse/node_kind.h"
  12. #include "toolchain/parse/precedence.h"
  13. #include "toolchain/parse/state.h"
  14. #include "toolchain/parse/tree.h"
  15. namespace Carbon::Parse {
  16. // An amount by which to look ahead of the current token. Lookahead should be
  17. // used sparingly, and unbounded lookahead should be avoided.
  18. //
  19. // TODO: Decide whether we want to avoid lookahead altogether.
  20. enum class Lookahead : int32_t {
  21. CurrentToken = 0,
  22. NextToken = 1,
  23. };
  24. // Context and shared functionality for parser handlers. See state.def for state
  25. // documentation.
  26. class Context {
  27. public:
  28. // Possible operator fixities for errors.
  29. enum class OperatorFixity : int8_t { Prefix, Infix, Postfix };
  30. // Possible return values for FindListToken.
  31. enum class ListTokenKind : int8_t { Comma, Close, CommaClose };
  32. // Supported kinds for HandlePattern.
  33. enum class PatternKind : int8_t { ImplicitParam, Param, Variable, Let };
  34. // Supported return values for GetDeclContext.
  35. enum class DeclContext : int8_t {
  36. File, // Top-level context.
  37. Class,
  38. Interface,
  39. NamedConstraint,
  40. };
  41. // Used for restricting ordering of `package` and `import` directives.
  42. enum class PackagingState : int8_t {
  43. StartOfFile,
  44. InImports,
  45. AfterNonPackagingDecl,
  46. // A warning about `import` placement has been issued so we don't keep
  47. // issuing more (when `import` is repeated) until more non-`import`
  48. // declarations come up.
  49. InImportsAfterNonPackagingDecl,
  50. };
  51. // Used to track state on state_stack_.
  52. struct StateStackEntry : public Printable<StateStackEntry> {
  53. explicit StateStackEntry(State state, PrecedenceGroup ambient_precedence,
  54. PrecedenceGroup lhs_precedence, Lex::Token token,
  55. int32_t subtree_start)
  56. : state(state),
  57. ambient_precedence(ambient_precedence),
  58. lhs_precedence(lhs_precedence),
  59. token(token),
  60. subtree_start(subtree_start) {}
  61. // Prints state information for verbose output.
  62. auto Print(llvm::raw_ostream& output) const -> void {
  63. output << state << " @" << token << " subtree_start=" << subtree_start
  64. << " has_error=" << has_error;
  65. };
  66. // The state.
  67. State state;
  68. // Set to true to indicate that an error was found, and that contextual
  69. // error recovery may be needed.
  70. bool has_error = false;
  71. // Precedence information used by expression states in order to determine
  72. // operator precedence. The ambient_precedence deals with how the expression
  73. // should interact with outside context, while the lhs_precedence is
  74. // specific to the lhs of an operator expression.
  75. PrecedenceGroup ambient_precedence;
  76. PrecedenceGroup lhs_precedence;
  77. // A token providing context based on the subtree. This will typically be
  78. // the first token in the subtree, but may sometimes be a token within. It
  79. // will typically be used for the subtree's root node.
  80. Lex::Token token;
  81. // The offset within the Tree of the subtree start.
  82. int32_t subtree_start;
  83. };
  84. // We expect StateStackEntry to fit into 12 bytes:
  85. // state = 1 byte
  86. // has_error = 1 byte
  87. // ambient_precedence = 1 byte
  88. // lhs_precedence = 1 byte
  89. // token = 4 bytes
  90. // subtree_start = 4 bytes
  91. // If it becomes bigger, it'd be worth examining better packing; it should be
  92. // feasible to pack the 1-byte entries more tightly.
  93. static_assert(sizeof(StateStackEntry) == 12,
  94. "StateStackEntry has unexpected size!");
  95. explicit Context(Tree& tree, Lex::TokenizedBuffer& tokens,
  96. Lex::TokenDiagnosticEmitter& emitter,
  97. llvm::raw_ostream* vlog_stream);
  98. // Adds a node to the parse tree that has no children (a leaf).
  99. auto AddLeafNode(NodeKind kind, Lex::Token token, bool has_error = false)
  100. -> void;
  101. // Adds a node to the parse tree that has children.
  102. auto AddNode(NodeKind kind, Lex::Token token, int subtree_start,
  103. bool has_error) -> void;
  104. // Returns the current position and moves past it.
  105. auto Consume() -> Lex::Token { return *(position_++); }
  106. // Consumes the current token. Does not return it.
  107. auto ConsumeAndDiscard() -> void { ++position_; }
  108. // Parses an open paren token, possibly diagnosing if necessary. Creates a
  109. // leaf parse node of the specified start kind. The default_token is used when
  110. // there's no open paren. Returns the open paren token if it was found.
  111. auto ConsumeAndAddOpenParen(Lex::Token default_token, NodeKind start_kind)
  112. -> std::optional<Lex::Token>;
  113. // Parses a closing symbol corresponding to the opening symbol
  114. // `expected_open`, possibly skipping forward and diagnosing if necessary.
  115. // Creates a parse node of the specified close kind. If `expected_open` is not
  116. // an opening symbol, the parse node will be associated with `state.token`,
  117. // no input will be consumed, and no diagnostic will be emitted.
  118. auto ConsumeAndAddCloseSymbol(Lex::Token expected_open, StateStackEntry state,
  119. NodeKind close_kind) -> void;
  120. // Composes `ConsumeIf` and `AddLeafNode`, returning false when ConsumeIf
  121. // fails.
  122. auto ConsumeAndAddLeafNodeIf(Lex::TokenKind token_kind, NodeKind node_kind)
  123. -> bool;
  124. // Returns the current position and moves past it. Requires the token is the
  125. // expected kind.
  126. auto ConsumeChecked(Lex::TokenKind kind) -> Lex::Token;
  127. // If the current position's token matches this `Kind`, returns it and
  128. // advances to the next position. Otherwise returns an empty optional.
  129. auto ConsumeIf(Lex::TokenKind kind) -> std::optional<Lex::Token>;
  130. // Find the next token of any of the given kinds at the current bracketing
  131. // level.
  132. auto FindNextOf(std::initializer_list<Lex::TokenKind> desired_kinds)
  133. -> std::optional<Lex::Token>;
  134. // If the token is an opening symbol for a matched group, skips to the matched
  135. // closing symbol and returns true. Otherwise, returns false.
  136. auto SkipMatchingGroup() -> bool;
  137. // Skips forward to move past the likely end of a declaration or statement.
  138. //
  139. // Looks forward, skipping over any matched symbol groups, to find the next
  140. // position that is likely past the end of a declaration or statement. This
  141. // is a heuristic and should only be called when skipping past parse errors.
  142. //
  143. // The strategy for recognizing when we have likely passed the end of a
  144. // declaration or statement:
  145. // - If we get to a close curly brace, we likely ended the entire context.
  146. // - If we get to a semicolon, that should have ended the declaration or
  147. // statement.
  148. // - If we get to a new line from the `SkipRoot` token, but with the same or
  149. // less indentation, there is likely a missing semicolon. Continued
  150. // declarations or statements across multiple lines should be indented.
  151. //
  152. // Returns a semicolon token if one is the likely end.
  153. auto SkipPastLikelyEnd(Lex::Token skip_root) -> std::optional<Lex::Token>;
  154. // Skip forward to the given token. Verifies that it is actually forward.
  155. auto SkipTo(Lex::Token t) -> void;
  156. // Returns true if the current token satisfies the lexical validity rules
  157. // for an infix operator.
  158. auto IsLexicallyValidInfixOperator() -> bool;
  159. // Determines whether the current trailing operator should be treated as
  160. // infix.
  161. auto IsTrailingOperatorInfix() -> bool;
  162. // Diagnoses whether the current token is not written properly for the given
  163. // fixity. For example, because mandatory whitespace is missing. Regardless of
  164. // whether there's an error, it's expected that parsing continues.
  165. auto DiagnoseOperatorFixity(OperatorFixity fixity) -> void;
  166. // If the current position is a `,`, consumes it, adds the provided token, and
  167. // returns `Comma`. Returns `Close` if the current position is close_token
  168. // (for example, `)`). `CommaClose` indicates it found both (for example,
  169. // `,)`). Handles cases where invalid tokens are present by advancing the
  170. // position, and may emit errors. Pass already_has_error in order to suppress
  171. // duplicate errors.
  172. auto ConsumeListToken(NodeKind comma_kind, Lex::TokenKind close_kind,
  173. bool already_has_error) -> ListTokenKind;
  174. // Gets the kind of the next token to be consumed. If `lookahead` is
  175. // provided, it specifies which token to inspect.
  176. auto PositionKind(Lookahead lookahead = Lookahead::CurrentToken) const
  177. -> Lex::TokenKind {
  178. return tokens_->GetKind(position_[static_cast<int32_t>(lookahead)]);
  179. }
  180. // Tests whether the next token to be consumed is of the specified kind. If
  181. // `lookahead` is provided, it specifies which token to inspect.
  182. auto PositionIs(Lex::TokenKind kind,
  183. Lookahead lookahead = Lookahead::CurrentToken) const -> bool {
  184. return PositionKind(lookahead) == kind;
  185. }
  186. // Pops the state and keeps the value for inspection.
  187. auto PopState() -> StateStackEntry {
  188. auto back = state_stack_.pop_back_val();
  189. CARBON_VLOG() << "Pop " << state_stack_.size() << ": " << back << "\n";
  190. return back;
  191. }
  192. // Pops the state and discards it.
  193. auto PopAndDiscardState() -> void {
  194. CARBON_VLOG() << "PopAndDiscard " << state_stack_.size() - 1 << ": "
  195. << state_stack_.back() << "\n";
  196. state_stack_.pop_back();
  197. }
  198. // Pushes a new state with the current position for context.
  199. auto PushState(State state) -> void {
  200. PushState(StateStackEntry(state, PrecedenceGroup::ForTopLevelExpr(),
  201. PrecedenceGroup::ForTopLevelExpr(), *position_,
  202. tree_->size()));
  203. }
  204. // Pushes a new state with a specific token for context. Used when forming a
  205. // new subtree with a token that isn't the start of the subtree.
  206. auto PushState(State state, Lex::Token token) -> void {
  207. PushState(StateStackEntry(state, PrecedenceGroup::ForTopLevelExpr(),
  208. PrecedenceGroup::ForTopLevelExpr(), token,
  209. tree_->size()));
  210. }
  211. // Pushes a new expression state with specific precedence.
  212. auto PushStateForExpr(PrecedenceGroup ambient_precedence) -> void {
  213. PushState(StateStackEntry(State::Expr, ambient_precedence,
  214. PrecedenceGroup::ForTopLevelExpr(), *position_,
  215. tree_->size()));
  216. }
  217. // Pushes a new state with detailed precedence for expression resume states.
  218. auto PushStateForExprLoop(State state, PrecedenceGroup ambient_precedence,
  219. PrecedenceGroup lhs_precedence) -> void {
  220. PushState(StateStackEntry(state, ambient_precedence, lhs_precedence,
  221. *position_, tree_->size()));
  222. }
  223. // Pushes a constructed state onto the stack.
  224. auto PushState(StateStackEntry state) -> void {
  225. CARBON_VLOG() << "Push " << state_stack_.size() << ": " << state << "\n";
  226. state_stack_.push_back(state);
  227. CARBON_CHECK(state_stack_.size() < (1 << 20))
  228. << "Excessive stack size: likely infinite loop";
  229. }
  230. // Returns the current declaration context according to state_stack_.
  231. // This is expected to be called in cases which are close to a context.
  232. // Although it looks like it could be O(n) for state_stack_'s depth, valid
  233. // parses should only need to look down a couple steps.
  234. //
  235. // This currently assumes it's being called from within the declaration's
  236. // DeclScopeLoop.
  237. auto GetDeclContext() -> DeclContext;
  238. // Propagates an error up the state stack, to the parent state.
  239. auto ReturnErrorOnState() -> void { state_stack_.back().has_error = true; }
  240. // For HandlePattern, tries to consume a wrapping keyword.
  241. auto ConsumeIfPatternKeyword(Lex::TokenKind keyword_token,
  242. State keyword_state, int subtree_start) -> void;
  243. // Emits a diagnostic for a declaration missing a semi.
  244. auto EmitExpectedDeclSemi(Lex::TokenKind expected_kind) -> void;
  245. // Emits a diagnostic for a declaration missing a semi or definition.
  246. auto EmitExpectedDeclSemiOrDefinition(Lex::TokenKind expected_kind) -> void;
  247. // Handles error recovery in a declaration, particularly before any possible
  248. // definition has started (although one could be present). Recover to a
  249. // semicolon when it makes sense as a possible end, otherwise use the
  250. // introducer token for the error.
  251. auto RecoverFromDeclError(StateStackEntry state, NodeKind parse_node_kind,
  252. bool skip_past_likely_end) -> void;
  253. // Sets the package directive information. Called at most once.
  254. auto set_packaging_directive(Tree::PackagingNames packaging_names,
  255. Tree::ApiOrImpl api_or_impl) -> void {
  256. CARBON_CHECK(!tree_->packaging_directive_);
  257. tree_->packaging_directive_ = {.names = packaging_names,
  258. .api_or_impl = api_or_impl};
  259. }
  260. // Adds an import.
  261. auto AddImport(Tree::PackagingNames package) -> void {
  262. tree_->imports_.push_back(package);
  263. }
  264. // Prints information for a stack dump.
  265. auto PrintForStackDump(llvm::raw_ostream& output) const -> void;
  266. auto tree() const -> const Tree& { return *tree_; }
  267. auto tokens() const -> const Lex::TokenizedBuffer& { return *tokens_; }
  268. auto emitter() -> Lex::TokenDiagnosticEmitter& { return *emitter_; }
  269. auto position() -> Lex::TokenIterator& { return position_; }
  270. auto position() const -> Lex::TokenIterator { return position_; }
  271. auto state_stack() -> llvm::SmallVector<StateStackEntry>& {
  272. return state_stack_;
  273. }
  274. auto state_stack() const -> const llvm::SmallVector<StateStackEntry>& {
  275. return state_stack_;
  276. }
  277. auto packaging_state() const -> PackagingState { return packaging_state_; }
  278. auto set_packaging_state(PackagingState packaging_state) -> void {
  279. packaging_state_ = packaging_state;
  280. }
  281. auto first_non_packaging_token() const -> Lex::Token {
  282. return first_non_packaging_token_;
  283. }
  284. auto set_first_non_packaging_token(Lex::Token token) -> void {
  285. CARBON_CHECK(!first_non_packaging_token_.is_valid());
  286. first_non_packaging_token_ = token;
  287. }
  288. private:
  289. // Prints a single token for a stack dump. Used by PrintForStackDump.
  290. auto PrintTokenForStackDump(llvm::raw_ostream& output, Lex::Token token) const
  291. -> void;
  292. Tree* tree_;
  293. Lex::TokenizedBuffer* tokens_;
  294. Lex::TokenDiagnosticEmitter* emitter_;
  295. // Whether to print verbose output.
  296. llvm::raw_ostream* vlog_stream_;
  297. // The current position within the token buffer.
  298. Lex::TokenIterator position_;
  299. // The EndOfFile token.
  300. Lex::TokenIterator end_;
  301. llvm::SmallVector<StateStackEntry> state_stack_;
  302. // The current packaging state, whether `import`/`package` are allowed.
  303. PackagingState packaging_state_ = PackagingState::StartOfFile;
  304. // The first non-packaging token, starting as invalid. Used for packaging
  305. // state warnings.
  306. Lex::Token first_non_packaging_token_ = Lex::Token::Invalid;
  307. };
  308. // `clang-format` has a bug with spacing around `->` returns in macros. See
  309. // https://bugs.llvm.org/show_bug.cgi?id=48320 for details.
  310. #define CARBON_PARSE_STATE(Name) auto Handle##Name(Context& context)->void;
  311. #include "toolchain/parse/state.def"
  312. } // namespace Carbon::Parse
  313. #endif // CARBON_TOOLCHAIN_PARSE_CONTEXT_H_