浏览代码

Replace some reference members with pointer members. (#2408)

I'd noted this while rewriting the parser and, while I kept it there during conversion, I think switching is consistent with the higher-level desire and the use of references therein was just an oversight.

Discussed at:
https://discord.com/channels/655572317891461132/655578254970716160/1042551242813083840
Jon Ross-Perkins 3 年之前
父节点
当前提交
adac572430
共有 3 个文件被更改,包括 156 次插入151 次删除
  1. 60 60
      toolchain/lexer/tokenized_buffer.cpp
  2. 87 84
      toolchain/parser/parser.cpp
  3. 9 7
      toolchain/parser/parser.h

+ 60 - 60
toolchain/lexer/tokenized_buffer.cpp

@@ -84,7 +84,7 @@ class TokenizedBuffer::Lexer {
   };
 
   Lexer(TokenizedBuffer& buffer, DiagnosticConsumer& consumer)
-      : buffer_(buffer),
+      : buffer_(&buffer),
         translator_(buffer, &current_column_),
         emitter_(translator_, consumer),
         token_translator_(buffer, &current_column_),
@@ -97,16 +97,16 @@ class TokenizedBuffer::Lexer {
   auto HandleNewline() -> void {
     current_line_info_->length = current_column_;
 
-    current_line_ = buffer_.AddLine(
+    current_line_ = buffer_->AddLine(
         {current_line_info_->start + current_column_ + 1, 0, 0});
-    current_line_info_ = &buffer_.GetLineInfo(current_line_);
+    current_line_info_ = &buffer_->GetLineInfo(current_line_);
     current_column_ = 0;
     set_indent_ = false;
   }
 
   auto NoteWhitespace() -> void {
-    if (!buffer_.token_infos_.empty()) {
-      buffer_.token_infos_.back().has_trailing_space = true;
+    if (!buffer_->token_infos_.empty()) {
+      buffer_->token_infos_.back().has_trailing_space = true;
     }
   }
 
@@ -203,28 +203,28 @@ class TokenizedBuffer::Lexer {
     return VariantMatch(
         literal->ComputeValue(emitter_),
         [&](LexedNumericLiteral::IntegerValue&& value) {
-          auto token = buffer_.AddToken({.kind = TokenKind::IntegerLiteral(),
-                                         .token_line = current_line_,
-                                         .column = int_column});
-          buffer_.GetTokenInfo(token).literal_index =
-              buffer_.literal_int_storage_.size();
-          buffer_.literal_int_storage_.push_back(std::move(value.value));
+          auto token = buffer_->AddToken({.kind = TokenKind::IntegerLiteral(),
+                                          .token_line = current_line_,
+                                          .column = int_column});
+          buffer_->GetTokenInfo(token).literal_index =
+              buffer_->literal_int_storage_.size();
+          buffer_->literal_int_storage_.push_back(std::move(value.value));
           return token;
         },
         [&](LexedNumericLiteral::RealValue&& value) {
-          auto token = buffer_.AddToken({.kind = TokenKind::RealLiteral(),
-                                         .token_line = current_line_,
-                                         .column = int_column});
-          buffer_.GetTokenInfo(token).literal_index =
-              buffer_.literal_int_storage_.size();
-          buffer_.literal_int_storage_.push_back(std::move(value.mantissa));
-          buffer_.literal_int_storage_.push_back(std::move(value.exponent));
-          CARBON_CHECK(buffer_.GetRealLiteral(token).IsDecimal() ==
+          auto token = buffer_->AddToken({.kind = TokenKind::RealLiteral(),
+                                          .token_line = current_line_,
+                                          .column = int_column});
+          buffer_->GetTokenInfo(token).literal_index =
+              buffer_->literal_int_storage_.size();
+          buffer_->literal_int_storage_.push_back(std::move(value.mantissa));
+          buffer_->literal_int_storage_.push_back(std::move(value.exponent));
+          CARBON_CHECK(buffer_->GetRealLiteral(token).IsDecimal() ==
                        (value.radix == LexedNumericLiteral::Radix::Decimal));
           return token;
         },
         [&](LexedNumericLiteral::UnrecoverableError) {
-          auto token = buffer_.AddToken({
+          auto token = buffer_->AddToken({
               .kind = TokenKind::Error(),
               .token_line = current_line_,
               .column = int_column,
@@ -270,22 +270,22 @@ class TokenizedBuffer::Lexer {
 
     if (literal->is_terminated()) {
       auto token =
-          buffer_.AddToken({.kind = TokenKind::StringLiteral(),
-                            .token_line = string_line,
-                            .column = string_column,
-                            .literal_index = static_cast<int32_t>(
-                                buffer_.literal_string_storage_.size())});
-      buffer_.literal_string_storage_.push_back(
+          buffer_->AddToken({.kind = TokenKind::StringLiteral(),
+                             .token_line = string_line,
+                             .column = string_column,
+                             .literal_index = static_cast<int32_t>(
+                                 buffer_->literal_string_storage_.size())});
+      buffer_->literal_string_storage_.push_back(
           literal->ComputeValue(emitter_));
       return token;
     } else {
       CARBON_DIAGNOSTIC(UnterminatedString, Error,
                         "String is missing a terminator.");
       emitter_.Emit(literal->text().begin(), UnterminatedString);
-      return buffer_.AddToken({.kind = TokenKind::Error(),
-                               .token_line = string_line,
-                               .column = string_column,
-                               .error_length = literal_size});
+      return buffer_->AddToken({.kind = TokenKind::Error(),
+                                .token_line = string_line,
+                                .column = string_column,
+                                .error_length = literal_size});
     }
   }
 
@@ -307,7 +307,7 @@ class TokenizedBuffer::Lexer {
     CloseInvalidOpenGroups(kind);
 
     const char* location = source_text.begin();
-    Token token = buffer_.AddToken(
+    Token token = buffer_->AddToken(
         {.kind = kind, .token_line = current_line_, .column = current_column_});
     current_column_ += kind.GetFixedSpelling().size();
     source_text = source_text.drop_front(kind.GetFixedSpelling().size());
@@ -323,7 +323,7 @@ class TokenizedBuffer::Lexer {
       return token;
     }
 
-    TokenInfo& closing_token_info = buffer_.GetTokenInfo(token);
+    TokenInfo& closing_token_info = buffer_->GetTokenInfo(token);
 
     // Check that there is a matching opening symbol before we consume this as
     // a closing symbol.
@@ -341,7 +341,7 @@ class TokenizedBuffer::Lexer {
 
     // Finally can handle a normal closing symbol.
     Token opening_token = open_groups_.pop_back_val();
-    TokenInfo& opening_token_info = buffer_.GetTokenInfo(opening_token);
+    TokenInfo& opening_token_info = buffer_->GetTokenInfo(opening_token);
     opening_token_info.closing_token = token;
     closing_token_info.opening_token = opening_token;
     return token;
@@ -377,7 +377,7 @@ class TokenizedBuffer::Lexer {
 
     llvm::StringRef suffix = word.substr(1);
     if (!CanLexInteger(emitter_, suffix)) {
-      return buffer_.AddToken(
+      return buffer_->AddToken(
           {.kind = TokenKind::Error(),
            .token_line = current_line_,
            .column = column,
@@ -388,11 +388,11 @@ class TokenizedBuffer::Lexer {
       return LexResult::NoMatch();
     }
 
-    auto token = buffer_.AddToken(
+    auto token = buffer_->AddToken(
         {.kind = *kind, .token_line = current_line_, .column = column});
-    buffer_.GetTokenInfo(token).literal_index =
-        buffer_.literal_int_storage_.size();
-    buffer_.literal_int_storage_.push_back(std::move(suffix_value));
+    buffer_->GetTokenInfo(token).literal_index =
+        buffer_->literal_int_storage_.size();
+    buffer_->literal_int_storage_.push_back(std::move(suffix_value));
     return token;
   }
 
@@ -405,7 +405,7 @@ class TokenizedBuffer::Lexer {
 
     while (!open_groups_.empty()) {
       Token opening_token = open_groups_.back();
-      TokenKind opening_kind = buffer_.GetTokenInfo(opening_token).kind;
+      TokenKind opening_kind = buffer_->GetTokenInfo(opening_token).kind;
       if (kind == opening_kind.GetClosingSymbol()) {
         return;
       }
@@ -416,30 +416,30 @@ class TokenizedBuffer::Lexer {
           "Closing symbol does not match most recent opening symbol.");
       token_emitter_.Emit(opening_token, MismatchedClosing);
 
-      CARBON_CHECK(!buffer_.tokens().empty())
+      CARBON_CHECK(!buffer_->tokens().empty())
           << "Must have a prior opening token!";
-      Token prev_token = buffer_.tokens().end()[-1];
+      Token prev_token = buffer_->tokens().end()[-1];
 
       // TODO: do a smarter backwards scan for where to put the closing
       // token.
-      Token closing_token = buffer_.AddToken(
+      Token closing_token = buffer_->AddToken(
           {.kind = opening_kind.GetClosingSymbol(),
-           .has_trailing_space = buffer_.HasTrailingWhitespace(prev_token),
+           .has_trailing_space = buffer_->HasTrailingWhitespace(prev_token),
            .is_recovery = true,
            .token_line = current_line_,
            .column = current_column_});
-      TokenInfo& opening_token_info = buffer_.GetTokenInfo(opening_token);
-      TokenInfo& closing_token_info = buffer_.GetTokenInfo(closing_token);
+      TokenInfo& opening_token_info = buffer_->GetTokenInfo(opening_token);
+      TokenInfo& closing_token_info = buffer_->GetTokenInfo(closing_token);
       opening_token_info.closing_token = closing_token;
       closing_token_info.opening_token = opening_token;
     }
   }
 
   auto GetOrCreateIdentifier(llvm::StringRef text) -> Identifier {
-    auto insert_result = buffer_.identifier_map_.insert(
-        {text, Identifier(buffer_.identifier_infos_.size())});
+    auto insert_result = buffer_->identifier_map_.insert(
+        {text, Identifier(buffer_->identifier_infos_.size())});
     if (insert_result.second) {
-      buffer_.identifier_infos_.push_back({text});
+      buffer_->identifier_infos_.push_back({text});
     }
     return insert_result.first->second;
   }
@@ -475,16 +475,16 @@ class TokenizedBuffer::Lexer {
 #include "toolchain/lexer/token_registry.def"
                          .Default(TokenKind::Error());
     if (kind != TokenKind::Error()) {
-      return buffer_.AddToken({.kind = kind,
-                               .token_line = current_line_,
-                               .column = identifier_column});
+      return buffer_->AddToken({.kind = kind,
+                                .token_line = current_line_,
+                                .column = identifier_column});
     }
 
     // Otherwise we have a generic identifier.
-    return buffer_.AddToken({.kind = TokenKind::Identifier(),
-                             .token_line = current_line_,
-                             .column = identifier_column,
-                             .id = GetOrCreateIdentifier(identifier_text)});
+    return buffer_->AddToken({.kind = TokenKind::Identifier(),
+                              .token_line = current_line_,
+                              .column = identifier_column,
+                              .id = GetOrCreateIdentifier(identifier_text)});
   }
 
   auto LexError(llvm::StringRef& source_text) -> LexResult {
@@ -509,7 +509,7 @@ class TokenizedBuffer::Lexer {
       error_text = source_text.take_front(1);
     }
 
-    auto token = buffer_.AddToken(
+    auto token = buffer_->AddToken(
         {.kind = TokenKind::Error(),
          .token_line = current_line_,
          .column = current_column_,
@@ -524,13 +524,13 @@ class TokenizedBuffer::Lexer {
   }
 
   auto AddEndOfFileToken() -> void {
-    buffer_.AddToken({.kind = TokenKind::EndOfFile(),
-                      .token_line = current_line_,
-                      .column = current_column_});
+    buffer_->AddToken({.kind = TokenKind::EndOfFile(),
+                       .token_line = current_line_,
+                       .column = current_column_});
   }
 
  private:
-  TokenizedBuffer& buffer_;
+  TokenizedBuffer* buffer_;
 
   SourceBufferLocationTranslator translator_;
   LexerDiagnosticEmitter emitter_;

+ 87 - 84
toolchain/parser/parser.cpp

@@ -68,46 +68,46 @@ class Parser::PrettyStackTraceParseState : public llvm::PrettyStackTraceEntry {
  private:
   auto Print(llvm::raw_ostream& output, TokenizedBuffer::Token token) const
       -> void {
-    auto line = parser_->tokens_.GetLine(token);
-    output << " @ " << parser_->tokens_.GetLineNumber(line) << ":"
-           << parser_->tokens_.GetColumnNumber(token) << ":"
+    auto line = parser_->tokens_->GetLine(token);
+    output << " @ " << parser_->tokens_->GetLineNumber(line) << ":"
+           << parser_->tokens_->GetColumnNumber(token) << ":"
            << " token " << token << " : "
-           << parser_->tokens_.GetKind(token).Name() << "\n";
+           << parser_->tokens_->GetKind(token).Name() << "\n";
   }
 
   const Parser* parser_;
 };
 
-Parser::Parser(ParseTree& tree_arg, TokenizedBuffer& tokens_arg,
+Parser::Parser(ParseTree& tree, TokenizedBuffer& tokens,
                TokenDiagnosticEmitter& emitter)
-    : tree_(tree_arg),
-      tokens_(tokens_arg),
-      emitter_(emitter),
-      position_(tokens_.tokens().begin()),
-      end_(tokens_.tokens().end()) {
+    : tree_(&tree),
+      tokens_(&tokens),
+      emitter_(&emitter),
+      position_(tokens_->tokens().begin()),
+      end_(tokens_->tokens().end()) {
   CARBON_CHECK(position_ != end_) << "Empty TokenizedBuffer";
   --end_;
-  CARBON_CHECK(tokens_.GetKind(*end_) == TokenKind::EndOfFile())
+  CARBON_CHECK(tokens_->GetKind(*end_) == TokenKind::EndOfFile())
       << "TokenizedBuffer should end with EndOfFile, ended with "
-      << tokens_.GetKind(*end_).Name();
+      << tokens_->GetKind(*end_).Name();
 }
 
 auto Parser::AddLeafNode(ParseNodeKind kind, TokenizedBuffer::Token token,
                          bool has_error) -> void {
-  tree_.node_impls_.push_back(
+  tree_->node_impls_.push_back(
       ParseTree::NodeImpl(kind, has_error, token, /*subtree_size=*/1));
   if (has_error) {
-    tree_.has_errors_ = true;
+    tree_->has_errors_ = true;
   }
 }
 
 auto Parser::AddNode(ParseNodeKind kind, TokenizedBuffer::Token token,
                      int subtree_start, bool has_error) -> void {
-  int subtree_size = tree_.size() - subtree_start + 1;
-  tree_.node_impls_.push_back(
+  int subtree_size = tree_->size() - subtree_start + 1;
+  tree_->node_impls_.push_back(
       ParseTree::NodeImpl(kind, has_error, token, subtree_size));
   if (has_error) {
-    tree_.has_errors_ = true;
+    tree_->has_errors_ = true;
   }
 }
 
@@ -119,9 +119,9 @@ auto Parser::ConsumeAndAddCloseParen(TokenizedBuffer::Token open_paren,
 
   // TODO: Include the location of the matching open_paren in the diagnostic.
   CARBON_DIAGNOSTIC(ExpectedCloseParen, Error, "Unexpected tokens before `)`.");
-  emitter_.Emit(*position_, ExpectedCloseParen);
+  emitter_->Emit(*position_, ExpectedCloseParen);
 
-  SkipTo(tokens_.GetMatchedClosingToken(open_paren));
+  SkipTo(tokens_->GetMatchedClosingToken(open_paren));
   AddLeafNode(close_kind, Consume());
   return false;
 }
@@ -150,7 +150,7 @@ auto Parser::FindNextOf(std::initializer_list<TokenKind> desired_kinds)
   auto new_position = position_;
   while (true) {
     TokenizedBuffer::Token token = *new_position;
-    TokenKind kind = tokens_.GetKind(token);
+    TokenKind kind = tokens_->GetKind(token);
     if (kind.IsOneOf(desired_kinds)) {
       return token;
     }
@@ -160,8 +160,8 @@ auto Parser::FindNextOf(std::initializer_list<TokenKind> desired_kinds)
       // There are no more tokens at this level.
       return llvm::None;
     } else if (kind.IsOpeningSymbol()) {
-      new_position =
-          TokenizedBuffer::TokenIterator(tokens_.GetMatchedClosingToken(token));
+      new_position = TokenizedBuffer::TokenIterator(
+          tokens_->GetMatchedClosingToken(token));
       // Advance past the closing token.
       ++new_position;
     } else {
@@ -175,7 +175,7 @@ auto Parser::SkipMatchingGroup() -> bool {
     return false;
   }
 
-  SkipTo(tokens_.GetMatchedClosingToken(*position_));
+  SkipTo(tokens_->GetMatchedClosingToken(*position_));
   ++position_;
   return true;
 }
@@ -186,19 +186,19 @@ auto Parser::SkipPastLikelyEnd(TokenizedBuffer::Token skip_root)
     return llvm::None;
   }
 
-  TokenizedBuffer::Line root_line = tokens_.GetLine(skip_root);
-  int root_line_indent = tokens_.GetIndentColumnNumber(root_line);
+  TokenizedBuffer::Line root_line = tokens_->GetLine(skip_root);
+  int root_line_indent = tokens_->GetIndentColumnNumber(root_line);
 
   // We will keep scanning through tokens on the same line as the root or
   // lines with greater indentation than root's line.
   auto is_same_line_or_indent_greater_than_root =
       [&](TokenizedBuffer::Token t) {
-        TokenizedBuffer::Line l = tokens_.GetLine(t);
+        TokenizedBuffer::Line l = tokens_->GetLine(t);
         if (l == root_line) {
           return true;
         }
 
-        return tokens_.GetIndentColumnNumber(l) > root_line_indent;
+        return tokens_->GetIndentColumnNumber(l) > root_line_indent;
       };
 
   do {
@@ -214,7 +214,7 @@ auto Parser::SkipPastLikelyEnd(TokenizedBuffer::Token skip_root)
       return semi;
     }
 
-    // Skip over any matching group of tokens_.
+    // Skip over any matching group of tokens_->
     if (SkipMatchingGroup()) {
       continue;
     }
@@ -247,7 +247,7 @@ auto Parser::HandleCodeBlockState() -> void {
 
     // Recover by parsing a single statement.
     CARBON_DIAGNOSTIC(ExpectedCodeBlock, Error, "Expected braced code block.");
-    emitter_.Emit(*position_, ExpectedCodeBlock);
+    emitter_->Emit(*position_, ExpectedCodeBlock);
 
     PushState(ParserState::Statement());
   }
@@ -282,8 +282,8 @@ static auto IsPossibleStartOfOperand(TokenKind kind) -> bool {
 auto Parser::IsLexicallyValidInfixOperator() -> bool {
   CARBON_CHECK(position_ != end_) << "Expected an operator token.";
 
-  bool leading_space = tokens_.HasLeadingWhitespace(*position_);
-  bool trailing_space = tokens_.HasTrailingWhitespace(*position_);
+  bool leading_space = tokens_->HasLeadingWhitespace(*position_);
+  bool trailing_space = tokens_->HasTrailingWhitespace(*position_);
 
   // If there's whitespace on both sides, it's an infix operator.
   if (leading_space && trailing_space) {
@@ -298,9 +298,9 @@ auto Parser::IsLexicallyValidInfixOperator() -> bool {
   // Otherwise, for an infix operator, the preceding token must be any close
   // bracket, identifier, or literal and the next token must be an open paren,
   // identifier, or literal.
-  if (position_ == tokens_.tokens().begin() ||
-      !IsAssumedEndOfOperand(tokens_.GetKind(*(position_ - 1))) ||
-      !IsAssumedStartOfOperand(tokens_.GetKind(*(position_ + 1)))) {
+  if (position_ == tokens_->tokens().begin() ||
+      !IsAssumedEndOfOperand(tokens_->GetKind(*(position_ - 1))) ||
+      !IsAssumedStartOfOperand(tokens_->GetKind(*(position_ + 1)))) {
     return false;
   }
 
@@ -315,7 +315,7 @@ auto Parser::IsTrailingOperatorInfix() -> bool {
   // An operator that follows the infix operator rules is parsed as
   // infix, unless the next token means that it can't possibly be.
   if (IsLexicallyValidInfixOperator() &&
-      IsPossibleStartOfOperand(tokens_.GetKind(*(position_ + 1)))) {
+      IsPossibleStartOfOperand(tokens_->GetKind(*(position_ + 1)))) {
     return true;
   }
 
@@ -323,8 +323,8 @@ auto Parser::IsTrailingOperatorInfix() -> bool {
   // not valid at all. If the next token looks like the start of an operand,
   // then parse as infix, otherwise as postfix. Either way we'll produce a
   // diagnostic later on.
-  if (tokens_.HasLeadingWhitespace(*position_) &&
-      IsAssumedStartOfOperand(tokens_.GetKind(*(position_ + 1)))) {
+  if (tokens_->HasLeadingWhitespace(*position_) &&
+      IsAssumedStartOfOperand(tokens_->GetKind(*(position_ + 1)))) {
     return true;
   }
 
@@ -338,12 +338,12 @@ auto Parser::DiagnoseOperatorFixity(OperatorFixity fixity) -> void {
       CARBON_DIAGNOSTIC(BinaryOperatorRequiresWhitespace, Error,
                         "Whitespace missing {0} binary operator.",
                         RelativeLocation);
-      emitter_.Emit(*position_, BinaryOperatorRequiresWhitespace,
-                    tokens_.HasLeadingWhitespace(*position_)
-                        ? RelativeLocation::After
-                        : (tokens_.HasTrailingWhitespace(*position_)
-                               ? RelativeLocation::Before
-                               : RelativeLocation::Around));
+      emitter_->Emit(*position_, BinaryOperatorRequiresWhitespace,
+                     tokens_->HasLeadingWhitespace(*position_)
+                         ? RelativeLocation::After
+                         : (tokens_->HasTrailingWhitespace(*position_)
+                                ? RelativeLocation::Before
+                                : RelativeLocation::Around));
     }
   } else {
     bool prefix = fixity == OperatorFixity::Prefix;
@@ -351,12 +351,12 @@ auto Parser::DiagnoseOperatorFixity(OperatorFixity fixity) -> void {
     // Whitespace is not permitted between a symbolic pre/postfix operator and
     // its operand.
     if (PositionKind().IsSymbol() &&
-        (prefix ? tokens_.HasTrailingWhitespace(*position_)
-                : tokens_.HasLeadingWhitespace(*position_))) {
+        (prefix ? tokens_->HasTrailingWhitespace(*position_)
+                : tokens_->HasLeadingWhitespace(*position_))) {
       CARBON_DIAGNOSTIC(UnaryOperatorHasWhitespace, Error,
                         "Whitespace is not allowed {0} this unary operator.",
                         RelativeLocation);
-      emitter_.Emit(
+      emitter_->Emit(
           *position_, UnaryOperatorHasWhitespace,
           prefix ? RelativeLocation::After : RelativeLocation::Before);
     }
@@ -365,7 +365,7 @@ auto Parser::DiagnoseOperatorFixity(OperatorFixity fixity) -> void {
       CARBON_DIAGNOSTIC(UnaryOperatorRequiresWhitespace, Error,
                         "Whitespace is required {0} this unary operator.",
                         RelativeLocation);
-      emitter_.Emit(
+      emitter_->Emit(
           *position_, UnaryOperatorRequiresWhitespace,
           prefix ? RelativeLocation::Before : RelativeLocation::After);
     }
@@ -379,7 +379,7 @@ auto Parser::ConsumeListToken(ParseNodeKind comma_kind, TokenKind close_kind,
     if (!already_has_error) {
       CARBON_DIAGNOSTIC(UnexpectedTokenAfterListElement, Error,
                         "Expected `,` or `{0}`.", TokenKind);
-      emitter_.Emit(*position_, UnexpectedTokenAfterListElement, close_kind);
+      emitter_->Emit(*position_, UnexpectedTokenAfterListElement, close_kind);
       ReturnErrorOnState();
     }
 
@@ -459,10 +459,10 @@ auto Parser::HandleBraceExpressionParameterError(StateStackEntry state,
                     llvm::StringRef, llvm::StringRef, llvm::StringRef);
   bool can_be_type = kind != BraceExpressionKind::Value;
   bool can_be_value = kind != BraceExpressionKind::Type;
-  emitter_.Emit(*position_, ExpectedStructLiteralField,
-                can_be_type ? "`.field: type`" : "",
-                (can_be_type && can_be_value) ? " or " : "",
-                can_be_value ? "`.field = value`" : "");
+  emitter_->Emit(*position_, ExpectedStructLiteralField,
+                 can_be_type ? "`.field: type`" : "",
+                 (can_be_type && can_be_value) ? " or " : "",
+                 can_be_value ? "`.field = value`" : "");
 
   state.state = BraceExpressionKindToParserState(
       kind, ParserState::BraceExpressionParameterFinishAsType(),
@@ -507,7 +507,8 @@ auto Parser::HandleBraceExpressionParameterAfterDesignator(
   if (state.has_error) {
     auto recovery_pos = FindNextOf(
         {TokenKind::Equal(), TokenKind::Colon(), TokenKind::Comma()});
-    if (!recovery_pos || tokens_.GetKind(*recovery_pos) == TokenKind::Comma()) {
+    if (!recovery_pos ||
+        tokens_->GetKind(*recovery_pos) == TokenKind::Comma()) {
       state.state = BraceExpressionKindToParserState(
           kind, ParserState::BraceExpressionParameterFinishAsType(),
           ParserState::BraceExpressionParameterFinishAsValue(),
@@ -665,7 +666,7 @@ auto Parser::HandleCodeBlockFinishState() -> void {
   auto state = PopState();
 
   // If the block started with an open curly, this is a close curly.
-  if (tokens_.GetKind(state.token) == TokenKind::OpenCurlyBrace()) {
+  if (tokens_->GetKind(state.token) == TokenKind::OpenCurlyBrace()) {
     AddNode(ParseNodeKind::CodeBlock(), Consume(), state.subtree_start,
             state.has_error);
   } else {
@@ -703,8 +704,8 @@ auto Parser::HandleDeclarationLoopState() -> void {
     default: {
       CARBON_DIAGNOSTIC(UnrecognizedDeclaration, Error,
                         "Unrecognized declaration introducer.");
-      emitter_.Emit(*position_, UnrecognizedDeclaration);
-      tree_.has_errors_ = true;
+      emitter_->Emit(*position_, UnrecognizedDeclaration);
+      tree_->has_errors_ = true;
       if (auto semi = SkipPastLikelyEnd(*position_)) {
         AddLeafNode(ParseNodeKind::EmptyDeclaration(), *semi,
                     /*has_error=*/true);
@@ -724,7 +725,7 @@ auto Parser::HandleDesignator(bool as_struct) -> void {
                                ParseNodeKind::DesignatedName())) {
     CARBON_DIAGNOSTIC(ExpectedIdentifierAfterDot, Error,
                       "Expected identifier after `.`.");
-    emitter_.Emit(*position_, ExpectedIdentifierAfterDot);
+    emitter_->Emit(*position_, ExpectedIdentifierAfterDot);
     // If we see a keyword, assume it was intended to be the designated name.
     // TODO: Should keywords be valid in designators?
     if (PositionKind().IsKeyword()) {
@@ -759,7 +760,7 @@ auto Parser::HandleExpressionState() -> void {
         OperatorPriority::RightFirst) {
       // The precedence rules don't permit this prefix operator in this
       // context. Diagnose this, but carry on and parse it anyway.
-      emitter_.Emit(*position_, OperatorRequiresParentheses);
+      emitter_->Emit(*position_, OperatorRequiresParentheses);
     } else {
       // Check that this operator follows the proper whitespace rules.
       DiagnoseOperatorFixity(OperatorFixity::Prefix);
@@ -814,7 +815,7 @@ auto Parser::HandleExpressionInPostfixState() -> void {
     }
     default: {
       CARBON_DIAGNOSTIC(ExpectedExpression, Error, "Expected expression.");
-      emitter_.Emit(*position_, ExpectedExpression);
+      emitter_->Emit(*position_, ExpectedExpression);
       ReturnErrorOnState();
       break;
     }
@@ -882,7 +883,7 @@ auto Parser::HandleExpressionLoopState() -> void {
     // Either the LHS operator and this operator are ambiguous, or the
     // LHS operator is a unary operator that can't be nested within
     // this operator. Either way, parentheses are required.
-    emitter_.Emit(*position_, OperatorRequiresParentheses);
+    emitter_->Emit(*position_, OperatorRequiresParentheses);
     state.has_error = true;
   } else {
     DiagnoseOperatorFixity(is_binary ? OperatorFixity::Infix
@@ -934,7 +935,7 @@ auto Parser::HandleExpressionStatementFinishState() -> void {
   }
 
   if (!state.has_error) {
-    emitter_.Emit(*position_, ExpectedSemiAfterExpression);
+    emitter_->Emit(*position_, ExpectedSemiAfterExpression);
   }
 
   if (auto semi_token = SkipPastLikelyEnd(state.token)) {
@@ -967,7 +968,7 @@ auto Parser::HandleFunctionIntroducerState() -> void {
                                ParseNodeKind::DeclaredName())) {
     CARBON_DIAGNOSTIC(ExpectedFunctionName, Error,
                       "Expected function name after `fn` keyword.");
-    emitter_.Emit(*position_, ExpectedFunctionName);
+    emitter_->Emit(*position_, ExpectedFunctionName);
     // TODO: We could change the lexer to allow us to synthesize certain
     // kinds of tokens and try to "recover" here, but unclear that this is
     // really useful.
@@ -978,7 +979,7 @@ auto Parser::HandleFunctionIntroducerState() -> void {
   if (!PositionIs(TokenKind::OpenParen())) {
     CARBON_DIAGNOSTIC(ExpectedFunctionParams, Error,
                       "Expected `(` after function name.");
-    emitter_.Emit(*position_, ExpectedFunctionParams);
+    emitter_->Emit(*position_, ExpectedFunctionParams);
     HandleFunctionError(state, true);
     return;
   }
@@ -1069,10 +1070,10 @@ auto Parser::HandleFunctionSignatureFinishState() -> void {
       CARBON_DIAGNOSTIC(
           ExpectedFunctionBodyOrSemi, Error,
           "Expected function definition or `;` after function declaration.");
-      emitter_.Emit(*position_, ExpectedFunctionBodyOrSemi);
+      emitter_->Emit(*position_, ExpectedFunctionBodyOrSemi);
       // Only need to skip if we've not already found a new line.
       bool skip_past_likely_end =
-          tokens_.GetLine(*position_) == tokens_.GetLine(state.token);
+          tokens_->GetLine(*position_) == tokens_->GetLine(state.token);
       HandleFunctionError(state, skip_past_likely_end);
       break;
     }
@@ -1100,21 +1101,21 @@ auto Parser::HandlePackageState() -> void {
                                ParseNodeKind::DeclaredName())) {
     CARBON_DIAGNOSTIC(ExpectedIdentifierAfterPackage, Error,
                       "Expected identifier after `package`.");
-    emitter_.Emit(*position_, ExpectedIdentifierAfterPackage);
+    emitter_->Emit(*position_, ExpectedIdentifierAfterPackage);
     exit_on_parse_error();
     return;
   }
 
   bool library_parsed = false;
   if (auto library_token = ConsumeIf(TokenKind::Library())) {
-    auto library_start = tree_.size();
+    auto library_start = tree_->size();
 
     if (!ConsumeAndAddLeafNodeIf(TokenKind::StringLiteral(),
                                  ParseNodeKind::Literal())) {
       CARBON_DIAGNOSTIC(
           ExpectedLibraryName, Error,
           "Expected a string literal to specify the library name.");
-      emitter_.Emit(*position_, ExpectedLibraryName);
+      emitter_->Emit(*position_, ExpectedLibraryName);
       exit_on_parse_error();
       return;
     }
@@ -1124,7 +1125,7 @@ auto Parser::HandlePackageState() -> void {
     library_parsed = true;
   }
 
-  switch (auto api_or_impl_token = tokens_.GetKind(*(position_))) {
+  switch (auto api_or_impl_token = tokens_->GetKind(*(position_))) {
     case TokenKind::Api(): {
       AddLeafNode(ParseNodeKind::PackageApi(), Consume());
       break;
@@ -1140,11 +1141,11 @@ auto Parser::HandlePackageState() -> void {
         // before the library name.
         CARBON_DIAGNOSTIC(MissingLibraryKeyword, Error,
                           "Missing `library` keyword.");
-        emitter_.Emit(*position_, MissingLibraryKeyword);
+        emitter_->Emit(*position_, MissingLibraryKeyword);
       } else {
         CARBON_DIAGNOSTIC(ExpectedApiOrImpl, Error,
                           "Expected a `api` or `impl`.");
-        emitter_.Emit(*position_, ExpectedApiOrImpl);
+        emitter_->Emit(*position_, ExpectedApiOrImpl);
       }
       exit_on_parse_error();
       return;
@@ -1155,7 +1156,7 @@ auto Parser::HandlePackageState() -> void {
                                ParseNodeKind::PackageEnd())) {
     CARBON_DIAGNOSTIC(ExpectedSemiToEndPackageDirective, Error,
                       "Expected `;` to end package directive.");
-    emitter_.Emit(*position_, ExpectedSemiToEndPackageDirective);
+    emitter_->Emit(*position_, ExpectedSemiToEndPackageDirective);
     exit_on_parse_error();
     return;
   }
@@ -1173,7 +1174,8 @@ auto Parser::HandleParenConditionState() -> void {
   } else {
     CARBON_DIAGNOSTIC(ExpectedParenAfter, Error, "Expected `(` after `{0}`.",
                       TokenKind);
-    emitter_.Emit(*position_, ExpectedParenAfter, tokens_.GetKind(state.token));
+    emitter_->Emit(*position_, ExpectedParenAfter,
+                   tokens_->GetKind(state.token));
   }
 
   // TODO: This should be adding a ConditionStart here instead of ConditionEnd
@@ -1186,7 +1188,7 @@ auto Parser::HandleParenConditionState() -> void {
 auto Parser::HandleParenConditionFinishState() -> void {
   auto state = PopState();
 
-  if (tokens_.GetKind(state.token) != TokenKind::OpenParen()) {
+  if (tokens_->GetKind(state.token) != TokenKind::OpenParen()) {
     // Don't expect a matching closing paren if there wasn't an opening paren.
     // TODO: Should probably push nodes on this state in order to have the
     // condition wrapped, but it wasn't before, so not doing it for consistency.
@@ -1283,18 +1285,18 @@ auto Parser::HandlePattern(PatternKind pattern_kind) -> void {
 
   // Handle an invalid pattern introducer.
   if (!PositionIs(TokenKind::Identifier()) ||
-      tokens_.GetKind(*(position_ + 1)) != TokenKind::Colon()) {
+      tokens_->GetKind(*(position_ + 1)) != TokenKind::Colon()) {
     switch (pattern_kind) {
       case PatternKind::Parameter: {
         CARBON_DIAGNOSTIC(ExpectedParameterName, Error,
                           "Expected parameter declaration.");
-        emitter_.Emit(*position_, ExpectedParameterName);
+        emitter_->Emit(*position_, ExpectedParameterName);
         break;
       }
       case PatternKind::Variable: {
         CARBON_DIAGNOSTIC(ExpectedVariableName, Error,
                           "Expected pattern in `var` declaration.");
-        emitter_.Emit(*position_, ExpectedVariableName);
+        emitter_->Emit(*position_, ExpectedVariableName);
         break;
       }
     }
@@ -1398,7 +1400,7 @@ auto Parser::HandleStatementForHeaderState() -> void {
                       "Expected `(` after `{0}`. Recovering from missing `(` "
                       "not implemented yet!",
                       TokenKind);
-    emitter_.Emit(*position_, ExpectedParenAfter, TokenKind::For());
+    emitter_->Emit(*position_, ExpectedParenAfter, TokenKind::For());
     // TODO: A proper recovery strategy is needed here. For now, I assume
     // that all brackets are properly balanced (i.e. each open bracket has a
     // closing one).
@@ -1417,7 +1419,7 @@ auto Parser::HandleStatementForHeaderState() -> void {
   } else {
     CARBON_DIAGNOSTIC(ExpectedVariableDeclaration, Error,
                       "Expected `var` declaration.");
-    emitter_.Emit(*position_, ExpectedVariableDeclaration);
+    emitter_->Emit(*position_, ExpectedVariableDeclaration);
 
     if (auto next_in = FindNextOf({TokenKind::In()})) {
       SkipTo(*next_in);
@@ -1435,13 +1437,13 @@ auto Parser::HandleStatementForHeaderInState() -> void {
   if (!ConsumeAndAddLeafNodeIf(TokenKind::In(), ParseNodeKind::ForIn())) {
     if (auto colon = ConsumeIf(TokenKind::Colon())) {
       CARBON_DIAGNOSTIC(ExpectedIn, Error, "`:` should be replaced by `in`.");
-      emitter_.Emit(*colon, ExpectedIn);
+      emitter_->Emit(*colon, ExpectedIn);
       AddLeafNode(ParseNodeKind::ForIn(), *colon, /*has_error=*/true);
     } else {
       CARBON_DIAGNOSTIC(ExpectedIn, Error,
                         "Expected `in` after loop `var` declaration.");
-      emitter_.Emit(*position_, ExpectedIn);
-      SkipTo(tokens_.GetMatchedClosingToken(state.token));
+      emitter_->Emit(*position_, ExpectedIn);
+      SkipTo(tokens_->GetMatchedClosingToken(state.token));
 
       state.has_error = true;
       PushState(state);
@@ -1518,7 +1520,8 @@ auto Parser::HandleStatementKeywordFinish(ParseNodeKind node_kind) -> void {
   if (!semi) {
     CARBON_DIAGNOSTIC(ExpectedSemiAfter, Error, "Expected `;` after `{0}`.",
                       TokenKind);
-    emitter_.Emit(*position_, ExpectedSemiAfter, tokens_.GetKind(state.token));
+    emitter_->Emit(*position_, ExpectedSemiAfter,
+                   tokens_->GetKind(state.token));
     state.has_error = true;
     // Recover to the next semicolon if possible, otherwise indicate the
     // keyword for the error.
@@ -1632,7 +1635,7 @@ auto Parser::HandleVarFinish(bool require_semicolon) -> void {
     auto semi = ConsumeAndAddLeafNodeIf(TokenKind::Semi(),
                                         ParseNodeKind::DeclarationEnd());
     if (!semi) {
-      emitter_.Emit(*position_, ExpectedSemiAfterExpression);
+      emitter_->Emit(*position_, ExpectedSemiAfterExpression);
       if (auto semi_token = SkipPastLikelyEnd(state.token)) {
         AddLeafNode(ParseNodeKind::DeclarationEnd(), *semi_token,
                     /*has_error=*/true);

+ 9 - 7
toolchain/parser/parser.h

@@ -180,7 +180,9 @@ class Parser {
                         bool already_has_error) -> ListTokenKind;
 
   // Gets the kind of the next token to be consumed.
-  auto PositionKind() const -> TokenKind { return tokens_.GetKind(*position_); }
+  auto PositionKind() const -> TokenKind {
+    return tokens_->GetKind(*position_);
+  }
 
   // Tests whether the next token to be consumed is of the specified kind.
   auto PositionIs(TokenKind kind) const -> bool {
@@ -197,14 +199,14 @@ class Parser {
   auto PushState(ParserState state) -> void {
     PushState(StateStackEntry(state, PrecedenceGroup::ForTopLevelExpression(),
                               PrecedenceGroup::ForTopLevelExpression(),
-                              *position_, tree_.size()));
+                              *position_, tree_->size()));
   }
 
   // Pushes a new expression state with specific precedence.
   auto PushStateForExpression(PrecedenceGroup ambient_precedence) -> void {
     PushState(StateStackEntry(ParserState::Expression(), ambient_precedence,
                               PrecedenceGroup::ForTopLevelExpression(),
-                              *position_, tree_.size()));
+                              *position_, tree_->size()));
   }
 
   // Pushes a new state with detailed precedence for expression resume states.
@@ -212,7 +214,7 @@ class Parser {
                                   PrecedenceGroup ambient_precedence,
                                   PrecedenceGroup lhs_precedence) -> void {
     PushState(StateStackEntry(state, ambient_precedence, lhs_precedence,
-                              *position_, tree_.size()));
+                              *position_, tree_->size()));
   }
 
   // Pushes a constructed state onto the stack.
@@ -278,9 +280,9 @@ class Parser {
 #define CARBON_PARSER_STATE(Name) auto Handle##Name##State()->void;
 #include "toolchain/parser/parser_state.def"
 
-  ParseTree& tree_;
-  TokenizedBuffer& tokens_;
-  TokenDiagnosticEmitter& emitter_;
+  ParseTree* tree_;
+  TokenizedBuffer* tokens_;
+  TokenDiagnosticEmitter* emitter_;
 
   // The current position within the token buffer.
   TokenizedBuffer::TokenIterator position_;