Просмотр исходного кода

Refactor compile-related tests to share construction. (#4396)

Note in particular that this fixes an issue where SharedValueStore had
been shared across files, when they should be per-file. This is only
visible when doing multiple compilations in a single test, which was
rare before.

This also moves these tests into the Testing namespace. My memory of the
various namespacing changes is that we'd generally agreed to have tests
in Testing so that we'd see SemIR:: and similar, same as we would in a
lot of the implementation.
Jon Ross-Perkins 1 год назад
Родитель
Сommit
0f350255ce

+ 1 - 0
toolchain/lex/BUILD

@@ -257,6 +257,7 @@ cc_test(
         "//toolchain/base:value_store",
         "//toolchain/diagnostics:diagnostic_emitter",
         "//toolchain/diagnostics:mocks",
+        "//toolchain/testing:compile_helper",
         "//toolchain/testing:yaml_test_helpers",
         "@googletest//:gtest",
         "@llvm-project//llvm:Support",

+ 224 - 232
toolchain/lex/tokenized_buffer_test.cpp

@@ -17,6 +17,7 @@
 #include "toolchain/diagnostics/mocks.h"
 #include "toolchain/lex/lex.h"
 #include "toolchain/lex/tokenized_buffer_test_helpers.h"
+#include "toolchain/testing/compile_helper.h"
 #include "toolchain/testing/yaml_test_helpers.h"
 
 namespace Carbon::Lex {
@@ -35,29 +36,11 @@ namespace Yaml = ::Carbon::Testing::Yaml;
 
 class LexerTest : public ::testing::Test {
  protected:
-  auto GetSourceBuffer(llvm::StringRef text) -> SourceBuffer& {
-    std::string filename = llvm::formatv("test{0}.carbon", ++file_index_);
-    CARBON_CHECK(fs_.addFile(filename, /*ModificationTime=*/0,
-                             llvm::MemoryBuffer::getMemBuffer(text)));
-    source_storage_.push_front(std::move(*SourceBuffer::MakeFromFile(
-        fs_, filename, ConsoleDiagnosticConsumer())));
-    return source_storage_.front();
-  }
-
-  auto Lex(llvm::StringRef text,
-           DiagnosticConsumer& consumer = ConsoleDiagnosticConsumer())
-      -> TokenizedBuffer {
-    return Lex::Lex(value_stores_, GetSourceBuffer(text), consumer);
-  }
-
-  SharedValueStores value_stores_;
-  llvm::vfs::InMemoryFileSystem fs_;
-  int file_index_ = 0;
-  std::forward_list<SourceBuffer> source_storage_;
+  Testing::CompileHelper compile_helper_;
 };
 
 TEST_F(LexerTest, HandlesEmptyBuffer) {
-  auto buffer = Lex("");
+  auto& buffer = compile_helper_.GetTokenizedBuffer("");
   EXPECT_FALSE(buffer.has_errors());
   EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
                           {.kind = TokenKind::FileStart},
@@ -65,7 +48,8 @@ TEST_F(LexerTest, HandlesEmptyBuffer) {
 }
 
 TEST_F(LexerTest, TracksLinesAndColumns) {
-  auto buffer = Lex("\n  ;;\n   ;;;\n   x\"foo\" '''baz\n  a\n ''' y");
+  auto& buffer = compile_helper_.GetTokenizedBuffer(
+      "\n  ;;\n   ;;;\n   x\"foo\" '''baz\n  a\n ''' y");
   EXPECT_FALSE(buffer.has_errors());
   EXPECT_THAT(
       buffer,
@@ -102,8 +86,8 @@ TEST_F(LexerTest, TracksLinesAndColumns) {
 }
 
 TEST_F(LexerTest, TracksLinesAndColumnsCRLF) {
-  auto buffer =
-      Lex("\r\n  ;;\r\n   ;;;\r\n   x\"foo\" '''baz\r\n  a\r\n ''' y");
+  auto& buffer = compile_helper_.GetTokenizedBuffer(
+      "\r\n  ;;\r\n   ;;;\r\n   x\"foo\" '''baz\r\n  a\r\n ''' y");
   EXPECT_FALSE(buffer.has_errors());
   EXPECT_THAT(
       buffer,
@@ -140,7 +124,7 @@ TEST_F(LexerTest, TracksLinesAndColumnsCRLF) {
 }
 
 TEST_F(LexerTest, InvalidCR) {
-  auto buffer = Lex("\n ;;\r ;\n   x");
+  auto& buffer = compile_helper_.GetTokenizedBuffer("\n ;;\r ;\n   x");
   EXPECT_TRUE(buffer.has_errors());
   EXPECT_THAT(
       buffer,
@@ -162,7 +146,7 @@ TEST_F(LexerTest, InvalidCR) {
 }
 
 TEST_F(LexerTest, InvalidLFCR) {
-  auto buffer = Lex("\n ;;\n\r ;\n   x");
+  auto& buffer = compile_helper_.GetTokenizedBuffer("\n ;;\n\r ;\n   x");
   EXPECT_TRUE(buffer.has_errors());
   EXPECT_THAT(
       buffer,
@@ -184,7 +168,9 @@ TEST_F(LexerTest, InvalidLFCR) {
 }
 
 TEST_F(LexerTest, HandlesNumericLiteral) {
-  auto buffer = Lex("12-578\n  1  2\n0x12_3ABC\n0b10_10_11\n1_234_567\n1.5e9");
+  auto [buffer, value_stores] =
+      compile_helper_.GetTokenizedBufferWithSharedValueStore(
+          "12-578\n  1  2\n0x12_3ABC\n0b10_10_11\n1_234_567\n1.5e9");
   EXPECT_FALSE(buffer.has_errors());
   ASSERT_THAT(buffer,
               HasTokens(llvm::ArrayRef<ExpectedToken>{
@@ -237,32 +223,33 @@ TEST_F(LexerTest, HandlesNumericLiteral) {
               }));
   auto token_start = buffer.tokens().begin();
   auto token_12 = token_start + 1;
-  EXPECT_EQ(value_stores_.ints().Get(buffer.GetIntLiteral(*token_12)), 12);
+  EXPECT_EQ(value_stores.ints().Get(buffer.GetIntLiteral(*token_12)), 12);
   auto token_578 = token_12 + 2;
-  EXPECT_EQ(value_stores_.ints().Get(buffer.GetIntLiteral(*token_578)), 578);
+  EXPECT_EQ(value_stores.ints().Get(buffer.GetIntLiteral(*token_578)), 578);
   auto token_1 = token_578 + 1;
-  EXPECT_EQ(value_stores_.ints().Get(buffer.GetIntLiteral(*token_1)), 1);
+  EXPECT_EQ(value_stores.ints().Get(buffer.GetIntLiteral(*token_1)), 1);
   auto token_2 = token_1 + 1;
-  EXPECT_EQ(value_stores_.ints().Get(buffer.GetIntLiteral(*token_2)), 2);
+  EXPECT_EQ(value_stores.ints().Get(buffer.GetIntLiteral(*token_2)), 2);
   auto token_0x12_3abc = token_2 + 1;
-  EXPECT_EQ(value_stores_.ints().Get(buffer.GetIntLiteral(*token_0x12_3abc)),
+  EXPECT_EQ(value_stores.ints().Get(buffer.GetIntLiteral(*token_0x12_3abc)),
             0x12'3abc);
   auto token_0b10_10_11 = token_0x12_3abc + 1;
-  EXPECT_EQ(value_stores_.ints().Get(buffer.GetIntLiteral(*token_0b10_10_11)),
+  EXPECT_EQ(value_stores.ints().Get(buffer.GetIntLiteral(*token_0b10_10_11)),
             0b10'10'11);
   auto token_1_234_567 = token_0b10_10_11 + 1;
-  EXPECT_EQ(value_stores_.ints().Get(buffer.GetIntLiteral(*token_1_234_567)),
+  EXPECT_EQ(value_stores.ints().Get(buffer.GetIntLiteral(*token_1_234_567)),
             1'234'567);
   auto token_1_5e9 = token_1_234_567 + 1;
   auto value_1_5e9 =
-      value_stores_.reals().Get(buffer.GetRealLiteral(*token_1_5e9));
+      value_stores.reals().Get(buffer.GetRealLiteral(*token_1_5e9));
   EXPECT_EQ(value_1_5e9.mantissa.getZExtValue(), 15);
   EXPECT_EQ(value_1_5e9.exponent.getSExtValue(), 8);
   EXPECT_EQ(value_1_5e9.is_decimal, true);
 }
 
 TEST_F(LexerTest, HandlesInvalidNumericLiterals) {
-  auto buffer = Lex("14x 15_49 0x3.5q 0x3_4.5_6 0ops");
+  auto& buffer =
+      compile_helper_.GetTokenizedBuffer("14x 15_49 0x3.5q 0x3_4.5_6 0ops");
   EXPECT_TRUE(buffer.has_errors());
   ASSERT_THAT(buffer,
               HasTokens(llvm::ArrayRef<ExpectedToken>{
@@ -312,7 +299,7 @@ TEST_F(LexerTest, SplitsNumericLiteralsProperly) {
     12e+1
     13._
   )";
-  auto buffer = Lex(source_text);
+  auto& buffer = compile_helper_.GetTokenizedBuffer(source_text);
   EXPECT_TRUE(buffer.has_errors());
   EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
                           {.kind = TokenKind::FileStart},
@@ -370,7 +357,8 @@ TEST_F(LexerTest, SplitsNumericLiteralsProperly) {
 
 TEST_F(LexerTest, HandlesGarbageCharacters) {
   constexpr char GarbageText[] = "$$💩-$\n$\0$12$\n\\\"\\\n\"x";
-  auto buffer = Lex(llvm::StringRef(GarbageText, sizeof(GarbageText) - 1));
+  auto& buffer = compile_helper_.GetTokenizedBuffer(
+      llvm::StringRef(GarbageText, sizeof(GarbageText) - 1));
   EXPECT_TRUE(buffer.has_errors());
   EXPECT_THAT(
       buffer,
@@ -403,104 +391,104 @@ TEST_F(LexerTest, Symbols) {
   // We don't need to exhaustively test symbols here as they're handled with
   // common code, but we want to check specific patterns to verify things like
   // max-munch rule and handling of interesting symbols.
-  auto buffer = Lex("<<<");
-  EXPECT_FALSE(buffer.has_errors());
-  EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
-                          {.kind = TokenKind::FileStart},
-                          {.kind = TokenKind::LessLess},
-                          {.kind = TokenKind::Less},
-                          {.kind = TokenKind::FileEnd},
-                      }));
-
-  buffer = Lex("<<=>>");
-  EXPECT_FALSE(buffer.has_errors());
-  EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
-                          {.kind = TokenKind::FileStart},
-                          {.kind = TokenKind::LessLessEqual},
-                          {.kind = TokenKind::GreaterGreater},
-                          {.kind = TokenKind::FileEnd},
-                      }));
-
-  buffer = Lex("< <=> >");
-  EXPECT_FALSE(buffer.has_errors());
-  EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
-                          {.kind = TokenKind::FileStart},
-                          {.kind = TokenKind::Less},
-                          {.kind = TokenKind::LessEqualGreater},
-                          {.kind = TokenKind::Greater},
-                          {.kind = TokenKind::FileEnd},
-                      }));
-
-  buffer = Lex("\\/?@&^!");
-  EXPECT_FALSE(buffer.has_errors());
-  EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
-                          {.kind = TokenKind::FileStart},
-                          {.kind = TokenKind::Backslash},
-                          {.kind = TokenKind::Slash},
-                          {.kind = TokenKind::Question},
-                          {.kind = TokenKind::At},
-                          {.kind = TokenKind::Amp},
-                          {.kind = TokenKind::Caret},
-                          {.kind = TokenKind::Exclaim},
-                          {.kind = TokenKind::FileEnd},
-                      }));
+  auto& buffer1 = compile_helper_.GetTokenizedBuffer("<<<");
+  EXPECT_FALSE(buffer1.has_errors());
+  EXPECT_THAT(buffer1, HasTokens(llvm::ArrayRef<ExpectedToken>{
+                           {.kind = TokenKind::FileStart},
+                           {.kind = TokenKind::LessLess},
+                           {.kind = TokenKind::Less},
+                           {.kind = TokenKind::FileEnd},
+                       }));
+
+  auto& buffer2 = compile_helper_.GetTokenizedBuffer("<<=>>");
+  EXPECT_FALSE(buffer2.has_errors());
+  EXPECT_THAT(buffer2, HasTokens(llvm::ArrayRef<ExpectedToken>{
+                           {.kind = TokenKind::FileStart},
+                           {.kind = TokenKind::LessLessEqual},
+                           {.kind = TokenKind::GreaterGreater},
+                           {.kind = TokenKind::FileEnd},
+                       }));
+
+  auto& buffer3 = compile_helper_.GetTokenizedBuffer("< <=> >");
+  EXPECT_FALSE(buffer3.has_errors());
+  EXPECT_THAT(buffer3, HasTokens(llvm::ArrayRef<ExpectedToken>{
+                           {.kind = TokenKind::FileStart},
+                           {.kind = TokenKind::Less},
+                           {.kind = TokenKind::LessEqualGreater},
+                           {.kind = TokenKind::Greater},
+                           {.kind = TokenKind::FileEnd},
+                       }));
+
+  auto& buffer4 = compile_helper_.GetTokenizedBuffer("\\/?@&^!");
+  EXPECT_FALSE(buffer4.has_errors());
+  EXPECT_THAT(buffer4, HasTokens(llvm::ArrayRef<ExpectedToken>{
+                           {.kind = TokenKind::FileStart},
+                           {.kind = TokenKind::Backslash},
+                           {.kind = TokenKind::Slash},
+                           {.kind = TokenKind::Question},
+                           {.kind = TokenKind::At},
+                           {.kind = TokenKind::Amp},
+                           {.kind = TokenKind::Caret},
+                           {.kind = TokenKind::Exclaim},
+                           {.kind = TokenKind::FileEnd},
+                       }));
 }
 
 TEST_F(LexerTest, Parens) {
-  auto buffer = Lex("()");
-  EXPECT_FALSE(buffer.has_errors());
-  EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
-                          {.kind = TokenKind::FileStart},
-                          {.kind = TokenKind::OpenParen},
-                          {.kind = TokenKind::CloseParen},
-                          {.kind = TokenKind::FileEnd},
-                      }));
-
-  buffer = Lex("((()()))");
-  EXPECT_FALSE(buffer.has_errors());
-  EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
-                          {.kind = TokenKind::FileStart},
-                          {.kind = TokenKind::OpenParen},
-                          {.kind = TokenKind::OpenParen},
-                          {.kind = TokenKind::OpenParen},
-                          {.kind = TokenKind::CloseParen},
-                          {.kind = TokenKind::OpenParen},
-                          {.kind = TokenKind::CloseParen},
-                          {.kind = TokenKind::CloseParen},
-                          {.kind = TokenKind::CloseParen},
-                          {.kind = TokenKind::FileEnd},
-                      }));
+  auto& buffer1 = compile_helper_.GetTokenizedBuffer("()");
+  EXPECT_FALSE(buffer1.has_errors());
+  EXPECT_THAT(buffer1, HasTokens(llvm::ArrayRef<ExpectedToken>{
+                           {.kind = TokenKind::FileStart},
+                           {.kind = TokenKind::OpenParen},
+                           {.kind = TokenKind::CloseParen},
+                           {.kind = TokenKind::FileEnd},
+                       }));
+
+  auto& buffer2 = compile_helper_.GetTokenizedBuffer("((()()))");
+  EXPECT_FALSE(buffer2.has_errors());
+  EXPECT_THAT(buffer2, HasTokens(llvm::ArrayRef<ExpectedToken>{
+                           {.kind = TokenKind::FileStart},
+                           {.kind = TokenKind::OpenParen},
+                           {.kind = TokenKind::OpenParen},
+                           {.kind = TokenKind::OpenParen},
+                           {.kind = TokenKind::CloseParen},
+                           {.kind = TokenKind::OpenParen},
+                           {.kind = TokenKind::CloseParen},
+                           {.kind = TokenKind::CloseParen},
+                           {.kind = TokenKind::CloseParen},
+                           {.kind = TokenKind::FileEnd},
+                       }));
 }
 
 TEST_F(LexerTest, CurlyBraces) {
-  auto buffer = Lex("{}");
-  EXPECT_FALSE(buffer.has_errors());
-  EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
-                          {.kind = TokenKind::FileStart},
-                          {.kind = TokenKind::OpenCurlyBrace},
-                          {.kind = TokenKind::CloseCurlyBrace},
-                          {.kind = TokenKind::FileEnd},
-                      }));
-
-  buffer = Lex("{{{}{}}}");
-  EXPECT_FALSE(buffer.has_errors());
-  EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
-                          {.kind = TokenKind::FileStart},
-                          {.kind = TokenKind::OpenCurlyBrace},
-                          {.kind = TokenKind::OpenCurlyBrace},
-                          {.kind = TokenKind::OpenCurlyBrace},
-                          {.kind = TokenKind::CloseCurlyBrace},
-                          {.kind = TokenKind::OpenCurlyBrace},
-                          {.kind = TokenKind::CloseCurlyBrace},
-                          {.kind = TokenKind::CloseCurlyBrace},
-                          {.kind = TokenKind::CloseCurlyBrace},
-                          {.kind = TokenKind::FileEnd},
-                      }));
+  auto& buffer1 = compile_helper_.GetTokenizedBuffer("{}");
+  EXPECT_FALSE(buffer1.has_errors());
+  EXPECT_THAT(buffer1, HasTokens(llvm::ArrayRef<ExpectedToken>{
+                           {.kind = TokenKind::FileStart},
+                           {.kind = TokenKind::OpenCurlyBrace},
+                           {.kind = TokenKind::CloseCurlyBrace},
+                           {.kind = TokenKind::FileEnd},
+                       }));
+
+  auto& buffer2 = compile_helper_.GetTokenizedBuffer("{{{}{}}}");
+  EXPECT_FALSE(buffer2.has_errors());
+  EXPECT_THAT(buffer2, HasTokens(llvm::ArrayRef<ExpectedToken>{
+                           {.kind = TokenKind::FileStart},
+                           {.kind = TokenKind::OpenCurlyBrace},
+                           {.kind = TokenKind::OpenCurlyBrace},
+                           {.kind = TokenKind::OpenCurlyBrace},
+                           {.kind = TokenKind::CloseCurlyBrace},
+                           {.kind = TokenKind::OpenCurlyBrace},
+                           {.kind = TokenKind::CloseCurlyBrace},
+                           {.kind = TokenKind::CloseCurlyBrace},
+                           {.kind = TokenKind::CloseCurlyBrace},
+                           {.kind = TokenKind::FileEnd},
+                       }));
 }
 
 TEST_F(LexerTest, MatchingGroups) {
   {
-    TokenizedBuffer buffer = Lex("(){}");
+    auto& buffer = compile_helper_.GetTokenizedBuffer("(){}");
     ASSERT_FALSE(buffer.has_errors());
     auto it = ++buffer.tokens().begin();
     auto open_paren_token = *it++;
@@ -521,14 +509,15 @@ TEST_F(LexerTest, MatchingGroups) {
   }
 
   {
-    TokenizedBuffer buffer = Lex("({x}){(y)} {{((z))}}");
+    auto [buffer, value_stores] =
+        compile_helper_.GetTokenizedBufferWithSharedValueStore(
+            "({x}){(y)} {{((z))}}");
     ASSERT_FALSE(buffer.has_errors());
     auto it = ++buffer.tokens().begin();
     auto open_paren_token = *it++;
     auto open_curly_token = *it++;
 
-    ASSERT_EQ("x",
-              value_stores_.identifiers().Get(buffer.GetIdentifier(*it++)));
+    ASSERT_EQ("x", value_stores.identifiers().Get(buffer.GetIdentifier(*it++)));
     auto close_curly_token = *it++;
     auto close_paren_token = *it++;
     EXPECT_EQ(close_paren_token,
@@ -542,8 +531,7 @@ TEST_F(LexerTest, MatchingGroups) {
 
     open_curly_token = *it++;
     open_paren_token = *it++;
-    ASSERT_EQ("y",
-              value_stores_.identifiers().Get(buffer.GetIdentifier(*it++)));
+    ASSERT_EQ("y", value_stores.identifiers().Get(buffer.GetIdentifier(*it++)));
     close_paren_token = *it++;
     close_curly_token = *it++;
     EXPECT_EQ(close_curly_token,
@@ -559,8 +547,7 @@ TEST_F(LexerTest, MatchingGroups) {
     auto inner_open_curly_token = *it++;
     open_paren_token = *it++;
     auto inner_open_paren_token = *it++;
-    ASSERT_EQ("z",
-              value_stores_.identifiers().Get(buffer.GetIdentifier(*it++)));
+    ASSERT_EQ("z", value_stores.identifiers().Get(buffer.GetIdentifier(*it++)));
     auto inner_close_paren_token = *it++;
     close_paren_token = *it++;
     auto inner_close_curly_token = *it++;
@@ -589,26 +576,26 @@ TEST_F(LexerTest, MatchingGroups) {
 }
 
 TEST_F(LexerTest, MismatchedGroups) {
-  auto buffer = Lex("{");
-  EXPECT_TRUE(buffer.has_errors());
-  EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
-                          {.kind = TokenKind::FileStart},
-                          {.kind = TokenKind::Error, .text = "{"},
-                          {.kind = TokenKind::FileEnd},
-                      }));
-
-  buffer = Lex("}");
-  EXPECT_TRUE(buffer.has_errors());
-  EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
-                          {.kind = TokenKind::FileStart},
-                          {.kind = TokenKind::Error, .text = "}"},
-                          {.kind = TokenKind::FileEnd},
-                      }));
-
-  buffer = Lex("{(}");
-  EXPECT_TRUE(buffer.has_errors());
+  auto& buffer1 = compile_helper_.GetTokenizedBuffer("{");
+  EXPECT_TRUE(buffer1.has_errors());
+  EXPECT_THAT(buffer1, HasTokens(llvm::ArrayRef<ExpectedToken>{
+                           {.kind = TokenKind::FileStart},
+                           {.kind = TokenKind::Error, .text = "{"},
+                           {.kind = TokenKind::FileEnd},
+                       }));
+
+  auto& buffer2 = compile_helper_.GetTokenizedBuffer("}");
+  EXPECT_TRUE(buffer2.has_errors());
+  EXPECT_THAT(buffer2, HasTokens(llvm::ArrayRef<ExpectedToken>{
+                           {.kind = TokenKind::FileStart},
+                           {.kind = TokenKind::Error, .text = "}"},
+                           {.kind = TokenKind::FileEnd},
+                       }));
+
+  auto& buffer3 = compile_helper_.GetTokenizedBuffer("{(}");
+  EXPECT_TRUE(buffer3.has_errors());
   EXPECT_THAT(
-      buffer,
+      buffer3,
       HasTokens(llvm::ArrayRef<ExpectedToken>{
           {.kind = TokenKind::FileStart},
           {.kind = TokenKind::OpenCurlyBrace, .column = 1},
@@ -618,10 +605,10 @@ TEST_F(LexerTest, MismatchedGroups) {
           {.kind = TokenKind::FileEnd},
       }));
 
-  buffer = Lex(")({)");
-  EXPECT_TRUE(buffer.has_errors());
+  auto& buffer4 = compile_helper_.GetTokenizedBuffer(")({)");
+  EXPECT_TRUE(buffer4.has_errors());
   EXPECT_THAT(
-      buffer,
+      buffer4,
       HasTokens(llvm::ArrayRef<ExpectedToken>{
           {.kind = TokenKind::FileStart},
           {.kind = TokenKind::Error, .column = 1, .text = ")"},
@@ -634,7 +621,7 @@ TEST_F(LexerTest, MismatchedGroups) {
 }
 
 TEST_F(LexerTest, Whitespace) {
-  auto buffer = Lex("{( } {(");
+  auto& buffer = compile_helper_.GetTokenizedBuffer("{( } {(");
 
   // Whether there should be whitespace before/after each token.
   bool space[] = {false,
@@ -675,7 +662,7 @@ TEST_F(LexerTest, Keywords) {
 #include "toolchain/lex/token_kind.def"
   };
   for (const auto& keyword : keywords) {
-    auto buffer = Lex(keyword.fixed_spelling());
+    auto& buffer = compile_helper_.GetTokenizedBuffer(keyword.fixed_spelling());
     EXPECT_FALSE(buffer.has_errors());
     EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
                             {.kind = TokenKind::FileStart},
@@ -686,10 +673,10 @@ TEST_F(LexerTest, Keywords) {
 }
 
 TEST_F(LexerTest, Comments) {
-  auto buffer = Lex(" ;\n  // foo\n  ;\n");
-  EXPECT_FALSE(buffer.has_errors());
+  auto& buffer1 = compile_helper_.GetTokenizedBuffer(" ;\n  // foo\n  ;\n");
+  EXPECT_FALSE(buffer1.has_errors());
   EXPECT_THAT(
-      buffer,
+      buffer1,
       HasTokens(llvm::ArrayRef<ExpectedToken>{
           {.kind = TokenKind::FileStart, .line = 1, .column = 1},
           {.kind = TokenKind::Semi, .line = 1, .column = 2, .indent_column = 2},
@@ -697,25 +684,26 @@ TEST_F(LexerTest, Comments) {
           {.kind = TokenKind::FileEnd, .line = 3, .column = 4},
       }));
 
-  buffer = Lex("// foo\n//\n// bar");
-  EXPECT_FALSE(buffer.has_errors());
-  EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
-                          {.kind = TokenKind::FileStart},
-                          {.kind = TokenKind::FileEnd}}));
+  auto& buffer2 = compile_helper_.GetTokenizedBuffer("// foo\n//\n// bar");
+  EXPECT_FALSE(buffer2.has_errors());
+  EXPECT_THAT(buffer2, HasTokens(llvm::ArrayRef<ExpectedToken>{
+                           {.kind = TokenKind::FileStart},
+                           {.kind = TokenKind::FileEnd}}));
 
   // Make sure weird characters aren't a problem.
-  buffer = Lex("  // foo#$!^?@-_💩🍫⃠ [̲̅$̲̅(̲̅ ͡° ͜ʖ ͡°̲̅)̲̅$̲̅]");
-  EXPECT_FALSE(buffer.has_errors());
-  EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
-                          {.kind = TokenKind::FileStart},
-                          {.kind = TokenKind::FileEnd}}));
+  auto& buffer3 =
+      compile_helper_.GetTokenizedBuffer("  // foo#$!^?@-_💩🍫⃠ [̲̅$̲̅(̲̅ ͡° ͜ʖ ͡°̲̅)̲̅$̲̅]");
+  EXPECT_FALSE(buffer3.has_errors());
+  EXPECT_THAT(buffer3, HasTokens(llvm::ArrayRef<ExpectedToken>{
+                           {.kind = TokenKind::FileStart},
+                           {.kind = TokenKind::FileEnd}}));
 
   // Make sure we can lex a comment at the end of the input.
-  buffer = Lex("//");
-  EXPECT_FALSE(buffer.has_errors());
-  EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
-                          {.kind = TokenKind::FileStart},
-                          {.kind = TokenKind::FileEnd}}));
+  auto& buffer4 = compile_helper_.GetTokenizedBuffer("//");
+  EXPECT_FALSE(buffer4.has_errors());
+  EXPECT_THAT(buffer4, HasTokens(llvm::ArrayRef<ExpectedToken>{
+                           {.kind = TokenKind::FileStart},
+                           {.kind = TokenKind::FileEnd}}));
 }
 
 TEST_F(LexerTest, InvalidComments) {
@@ -726,53 +714,54 @@ TEST_F(LexerTest, InvalidComments) {
       " //world",
   };
   for (llvm::StringLiteral testcase : testcases) {
-    auto buffer = Lex(testcase);
+    auto& buffer = compile_helper_.GetTokenizedBuffer(testcase);
     EXPECT_TRUE(buffer.has_errors());
   }
 }
 
 TEST_F(LexerTest, Identifiers) {
-  auto buffer = Lex("   foobar");
-  EXPECT_FALSE(buffer.has_errors());
-  EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
-                          {.kind = TokenKind::FileStart},
-                          {.kind = TokenKind::Identifier,
-                           .column = 4,
-                           .indent_column = 4,
-                           .text = "foobar"},
-                          {.kind = TokenKind::FileEnd},
-                      }));
+  auto& buffer1 = compile_helper_.GetTokenizedBuffer("   foobar");
+  EXPECT_FALSE(buffer1.has_errors());
+  EXPECT_THAT(buffer1, HasTokens(llvm::ArrayRef<ExpectedToken>{
+                           {.kind = TokenKind::FileStart},
+                           {.kind = TokenKind::Identifier,
+                            .column = 4,
+                            .indent_column = 4,
+                            .text = "foobar"},
+                           {.kind = TokenKind::FileEnd},
+                       }));
 
   // Check different kinds of identifier character sequences.
-  buffer = Lex("_foo_bar");
-  EXPECT_FALSE(buffer.has_errors());
-  EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
-                          {.kind = TokenKind::FileStart},
-                          {.kind = TokenKind::Identifier, .text = "_foo_bar"},
-                          {.kind = TokenKind::FileEnd},
-                      }));
-
-  buffer = Lex("foo2bar00");
-  EXPECT_FALSE(buffer.has_errors());
-  EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
-                          {.kind = TokenKind::FileStart},
-                          {.kind = TokenKind::Identifier, .text = "foo2bar00"},
-                          {.kind = TokenKind::FileEnd},
-                      }));
+  auto& buffer2 = compile_helper_.GetTokenizedBuffer("_foo_bar");
+  EXPECT_FALSE(buffer2.has_errors());
+  EXPECT_THAT(buffer2, HasTokens(llvm::ArrayRef<ExpectedToken>{
+                           {.kind = TokenKind::FileStart},
+                           {.kind = TokenKind::Identifier, .text = "_foo_bar"},
+                           {.kind = TokenKind::FileEnd},
+                       }));
+
+  auto& buffer3 = compile_helper_.GetTokenizedBuffer("foo2bar00");
+  EXPECT_FALSE(buffer3.has_errors());
+  EXPECT_THAT(buffer3, HasTokens(llvm::ArrayRef<ExpectedToken>{
+                           {.kind = TokenKind::FileStart},
+                           {.kind = TokenKind::Identifier, .text = "foo2bar00"},
+                           {.kind = TokenKind::FileEnd},
+                       }));
 
   // Check that we can parse identifiers that start with a keyword.
-  buffer = Lex("fnord");
-  EXPECT_FALSE(buffer.has_errors());
-  EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
-                          {.kind = TokenKind::FileStart},
-                          {.kind = TokenKind::Identifier, .text = "fnord"},
-                          {.kind = TokenKind::FileEnd},
-                      }));
+  auto& buffer4 = compile_helper_.GetTokenizedBuffer("fnord");
+  EXPECT_FALSE(buffer4.has_errors());
+  EXPECT_THAT(buffer4, HasTokens(llvm::ArrayRef<ExpectedToken>{
+                           {.kind = TokenKind::FileStart},
+                           {.kind = TokenKind::Identifier, .text = "fnord"},
+                           {.kind = TokenKind::FileEnd},
+                       }));
 
   // Check multiple identifiers with indent and interning.
-  buffer = Lex("   foo;bar\nbar \n  foo\tfoo");
-  EXPECT_FALSE(buffer.has_errors());
-  EXPECT_THAT(buffer,
+  auto& buffer5 =
+      compile_helper_.GetTokenizedBuffer("   foo;bar\nbar \n  foo\tfoo");
+  EXPECT_FALSE(buffer5.has_errors());
+  EXPECT_THAT(buffer5,
               HasTokens(llvm::ArrayRef<ExpectedToken>{
                   {.kind = TokenKind::FileStart, .line = 1, .column = 1},
                   {.kind = TokenKind::Identifier,
@@ -823,7 +812,8 @@ TEST_F(LexerTest, StringLiterals) {
     """x"""
   )";
 
-  auto buffer = Lex(testcase);
+  auto [buffer, value_stores] =
+      compile_helper_.GetTokenizedBufferWithSharedValueStore(testcase);
   EXPECT_FALSE(buffer.has_errors());
   EXPECT_THAT(buffer,
               HasTokens(llvm::ArrayRef<ExpectedToken>{
@@ -832,13 +822,13 @@ TEST_F(LexerTest, StringLiterals) {
                    .line = 2,
                    .column = 5,
                    .indent_column = 5,
-                   .value_stores = &value_stores_,
+                   .value_stores = &value_stores,
                    .string_contents = {"hello world\n"}},
                   {.kind = TokenKind::StringLiteral,
                    .line = 4,
                    .column = 5,
                    .indent_column = 5,
-                   .value_stores = &value_stores_,
+                   .value_stores = &value_stores,
                    .string_contents = {" test  \xAB\n"}},
                   {.kind = TokenKind::Identifier,
                    .line = 7,
@@ -849,19 +839,19 @@ TEST_F(LexerTest, StringLiterals) {
                    .line = 9,
                    .column = 7,
                    .indent_column = 7,
-                   .value_stores = &value_stores_,
+                   .value_stores = &value_stores,
                    .string_contents = {"\""}},
                   {.kind = TokenKind::StringLiteral,
                    .line = 11,
                    .column = 5,
                    .indent_column = 5,
-                   .value_stores = &value_stores_,
+                   .value_stores = &value_stores,
                    .string_contents = llvm::StringLiteral::withInnerNUL("\0")},
                   {.kind = TokenKind::StringLiteral,
                    .line = 13,
                    .column = 5,
                    .indent_column = 5,
-                   .value_stores = &value_stores_,
+                   .value_stores = &value_stores,
                    .string_contents = {"\\0\"foo\"\\1"}},
 
                   // """x""" is three string literals, not one invalid
@@ -870,19 +860,19 @@ TEST_F(LexerTest, StringLiterals) {
                    .line = 15,
                    .column = 5,
                    .indent_column = 5,
-                   .value_stores = &value_stores_,
+                   .value_stores = &value_stores,
                    .string_contents = {""}},
                   {.kind = TokenKind::StringLiteral,
                    .line = 15,
                    .column = 7,
                    .indent_column = 5,
-                   .value_stores = &value_stores_,
+                   .value_stores = &value_stores,
                    .string_contents = {"x"}},
                   {.kind = TokenKind::StringLiteral,
                    .line = 15,
                    .column = 10,
                    .indent_column = 5,
-                   .value_stores = &value_stores_,
+                   .value_stores = &value_stores,
                    .string_contents = {""}},
                   {.kind = TokenKind::FileEnd, .line = 16, .column = 3},
               }));
@@ -909,7 +899,7 @@ TEST_F(LexerTest, InvalidStringLiterals) {
 
   for (llvm::StringLiteral test : invalid) {
     SCOPED_TRACE(test);
-    auto buffer = Lex(test);
+    auto& buffer = compile_helper_.GetTokenizedBuffer(test);
     EXPECT_TRUE(buffer.has_errors());
 
     // We should have formed at least one error token.
@@ -932,7 +922,8 @@ TEST_F(LexerTest, TypeLiterals) {
     s1
   )";
 
-  auto buffer = Lex(testcase);
+  auto [buffer, value_stores] =
+      compile_helper_.GetTokenizedBufferWithSharedValueStore(testcase);
   EXPECT_FALSE(buffer.has_errors());
   ASSERT_THAT(buffer,
               HasTokens(llvm::ArrayRef<ExpectedToken>{
@@ -1017,7 +1008,7 @@ TEST_F(LexerTest, TypeLiterals) {
 
   auto type_size = [&](int token_index) {
     auto token = buffer.tokens().begin()[token_index];
-    return value_stores_.ints().Get(buffer.GetTypeLiteralSize(token));
+    return value_stores.ints().Get(buffer.GetTypeLiteralSize(token));
   };
 
   EXPECT_EQ(type_size(2), 1);
@@ -1040,7 +1031,7 @@ TEST_F(LexerTest, TypeLiteralTooManyDigits) {
               HandleDiagnostic(IsSingleDiagnostic(
                   DiagnosticKind::TooManyDigits, DiagnosticLevel::Error, 1, 2,
                   HasSubstr(llvm::formatv(" {0} ", Count)))));
-  auto buffer = Lex(code, consumer);
+  auto& buffer = compile_helper_.GetTokenizedBuffer(code, &consumer);
   EXPECT_TRUE(buffer.has_errors());
   ASSERT_THAT(buffer,
               HasTokens(llvm::ArrayRef<ExpectedToken>{
@@ -1064,7 +1055,7 @@ TEST_F(LexerTest, DiagnosticTrailingComment) {
   EXPECT_CALL(consumer, HandleDiagnostic(IsSingleDiagnostic(
                             DiagnosticKind::TrailingComment,
                             DiagnosticLevel::Error, 3, 19, _)));
-  Lex(testcase, consumer);
+  compile_helper_.GetTokenizedBuffer(testcase, &consumer);
 }
 
 TEST_F(LexerTest, DiagnosticWhitespace) {
@@ -1072,7 +1063,7 @@ TEST_F(LexerTest, DiagnosticWhitespace) {
   EXPECT_CALL(consumer, HandleDiagnostic(IsSingleDiagnostic(
                             DiagnosticKind::NoWhitespaceAfterCommentIntroducer,
                             DiagnosticLevel::Error, 1, 3, _)));
-  Lex("//no space after comment", consumer);
+  compile_helper_.GetTokenizedBuffer("//no space after comment", &consumer);
 }
 
 TEST_F(LexerTest, DiagnosticUnrecognizedEscape) {
@@ -1080,7 +1071,7 @@ TEST_F(LexerTest, DiagnosticUnrecognizedEscape) {
   EXPECT_CALL(consumer, HandleDiagnostic(IsSingleDiagnostic(
                             DiagnosticKind::UnknownEscapeSequence,
                             DiagnosticLevel::Error, 1, 8, HasSubstr("`b`"))));
-  Lex(R"("hello\bworld")", consumer);
+  compile_helper_.GetTokenizedBuffer(R"("hello\bworld")", &consumer);
 }
 
 TEST_F(LexerTest, DiagnosticBadHex) {
@@ -1088,7 +1079,7 @@ TEST_F(LexerTest, DiagnosticBadHex) {
   EXPECT_CALL(consumer, HandleDiagnostic(IsSingleDiagnostic(
                             DiagnosticKind::HexadecimalEscapeMissingDigits,
                             DiagnosticLevel::Error, 1, 9, _)));
-  Lex(R"("hello\xabworld")", consumer);
+  compile_helper_.GetTokenizedBuffer(R"("hello\xabworld")", &consumer);
 }
 
 TEST_F(LexerTest, DiagnosticInvalidDigit) {
@@ -1096,7 +1087,7 @@ TEST_F(LexerTest, DiagnosticInvalidDigit) {
   EXPECT_CALL(consumer, HandleDiagnostic(IsSingleDiagnostic(
                             DiagnosticKind::InvalidDigit,
                             DiagnosticLevel::Error, 1, 6, HasSubstr("'a'"))));
-  Lex("0x123abc", consumer);
+  compile_helper_.GetTokenizedBuffer("0x123abc", &consumer);
 }
 
 TEST_F(LexerTest, DiagnosticMissingTerminator) {
@@ -1104,7 +1095,7 @@ TEST_F(LexerTest, DiagnosticMissingTerminator) {
   EXPECT_CALL(consumer, HandleDiagnostic(IsSingleDiagnostic(
                             DiagnosticKind::UnterminatedString,
                             DiagnosticLevel::Error, 1, 1, _)));
-  Lex(R"(#" ")", consumer);
+  compile_helper_.GetTokenizedBuffer(R"(#" ")", &consumer);
 }
 
 TEST_F(LexerTest, DiagnosticUnrecognizedChar) {
@@ -1112,12 +1103,13 @@ TEST_F(LexerTest, DiagnosticUnrecognizedChar) {
   EXPECT_CALL(consumer, HandleDiagnostic(IsSingleDiagnostic(
                             DiagnosticKind::UnrecognizedCharacters,
                             DiagnosticLevel::Error, 1, 1, _)));
-  Lex("\b", consumer);
+  compile_helper_.GetTokenizedBuffer("\b", &consumer);
 }
 
 TEST_F(LexerTest, PrintingOutputYaml) {
   // Test that we can parse this into YAML and verify line and indent data.
-  auto buffer = Lex("\n ;\n\n\n; ;\n\n\n\n\n\n\n\n\n\n\n");
+  auto& buffer =
+      compile_helper_.GetTokenizedBuffer("\n ;\n\n\n; ;\n\n\n\n\n\n\n\n\n\n\n");
   ASSERT_FALSE(buffer.has_errors());
   TestRawOstream print_stream;
   buffer.Print(print_stream);
@@ -1125,7 +1117,7 @@ TEST_F(LexerTest, PrintingOutputYaml) {
   EXPECT_THAT(
       Yaml::Value::FromText(print_stream.TakeStr()),
       IsYaml(ElementsAre(Yaml::Sequence(ElementsAre(Yaml::Mapping(ElementsAre(
-          Pair("filename", source_storage_.front().filename().str()),
+          Pair("filename", buffer.source().filename().str()),
           Pair("tokens", Yaml::Sequence(ElementsAre(
                              Yaml::Mapping(ElementsAre(
                                  Pair("index", "0"), Pair("kind", "FileStart"),

+ 2 - 0
toolchain/parse/BUILD

@@ -45,6 +45,7 @@ cc_test(
         "//toolchain/diagnostics:mocks",
         "//toolchain/lex",
         "//toolchain/lex:tokenized_buffer",
+        "//toolchain/testing:compile_helper",
         "@googletest//:gtest",
     ],
 )
@@ -139,6 +140,7 @@ cc_test(
         "//toolchain/diagnostics:mocks",
         "//toolchain/lex",
         "//toolchain/lex:tokenized_buffer",
+        "//toolchain/testing:compile_helper",
         "//toolchain/testing:yaml_test_helpers",
         "@googletest//:gtest",
         "@llvm-project//llvm:Support",

+ 18 - 35
toolchain/parse/tree_test.cpp

@@ -17,6 +17,7 @@
 #include "toolchain/lex/tokenized_buffer.h"
 #include "toolchain/parse/parse.h"
 #include "toolchain/parse/tree_and_subtrees.h"
+#include "toolchain/testing/compile_helper.h"
 #include "toolchain/testing/yaml_test_helpers.h"
 
 namespace Carbon::Parse {
@@ -30,38 +31,19 @@ namespace Yaml = ::Carbon::Testing::Yaml;
 
 class TreeTest : public ::testing::Test {
  protected:
-  auto GetSourceBuffer(llvm::StringRef t) -> SourceBuffer& {
-    CARBON_CHECK(fs_.addFile("test.carbon", /*ModificationTime=*/0,
-                             llvm::MemoryBuffer::getMemBuffer(t)));
-    source_storage_.push_front(
-        std::move(*SourceBuffer::MakeFromFile(fs_, "test.carbon", consumer_)));
-    return source_storage_.front();
-  }
-
-  auto GetTokenizedBuffer(llvm::StringRef t) -> Lex::TokenizedBuffer& {
-    token_storage_.push_front(
-        Lex::Lex(value_stores_, GetSourceBuffer(t), consumer_));
-    return token_storage_.front();
-  }
-
-  SharedValueStores value_stores_;
-  llvm::vfs::InMemoryFileSystem fs_;
-  std::forward_list<SourceBuffer> source_storage_;
-  std::forward_list<Lex::TokenizedBuffer> token_storage_;
-  DiagnosticConsumer& consumer_ = ConsoleDiagnosticConsumer();
+  Testing::CompileHelper compile_helper_;
 };
 
 TEST_F(TreeTest, IsValid) {
-  Lex::TokenizedBuffer& tokens = GetTokenizedBuffer("");
-  Tree tree = Parse(tokens, consumer_, /*vlog_stream=*/nullptr);
+  Tree& tree = compile_helper_.GetTree("");
   EXPECT_TRUE((*tree.postorder().begin()).is_valid());
 }
 
 TEST_F(TreeTest, AsAndTryAs) {
-  Lex::TokenizedBuffer& tokens = GetTokenizedBuffer("fn F();");
-  Tree tree = Parse(tokens, consumer_, /*vlog_stream=*/nullptr);
+  auto [tokens, tree_and_subtrees] =
+      compile_helper_.GetTokenizedBufferWithTreeAndSubtrees("fn F();");
+  const auto& tree = tree_and_subtrees.tree();
   ASSERT_FALSE(tree.has_errors());
-  TreeAndSubtrees tree_and_subtrees(tokens, tree);
   auto it = tree_and_subtrees.roots().begin();
   // A FileEnd node, so won't match.
   NodeId n = *it;
@@ -105,11 +87,11 @@ TEST_F(TreeTest, AsAndTryAs) {
 }
 
 TEST_F(TreeTest, PrintPostorderAsYAML) {
-  Lex::TokenizedBuffer& tokens = GetTokenizedBuffer("fn F();");
-  Tree tree = Parse(tokens, consumer_, /*vlog_stream=*/nullptr);
-  EXPECT_FALSE(tree.has_errors());
+  auto [tokens, tree_and_subtrees] =
+      compile_helper_.GetTokenizedBufferWithTreeAndSubtrees("fn F();");
+  EXPECT_FALSE(tree_and_subtrees.tree().has_errors());
   TestRawOstream print_stream;
-  tree.Print(print_stream);
+  tree_and_subtrees.tree().Print(print_stream);
 
   auto file = Yaml::Sequence(ElementsAre(
       Yaml::Mapping(ElementsAre(Pair("kind", "FileStart"), Pair("text", ""))),
@@ -126,17 +108,17 @@ TEST_F(TreeTest, PrintPostorderAsYAML) {
       Yaml::Mapping(ElementsAre(Pair("kind", "FileEnd"), Pair("text", "")))));
 
   auto root = Yaml::Sequence(ElementsAre(Yaml::Mapping(
-      ElementsAre(Pair("filename", "test.carbon"), Pair("parse_tree", file)))));
+      ElementsAre(Pair("filename", tokens.source().filename().str()),
+                  Pair("parse_tree", file)))));
 
   EXPECT_THAT(Yaml::Value::FromText(print_stream.TakeStr()),
               IsYaml(ElementsAre(root)));
 }
 
 TEST_F(TreeTest, PrintPreorderAsYAML) {
-  Lex::TokenizedBuffer& tokens = GetTokenizedBuffer("fn F();");
-  Tree tree = Parse(tokens, consumer_, /*vlog_stream=*/nullptr);
-  EXPECT_FALSE(tree.has_errors());
-  TreeAndSubtrees tree_and_subtrees(tokens, tree);
+  auto [tokens, tree_and_subtrees] =
+      compile_helper_.GetTokenizedBufferWithTreeAndSubtrees("fn F();");
+  EXPECT_FALSE(tree_and_subtrees.tree().has_errors());
   TestRawOstream print_stream;
   tree_and_subtrees.PrintPreorder(print_stream);
 
@@ -167,7 +149,8 @@ TEST_F(TreeTest, PrintPreorderAsYAML) {
                                 Pair("kind", "FileEnd"), Pair("text", "")))));
 
   auto root = Yaml::Sequence(ElementsAre(Yaml::Mapping(
-      ElementsAre(Pair("filename", "test.carbon"), Pair("parse_tree", file)))));
+      ElementsAre(Pair("filename", tokens.source().filename().str()),
+                  Pair("parse_tree", file)))));
 
   EXPECT_THAT(Yaml::Value::FromText(print_stream.TakeStr()),
               IsYaml(ElementsAre(root)));
@@ -178,7 +161,7 @@ TEST_F(TreeTest, HighRecursion) {
   code.append(10000, '(');
   code.append(10000, ')');
   code += "; }";
-  Lex::TokenizedBuffer& tokens = GetTokenizedBuffer(code);
+  Lex::TokenizedBuffer& tokens = compile_helper_.GetTokenizedBuffer(code);
   ASSERT_FALSE(tokens.has_errors());
   Testing::MockDiagnosticConsumer consumer;
   Tree tree = Parse(tokens, consumer, /*vlog_stream=*/nullptr);

+ 72 - 105
toolchain/parse/typed_nodes_test.cpp

@@ -13,6 +13,7 @@
 #include "toolchain/lex/tokenized_buffer.h"
 #include "toolchain/parse/parse.h"
 #include "toolchain/parse/tree_and_subtrees.h"
+#include "toolchain/testing/compile_helper.h"
 
 namespace Carbon::Parse {
 
@@ -21,16 +22,17 @@ namespace Carbon::Parse {
 class TypedNodesTestPeer {
  public:
   template <typename T>
-  static auto VerifyExtractAs(const TreeAndSubtrees* tree, NodeId node_id,
+  static auto VerifyExtractAs(const TreeAndSubtrees& tree, NodeId node_id,
                               ErrorBuilder* trace) -> std::optional<T> {
-    return tree->VerifyExtractAs<T>(node_id, trace);
+    return tree.VerifyExtractAs<T>(node_id, trace);
   }
 
   // Sets the kind of a node. This is intended to allow putting the tree into a
   // state where verification can fail, in order to make the failure path of
   // `Verify` testable.
-  static auto SetNodeKind(Tree* tree, NodeId node_id, NodeKind kind) -> void {
-    tree->SetNodeKindForTesting(node_id, kind);
+  static auto SetNodeKind(const Tree& tree, NodeId node_id, NodeKind kind)
+      -> void {
+    const_cast<Tree&>(tree).SetNodeKindForTesting(node_id, kind);
   }
 };
 
@@ -44,137 +46,104 @@ namespace {
 
 class TypedNodeTest : public ::testing::Test {
  protected:
-  auto GetSourceBuffer(llvm::StringRef t) -> SourceBuffer& {
-    CARBON_CHECK(fs_.addFile("test.carbon", /*ModificationTime=*/0,
-                             llvm::MemoryBuffer::getMemBuffer(t)));
-    source_storage_.push_front(
-        std::move(*SourceBuffer::MakeFromFile(fs_, "test.carbon", consumer_)));
-    return source_storage_.front();
-  }
-
-  auto GetTokenizedBuffer(llvm::StringRef t) -> Lex::TokenizedBuffer& {
-    token_storage_.push_front(
-        Lex::Lex(value_stores_, GetSourceBuffer(t), consumer_));
-    return token_storage_.front();
-  }
-
-  auto GetTree(llvm::StringRef t) -> TreeAndSubtrees& {
-    tree_storage_.push_front(Parse(GetTokenizedBuffer(t), consumer_,
-                                   /*vlog_stream=*/nullptr));
-    tree_and_subtrees_storage_.push_front(
-        TreeAndSubtrees(token_storage_.front(), tree_storage_.front()));
-    return tree_and_subtrees_storage_.front();
-  }
-
-  auto GetTokenizedBufferAndTree(llvm::StringRef t)
-      -> std::pair<Lex::TokenizedBuffer*, TreeAndSubtrees*> {
-    auto* tree = &GetTree(t);
-    return {&token_storage_.front(), tree};
-  }
+  using Peer = TypedNodesTestPeer;
 
-  SharedValueStores value_stores_;
-  llvm::vfs::InMemoryFileSystem fs_;
-  std::forward_list<SourceBuffer> source_storage_;
-  std::forward_list<Lex::TokenizedBuffer> token_storage_;
-  std::forward_list<Tree> tree_storage_;
-  std::forward_list<TreeAndSubtrees> tree_and_subtrees_storage_;
-  DiagnosticConsumer& consumer_ = ConsoleDiagnosticConsumer();
+  Testing::CompileHelper compile_helper_;
 };
 
 TEST_F(TypedNodeTest, Empty) {
-  auto* tree = &GetTree("");
-  auto file = tree->ExtractFile();
+  auto& tree = compile_helper_.GetTreeAndSubtrees("");
+  auto file = tree.ExtractFile();
 
-  EXPECT_TRUE(tree->tree().IsValid(file.start));
-  EXPECT_TRUE(tree->ExtractAs<FileStart>(file.start).has_value());
-  EXPECT_TRUE(tree->Extract(file.start).has_value());
+  EXPECT_TRUE(tree.tree().IsValid(file.start));
+  EXPECT_TRUE(tree.ExtractAs<FileStart>(file.start).has_value());
+  EXPECT_TRUE(tree.Extract(file.start).has_value());
 
-  EXPECT_TRUE(tree->tree().IsValid(file.end));
-  EXPECT_TRUE(tree->ExtractAs<FileEnd>(file.end).has_value());
-  EXPECT_TRUE(tree->Extract(file.end).has_value());
+  EXPECT_TRUE(tree.tree().IsValid(file.end));
+  EXPECT_TRUE(tree.ExtractAs<FileEnd>(file.end).has_value());
+  EXPECT_TRUE(tree.Extract(file.end).has_value());
 
-  EXPECT_FALSE(tree->tree().IsValid<FileEnd>(file.start));
-  EXPECT_FALSE(tree->ExtractAs<FileEnd>(file.start).has_value());
+  EXPECT_FALSE(tree.tree().IsValid<FileEnd>(file.start));
+  EXPECT_FALSE(tree.ExtractAs<FileEnd>(file.start).has_value());
 }
 
 TEST_F(TypedNodeTest, Function) {
-  auto* tree = &GetTree(R"carbon(
+  auto& tree = compile_helper_.GetTreeAndSubtrees(R"carbon(
     fn F() {}
     virtual fn G() -> i32;
   )carbon");
-  auto file = tree->ExtractFile();
+  auto file = tree.ExtractFile();
 
   ASSERT_EQ(file.decls.size(), 2);
 
-  auto f_fn = tree->ExtractAs<FunctionDefinition>(file.decls[0]);
+  auto f_fn = tree.ExtractAs<FunctionDefinition>(file.decls[0]);
   ASSERT_TRUE(f_fn.has_value());
-  auto f_sig = tree->Extract(f_fn->signature);
+  auto f_sig = tree.Extract(f_fn->signature);
   ASSERT_TRUE(f_sig.has_value());
   EXPECT_FALSE(f_sig->return_type.has_value());
   EXPECT_TRUE(f_sig->modifiers.empty());
 
-  auto g_fn = tree->ExtractAs<FunctionDecl>(file.decls[1]);
+  auto g_fn = tree.ExtractAs<FunctionDecl>(file.decls[1]);
   ASSERT_TRUE(g_fn.has_value());
   EXPECT_TRUE(g_fn->return_type.has_value());
   EXPECT_FALSE(g_fn->modifiers.empty());
 }
 
 TEST_F(TypedNodeTest, ModifierOrder) {
-  auto* tree = &GetTree(R"carbon(
+  auto& tree = compile_helper_.GetTreeAndSubtrees(R"carbon(
     private abstract virtual default interface I;
   )carbon");
-  auto file = tree->ExtractFile();
+  auto file = tree.ExtractFile();
 
   ASSERT_EQ(file.decls.size(), 1);
 
-  auto decl = tree->ExtractAs<InterfaceDecl>(file.decls[0]);
+  auto decl = tree.ExtractAs<InterfaceDecl>(file.decls[0]);
   ASSERT_TRUE(decl.has_value());
   ASSERT_EQ(decl->modifiers.size(), 4);
   // Note that the order here matches the source order, but is reversed from
   // sibling iteration order.
-  ASSERT_TRUE(tree->ExtractAs<PrivateModifier>(decl->modifiers[0]).has_value());
-  ASSERT_TRUE(
-      tree->ExtractAs<AbstractModifier>(decl->modifiers[1]).has_value());
-  ASSERT_TRUE(tree->ExtractAs<VirtualModifier>(decl->modifiers[2]).has_value());
-  ASSERT_TRUE(tree->ExtractAs<DefaultModifier>(decl->modifiers[3]).has_value());
+  ASSERT_TRUE(tree.ExtractAs<PrivateModifier>(decl->modifiers[0]).has_value());
+  ASSERT_TRUE(tree.ExtractAs<AbstractModifier>(decl->modifiers[1]).has_value());
+  ASSERT_TRUE(tree.ExtractAs<VirtualModifier>(decl->modifiers[2]).has_value());
+  ASSERT_TRUE(tree.ExtractAs<DefaultModifier>(decl->modifiers[3]).has_value());
 }
 
 TEST_F(TypedNodeTest, For) {
-  auto* tree = &GetTree(R"carbon(
+  auto& tree = compile_helper_.GetTreeAndSubtrees(R"carbon(
     fn F(arr: [i32; 5]) {
       for (var v: i32 in arr) {
         Print(v);
       }
     }
   )carbon");
-  auto file = tree->ExtractFile();
+  auto file = tree.ExtractFile();
 
   ASSERT_EQ(file.decls.size(), 1);
-  auto fn = tree->ExtractAs<FunctionDefinition>(file.decls[0]);
+  auto fn = tree.ExtractAs<FunctionDefinition>(file.decls[0]);
   ASSERT_TRUE(fn.has_value());
   ASSERT_EQ(fn->body.size(), 1);
-  auto for_stmt = tree->ExtractAs<ForStatement>(fn->body[0]);
+  auto for_stmt = tree.ExtractAs<ForStatement>(fn->body[0]);
   ASSERT_TRUE(for_stmt.has_value());
-  auto for_header = tree->Extract(for_stmt->header);
+  auto for_header = tree.Extract(for_stmt->header);
   ASSERT_TRUE(for_header.has_value());
-  auto for_var = tree->Extract(for_header->var);
+  auto for_var = tree.Extract(for_header->var);
   ASSERT_TRUE(for_var.has_value());
-  auto for_var_binding = tree->ExtractAs<BindingPattern>(for_var->pattern);
+  auto for_var_binding = tree.ExtractAs<BindingPattern>(for_var->pattern);
   ASSERT_TRUE(for_var_binding.has_value());
-  auto for_var_name = tree->ExtractAs<IdentifierName>(for_var_binding->name);
+  auto for_var_name = tree.ExtractAs<IdentifierName>(for_var_binding->name);
   ASSERT_TRUE(for_var_name.has_value());
 }
 
 TEST_F(TypedNodeTest, VerifyExtractTraceLibrary) {
-  auto* tree = &GetTree(R"carbon(
+  auto& tree = compile_helper_.GetTreeAndSubtrees(R"carbon(
     impl library default;
   )carbon");
-  auto file = tree->ExtractFile();
+  auto file = tree.ExtractFile();
 
   ASSERT_EQ(file.decls.size(), 1);
   ErrorBuilder trace;
-  auto library = TypedNodesTestPeer::VerifyExtractAs<LibraryDecl>(
-      tree, file.decls[0], &trace);
+  auto library =
+      Peer::VerifyExtractAs<LibraryDecl>(tree, file.decls[0], &trace);
   EXPECT_TRUE(library.has_value());
   Error err = trace;
   // Use Regex matching to avoid hard-coding the result of `typeinfo(T).name()`.
@@ -191,15 +160,14 @@ Aggregate [^:]*: success
 }
 
 TEST_F(TypedNodeTest, VerifyExtractTraceVarNoInit) {
-  auto* tree = &GetTree(R"carbon(
+  auto& tree = compile_helper_.GetTreeAndSubtrees(R"carbon(
     var x: bool;
   )carbon");
-  auto file = tree->ExtractFile();
+  auto file = tree.ExtractFile();
 
   ASSERT_EQ(file.decls.size(), 1);
   ErrorBuilder trace;
-  auto var = TypedNodesTestPeer::VerifyExtractAs<VariableDecl>(
-      tree, file.decls[0], &trace);
+  auto var = Peer::VerifyExtractAs<VariableDecl>(tree, file.decls[0], &trace);
   ASSERT_TRUE(var.has_value());
   Error err = trace;
   // Use Regex matching to avoid hard-coding the result of `typeinfo(T).name()`.
@@ -223,15 +191,14 @@ Aggregate [^:]*: success
 }
 
 TEST_F(TypedNodeTest, VerifyExtractTraceExpression) {
-  auto* tree = &GetTree(R"carbon(
+  auto& tree = compile_helper_.GetTreeAndSubtrees(R"carbon(
     var x: i32 = p->q.r;
   )carbon");
-  auto file = tree->ExtractFile();
+  auto file = tree.ExtractFile();
 
   ASSERT_EQ(file.decls.size(), 1);
   ErrorBuilder trace1;
-  auto var = TypedNodesTestPeer::VerifyExtractAs<VariableDecl>(
-      tree, file.decls[0], &trace1);
+  auto var = Peer::VerifyExtractAs<VariableDecl>(tree, file.decls[0], &trace1);
   ASSERT_TRUE(var.has_value());
   Error err1 = trace1;
   // Use Regex matching to avoid hard-coding the result of `typeinfo(T).name()`.
@@ -256,7 +223,7 @@ Aggregate [^:]*: success
 
   ASSERT_TRUE(var->initializer.has_value());
   ErrorBuilder trace2;
-  auto value = TypedNodesTestPeer::VerifyExtractAs<MemberAccessExpr>(
+  auto value = Peer::VerifyExtractAs<MemberAccessExpr>(
       tree, var->initializer->value, &trace2);
   ASSERT_TRUE(value.has_value());
   Error err2 = trace2;
@@ -270,15 +237,15 @@ Aggregate [^:]*: success
 }
 
 TEST_F(TypedNodeTest, VerifyExtractTraceClassDecl) {
-  auto* tree = &GetTree(R"carbon(
+  auto& tree = compile_helper_.GetTreeAndSubtrees(R"carbon(
     private abstract class N.C(T:! type);
   )carbon");
-  auto file = tree->ExtractFile();
+  auto file = tree.ExtractFile();
 
   ASSERT_EQ(file.decls.size(), 1);
   ErrorBuilder trace;
-  auto class_decl = TypedNodesTestPeer::VerifyExtractAs<ClassDecl>(
-      tree, file.decls[0], &trace);
+  auto class_decl =
+      Peer::VerifyExtractAs<ClassDecl>(tree, file.decls[0], &trace);
   EXPECT_TRUE(class_decl.has_value());
   Error err = trace;
   // Use Regex matching to avoid hard-coding the result of `typeinfo(T).name()`.
@@ -310,50 +277,50 @@ Aggregate [^:]*: success
 }
 
 TEST_F(TypedNodeTest, Token) {
-  auto [tokens, tree] = GetTokenizedBufferAndTree(R"carbon(
+  auto [tokens, tree] =
+      compile_helper_.GetTokenizedBufferWithTreeAndSubtrees(R"carbon(
     var n: i32 = 0;
   )carbon");
-  auto file = tree->ExtractFile();
+  auto file = tree.ExtractFile();
 
   ASSERT_EQ(file.decls.size(), 1);
 
-  auto n_var = tree->ExtractAs<VariableDecl>(file.decls[0]);
+  auto n_var = tree.ExtractAs<VariableDecl>(file.decls[0]);
   ASSERT_TRUE(n_var.has_value());
-  EXPECT_EQ(tokens->GetKind(n_var->token), Lex::TokenKind::Semi);
+  EXPECT_EQ(tokens.GetKind(n_var->token), Lex::TokenKind::Semi);
 
-  auto n_intro = tree->ExtractAs<VariableIntroducer>(n_var->introducer);
+  auto n_intro = tree.ExtractAs<VariableIntroducer>(n_var->introducer);
   ASSERT_TRUE(n_intro.has_value());
-  EXPECT_EQ(tokens->GetKind(n_intro->token), Lex::TokenKind::Var);
+  EXPECT_EQ(tokens.GetKind(n_intro->token), Lex::TokenKind::Var);
 
-  auto n_patt = tree->ExtractAs<BindingPattern>(n_var->pattern);
+  auto n_patt = tree.ExtractAs<BindingPattern>(n_var->pattern);
   ASSERT_TRUE(n_patt.has_value());
-  EXPECT_EQ(tokens->GetKind(n_patt->token), Lex::TokenKind::Colon);
+  EXPECT_EQ(tokens.GetKind(n_patt->token), Lex::TokenKind::Colon);
 }
 
 TEST_F(TypedNodeTest, VerifyInvalid) {
-  auto* tree = &GetTree(R"carbon(
+  auto& tree = compile_helper_.GetTreeAndSubtrees(R"carbon(
     fn F() -> i32 { return 0; }
   )carbon");
 
-  auto file = tree->ExtractFile();
+  auto file = tree.ExtractFile();
   ASSERT_EQ(file.decls.size(), 1);
 
-  auto f_fn = tree->ExtractAs<FunctionDefinition>(file.decls[0]);
+  auto f_fn = tree.ExtractAs<FunctionDefinition>(file.decls[0]);
   ASSERT_TRUE(f_fn.has_value());
-  auto f_sig = tree->ExtractAs<FunctionDefinitionStart>(f_fn->signature);
+  auto f_sig = tree.ExtractAs<FunctionDefinitionStart>(f_fn->signature);
   ASSERT_TRUE(f_sig.has_value());
-  auto f_intro = tree->ExtractAs<FunctionIntroducer>(f_sig->introducer);
+  auto f_intro = tree.ExtractAs<FunctionIntroducer>(f_sig->introducer);
   ASSERT_TRUE(f_intro.has_value());
 
   // Change the kind of the introducer and check we get a good trace log.
-  TypedNodesTestPeer::SetNodeKind(&tree_storage_.front(), f_sig->introducer,
-                                  NodeKind::ClassIntroducer);
+  Peer::SetNodeKind(tree.tree(), f_sig->introducer, NodeKind::ClassIntroducer);
 
   // The introducer should not extract as a FunctionIntroducer any more because
   // the kind is wrong.
   {
     ErrorBuilder trace;
-    EXPECT_FALSE(TypedNodesTestPeer::VerifyExtractAs<FunctionIntroducer>(
+    EXPECT_FALSE(Peer::VerifyExtractAs<FunctionIntroducer>(
         tree, f_sig->introducer, &trace));
 
     Error err = trace;
@@ -366,8 +333,8 @@ TEST_F(TypedNodeTest, VerifyInvalid) {
   // token kind is wrong.
   {
     ErrorBuilder trace;
-    EXPECT_FALSE(TypedNodesTestPeer::VerifyExtractAs<ClassIntroducer>(
-        tree, f_sig->introducer, &trace));
+    EXPECT_FALSE(Peer::VerifyExtractAs<ClassIntroducer>(tree, f_sig->introducer,
+                                                        &trace));
 
     Error err = trace;
     EXPECT_THAT(err.message(),
@@ -379,7 +346,7 @@ TEST_F(TypedNodeTest, VerifyInvalid) {
   // kind for the introducer is wrong.
   {
     ErrorBuilder trace;
-    EXPECT_FALSE(TypedNodesTestPeer::VerifyExtractAs<FunctionDefinitionStart>(
+    EXPECT_FALSE(Peer::VerifyExtractAs<FunctionDefinitionStart>(
         tree, f_fn->signature, &trace));
 
     Error err = trace;

+ 15 - 0
toolchain/testing/BUILD

@@ -8,6 +8,21 @@ load("//testing/file_test:rules.bzl", "file_test")
 
 package(default_visibility = ["//visibility:public"])
 
+cc_library(
+    name = "compile_helper",
+    testonly = 1,
+    srcs = ["compile_helper.cpp"],
+    hdrs = ["compile_helper.h"],
+    deps = [
+        "//toolchain/diagnostics:diagnostic_emitter",
+        "//toolchain/lex",
+        "//toolchain/parse",
+        "//toolchain/parse:tree",
+        "//toolchain/source:source_buffer",
+        "@llvm-project//llvm:Support",
+    ],
+)
+
 file_test(
     name = "file_test",
     size = "small",

+ 56 - 0
toolchain/testing/compile_helper.cpp

@@ -0,0 +1,56 @@
+// Part of the Carbon Language project, under the Apache License v2.0 with LLVM
+// Exceptions. See /LICENSE for license information.
+// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
+
+#include "toolchain/testing/compile_helper.h"
+
+namespace Carbon::Testing {
+
+auto CompileHelper::GetTokenizedBuffer(llvm::StringRef text,
+                                       DiagnosticConsumer* consumer)
+    -> Lex::TokenizedBuffer& {
+  auto& source = GetSourceBuffer(text);
+
+  value_store_storage_.emplace_front();
+  token_storage_.push_front(Lex::Lex(value_store_storage_.front(), source,
+                                     consumer ? *consumer : consumer_));
+  return token_storage_.front();
+}
+
+auto CompileHelper::GetTokenizedBufferWithSharedValueStore(llvm::StringRef text)
+    -> std::pair<Lex::TokenizedBuffer&, SharedValueStores&> {
+  auto& tokens = GetTokenizedBuffer(text);
+  return {tokens, value_store_storage_.front()};
+}
+
+auto CompileHelper::GetTree(llvm::StringRef text) -> Parse::Tree& {
+  auto& tokens = GetTokenizedBuffer(text);
+  tree_storage_.push_front(Parse::Parse(tokens, consumer_,
+                                        /*vlog_stream=*/nullptr));
+  return tree_storage_.front();
+}
+
+auto CompileHelper::GetTreeAndSubtrees(llvm::StringRef text)
+    -> Parse::TreeAndSubtrees& {
+  auto& tree = GetTree(text);
+  tree_and_subtrees_storage_.push_front(
+      Parse::TreeAndSubtrees(token_storage_.front(), tree));
+  return tree_and_subtrees_storage_.front();
+}
+
+auto CompileHelper::GetTokenizedBufferWithTreeAndSubtrees(llvm::StringRef text)
+    -> std::pair<Lex::TokenizedBuffer&, Parse::TreeAndSubtrees&> {
+  auto& tree_and_subtrees = GetTreeAndSubtrees(text);
+  return {token_storage_.front(), tree_and_subtrees};
+}
+
+auto CompileHelper::GetSourceBuffer(llvm::StringRef text) -> SourceBuffer& {
+  std::string filename = llvm::formatv("test{0}.carbon", ++file_index_);
+  CARBON_CHECK(fs_.addFile(filename, /*ModificationTime=*/0,
+                           llvm::MemoryBuffer::getMemBuffer(text)));
+  source_storage_.push_front(
+      std::move(*SourceBuffer::MakeFromFile(fs_, filename, consumer_)));
+  return source_storage_.front();
+}
+
+}  // namespace Carbon::Testing

+ 64 - 0
toolchain/testing/compile_helper.h

@@ -0,0 +1,64 @@
+// Part of the Carbon Language project, under the Apache License v2.0 with LLVM
+// Exceptions. See /LICENSE for license information.
+// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
+
+#ifndef CARBON_TOOLCHAIN_TESTING_COMPILE_HELPER_H_
+#define CARBON_TOOLCHAIN_TESTING_COMPILE_HELPER_H_
+
+#include <forward_list>
+
+#include "llvm/Support/VirtualFileSystem.h"
+#include "toolchain/diagnostics/diagnostic_consumer.h"
+#include "toolchain/lex/lex.h"
+#include "toolchain/parse/parse.h"
+#include "toolchain/parse/tree_and_subtrees.h"
+#include "toolchain/source/source_buffer.h"
+
+namespace Carbon::Testing {
+
+// A test helper for compile-related functionality.
+class CompileHelper {
+ public:
+  // Returns the result of lex.
+  auto GetTokenizedBuffer(llvm::StringRef text,
+                          DiagnosticConsumer* consumer = nullptr)
+      -> Lex::TokenizedBuffer&;
+
+  // Returns the result of lex along with shared values.
+  auto GetTokenizedBufferWithSharedValueStore(llvm::StringRef text)
+      -> std::pair<Lex::TokenizedBuffer&, SharedValueStores&>;
+
+  // Returns the result of parse.
+  auto GetTree(llvm::StringRef text) -> Parse::Tree&;
+
+  // Returns the result of parse (with extra subtree information).
+  auto GetTreeAndSubtrees(llvm::StringRef text) -> Parse::TreeAndSubtrees&;
+
+  // Returns the results of both lex and parse (with extra subtree information).
+  auto GetTokenizedBufferWithTreeAndSubtrees(llvm::StringRef text)
+      -> std::pair<Lex::TokenizedBuffer&, Parse::TreeAndSubtrees&>;
+
+ private:
+  // Produces a source buffer for the input text.
+  auto GetSourceBuffer(llvm::StringRef text) -> SourceBuffer&;
+
+  // Diagnostics will be printed to console.
+  DiagnosticConsumer& consumer_ = ConsoleDiagnosticConsumer();
+
+  // An index to generate unique filenames.
+  int file_index_ = 0;
+
+  // A filesystem for storing test files.
+  llvm::vfs::InMemoryFileSystem fs_;
+
+  // Storage for various compile artifacts.
+  std::forward_list<SourceBuffer> source_storage_;
+  std::forward_list<SharedValueStores> value_store_storage_;
+  std::forward_list<Lex::TokenizedBuffer> token_storage_;
+  std::forward_list<Parse::Tree> tree_storage_;
+  std::forward_list<Parse::TreeAndSubtrees> tree_and_subtrees_storage_;
+};
+
+}  // namespace Carbon::Testing
+
+#endif  // CARBON_TOOLCHAIN_TESTING_COMPILE_HELPER_H_