| 123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682 |
- // Part of the Carbon Language project, under the Apache License v2.0 with LLVM
- // Exceptions. See /LICENSE for license information.
- // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
- #include "lexer/tokenized_buffer.h"
- #include <iterator>
- #include "diagnostics/diagnostic_emitter.h"
- #include "gmock/gmock.h"
- #include "gtest/gtest.h"
- #include "lexer/tokenized_buffer_test_helpers.h"
- #include "llvm/ADT/ArrayRef.h"
- #include "llvm/ADT/None.h"
- #include "llvm/ADT/Sequence.h"
- #include "llvm/ADT/SmallString.h"
- #include "llvm/ADT/Twine.h"
- #include "llvm/Support/SourceMgr.h"
- #include "llvm/Support/YAMLParser.h"
- #include "llvm/Support/raw_ostream.h"
- namespace Carbon {
- namespace {
- using ::Carbon::Testing::ExpectedToken;
- using ::Carbon::Testing::HasTokens;
- using ::Carbon::Testing::IsKeyValueScalars;
- using ::testing::Eq;
- using ::testing::NotNull;
- using ::testing::StrEq;
- struct LexerTest : ::testing::Test {
- llvm::SmallVector<SourceBuffer, 16> source_storage;
- auto GetSourceBuffer(llvm::Twine text) -> SourceBuffer& {
- source_storage.push_back(SourceBuffer::CreateFromText(text.str()));
- return source_storage.back();
- }
- auto Lex(llvm::Twine text) -> TokenizedBuffer {
- // TODO: build a full mock for this.
- return TokenizedBuffer::Lex(GetSourceBuffer(text), NullDiagnosticEmitter());
- }
- };
- TEST_F(LexerTest, HandlesEmptyBuffer) {
- auto buffer = Lex("");
- EXPECT_FALSE(buffer.HasErrors());
- EXPECT_EQ(buffer.Tokens().begin(), buffer.Tokens().end());
- }
- TEST_F(LexerTest, TracksLinesAndColumns) {
- auto buffer = Lex("\n ;;\n ;;;\n");
- EXPECT_FALSE(buffer.HasErrors());
- EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
- {.kind = TokenKind::Semi(),
- .line = 2,
- .column = 3,
- .indent_column = 3},
- {.kind = TokenKind::Semi(),
- .line = 2,
- .column = 4,
- .indent_column = 3},
- {.kind = TokenKind::Semi(),
- .line = 3,
- .column = 4,
- .indent_column = 4},
- {.kind = TokenKind::Semi(),
- .line = 3,
- .column = 5,
- .indent_column = 4},
- {.kind = TokenKind::Semi(),
- .line = 3,
- .column = 6,
- .indent_column = 4},
- }));
- }
- TEST_F(LexerTest, HandlesIntegerLiteral) {
- auto buffer = Lex("12-578\n 1 2");
- EXPECT_FALSE(buffer.HasErrors());
- ASSERT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
- {.kind = TokenKind::IntegerLiteral(),
- .line = 1,
- .column = 1,
- .indent_column = 1,
- .text = "12"},
- {.kind = TokenKind::Minus(),
- .line = 1,
- .column = 3,
- .indent_column = 1},
- {.kind = TokenKind::IntegerLiteral(),
- .line = 1,
- .column = 4,
- .indent_column = 1,
- .text = "578"},
- {.kind = TokenKind::IntegerLiteral(),
- .line = 2,
- .column = 3,
- .indent_column = 3,
- .text = "1"},
- {.kind = TokenKind::IntegerLiteral(),
- .line = 2,
- .column = 6,
- .indent_column = 3,
- .text = "2"},
- }));
- auto token_12 = buffer.Tokens().begin();
- EXPECT_EQ(buffer.GetIntegerLiteral(*token_12), 12);
- auto token_578 = buffer.Tokens().begin() + 2;
- EXPECT_EQ(buffer.GetIntegerLiteral(*token_578), 578);
- auto token_1 = buffer.Tokens().begin() + 3;
- EXPECT_EQ(buffer.GetIntegerLiteral(*token_1), 1);
- auto token_2 = buffer.Tokens().begin() + 4;
- EXPECT_EQ(buffer.GetIntegerLiteral(*token_2), 2);
- }
- TEST_F(LexerTest, HandlesGarbageCharacters) {
- constexpr char GarbageText[] = "$$💩-$\n$\0$12$";
- auto buffer = Lex(llvm::StringRef(GarbageText, sizeof(GarbageText) - 1));
- EXPECT_TRUE(buffer.HasErrors());
- EXPECT_THAT(
- buffer,
- HasTokens(llvm::ArrayRef<ExpectedToken>{
- {.kind = TokenKind::Error(),
- .line = 1,
- .column = 1,
- .text = llvm::StringRef("$$💩", 6)},
- // 💩 takes 4 bytes, and we count column as bytes offset.
- {.kind = TokenKind::Minus(), .line = 1, .column = 7},
- {.kind = TokenKind::Error(), .line = 1, .column = 8, .text = "$"},
- // newline
- {.kind = TokenKind::Error(),
- .line = 2,
- .column = 1,
- .text = llvm::StringRef("$\0$", 3)},
- {.kind = TokenKind::IntegerLiteral(),
- .line = 2,
- .column = 4,
- .text = "12"},
- {.kind = TokenKind::Error(), .line = 2, .column = 6, .text = "$"},
- }));
- }
- TEST_F(LexerTest, Symbols) {
- // We don't need to exhaustively test symbols here as they're handled with
- // common code, but we want to check specific patterns to verify things like
- // max-munch rule and handling of interesting symbols.
- auto buffer = Lex("<<<");
- EXPECT_FALSE(buffer.HasErrors());
- EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
- {TokenKind::LessLess()},
- {TokenKind::Less()},
- }));
- buffer = Lex("<<=>>");
- EXPECT_FALSE(buffer.HasErrors());
- EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
- {TokenKind::LessLessEqual()},
- {TokenKind::GreaterGreater()},
- }));
- buffer = Lex("< <=> >");
- EXPECT_FALSE(buffer.HasErrors());
- EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
- {TokenKind::Less()},
- {TokenKind::LessEqualGreater()},
- {TokenKind::Greater()},
- }));
- buffer = Lex("\\/?#@&^!");
- EXPECT_FALSE(buffer.HasErrors());
- EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
- {TokenKind::Backslash()},
- {TokenKind::Slash()},
- {TokenKind::Question()},
- {TokenKind::Hash()},
- {TokenKind::At()},
- {TokenKind::Amp()},
- {TokenKind::Caret()},
- {TokenKind::Exclaim()},
- }));
- }
- TEST_F(LexerTest, Parens) {
- auto buffer = Lex("()");
- EXPECT_FALSE(buffer.HasErrors());
- EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
- {TokenKind::OpenParen()},
- {TokenKind::CloseParen()},
- }));
- buffer = Lex("((()()))");
- EXPECT_FALSE(buffer.HasErrors());
- EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
- {TokenKind::OpenParen()},
- {TokenKind::OpenParen()},
- {TokenKind::OpenParen()},
- {TokenKind::CloseParen()},
- {TokenKind::OpenParen()},
- {TokenKind::CloseParen()},
- {TokenKind::CloseParen()},
- {TokenKind::CloseParen()},
- }));
- }
- TEST_F(LexerTest, CurlyBraces) {
- auto buffer = Lex("{}");
- EXPECT_FALSE(buffer.HasErrors());
- EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
- {TokenKind::OpenCurlyBrace()},
- {TokenKind::CloseCurlyBrace()},
- }));
- buffer = Lex("{{{}{}}}");
- EXPECT_FALSE(buffer.HasErrors());
- EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
- {TokenKind::OpenCurlyBrace()},
- {TokenKind::OpenCurlyBrace()},
- {TokenKind::OpenCurlyBrace()},
- {TokenKind::CloseCurlyBrace()},
- {TokenKind::OpenCurlyBrace()},
- {TokenKind::CloseCurlyBrace()},
- {TokenKind::CloseCurlyBrace()},
- {TokenKind::CloseCurlyBrace()},
- }));
- }
- TEST_F(LexerTest, MatchingGroups) {
- {
- TokenizedBuffer buffer = Lex("(){}");
- ASSERT_FALSE(buffer.HasErrors());
- auto it = buffer.Tokens().begin();
- auto open_paren_token = *it++;
- auto close_paren_token = *it++;
- EXPECT_EQ(close_paren_token,
- buffer.GetMatchedClosingToken(open_paren_token));
- EXPECT_EQ(open_paren_token,
- buffer.GetMatchedOpeningToken(close_paren_token));
- auto open_curly_token = *it++;
- auto close_curly_token = *it++;
- EXPECT_EQ(close_curly_token,
- buffer.GetMatchedClosingToken(open_curly_token));
- EXPECT_EQ(open_curly_token,
- buffer.GetMatchedOpeningToken(close_curly_token));
- EXPECT_EQ(buffer.Tokens().end(), it);
- }
- {
- TokenizedBuffer buffer = Lex("({x}){(y)} {{((z))}}");
- ASSERT_FALSE(buffer.HasErrors());
- auto it = buffer.Tokens().begin();
- auto open_paren_token = *it++;
- auto open_curly_token = *it++;
- ASSERT_EQ("x", buffer.GetIdentifierText(buffer.GetIdentifier(*it++)));
- auto close_curly_token = *it++;
- auto close_paren_token = *it++;
- EXPECT_EQ(close_paren_token,
- buffer.GetMatchedClosingToken(open_paren_token));
- EXPECT_EQ(open_paren_token,
- buffer.GetMatchedOpeningToken(close_paren_token));
- EXPECT_EQ(close_curly_token,
- buffer.GetMatchedClosingToken(open_curly_token));
- EXPECT_EQ(open_curly_token,
- buffer.GetMatchedOpeningToken(close_curly_token));
- open_curly_token = *it++;
- open_paren_token = *it++;
- ASSERT_EQ("y", buffer.GetIdentifierText(buffer.GetIdentifier(*it++)));
- close_paren_token = *it++;
- close_curly_token = *it++;
- EXPECT_EQ(close_curly_token,
- buffer.GetMatchedClosingToken(open_curly_token));
- EXPECT_EQ(open_curly_token,
- buffer.GetMatchedOpeningToken(close_curly_token));
- EXPECT_EQ(close_paren_token,
- buffer.GetMatchedClosingToken(open_paren_token));
- EXPECT_EQ(open_paren_token,
- buffer.GetMatchedOpeningToken(close_paren_token));
- open_curly_token = *it++;
- auto inner_open_curly_token = *it++;
- open_paren_token = *it++;
- auto inner_open_paren_token = *it++;
- ASSERT_EQ("z", buffer.GetIdentifierText(buffer.GetIdentifier(*it++)));
- auto inner_close_paren_token = *it++;
- close_paren_token = *it++;
- auto inner_close_curly_token = *it++;
- close_curly_token = *it++;
- EXPECT_EQ(close_curly_token,
- buffer.GetMatchedClosingToken(open_curly_token));
- EXPECT_EQ(open_curly_token,
- buffer.GetMatchedOpeningToken(close_curly_token));
- EXPECT_EQ(inner_close_curly_token,
- buffer.GetMatchedClosingToken(inner_open_curly_token));
- EXPECT_EQ(inner_open_curly_token,
- buffer.GetMatchedOpeningToken(inner_close_curly_token));
- EXPECT_EQ(close_paren_token,
- buffer.GetMatchedClosingToken(open_paren_token));
- EXPECT_EQ(open_paren_token,
- buffer.GetMatchedOpeningToken(close_paren_token));
- EXPECT_EQ(inner_close_paren_token,
- buffer.GetMatchedClosingToken(inner_open_paren_token));
- EXPECT_EQ(inner_open_paren_token,
- buffer.GetMatchedOpeningToken(inner_close_paren_token));
- EXPECT_EQ(buffer.Tokens().end(), it);
- }
- }
- TEST_F(LexerTest, MismatchedGroups) {
- auto buffer = Lex("{");
- EXPECT_TRUE(buffer.HasErrors());
- EXPECT_THAT(buffer,
- HasTokens(llvm::ArrayRef<ExpectedToken>{
- {TokenKind::OpenCurlyBrace()},
- {.kind = TokenKind::CloseCurlyBrace(), .recovery = true},
- }));
- buffer = Lex("}");
- EXPECT_TRUE(buffer.HasErrors());
- EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
- {.kind = TokenKind::Error(), .text = "}"},
- }));
- buffer = Lex("{(}");
- EXPECT_TRUE(buffer.HasErrors());
- EXPECT_THAT(
- buffer,
- HasTokens(llvm::ArrayRef<ExpectedToken>{
- {.kind = TokenKind::OpenCurlyBrace(), .column = 1},
- {.kind = TokenKind::OpenParen(), .column = 2},
- {.kind = TokenKind::CloseParen(), .column = 3, .recovery = true},
- {.kind = TokenKind::CloseCurlyBrace(), .column = 3},
- }));
- buffer = Lex(")({)");
- EXPECT_TRUE(buffer.HasErrors());
- EXPECT_THAT(
- buffer,
- HasTokens(llvm::ArrayRef<ExpectedToken>{
- {.kind = TokenKind::Error(), .column = 1, .text = ")"},
- {.kind = TokenKind::OpenParen(), .column = 2},
- {.kind = TokenKind::OpenCurlyBrace(), .column = 3},
- {.kind = TokenKind::CloseCurlyBrace(), .column = 4, .recovery = true},
- {.kind = TokenKind::CloseParen(), .column = 4},
- }));
- }
- TEST_F(LexerTest, Keywords) {
- auto buffer = Lex(" fn");
- EXPECT_FALSE(buffer.HasErrors());
- EXPECT_THAT(
- buffer,
- HasTokens(llvm::ArrayRef<ExpectedToken>{
- {.kind = TokenKind::FnKeyword(), .column = 4, .indent_column = 4},
- }));
- buffer = Lex("and or not if else for loop return var break continue _");
- EXPECT_FALSE(buffer.HasErrors());
- EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
- {TokenKind::AndKeyword()},
- {TokenKind::OrKeyword()},
- {TokenKind::NotKeyword()},
- {TokenKind::IfKeyword()},
- {TokenKind::ElseKeyword()},
- {TokenKind::ForKeyword()},
- {TokenKind::LoopKeyword()},
- {TokenKind::ReturnKeyword()},
- {TokenKind::VarKeyword()},
- {TokenKind::BreakKeyword()},
- {TokenKind::ContinueKeyword()},
- {TokenKind::UnderscoreKeyword()},
- }));
- }
- TEST_F(LexerTest, Comments) {
- auto buffer = Lex(" ;\n // foo\n ;");
- EXPECT_FALSE(buffer.HasErrors());
- EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
- {.kind = TokenKind::Semi(),
- .line = 1,
- .column = 2,
- .indent_column = 2},
- {.kind = TokenKind::Semi(),
- .line = 3,
- .column = 3,
- .indent_column = 3},
- }));
- buffer = Lex("// foo\n//\n// bar");
- EXPECT_FALSE(buffer.HasErrors());
- EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{}));
- // Make sure weird characters aren't a problem.
- buffer = Lex(" //foo#$!^?@-_💩🍫⃠ [̲̅$̲̅(̲̅ ͡° ͜ʖ ͡°̲̅)̲̅$̲̅]");
- EXPECT_FALSE(buffer.HasErrors());
- EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{}));
- }
- TEST_F(LexerTest, DocComments) {
- auto buffer = Lex(" /// foo");
- EXPECT_FALSE(buffer.HasErrors());
- EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
- {.kind = TokenKind::DocComment(),
- .line = 1,
- .column = 3,
- .indent_column = 3,
- .text = "/// foo"},
- }));
- buffer = Lex("/// foo\n//\n/// bar");
- EXPECT_FALSE(buffer.HasErrors());
- EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
- {.kind = TokenKind::DocComment(),
- .line = 1,
- .column = 1,
- .indent_column = 1,
- .text = "/// foo"},
- {.kind = TokenKind::DocComment(),
- .line = 3,
- .column = 1,
- .indent_column = 1,
- .text = "/// bar"},
- }));
- buffer = Lex("/// foo\n///\n/// bar");
- EXPECT_FALSE(buffer.HasErrors());
- EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
- {.kind = TokenKind::DocComment(),
- .line = 1,
- .column = 1,
- .indent_column = 1,
- .text = "/// foo"},
- {.kind = TokenKind::DocComment(),
- .line = 2,
- .column = 1,
- .indent_column = 1,
- .text = "///"},
- {.kind = TokenKind::DocComment(),
- .line = 3,
- .column = 1,
- .indent_column = 1,
- .text = "/// bar"},
- }));
- // Make sure weird characters aren't a problem.
- buffer = Lex(" ///foo#$!^?@-_💩🍫⃠ [̲̅$̲̅(̲̅ ͡° ͜ʖ ͡°̲̅)̲̅$̲̅]");
- EXPECT_FALSE(buffer.HasErrors());
- EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
- {.kind = TokenKind::DocComment(),
- .line = 1,
- .column = 3,
- .indent_column = 3,
- .text = "///foo#$!^?@-_💩🍫⃠ [̲̅$̲̅(̲̅ ͡° ͜ʖ ͡°̲̅)̲̅$̲̅]"},
- }));
- }
- TEST_F(LexerTest, Identifiers) {
- auto buffer = Lex(" foobar");
- EXPECT_FALSE(buffer.HasErrors());
- EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
- {.kind = TokenKind::Identifier(),
- .column = 4,
- .indent_column = 4,
- .text = "foobar"},
- }));
- // Check different kinds of identifier character sequences.
- buffer = Lex("_foo_bar");
- EXPECT_FALSE(buffer.HasErrors());
- EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
- {.kind = TokenKind::Identifier(), .text = "_foo_bar"},
- }));
- buffer = Lex("foo2bar00");
- EXPECT_FALSE(buffer.HasErrors());
- EXPECT_THAT(buffer,
- HasTokens(llvm::ArrayRef<ExpectedToken>{
- {.kind = TokenKind::Identifier(), .text = "foo2bar00"},
- }));
- // Check that we can parse identifiers that start with a keyword.
- buffer = Lex("fnord");
- EXPECT_FALSE(buffer.HasErrors());
- EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
- {.kind = TokenKind::Identifier(), .text = "fnord"},
- }));
- // Check multiple identifiers with indent and interning.
- buffer = Lex(" foo;bar\nbar \n foo\tfoo");
- EXPECT_FALSE(buffer.HasErrors());
- EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
- {.kind = TokenKind::Identifier(),
- .line = 1,
- .column = 4,
- .indent_column = 4,
- .text = "foo"},
- {.kind = TokenKind::Semi()},
- {.kind = TokenKind::Identifier(),
- .line = 1,
- .column = 8,
- .indent_column = 4,
- .text = "bar"},
- {.kind = TokenKind::Identifier(),
- .line = 2,
- .column = 1,
- .indent_column = 1,
- .text = "bar"},
- {.kind = TokenKind::Identifier(),
- .line = 3,
- .column = 3,
- .indent_column = 3,
- .text = "foo"},
- {.kind = TokenKind::Identifier(),
- .line = 3,
- .column = 7,
- .indent_column = 3,
- .text = "foo"},
- }));
- }
- auto GetAndDropLine(llvm::StringRef& text) -> std::string {
- auto newline_offset = text.find_first_of('\n');
- llvm::StringRef line = text.slice(0, newline_offset);
- if (newline_offset != llvm::StringRef::npos) {
- text = text.substr(newline_offset + 1);
- } else {
- text = "";
- }
- return line.str();
- }
- TEST_F(LexerTest, Printing) {
- auto buffer = Lex(";");
- ASSERT_FALSE(buffer.HasErrors());
- std::string print_storage;
- llvm::raw_string_ostream print_stream(print_storage);
- buffer.Print(print_stream);
- llvm::StringRef print = print_stream.str();
- EXPECT_THAT(GetAndDropLine(print),
- StrEq("token: { index: 0, kind: 'Semi', line: 1, column: 1, "
- "indent: 1, spelling: ';' }"));
- EXPECT_TRUE(print.empty()) << print;
- // Test kind padding.
- buffer = Lex("(;foo;)");
- ASSERT_FALSE(buffer.HasErrors());
- print_storage.clear();
- buffer.Print(print_stream);
- print = print_stream.str();
- EXPECT_THAT(GetAndDropLine(print),
- StrEq("token: { index: 0, kind: 'OpenParen', line: 1, column: "
- "1, indent: 1, spelling: '(', closing_token: 4 }"));
- EXPECT_THAT(GetAndDropLine(print),
- StrEq("token: { index: 1, kind: 'Semi', line: 1, column: "
- "2, indent: 1, spelling: ';' }"));
- EXPECT_THAT(GetAndDropLine(print),
- StrEq("token: { index: 2, kind: 'Identifier', line: 1, column: "
- "3, indent: 1, spelling: 'foo', identifier: 0 }"));
- EXPECT_THAT(GetAndDropLine(print),
- StrEq("token: { index: 3, kind: 'Semi', line: 1, column: "
- "6, indent: 1, spelling: ';' }"));
- EXPECT_THAT(GetAndDropLine(print),
- StrEq("token: { index: 4, kind: 'CloseParen', line: 1, column: "
- "7, indent: 1, spelling: ')', opening_token: 0 }"));
- EXPECT_TRUE(print.empty()) << print;
- // Test digit padding with max values of 9, 10, and 11.
- buffer = Lex(";\n\n\n\n\n\n\n\n\n\n ;;");
- ASSERT_FALSE(buffer.HasErrors());
- print_storage.clear();
- buffer.Print(print_stream);
- print = print_stream.str();
- EXPECT_THAT(GetAndDropLine(print),
- StrEq("token: { index: 0, kind: 'Semi', line: 1, column: 1, "
- "indent: 1, spelling: ';' }"));
- EXPECT_THAT(GetAndDropLine(print),
- StrEq("token: { index: 1, kind: 'Semi', line: 11, column: 9, "
- "indent: 9, spelling: ';' }"));
- EXPECT_THAT(GetAndDropLine(print),
- StrEq("token: { index: 2, kind: 'Semi', line: 11, column: 10, "
- "indent: 9, spelling: ';' }"));
- EXPECT_TRUE(print.empty()) << print;
- }
- TEST_F(LexerTest, PrintingAsYaml) {
- // Test that we can parse this into YAML and verify line and indent data.
- auto buffer = Lex("\n ;\n\n\n; ;\n\n\n\n\n\n\n\n\n\n\n");
- ASSERT_FALSE(buffer.HasErrors());
- std::string print_output;
- llvm::raw_string_ostream print_stream(print_output);
- buffer.Print(print_stream);
- print_stream.flush();
- // Parse the output into a YAML stream. This will print errors to stderr.
- llvm::SourceMgr source_manager;
- llvm::yaml::Stream yaml_stream(print_output, source_manager);
- auto yaml_it = yaml_stream.begin();
- auto* root_node = llvm::dyn_cast<llvm::yaml::MappingNode>(yaml_it->getRoot());
- ASSERT_THAT(root_node, NotNull());
- // Walk the top-level mapping of tokens, dig out the sub-mapping of data for
- // each taken, and then verify those entries.
- auto mapping_it = llvm::cast<llvm::yaml::MappingNode>(root_node)->begin();
- auto* token_node = llvm::dyn_cast<llvm::yaml::KeyValueNode>(&*mapping_it);
- ASSERT_THAT(token_node, NotNull());
- auto* token_key_node =
- llvm::dyn_cast<llvm::yaml::ScalarNode>(token_node->getKey());
- ASSERT_THAT(token_key_node, NotNull());
- EXPECT_THAT(token_key_node->getRawValue(), StrEq("token"));
- auto* token_value_node =
- llvm::dyn_cast<llvm::yaml::MappingNode>(token_node->getValue());
- ASSERT_THAT(token_value_node, NotNull());
- auto token_it = token_value_node->begin();
- EXPECT_THAT(&*token_it, IsKeyValueScalars("index", "0"));
- ++token_it;
- EXPECT_THAT(&*token_it, IsKeyValueScalars("kind", "Semi"));
- ++token_it;
- EXPECT_THAT(&*token_it, IsKeyValueScalars("line", "2"));
- ++token_it;
- EXPECT_THAT(&*token_it, IsKeyValueScalars("column", "2"));
- ++token_it;
- EXPECT_THAT(&*token_it, IsKeyValueScalars("indent", "2"));
- ++token_it;
- EXPECT_THAT(&*token_it, IsKeyValueScalars("spelling", ";"));
- EXPECT_THAT(++token_it, Eq(token_value_node->end()));
- ++mapping_it;
- token_node = llvm::dyn_cast<llvm::yaml::KeyValueNode>(&*mapping_it);
- ASSERT_THAT(token_node, NotNull());
- token_key_node = llvm::dyn_cast<llvm::yaml::ScalarNode>(token_node->getKey());
- ASSERT_THAT(token_key_node, NotNull());
- EXPECT_THAT(token_key_node->getRawValue(), StrEq("token"));
- token_value_node =
- llvm::dyn_cast<llvm::yaml::MappingNode>(token_node->getValue());
- ASSERT_THAT(token_value_node, NotNull());
- token_it = token_value_node->begin();
- EXPECT_THAT(&*token_it, IsKeyValueScalars("index", "1"));
- ++token_it;
- EXPECT_THAT(&*token_it, IsKeyValueScalars("kind", "Semi"));
- ++token_it;
- EXPECT_THAT(&*token_it, IsKeyValueScalars("line", "5"));
- ++token_it;
- EXPECT_THAT(&*token_it, IsKeyValueScalars("column", "1"));
- ++token_it;
- EXPECT_THAT(&*token_it, IsKeyValueScalars("indent", "1"));
- ++token_it;
- EXPECT_THAT(&*token_it, IsKeyValueScalars("spelling", ";"));
- EXPECT_THAT(++token_it, Eq(token_value_node->end()));
- ++mapping_it;
- token_node = llvm::dyn_cast<llvm::yaml::KeyValueNode>(&*mapping_it);
- ASSERT_THAT(token_node, NotNull());
- token_key_node = llvm::dyn_cast<llvm::yaml::ScalarNode>(token_node->getKey());
- ASSERT_THAT(token_key_node, NotNull());
- EXPECT_THAT(token_key_node->getRawValue(), StrEq("token"));
- token_value_node =
- llvm::dyn_cast<llvm::yaml::MappingNode>(token_node->getValue());
- ASSERT_THAT(token_value_node, NotNull());
- token_it = token_value_node->begin();
- EXPECT_THAT(&*token_it, IsKeyValueScalars("index", "2"));
- ++token_it;
- EXPECT_THAT(&*token_it, IsKeyValueScalars("kind", "Semi"));
- ++token_it;
- EXPECT_THAT(&*token_it, IsKeyValueScalars("line", "5"));
- ++token_it;
- EXPECT_THAT(&*token_it, IsKeyValueScalars("column", "3"));
- ++token_it;
- EXPECT_THAT(&*token_it, IsKeyValueScalars("indent", "1"));
- ++token_it;
- EXPECT_THAT(&*token_it, IsKeyValueScalars("spelling", ";"));
- EXPECT_THAT(++token_it, Eq(token_value_node->end()));
- ASSERT_THAT(++mapping_it, Eq(root_node->end()));
- ASSERT_THAT(++yaml_it, Eq(yaml_stream.end()));
- }
- } // namespace
- } // namespace Carbon
|