tokenized_buffer_test.cpp 27 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682
  1. // Part of the Carbon Language project, under the Apache License v2.0 with LLVM
  2. // Exceptions. See /LICENSE for license information.
  3. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
  4. #include "lexer/tokenized_buffer.h"
  5. #include <iterator>
  6. #include "diagnostics/diagnostic_emitter.h"
  7. #include "gmock/gmock.h"
  8. #include "gtest/gtest.h"
  9. #include "lexer/tokenized_buffer_test_helpers.h"
  10. #include "llvm/ADT/ArrayRef.h"
  11. #include "llvm/ADT/None.h"
  12. #include "llvm/ADT/Sequence.h"
  13. #include "llvm/ADT/SmallString.h"
  14. #include "llvm/ADT/Twine.h"
  15. #include "llvm/Support/SourceMgr.h"
  16. #include "llvm/Support/YAMLParser.h"
  17. #include "llvm/Support/raw_ostream.h"
  18. namespace Carbon {
  19. namespace {
  20. using ::Carbon::Testing::ExpectedToken;
  21. using ::Carbon::Testing::HasTokens;
  22. using ::Carbon::Testing::IsKeyValueScalars;
  23. using ::testing::Eq;
  24. using ::testing::NotNull;
  25. using ::testing::StrEq;
  26. struct LexerTest : ::testing::Test {
  27. llvm::SmallVector<SourceBuffer, 16> source_storage;
  28. auto GetSourceBuffer(llvm::Twine text) -> SourceBuffer& {
  29. source_storage.push_back(SourceBuffer::CreateFromText(text.str()));
  30. return source_storage.back();
  31. }
  32. auto Lex(llvm::Twine text) -> TokenizedBuffer {
  33. // TODO: build a full mock for this.
  34. return TokenizedBuffer::Lex(GetSourceBuffer(text), NullDiagnosticEmitter());
  35. }
  36. };
  37. TEST_F(LexerTest, HandlesEmptyBuffer) {
  38. auto buffer = Lex("");
  39. EXPECT_FALSE(buffer.HasErrors());
  40. EXPECT_EQ(buffer.Tokens().begin(), buffer.Tokens().end());
  41. }
  42. TEST_F(LexerTest, TracksLinesAndColumns) {
  43. auto buffer = Lex("\n ;;\n ;;;\n");
  44. EXPECT_FALSE(buffer.HasErrors());
  45. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  46. {.kind = TokenKind::Semi(),
  47. .line = 2,
  48. .column = 3,
  49. .indent_column = 3},
  50. {.kind = TokenKind::Semi(),
  51. .line = 2,
  52. .column = 4,
  53. .indent_column = 3},
  54. {.kind = TokenKind::Semi(),
  55. .line = 3,
  56. .column = 4,
  57. .indent_column = 4},
  58. {.kind = TokenKind::Semi(),
  59. .line = 3,
  60. .column = 5,
  61. .indent_column = 4},
  62. {.kind = TokenKind::Semi(),
  63. .line = 3,
  64. .column = 6,
  65. .indent_column = 4},
  66. }));
  67. }
  68. TEST_F(LexerTest, HandlesIntegerLiteral) {
  69. auto buffer = Lex("12-578\n 1 2");
  70. EXPECT_FALSE(buffer.HasErrors());
  71. ASSERT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  72. {.kind = TokenKind::IntegerLiteral(),
  73. .line = 1,
  74. .column = 1,
  75. .indent_column = 1,
  76. .text = "12"},
  77. {.kind = TokenKind::Minus(),
  78. .line = 1,
  79. .column = 3,
  80. .indent_column = 1},
  81. {.kind = TokenKind::IntegerLiteral(),
  82. .line = 1,
  83. .column = 4,
  84. .indent_column = 1,
  85. .text = "578"},
  86. {.kind = TokenKind::IntegerLiteral(),
  87. .line = 2,
  88. .column = 3,
  89. .indent_column = 3,
  90. .text = "1"},
  91. {.kind = TokenKind::IntegerLiteral(),
  92. .line = 2,
  93. .column = 6,
  94. .indent_column = 3,
  95. .text = "2"},
  96. }));
  97. auto token_12 = buffer.Tokens().begin();
  98. EXPECT_EQ(buffer.GetIntegerLiteral(*token_12), 12);
  99. auto token_578 = buffer.Tokens().begin() + 2;
  100. EXPECT_EQ(buffer.GetIntegerLiteral(*token_578), 578);
  101. auto token_1 = buffer.Tokens().begin() + 3;
  102. EXPECT_EQ(buffer.GetIntegerLiteral(*token_1), 1);
  103. auto token_2 = buffer.Tokens().begin() + 4;
  104. EXPECT_EQ(buffer.GetIntegerLiteral(*token_2), 2);
  105. }
  106. TEST_F(LexerTest, HandlesGarbageCharacters) {
  107. constexpr char GarbageText[] = "$$💩-$\n$\0$12$";
  108. auto buffer = Lex(llvm::StringRef(GarbageText, sizeof(GarbageText) - 1));
  109. EXPECT_TRUE(buffer.HasErrors());
  110. EXPECT_THAT(
  111. buffer,
  112. HasTokens(llvm::ArrayRef<ExpectedToken>{
  113. {.kind = TokenKind::Error(),
  114. .line = 1,
  115. .column = 1,
  116. .text = llvm::StringRef("$$💩", 6)},
  117. // 💩 takes 4 bytes, and we count column as bytes offset.
  118. {.kind = TokenKind::Minus(), .line = 1, .column = 7},
  119. {.kind = TokenKind::Error(), .line = 1, .column = 8, .text = "$"},
  120. // newline
  121. {.kind = TokenKind::Error(),
  122. .line = 2,
  123. .column = 1,
  124. .text = llvm::StringRef("$\0$", 3)},
  125. {.kind = TokenKind::IntegerLiteral(),
  126. .line = 2,
  127. .column = 4,
  128. .text = "12"},
  129. {.kind = TokenKind::Error(), .line = 2, .column = 6, .text = "$"},
  130. }));
  131. }
  132. TEST_F(LexerTest, Symbols) {
  133. // We don't need to exhaustively test symbols here as they're handled with
  134. // common code, but we want to check specific patterns to verify things like
  135. // max-munch rule and handling of interesting symbols.
  136. auto buffer = Lex("<<<");
  137. EXPECT_FALSE(buffer.HasErrors());
  138. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  139. {TokenKind::LessLess()},
  140. {TokenKind::Less()},
  141. }));
  142. buffer = Lex("<<=>>");
  143. EXPECT_FALSE(buffer.HasErrors());
  144. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  145. {TokenKind::LessLessEqual()},
  146. {TokenKind::GreaterGreater()},
  147. }));
  148. buffer = Lex("< <=> >");
  149. EXPECT_FALSE(buffer.HasErrors());
  150. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  151. {TokenKind::Less()},
  152. {TokenKind::LessEqualGreater()},
  153. {TokenKind::Greater()},
  154. }));
  155. buffer = Lex("\\/?#@&^!");
  156. EXPECT_FALSE(buffer.HasErrors());
  157. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  158. {TokenKind::Backslash()},
  159. {TokenKind::Slash()},
  160. {TokenKind::Question()},
  161. {TokenKind::Hash()},
  162. {TokenKind::At()},
  163. {TokenKind::Amp()},
  164. {TokenKind::Caret()},
  165. {TokenKind::Exclaim()},
  166. }));
  167. }
  168. TEST_F(LexerTest, Parens) {
  169. auto buffer = Lex("()");
  170. EXPECT_FALSE(buffer.HasErrors());
  171. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  172. {TokenKind::OpenParen()},
  173. {TokenKind::CloseParen()},
  174. }));
  175. buffer = Lex("((()()))");
  176. EXPECT_FALSE(buffer.HasErrors());
  177. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  178. {TokenKind::OpenParen()},
  179. {TokenKind::OpenParen()},
  180. {TokenKind::OpenParen()},
  181. {TokenKind::CloseParen()},
  182. {TokenKind::OpenParen()},
  183. {TokenKind::CloseParen()},
  184. {TokenKind::CloseParen()},
  185. {TokenKind::CloseParen()},
  186. }));
  187. }
  188. TEST_F(LexerTest, CurlyBraces) {
  189. auto buffer = Lex("{}");
  190. EXPECT_FALSE(buffer.HasErrors());
  191. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  192. {TokenKind::OpenCurlyBrace()},
  193. {TokenKind::CloseCurlyBrace()},
  194. }));
  195. buffer = Lex("{{{}{}}}");
  196. EXPECT_FALSE(buffer.HasErrors());
  197. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  198. {TokenKind::OpenCurlyBrace()},
  199. {TokenKind::OpenCurlyBrace()},
  200. {TokenKind::OpenCurlyBrace()},
  201. {TokenKind::CloseCurlyBrace()},
  202. {TokenKind::OpenCurlyBrace()},
  203. {TokenKind::CloseCurlyBrace()},
  204. {TokenKind::CloseCurlyBrace()},
  205. {TokenKind::CloseCurlyBrace()},
  206. }));
  207. }
  208. TEST_F(LexerTest, MatchingGroups) {
  209. {
  210. TokenizedBuffer buffer = Lex("(){}");
  211. ASSERT_FALSE(buffer.HasErrors());
  212. auto it = buffer.Tokens().begin();
  213. auto open_paren_token = *it++;
  214. auto close_paren_token = *it++;
  215. EXPECT_EQ(close_paren_token,
  216. buffer.GetMatchedClosingToken(open_paren_token));
  217. EXPECT_EQ(open_paren_token,
  218. buffer.GetMatchedOpeningToken(close_paren_token));
  219. auto open_curly_token = *it++;
  220. auto close_curly_token = *it++;
  221. EXPECT_EQ(close_curly_token,
  222. buffer.GetMatchedClosingToken(open_curly_token));
  223. EXPECT_EQ(open_curly_token,
  224. buffer.GetMatchedOpeningToken(close_curly_token));
  225. EXPECT_EQ(buffer.Tokens().end(), it);
  226. }
  227. {
  228. TokenizedBuffer buffer = Lex("({x}){(y)} {{((z))}}");
  229. ASSERT_FALSE(buffer.HasErrors());
  230. auto it = buffer.Tokens().begin();
  231. auto open_paren_token = *it++;
  232. auto open_curly_token = *it++;
  233. ASSERT_EQ("x", buffer.GetIdentifierText(buffer.GetIdentifier(*it++)));
  234. auto close_curly_token = *it++;
  235. auto close_paren_token = *it++;
  236. EXPECT_EQ(close_paren_token,
  237. buffer.GetMatchedClosingToken(open_paren_token));
  238. EXPECT_EQ(open_paren_token,
  239. buffer.GetMatchedOpeningToken(close_paren_token));
  240. EXPECT_EQ(close_curly_token,
  241. buffer.GetMatchedClosingToken(open_curly_token));
  242. EXPECT_EQ(open_curly_token,
  243. buffer.GetMatchedOpeningToken(close_curly_token));
  244. open_curly_token = *it++;
  245. open_paren_token = *it++;
  246. ASSERT_EQ("y", buffer.GetIdentifierText(buffer.GetIdentifier(*it++)));
  247. close_paren_token = *it++;
  248. close_curly_token = *it++;
  249. EXPECT_EQ(close_curly_token,
  250. buffer.GetMatchedClosingToken(open_curly_token));
  251. EXPECT_EQ(open_curly_token,
  252. buffer.GetMatchedOpeningToken(close_curly_token));
  253. EXPECT_EQ(close_paren_token,
  254. buffer.GetMatchedClosingToken(open_paren_token));
  255. EXPECT_EQ(open_paren_token,
  256. buffer.GetMatchedOpeningToken(close_paren_token));
  257. open_curly_token = *it++;
  258. auto inner_open_curly_token = *it++;
  259. open_paren_token = *it++;
  260. auto inner_open_paren_token = *it++;
  261. ASSERT_EQ("z", buffer.GetIdentifierText(buffer.GetIdentifier(*it++)));
  262. auto inner_close_paren_token = *it++;
  263. close_paren_token = *it++;
  264. auto inner_close_curly_token = *it++;
  265. close_curly_token = *it++;
  266. EXPECT_EQ(close_curly_token,
  267. buffer.GetMatchedClosingToken(open_curly_token));
  268. EXPECT_EQ(open_curly_token,
  269. buffer.GetMatchedOpeningToken(close_curly_token));
  270. EXPECT_EQ(inner_close_curly_token,
  271. buffer.GetMatchedClosingToken(inner_open_curly_token));
  272. EXPECT_EQ(inner_open_curly_token,
  273. buffer.GetMatchedOpeningToken(inner_close_curly_token));
  274. EXPECT_EQ(close_paren_token,
  275. buffer.GetMatchedClosingToken(open_paren_token));
  276. EXPECT_EQ(open_paren_token,
  277. buffer.GetMatchedOpeningToken(close_paren_token));
  278. EXPECT_EQ(inner_close_paren_token,
  279. buffer.GetMatchedClosingToken(inner_open_paren_token));
  280. EXPECT_EQ(inner_open_paren_token,
  281. buffer.GetMatchedOpeningToken(inner_close_paren_token));
  282. EXPECT_EQ(buffer.Tokens().end(), it);
  283. }
  284. }
  285. TEST_F(LexerTest, MismatchedGroups) {
  286. auto buffer = Lex("{");
  287. EXPECT_TRUE(buffer.HasErrors());
  288. EXPECT_THAT(buffer,
  289. HasTokens(llvm::ArrayRef<ExpectedToken>{
  290. {TokenKind::OpenCurlyBrace()},
  291. {.kind = TokenKind::CloseCurlyBrace(), .recovery = true},
  292. }));
  293. buffer = Lex("}");
  294. EXPECT_TRUE(buffer.HasErrors());
  295. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  296. {.kind = TokenKind::Error(), .text = "}"},
  297. }));
  298. buffer = Lex("{(}");
  299. EXPECT_TRUE(buffer.HasErrors());
  300. EXPECT_THAT(
  301. buffer,
  302. HasTokens(llvm::ArrayRef<ExpectedToken>{
  303. {.kind = TokenKind::OpenCurlyBrace(), .column = 1},
  304. {.kind = TokenKind::OpenParen(), .column = 2},
  305. {.kind = TokenKind::CloseParen(), .column = 3, .recovery = true},
  306. {.kind = TokenKind::CloseCurlyBrace(), .column = 3},
  307. }));
  308. buffer = Lex(")({)");
  309. EXPECT_TRUE(buffer.HasErrors());
  310. EXPECT_THAT(
  311. buffer,
  312. HasTokens(llvm::ArrayRef<ExpectedToken>{
  313. {.kind = TokenKind::Error(), .column = 1, .text = ")"},
  314. {.kind = TokenKind::OpenParen(), .column = 2},
  315. {.kind = TokenKind::OpenCurlyBrace(), .column = 3},
  316. {.kind = TokenKind::CloseCurlyBrace(), .column = 4, .recovery = true},
  317. {.kind = TokenKind::CloseParen(), .column = 4},
  318. }));
  319. }
  320. TEST_F(LexerTest, Keywords) {
  321. auto buffer = Lex(" fn");
  322. EXPECT_FALSE(buffer.HasErrors());
  323. EXPECT_THAT(
  324. buffer,
  325. HasTokens(llvm::ArrayRef<ExpectedToken>{
  326. {.kind = TokenKind::FnKeyword(), .column = 4, .indent_column = 4},
  327. }));
  328. buffer = Lex("and or not if else for loop return var break continue _");
  329. EXPECT_FALSE(buffer.HasErrors());
  330. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  331. {TokenKind::AndKeyword()},
  332. {TokenKind::OrKeyword()},
  333. {TokenKind::NotKeyword()},
  334. {TokenKind::IfKeyword()},
  335. {TokenKind::ElseKeyword()},
  336. {TokenKind::ForKeyword()},
  337. {TokenKind::LoopKeyword()},
  338. {TokenKind::ReturnKeyword()},
  339. {TokenKind::VarKeyword()},
  340. {TokenKind::BreakKeyword()},
  341. {TokenKind::ContinueKeyword()},
  342. {TokenKind::UnderscoreKeyword()},
  343. }));
  344. }
  345. TEST_F(LexerTest, Comments) {
  346. auto buffer = Lex(" ;\n // foo\n ;");
  347. EXPECT_FALSE(buffer.HasErrors());
  348. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  349. {.kind = TokenKind::Semi(),
  350. .line = 1,
  351. .column = 2,
  352. .indent_column = 2},
  353. {.kind = TokenKind::Semi(),
  354. .line = 3,
  355. .column = 3,
  356. .indent_column = 3},
  357. }));
  358. buffer = Lex("// foo\n//\n// bar");
  359. EXPECT_FALSE(buffer.HasErrors());
  360. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{}));
  361. // Make sure weird characters aren't a problem.
  362. buffer = Lex(" //foo#$!^?@-_💩🍫⃠ [̲̅$̲̅(̲̅ ͡° ͜ʖ ͡°̲̅)̲̅$̲̅]");
  363. EXPECT_FALSE(buffer.HasErrors());
  364. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{}));
  365. }
  366. TEST_F(LexerTest, DocComments) {
  367. auto buffer = Lex(" /// foo");
  368. EXPECT_FALSE(buffer.HasErrors());
  369. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  370. {.kind = TokenKind::DocComment(),
  371. .line = 1,
  372. .column = 3,
  373. .indent_column = 3,
  374. .text = "/// foo"},
  375. }));
  376. buffer = Lex("/// foo\n//\n/// bar");
  377. EXPECT_FALSE(buffer.HasErrors());
  378. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  379. {.kind = TokenKind::DocComment(),
  380. .line = 1,
  381. .column = 1,
  382. .indent_column = 1,
  383. .text = "/// foo"},
  384. {.kind = TokenKind::DocComment(),
  385. .line = 3,
  386. .column = 1,
  387. .indent_column = 1,
  388. .text = "/// bar"},
  389. }));
  390. buffer = Lex("/// foo\n///\n/// bar");
  391. EXPECT_FALSE(buffer.HasErrors());
  392. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  393. {.kind = TokenKind::DocComment(),
  394. .line = 1,
  395. .column = 1,
  396. .indent_column = 1,
  397. .text = "/// foo"},
  398. {.kind = TokenKind::DocComment(),
  399. .line = 2,
  400. .column = 1,
  401. .indent_column = 1,
  402. .text = "///"},
  403. {.kind = TokenKind::DocComment(),
  404. .line = 3,
  405. .column = 1,
  406. .indent_column = 1,
  407. .text = "/// bar"},
  408. }));
  409. // Make sure weird characters aren't a problem.
  410. buffer = Lex(" ///foo#$!^?@-_💩🍫⃠ [̲̅$̲̅(̲̅ ͡° ͜ʖ ͡°̲̅)̲̅$̲̅]");
  411. EXPECT_FALSE(buffer.HasErrors());
  412. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  413. {.kind = TokenKind::DocComment(),
  414. .line = 1,
  415. .column = 3,
  416. .indent_column = 3,
  417. .text = "///foo#$!^?@-_💩🍫⃠ [̲̅$̲̅(̲̅ ͡° ͜ʖ ͡°̲̅)̲̅$̲̅]"},
  418. }));
  419. }
  420. TEST_F(LexerTest, Identifiers) {
  421. auto buffer = Lex(" foobar");
  422. EXPECT_FALSE(buffer.HasErrors());
  423. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  424. {.kind = TokenKind::Identifier(),
  425. .column = 4,
  426. .indent_column = 4,
  427. .text = "foobar"},
  428. }));
  429. // Check different kinds of identifier character sequences.
  430. buffer = Lex("_foo_bar");
  431. EXPECT_FALSE(buffer.HasErrors());
  432. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  433. {.kind = TokenKind::Identifier(), .text = "_foo_bar"},
  434. }));
  435. buffer = Lex("foo2bar00");
  436. EXPECT_FALSE(buffer.HasErrors());
  437. EXPECT_THAT(buffer,
  438. HasTokens(llvm::ArrayRef<ExpectedToken>{
  439. {.kind = TokenKind::Identifier(), .text = "foo2bar00"},
  440. }));
  441. // Check that we can parse identifiers that start with a keyword.
  442. buffer = Lex("fnord");
  443. EXPECT_FALSE(buffer.HasErrors());
  444. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  445. {.kind = TokenKind::Identifier(), .text = "fnord"},
  446. }));
  447. // Check multiple identifiers with indent and interning.
  448. buffer = Lex(" foo;bar\nbar \n foo\tfoo");
  449. EXPECT_FALSE(buffer.HasErrors());
  450. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  451. {.kind = TokenKind::Identifier(),
  452. .line = 1,
  453. .column = 4,
  454. .indent_column = 4,
  455. .text = "foo"},
  456. {.kind = TokenKind::Semi()},
  457. {.kind = TokenKind::Identifier(),
  458. .line = 1,
  459. .column = 8,
  460. .indent_column = 4,
  461. .text = "bar"},
  462. {.kind = TokenKind::Identifier(),
  463. .line = 2,
  464. .column = 1,
  465. .indent_column = 1,
  466. .text = "bar"},
  467. {.kind = TokenKind::Identifier(),
  468. .line = 3,
  469. .column = 3,
  470. .indent_column = 3,
  471. .text = "foo"},
  472. {.kind = TokenKind::Identifier(),
  473. .line = 3,
  474. .column = 7,
  475. .indent_column = 3,
  476. .text = "foo"},
  477. }));
  478. }
  479. auto GetAndDropLine(llvm::StringRef& text) -> std::string {
  480. auto newline_offset = text.find_first_of('\n');
  481. llvm::StringRef line = text.slice(0, newline_offset);
  482. if (newline_offset != llvm::StringRef::npos) {
  483. text = text.substr(newline_offset + 1);
  484. } else {
  485. text = "";
  486. }
  487. return line.str();
  488. }
  489. TEST_F(LexerTest, Printing) {
  490. auto buffer = Lex(";");
  491. ASSERT_FALSE(buffer.HasErrors());
  492. std::string print_storage;
  493. llvm::raw_string_ostream print_stream(print_storage);
  494. buffer.Print(print_stream);
  495. llvm::StringRef print = print_stream.str();
  496. EXPECT_THAT(GetAndDropLine(print),
  497. StrEq("token: { index: 0, kind: 'Semi', line: 1, column: 1, "
  498. "indent: 1, spelling: ';' }"));
  499. EXPECT_TRUE(print.empty()) << print;
  500. // Test kind padding.
  501. buffer = Lex("(;foo;)");
  502. ASSERT_FALSE(buffer.HasErrors());
  503. print_storage.clear();
  504. buffer.Print(print_stream);
  505. print = print_stream.str();
  506. EXPECT_THAT(GetAndDropLine(print),
  507. StrEq("token: { index: 0, kind: 'OpenParen', line: 1, column: "
  508. "1, indent: 1, spelling: '(', closing_token: 4 }"));
  509. EXPECT_THAT(GetAndDropLine(print),
  510. StrEq("token: { index: 1, kind: 'Semi', line: 1, column: "
  511. "2, indent: 1, spelling: ';' }"));
  512. EXPECT_THAT(GetAndDropLine(print),
  513. StrEq("token: { index: 2, kind: 'Identifier', line: 1, column: "
  514. "3, indent: 1, spelling: 'foo', identifier: 0 }"));
  515. EXPECT_THAT(GetAndDropLine(print),
  516. StrEq("token: { index: 3, kind: 'Semi', line: 1, column: "
  517. "6, indent: 1, spelling: ';' }"));
  518. EXPECT_THAT(GetAndDropLine(print),
  519. StrEq("token: { index: 4, kind: 'CloseParen', line: 1, column: "
  520. "7, indent: 1, spelling: ')', opening_token: 0 }"));
  521. EXPECT_TRUE(print.empty()) << print;
  522. // Test digit padding with max values of 9, 10, and 11.
  523. buffer = Lex(";\n\n\n\n\n\n\n\n\n\n ;;");
  524. ASSERT_FALSE(buffer.HasErrors());
  525. print_storage.clear();
  526. buffer.Print(print_stream);
  527. print = print_stream.str();
  528. EXPECT_THAT(GetAndDropLine(print),
  529. StrEq("token: { index: 0, kind: 'Semi', line: 1, column: 1, "
  530. "indent: 1, spelling: ';' }"));
  531. EXPECT_THAT(GetAndDropLine(print),
  532. StrEq("token: { index: 1, kind: 'Semi', line: 11, column: 9, "
  533. "indent: 9, spelling: ';' }"));
  534. EXPECT_THAT(GetAndDropLine(print),
  535. StrEq("token: { index: 2, kind: 'Semi', line: 11, column: 10, "
  536. "indent: 9, spelling: ';' }"));
  537. EXPECT_TRUE(print.empty()) << print;
  538. }
  539. TEST_F(LexerTest, PrintingAsYaml) {
  540. // Test that we can parse this into YAML and verify line and indent data.
  541. auto buffer = Lex("\n ;\n\n\n; ;\n\n\n\n\n\n\n\n\n\n\n");
  542. ASSERT_FALSE(buffer.HasErrors());
  543. std::string print_output;
  544. llvm::raw_string_ostream print_stream(print_output);
  545. buffer.Print(print_stream);
  546. print_stream.flush();
  547. // Parse the output into a YAML stream. This will print errors to stderr.
  548. llvm::SourceMgr source_manager;
  549. llvm::yaml::Stream yaml_stream(print_output, source_manager);
  550. auto yaml_it = yaml_stream.begin();
  551. auto* root_node = llvm::dyn_cast<llvm::yaml::MappingNode>(yaml_it->getRoot());
  552. ASSERT_THAT(root_node, NotNull());
  553. // Walk the top-level mapping of tokens, dig out the sub-mapping of data for
  554. // each taken, and then verify those entries.
  555. auto mapping_it = llvm::cast<llvm::yaml::MappingNode>(root_node)->begin();
  556. auto* token_node = llvm::dyn_cast<llvm::yaml::KeyValueNode>(&*mapping_it);
  557. ASSERT_THAT(token_node, NotNull());
  558. auto* token_key_node =
  559. llvm::dyn_cast<llvm::yaml::ScalarNode>(token_node->getKey());
  560. ASSERT_THAT(token_key_node, NotNull());
  561. EXPECT_THAT(token_key_node->getRawValue(), StrEq("token"));
  562. auto* token_value_node =
  563. llvm::dyn_cast<llvm::yaml::MappingNode>(token_node->getValue());
  564. ASSERT_THAT(token_value_node, NotNull());
  565. auto token_it = token_value_node->begin();
  566. EXPECT_THAT(&*token_it, IsKeyValueScalars("index", "0"));
  567. ++token_it;
  568. EXPECT_THAT(&*token_it, IsKeyValueScalars("kind", "Semi"));
  569. ++token_it;
  570. EXPECT_THAT(&*token_it, IsKeyValueScalars("line", "2"));
  571. ++token_it;
  572. EXPECT_THAT(&*token_it, IsKeyValueScalars("column", "2"));
  573. ++token_it;
  574. EXPECT_THAT(&*token_it, IsKeyValueScalars("indent", "2"));
  575. ++token_it;
  576. EXPECT_THAT(&*token_it, IsKeyValueScalars("spelling", ";"));
  577. EXPECT_THAT(++token_it, Eq(token_value_node->end()));
  578. ++mapping_it;
  579. token_node = llvm::dyn_cast<llvm::yaml::KeyValueNode>(&*mapping_it);
  580. ASSERT_THAT(token_node, NotNull());
  581. token_key_node = llvm::dyn_cast<llvm::yaml::ScalarNode>(token_node->getKey());
  582. ASSERT_THAT(token_key_node, NotNull());
  583. EXPECT_THAT(token_key_node->getRawValue(), StrEq("token"));
  584. token_value_node =
  585. llvm::dyn_cast<llvm::yaml::MappingNode>(token_node->getValue());
  586. ASSERT_THAT(token_value_node, NotNull());
  587. token_it = token_value_node->begin();
  588. EXPECT_THAT(&*token_it, IsKeyValueScalars("index", "1"));
  589. ++token_it;
  590. EXPECT_THAT(&*token_it, IsKeyValueScalars("kind", "Semi"));
  591. ++token_it;
  592. EXPECT_THAT(&*token_it, IsKeyValueScalars("line", "5"));
  593. ++token_it;
  594. EXPECT_THAT(&*token_it, IsKeyValueScalars("column", "1"));
  595. ++token_it;
  596. EXPECT_THAT(&*token_it, IsKeyValueScalars("indent", "1"));
  597. ++token_it;
  598. EXPECT_THAT(&*token_it, IsKeyValueScalars("spelling", ";"));
  599. EXPECT_THAT(++token_it, Eq(token_value_node->end()));
  600. ++mapping_it;
  601. token_node = llvm::dyn_cast<llvm::yaml::KeyValueNode>(&*mapping_it);
  602. ASSERT_THAT(token_node, NotNull());
  603. token_key_node = llvm::dyn_cast<llvm::yaml::ScalarNode>(token_node->getKey());
  604. ASSERT_THAT(token_key_node, NotNull());
  605. EXPECT_THAT(token_key_node->getRawValue(), StrEq("token"));
  606. token_value_node =
  607. llvm::dyn_cast<llvm::yaml::MappingNode>(token_node->getValue());
  608. ASSERT_THAT(token_value_node, NotNull());
  609. token_it = token_value_node->begin();
  610. EXPECT_THAT(&*token_it, IsKeyValueScalars("index", "2"));
  611. ++token_it;
  612. EXPECT_THAT(&*token_it, IsKeyValueScalars("kind", "Semi"));
  613. ++token_it;
  614. EXPECT_THAT(&*token_it, IsKeyValueScalars("line", "5"));
  615. ++token_it;
  616. EXPECT_THAT(&*token_it, IsKeyValueScalars("column", "3"));
  617. ++token_it;
  618. EXPECT_THAT(&*token_it, IsKeyValueScalars("indent", "1"));
  619. ++token_it;
  620. EXPECT_THAT(&*token_it, IsKeyValueScalars("spelling", ";"));
  621. EXPECT_THAT(++token_it, Eq(token_value_node->end()));
  622. ASSERT_THAT(++mapping_it, Eq(root_node->end()));
  623. ASSERT_THAT(++yaml_it, Eq(yaml_stream.end()));
  624. }
  625. } // namespace
  626. } // namespace Carbon