tokenized_buffer_test.cpp 41 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101
  1. // Part of the Carbon Language project, under the Apache License v2.0 with LLVM
  2. // Exceptions. See /LICENSE for license information.
  3. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
  4. #include "toolchain/lexer/tokenized_buffer.h"
  5. #include <gmock/gmock.h>
  6. #include <gtest/gtest.h>
  7. #include <iterator>
  8. #include "llvm/ADT/ArrayRef.h"
  9. #include "llvm/ADT/None.h"
  10. #include "llvm/ADT/Sequence.h"
  11. #include "llvm/ADT/SmallString.h"
  12. #include "llvm/ADT/Twine.h"
  13. #include "llvm/Support/SourceMgr.h"
  14. #include "llvm/Support/raw_ostream.h"
  15. #include "toolchain/common/yaml_test_helpers.h"
  16. #include "toolchain/diagnostics/diagnostic_emitter.h"
  17. #include "toolchain/diagnostics/mocks.h"
  18. #include "toolchain/lexer/tokenized_buffer_test_helpers.h"
  19. namespace Carbon::Testing {
  20. namespace {
  21. using ::testing::ElementsAre;
  22. using ::testing::Eq;
  23. using ::testing::HasSubstr;
  24. using ::testing::StrEq;
  25. class LexerTest : public ::testing::Test {
  26. protected:
  27. auto GetSourceBuffer(llvm::Twine text) -> SourceBuffer& {
  28. source_storage.push_back(SourceBuffer::CreateFromText(text.str()));
  29. return source_storage.back();
  30. }
  31. auto Lex(llvm::Twine text,
  32. DiagnosticConsumer& consumer = ConsoleDiagnosticConsumer())
  33. -> TokenizedBuffer {
  34. return TokenizedBuffer::Lex(GetSourceBuffer(text), consumer);
  35. }
  36. llvm::SmallVector<SourceBuffer, 16> source_storage;
  37. };
  38. TEST_F(LexerTest, HandlesEmptyBuffer) {
  39. auto buffer = Lex("");
  40. EXPECT_FALSE(buffer.HasErrors());
  41. EXPECT_THAT(
  42. buffer,
  43. HasTokens(llvm::ArrayRef<ExpectedToken>{{TokenKind::EndOfFile()}}));
  44. }
  45. TEST_F(LexerTest, TracksLinesAndColumns) {
  46. auto buffer = Lex("\n ;;\n ;;;\n x\"foo\" \"\"\"baz\n a\n \"\"\" y");
  47. EXPECT_FALSE(buffer.HasErrors());
  48. EXPECT_THAT(buffer,
  49. HasTokens(llvm::ArrayRef<ExpectedToken>{
  50. {.kind = TokenKind::Semi(),
  51. .line = 2,
  52. .column = 3,
  53. .indent_column = 3},
  54. {.kind = TokenKind::Semi(),
  55. .line = 2,
  56. .column = 4,
  57. .indent_column = 3},
  58. {.kind = TokenKind::Semi(),
  59. .line = 3,
  60. .column = 4,
  61. .indent_column = 4},
  62. {.kind = TokenKind::Semi(),
  63. .line = 3,
  64. .column = 5,
  65. .indent_column = 4},
  66. {.kind = TokenKind::Semi(),
  67. .line = 3,
  68. .column = 6,
  69. .indent_column = 4},
  70. {.kind = TokenKind::Identifier(),
  71. .line = 4,
  72. .column = 4,
  73. .indent_column = 4,
  74. .text = "x"},
  75. {.kind = TokenKind::StringLiteral(),
  76. .line = 4,
  77. .column = 5,
  78. .indent_column = 4},
  79. {.kind = TokenKind::StringLiteral(),
  80. .line = 4,
  81. .column = 11,
  82. .indent_column = 4},
  83. {.kind = TokenKind::Identifier(),
  84. .line = 6,
  85. .column = 6,
  86. .indent_column = 11,
  87. .text = "y"},
  88. {.kind = TokenKind::EndOfFile(), .line = 6, .column = 7},
  89. }));
  90. }
  91. TEST_F(LexerTest, HandlesNumericLiteral) {
  92. auto buffer = Lex("12-578\n 1 2\n0x12_3ABC\n0b10_10_11\n1_234_567\n1.5e9");
  93. EXPECT_FALSE(buffer.HasErrors());
  94. ASSERT_THAT(buffer,
  95. HasTokens(llvm::ArrayRef<ExpectedToken>{
  96. {.kind = TokenKind::IntegerLiteral(),
  97. .line = 1,
  98. .column = 1,
  99. .indent_column = 1,
  100. .text = "12"},
  101. {.kind = TokenKind::Minus(),
  102. .line = 1,
  103. .column = 3,
  104. .indent_column = 1},
  105. {.kind = TokenKind::IntegerLiteral(),
  106. .line = 1,
  107. .column = 4,
  108. .indent_column = 1,
  109. .text = "578"},
  110. {.kind = TokenKind::IntegerLiteral(),
  111. .line = 2,
  112. .column = 3,
  113. .indent_column = 3,
  114. .text = "1"},
  115. {.kind = TokenKind::IntegerLiteral(),
  116. .line = 2,
  117. .column = 6,
  118. .indent_column = 3,
  119. .text = "2"},
  120. {.kind = TokenKind::IntegerLiteral(),
  121. .line = 3,
  122. .column = 1,
  123. .indent_column = 1,
  124. .text = "0x12_3ABC"},
  125. {.kind = TokenKind::IntegerLiteral(),
  126. .line = 4,
  127. .column = 1,
  128. .indent_column = 1,
  129. .text = "0b10_10_11"},
  130. {.kind = TokenKind::IntegerLiteral(),
  131. .line = 5,
  132. .column = 1,
  133. .indent_column = 1,
  134. .text = "1_234_567"},
  135. {.kind = TokenKind::RealLiteral(),
  136. .line = 6,
  137. .column = 1,
  138. .indent_column = 1,
  139. .text = "1.5e9"},
  140. {.kind = TokenKind::EndOfFile(), .line = 6, .column = 6},
  141. }));
  142. auto token_12 = buffer.Tokens().begin();
  143. EXPECT_EQ(buffer.GetIntegerLiteral(*token_12), 12);
  144. auto token_578 = buffer.Tokens().begin() + 2;
  145. EXPECT_EQ(buffer.GetIntegerLiteral(*token_578), 578);
  146. auto token_1 = buffer.Tokens().begin() + 3;
  147. EXPECT_EQ(buffer.GetIntegerLiteral(*token_1), 1);
  148. auto token_2 = buffer.Tokens().begin() + 4;
  149. EXPECT_EQ(buffer.GetIntegerLiteral(*token_2), 2);
  150. auto token_0x12_3abc = buffer.Tokens().begin() + 5;
  151. EXPECT_EQ(buffer.GetIntegerLiteral(*token_0x12_3abc), 0x12'3abc);
  152. auto token_0b10_10_11 = buffer.Tokens().begin() + 6;
  153. EXPECT_EQ(buffer.GetIntegerLiteral(*token_0b10_10_11), 0b10'10'11);
  154. auto token_1_234_567 = buffer.Tokens().begin() + 7;
  155. EXPECT_EQ(buffer.GetIntegerLiteral(*token_1_234_567), 1'234'567);
  156. auto token_1_5e9 = buffer.Tokens().begin() + 8;
  157. auto value_1_5e9 = buffer.GetRealLiteral(*token_1_5e9);
  158. EXPECT_EQ(value_1_5e9.Mantissa().getZExtValue(), 15);
  159. EXPECT_EQ(value_1_5e9.Exponent().getSExtValue(), 8);
  160. EXPECT_EQ(value_1_5e9.IsDecimal(), true);
  161. }
  162. TEST_F(LexerTest, HandlesInvalidNumericLiterals) {
  163. auto buffer = Lex("14x 15_49 0x3.5q 0x3_4.5_6 0ops");
  164. EXPECT_TRUE(buffer.HasErrors());
  165. ASSERT_THAT(buffer,
  166. HasTokens(llvm::ArrayRef<ExpectedToken>{
  167. {.kind = TokenKind::Error(),
  168. .line = 1,
  169. .column = 1,
  170. .indent_column = 1,
  171. .text = "14x"},
  172. {.kind = TokenKind::IntegerLiteral(),
  173. .line = 1,
  174. .column = 5,
  175. .indent_column = 1,
  176. .text = "15_49"},
  177. {.kind = TokenKind::Error(),
  178. .line = 1,
  179. .column = 11,
  180. .indent_column = 1,
  181. .text = "0x3.5q"},
  182. {.kind = TokenKind::RealLiteral(),
  183. .line = 1,
  184. .column = 18,
  185. .indent_column = 1,
  186. .text = "0x3_4.5_6"},
  187. {.kind = TokenKind::Error(),
  188. .line = 1,
  189. .column = 28,
  190. .indent_column = 1,
  191. .text = "0ops"},
  192. {.kind = TokenKind::EndOfFile(), .line = 1, .column = 32},
  193. }));
  194. }
  195. TEST_F(LexerTest, SplitsNumericLiteralsProperly) {
  196. llvm::StringLiteral source_text = R"(
  197. 1.
  198. .2
  199. 3.+foo
  200. 4.0-bar
  201. 5.0e+123+456
  202. 6.0e+1e+2
  203. 1e7
  204. 8..10
  205. 9.0.9.5
  206. 10.foo
  207. 11.0.foo
  208. 12e+1
  209. 13._
  210. )";
  211. auto buffer = Lex(source_text);
  212. EXPECT_TRUE(buffer.HasErrors());
  213. EXPECT_THAT(buffer,
  214. HasTokens(llvm::ArrayRef<ExpectedToken>{
  215. {.kind = TokenKind::IntegerLiteral(), .text = "1"},
  216. {.kind = TokenKind::Period()},
  217. // newline
  218. {.kind = TokenKind::Period()},
  219. {.kind = TokenKind::IntegerLiteral(), .text = "2"},
  220. // newline
  221. {.kind = TokenKind::IntegerLiteral(), .text = "3"},
  222. {.kind = TokenKind::Period()},
  223. {.kind = TokenKind::Plus()},
  224. {.kind = TokenKind::Identifier(), .text = "foo"},
  225. // newline
  226. {.kind = TokenKind::RealLiteral(), .text = "4.0"},
  227. {.kind = TokenKind::Minus()},
  228. {.kind = TokenKind::Identifier(), .text = "bar"},
  229. // newline
  230. {.kind = TokenKind::RealLiteral(), .text = "5.0e+123"},
  231. {.kind = TokenKind::Plus()},
  232. {.kind = TokenKind::IntegerLiteral(), .text = "456"},
  233. // newline
  234. {.kind = TokenKind::Error(), .text = "6.0e+1e"},
  235. {.kind = TokenKind::Plus()},
  236. {.kind = TokenKind::IntegerLiteral(), .text = "2"},
  237. // newline
  238. {.kind = TokenKind::Error(), .text = "1e7"},
  239. // newline
  240. {.kind = TokenKind::IntegerLiteral(), .text = "8"},
  241. {.kind = TokenKind::Period()},
  242. {.kind = TokenKind::Period()},
  243. {.kind = TokenKind::IntegerLiteral(), .text = "10"},
  244. // newline
  245. {.kind = TokenKind::RealLiteral(), .text = "9.0"},
  246. {.kind = TokenKind::Period()},
  247. {.kind = TokenKind::RealLiteral(), .text = "9.5"},
  248. // newline
  249. {.kind = TokenKind::Error(), .text = "10.foo"},
  250. // newline
  251. {.kind = TokenKind::RealLiteral(), .text = "11.0"},
  252. {.kind = TokenKind::Period()},
  253. {.kind = TokenKind::Identifier(), .text = "foo"},
  254. // newline
  255. {.kind = TokenKind::Error(), .text = "12e"},
  256. {.kind = TokenKind::Plus()},
  257. {.kind = TokenKind::IntegerLiteral(), .text = "1"},
  258. // newline
  259. {.kind = TokenKind::IntegerLiteral(), .text = "13"},
  260. {.kind = TokenKind::Period()},
  261. {.kind = TokenKind::UnderscoreKeyword()},
  262. // newline
  263. {.kind = TokenKind::EndOfFile()},
  264. }));
  265. }
  266. TEST_F(LexerTest, HandlesGarbageCharacters) {
  267. constexpr char GarbageText[] = "$$💩-$\n$\0$12$\n\"\n\"\\";
  268. auto buffer = Lex(llvm::StringRef(GarbageText, sizeof(GarbageText) - 1));
  269. EXPECT_TRUE(buffer.HasErrors());
  270. EXPECT_THAT(
  271. buffer,
  272. HasTokens(llvm::ArrayRef<ExpectedToken>{
  273. {.kind = TokenKind::Error(),
  274. .line = 1,
  275. .column = 1,
  276. .text = llvm::StringRef("$$💩", 6)},
  277. // 💩 takes 4 bytes, and we count column as bytes offset.
  278. {.kind = TokenKind::Minus(), .line = 1, .column = 7},
  279. {.kind = TokenKind::Error(), .line = 1, .column = 8, .text = "$"},
  280. // newline
  281. {.kind = TokenKind::Error(),
  282. .line = 2,
  283. .column = 1,
  284. .text = llvm::StringRef("$\0$", 3)},
  285. {.kind = TokenKind::IntegerLiteral(),
  286. .line = 2,
  287. .column = 4,
  288. .text = "12"},
  289. {.kind = TokenKind::Error(), .line = 2, .column = 6, .text = "$"},
  290. // newline
  291. {.kind = TokenKind::Error(),
  292. .line = 3,
  293. .column = 1,
  294. .text = llvm::StringRef("\"", 1)},
  295. // newline
  296. {.kind = TokenKind::Error(),
  297. .line = 4,
  298. .column = 1,
  299. .text = llvm::StringRef("\"", 1)},
  300. {.kind = TokenKind::Backslash(),
  301. .line = 4,
  302. .column = 2,
  303. .text = llvm::StringRef("\\", 1)},
  304. {.kind = TokenKind::EndOfFile(), .line = 4, .column = 3},
  305. }));
  306. }
  307. TEST_F(LexerTest, Symbols) {
  308. // We don't need to exhaustively test symbols here as they're handled with
  309. // common code, but we want to check specific patterns to verify things like
  310. // max-munch rule and handling of interesting symbols.
  311. auto buffer = Lex("<<<");
  312. EXPECT_FALSE(buffer.HasErrors());
  313. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  314. {TokenKind::LessLess()},
  315. {TokenKind::Less()},
  316. {TokenKind::EndOfFile()},
  317. }));
  318. buffer = Lex("<<=>>");
  319. EXPECT_FALSE(buffer.HasErrors());
  320. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  321. {TokenKind::LessLessEqual()},
  322. {TokenKind::GreaterGreater()},
  323. {TokenKind::EndOfFile()},
  324. }));
  325. buffer = Lex("< <=> >");
  326. EXPECT_FALSE(buffer.HasErrors());
  327. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  328. {TokenKind::Less()},
  329. {TokenKind::LessEqualGreater()},
  330. {TokenKind::Greater()},
  331. {TokenKind::EndOfFile()},
  332. }));
  333. buffer = Lex("\\/?@&^!");
  334. EXPECT_FALSE(buffer.HasErrors());
  335. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  336. {TokenKind::Backslash()},
  337. {TokenKind::Slash()},
  338. {TokenKind::Question()},
  339. {TokenKind::At()},
  340. {TokenKind::Amp()},
  341. {TokenKind::Caret()},
  342. {TokenKind::Exclaim()},
  343. {TokenKind::EndOfFile()},
  344. }));
  345. }
  346. TEST_F(LexerTest, Parens) {
  347. auto buffer = Lex("()");
  348. EXPECT_FALSE(buffer.HasErrors());
  349. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  350. {TokenKind::OpenParen()},
  351. {TokenKind::CloseParen()},
  352. {TokenKind::EndOfFile()},
  353. }));
  354. buffer = Lex("((()()))");
  355. EXPECT_FALSE(buffer.HasErrors());
  356. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  357. {TokenKind::OpenParen()},
  358. {TokenKind::OpenParen()},
  359. {TokenKind::OpenParen()},
  360. {TokenKind::CloseParen()},
  361. {TokenKind::OpenParen()},
  362. {TokenKind::CloseParen()},
  363. {TokenKind::CloseParen()},
  364. {TokenKind::CloseParen()},
  365. {TokenKind::EndOfFile()},
  366. }));
  367. }
  368. TEST_F(LexerTest, CurlyBraces) {
  369. auto buffer = Lex("{}");
  370. EXPECT_FALSE(buffer.HasErrors());
  371. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  372. {TokenKind::OpenCurlyBrace()},
  373. {TokenKind::CloseCurlyBrace()},
  374. {TokenKind::EndOfFile()},
  375. }));
  376. buffer = Lex("{{{}{}}}");
  377. EXPECT_FALSE(buffer.HasErrors());
  378. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  379. {TokenKind::OpenCurlyBrace()},
  380. {TokenKind::OpenCurlyBrace()},
  381. {TokenKind::OpenCurlyBrace()},
  382. {TokenKind::CloseCurlyBrace()},
  383. {TokenKind::OpenCurlyBrace()},
  384. {TokenKind::CloseCurlyBrace()},
  385. {TokenKind::CloseCurlyBrace()},
  386. {TokenKind::CloseCurlyBrace()},
  387. {TokenKind::EndOfFile()},
  388. }));
  389. }
  390. TEST_F(LexerTest, MatchingGroups) {
  391. {
  392. TokenizedBuffer buffer = Lex("(){}");
  393. ASSERT_FALSE(buffer.HasErrors());
  394. auto it = buffer.Tokens().begin();
  395. auto open_paren_token = *it++;
  396. auto close_paren_token = *it++;
  397. EXPECT_EQ(close_paren_token,
  398. buffer.GetMatchedClosingToken(open_paren_token));
  399. EXPECT_EQ(open_paren_token,
  400. buffer.GetMatchedOpeningToken(close_paren_token));
  401. auto open_curly_token = *it++;
  402. auto close_curly_token = *it++;
  403. EXPECT_EQ(close_curly_token,
  404. buffer.GetMatchedClosingToken(open_curly_token));
  405. EXPECT_EQ(open_curly_token,
  406. buffer.GetMatchedOpeningToken(close_curly_token));
  407. auto eof_token = *it++;
  408. EXPECT_EQ(buffer.GetKind(eof_token), TokenKind::EndOfFile());
  409. EXPECT_EQ(buffer.Tokens().end(), it);
  410. }
  411. {
  412. TokenizedBuffer buffer = Lex("({x}){(y)} {{((z))}}");
  413. ASSERT_FALSE(buffer.HasErrors());
  414. auto it = buffer.Tokens().begin();
  415. auto open_paren_token = *it++;
  416. auto open_curly_token = *it++;
  417. ASSERT_EQ("x", buffer.GetIdentifierText(buffer.GetIdentifier(*it++)));
  418. auto close_curly_token = *it++;
  419. auto close_paren_token = *it++;
  420. EXPECT_EQ(close_paren_token,
  421. buffer.GetMatchedClosingToken(open_paren_token));
  422. EXPECT_EQ(open_paren_token,
  423. buffer.GetMatchedOpeningToken(close_paren_token));
  424. EXPECT_EQ(close_curly_token,
  425. buffer.GetMatchedClosingToken(open_curly_token));
  426. EXPECT_EQ(open_curly_token,
  427. buffer.GetMatchedOpeningToken(close_curly_token));
  428. open_curly_token = *it++;
  429. open_paren_token = *it++;
  430. ASSERT_EQ("y", buffer.GetIdentifierText(buffer.GetIdentifier(*it++)));
  431. close_paren_token = *it++;
  432. close_curly_token = *it++;
  433. EXPECT_EQ(close_curly_token,
  434. buffer.GetMatchedClosingToken(open_curly_token));
  435. EXPECT_EQ(open_curly_token,
  436. buffer.GetMatchedOpeningToken(close_curly_token));
  437. EXPECT_EQ(close_paren_token,
  438. buffer.GetMatchedClosingToken(open_paren_token));
  439. EXPECT_EQ(open_paren_token,
  440. buffer.GetMatchedOpeningToken(close_paren_token));
  441. open_curly_token = *it++;
  442. auto inner_open_curly_token = *it++;
  443. open_paren_token = *it++;
  444. auto inner_open_paren_token = *it++;
  445. ASSERT_EQ("z", buffer.GetIdentifierText(buffer.GetIdentifier(*it++)));
  446. auto inner_close_paren_token = *it++;
  447. close_paren_token = *it++;
  448. auto inner_close_curly_token = *it++;
  449. close_curly_token = *it++;
  450. EXPECT_EQ(close_curly_token,
  451. buffer.GetMatchedClosingToken(open_curly_token));
  452. EXPECT_EQ(open_curly_token,
  453. buffer.GetMatchedOpeningToken(close_curly_token));
  454. EXPECT_EQ(inner_close_curly_token,
  455. buffer.GetMatchedClosingToken(inner_open_curly_token));
  456. EXPECT_EQ(inner_open_curly_token,
  457. buffer.GetMatchedOpeningToken(inner_close_curly_token));
  458. EXPECT_EQ(close_paren_token,
  459. buffer.GetMatchedClosingToken(open_paren_token));
  460. EXPECT_EQ(open_paren_token,
  461. buffer.GetMatchedOpeningToken(close_paren_token));
  462. EXPECT_EQ(inner_close_paren_token,
  463. buffer.GetMatchedClosingToken(inner_open_paren_token));
  464. EXPECT_EQ(inner_open_paren_token,
  465. buffer.GetMatchedOpeningToken(inner_close_paren_token));
  466. auto eof_token = *it++;
  467. EXPECT_EQ(buffer.GetKind(eof_token), TokenKind::EndOfFile());
  468. EXPECT_EQ(buffer.Tokens().end(), it);
  469. }
  470. }
  471. TEST_F(LexerTest, MismatchedGroups) {
  472. auto buffer = Lex("{");
  473. EXPECT_TRUE(buffer.HasErrors());
  474. EXPECT_THAT(buffer,
  475. HasTokens(llvm::ArrayRef<ExpectedToken>{
  476. {TokenKind::OpenCurlyBrace()},
  477. {.kind = TokenKind::CloseCurlyBrace(), .recovery = true},
  478. {TokenKind::EndOfFile()},
  479. }));
  480. buffer = Lex("}");
  481. EXPECT_TRUE(buffer.HasErrors());
  482. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  483. {.kind = TokenKind::Error(), .text = "}"},
  484. {TokenKind::EndOfFile()},
  485. }));
  486. buffer = Lex("{(}");
  487. EXPECT_TRUE(buffer.HasErrors());
  488. EXPECT_THAT(
  489. buffer,
  490. HasTokens(llvm::ArrayRef<ExpectedToken>{
  491. {.kind = TokenKind::OpenCurlyBrace(), .column = 1},
  492. {.kind = TokenKind::OpenParen(), .column = 2},
  493. {.kind = TokenKind::CloseParen(), .column = 3, .recovery = true},
  494. {.kind = TokenKind::CloseCurlyBrace(), .column = 3},
  495. {TokenKind::EndOfFile()},
  496. }));
  497. buffer = Lex(")({)");
  498. EXPECT_TRUE(buffer.HasErrors());
  499. EXPECT_THAT(
  500. buffer,
  501. HasTokens(llvm::ArrayRef<ExpectedToken>{
  502. {.kind = TokenKind::Error(), .column = 1, .text = ")"},
  503. {.kind = TokenKind::OpenParen(), .column = 2},
  504. {.kind = TokenKind::OpenCurlyBrace(), .column = 3},
  505. {.kind = TokenKind::CloseCurlyBrace(), .column = 4, .recovery = true},
  506. {.kind = TokenKind::CloseParen(), .column = 4},
  507. {TokenKind::EndOfFile()},
  508. }));
  509. }
  510. TEST_F(LexerTest, Whitespace) {
  511. auto buffer = Lex("{( } {(");
  512. // Whether there should be whitespace before/after each token.
  513. bool space[] = {true,
  514. // {
  515. false,
  516. // (
  517. true,
  518. // inserted )
  519. true,
  520. // }
  521. true,
  522. // {
  523. false,
  524. // (
  525. true,
  526. // inserted )
  527. true,
  528. // inserted }
  529. true,
  530. // EOF
  531. false};
  532. int pos = 0;
  533. for (TokenizedBuffer::Token token : buffer.Tokens()) {
  534. ASSERT_LT(pos, std::size(space));
  535. EXPECT_THAT(buffer.HasLeadingWhitespace(token), Eq(space[pos]));
  536. ++pos;
  537. ASSERT_LT(pos, std::size(space));
  538. EXPECT_THAT(buffer.HasTrailingWhitespace(token), Eq(space[pos]));
  539. }
  540. ASSERT_EQ(pos + 1, std::size(space));
  541. }
  542. TEST_F(LexerTest, Keywords) {
  543. auto buffer = Lex(" fn");
  544. EXPECT_FALSE(buffer.HasErrors());
  545. EXPECT_THAT(
  546. buffer,
  547. HasTokens(llvm::ArrayRef<ExpectedToken>{
  548. {.kind = TokenKind::FnKeyword(), .column = 4, .indent_column = 4},
  549. {TokenKind::EndOfFile()},
  550. }));
  551. buffer = Lex("and or not if else for loop return var break continue _");
  552. EXPECT_FALSE(buffer.HasErrors());
  553. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  554. {TokenKind::AndKeyword()},
  555. {TokenKind::OrKeyword()},
  556. {TokenKind::NotKeyword()},
  557. {TokenKind::IfKeyword()},
  558. {TokenKind::ElseKeyword()},
  559. {TokenKind::ForKeyword()},
  560. {TokenKind::LoopKeyword()},
  561. {TokenKind::ReturnKeyword()},
  562. {TokenKind::VarKeyword()},
  563. {TokenKind::BreakKeyword()},
  564. {TokenKind::ContinueKeyword()},
  565. {TokenKind::UnderscoreKeyword()},
  566. {TokenKind::EndOfFile()},
  567. }));
  568. }
  569. TEST_F(LexerTest, Comments) {
  570. auto buffer = Lex(" ;\n // foo\n ;\n");
  571. EXPECT_FALSE(buffer.HasErrors());
  572. EXPECT_THAT(buffer,
  573. HasTokens(llvm::ArrayRef<ExpectedToken>{
  574. {.kind = TokenKind::Semi(),
  575. .line = 1,
  576. .column = 2,
  577. .indent_column = 2},
  578. {.kind = TokenKind::Semi(),
  579. .line = 3,
  580. .column = 3,
  581. .indent_column = 3},
  582. {.kind = TokenKind::EndOfFile(), .line = 3, .column = 4},
  583. }));
  584. buffer = Lex("// foo\n//\n// bar");
  585. EXPECT_FALSE(buffer.HasErrors());
  586. EXPECT_THAT(
  587. buffer,
  588. HasTokens(llvm::ArrayRef<ExpectedToken>{{TokenKind::EndOfFile()}}));
  589. // Make sure weird characters aren't a problem.
  590. buffer = Lex(" // foo#$!^?@-_💩🍫⃠ [̲̅$̲̅(̲̅ ͡° ͜ʖ ͡°̲̅)̲̅$̲̅]");
  591. EXPECT_FALSE(buffer.HasErrors());
  592. EXPECT_THAT(
  593. buffer,
  594. HasTokens(llvm::ArrayRef<ExpectedToken>{{TokenKind::EndOfFile()}}));
  595. // Make sure we can lex a comment at the end of the input.
  596. buffer = Lex("//");
  597. EXPECT_FALSE(buffer.HasErrors());
  598. EXPECT_THAT(
  599. buffer,
  600. HasTokens(llvm::ArrayRef<ExpectedToken>{{TokenKind::EndOfFile()}}));
  601. }
  602. TEST_F(LexerTest, InvalidComments) {
  603. llvm::StringLiteral testcases[] = {
  604. " /// foo\n",
  605. "foo // bar\n",
  606. "//! hello",
  607. " //world",
  608. };
  609. for (llvm::StringLiteral testcase : testcases) {
  610. auto buffer = Lex(testcase);
  611. EXPECT_TRUE(buffer.HasErrors());
  612. }
  613. }
  614. TEST_F(LexerTest, Identifiers) {
  615. auto buffer = Lex(" foobar");
  616. EXPECT_FALSE(buffer.HasErrors());
  617. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  618. {.kind = TokenKind::Identifier(),
  619. .column = 4,
  620. .indent_column = 4,
  621. .text = "foobar"},
  622. {TokenKind::EndOfFile()},
  623. }));
  624. // Check different kinds of identifier character sequences.
  625. buffer = Lex("_foo_bar");
  626. EXPECT_FALSE(buffer.HasErrors());
  627. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  628. {.kind = TokenKind::Identifier(), .text = "_foo_bar"},
  629. {TokenKind::EndOfFile()},
  630. }));
  631. buffer = Lex("foo2bar00");
  632. EXPECT_FALSE(buffer.HasErrors());
  633. EXPECT_THAT(buffer,
  634. HasTokens(llvm::ArrayRef<ExpectedToken>{
  635. {.kind = TokenKind::Identifier(), .text = "foo2bar00"},
  636. {TokenKind::EndOfFile()},
  637. }));
  638. // Check that we can parse identifiers that start with a keyword.
  639. buffer = Lex("fnord");
  640. EXPECT_FALSE(buffer.HasErrors());
  641. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  642. {.kind = TokenKind::Identifier(), .text = "fnord"},
  643. {TokenKind::EndOfFile()},
  644. }));
  645. // Check multiple identifiers with indent and interning.
  646. buffer = Lex(" foo;bar\nbar \n foo\tfoo");
  647. EXPECT_FALSE(buffer.HasErrors());
  648. EXPECT_THAT(buffer,
  649. HasTokens(llvm::ArrayRef<ExpectedToken>{
  650. {.kind = TokenKind::Identifier(),
  651. .line = 1,
  652. .column = 4,
  653. .indent_column = 4,
  654. .text = "foo"},
  655. {.kind = TokenKind::Semi()},
  656. {.kind = TokenKind::Identifier(),
  657. .line = 1,
  658. .column = 8,
  659. .indent_column = 4,
  660. .text = "bar"},
  661. {.kind = TokenKind::Identifier(),
  662. .line = 2,
  663. .column = 1,
  664. .indent_column = 1,
  665. .text = "bar"},
  666. {.kind = TokenKind::Identifier(),
  667. .line = 3,
  668. .column = 3,
  669. .indent_column = 3,
  670. .text = "foo"},
  671. {.kind = TokenKind::Identifier(),
  672. .line = 3,
  673. .column = 7,
  674. .indent_column = 3,
  675. .text = "foo"},
  676. {.kind = TokenKind::EndOfFile(), .line = 3, .column = 10},
  677. }));
  678. }
  679. TEST_F(LexerTest, StringLiterals) {
  680. llvm::StringLiteral testcase = R"(
  681. "hello world\n"
  682. """foo
  683. test \
  684. \xAB
  685. """ trailing
  686. #"""#
  687. "\0"
  688. #"\0"foo"\1"#
  689. """x"""
  690. )";
  691. auto buffer = Lex(testcase);
  692. EXPECT_FALSE(buffer.HasErrors());
  693. EXPECT_THAT(buffer,
  694. HasTokens(llvm::ArrayRef<ExpectedToken>{
  695. {.kind = TokenKind::StringLiteral(),
  696. .line = 2,
  697. .column = 5,
  698. .indent_column = 5,
  699. .string_contents = {"hello world\n"}},
  700. {.kind = TokenKind::StringLiteral(),
  701. .line = 4,
  702. .column = 5,
  703. .indent_column = 5,
  704. .string_contents = {" test \xAB\n"}},
  705. {.kind = TokenKind::Identifier(),
  706. .line = 7,
  707. .column = 10,
  708. .indent_column = 5,
  709. .text = "trailing"},
  710. {.kind = TokenKind::StringLiteral(),
  711. .line = 9,
  712. .column = 7,
  713. .indent_column = 7,
  714. .string_contents = {"\""}},
  715. {.kind = TokenKind::StringLiteral(),
  716. .line = 11,
  717. .column = 5,
  718. .indent_column = 5,
  719. .string_contents = llvm::StringLiteral::withInnerNUL("\0")},
  720. {.kind = TokenKind::StringLiteral(),
  721. .line = 13,
  722. .column = 5,
  723. .indent_column = 5,
  724. .string_contents = {"\\0\"foo\"\\1"}},
  725. // """x""" is three string literals, not one.
  726. {.kind = TokenKind::StringLiteral(),
  727. .line = 15,
  728. .column = 5,
  729. .indent_column = 5,
  730. .string_contents = {""}},
  731. {.kind = TokenKind::StringLiteral(),
  732. .line = 15,
  733. .column = 7,
  734. .indent_column = 5,
  735. .string_contents = {"x"}},
  736. {.kind = TokenKind::StringLiteral(),
  737. .line = 15,
  738. .column = 10,
  739. .indent_column = 5,
  740. .string_contents = {""}},
  741. {.kind = TokenKind::EndOfFile(), .line = 16, .column = 3},
  742. }));
  743. }
  744. TEST_F(LexerTest, InvalidStringLiterals) {
  745. llvm::StringLiteral invalid[] = {
  746. R"(")",
  747. R"("""
  748. "")", //
  749. R"("\)", //
  750. R"("\")", //
  751. R"("\\)", //
  752. R"("\\\")", //
  753. R"(""")",
  754. R"("""
  755. )", //
  756. R"("""\)",
  757. R"(#"""
  758. """)",
  759. };
  760. for (llvm::StringLiteral test : invalid) {
  761. auto buffer = Lex(test);
  762. EXPECT_TRUE(buffer.HasErrors()) << "`" << test << "`";
  763. // We should have formed at least one error token.
  764. bool found_error = false;
  765. for (TokenizedBuffer::Token token : buffer.Tokens()) {
  766. if (buffer.GetKind(token) == TokenKind::Error()) {
  767. found_error = true;
  768. break;
  769. }
  770. }
  771. EXPECT_TRUE(found_error) << "`" << test << "`";
  772. }
  773. }
  774. TEST_F(LexerTest, TypeLiterals) {
  775. llvm::StringLiteral testcase = R"(
  776. i0 i1 i20 i999999999999 i0x1
  777. u0 u1 u64 u64b
  778. f32 f80 f1 fi
  779. s1
  780. )";
  781. auto buffer = Lex(testcase);
  782. EXPECT_FALSE(buffer.HasErrors());
  783. ASSERT_THAT(buffer,
  784. HasTokens(llvm::ArrayRef<ExpectedToken>{
  785. {.kind = TokenKind::Identifier(),
  786. .line = 2,
  787. .column = 5,
  788. .indent_column = 5,
  789. .text = {"i0"}},
  790. {.kind = TokenKind::IntegerTypeLiteral(),
  791. .line = 2,
  792. .column = 8,
  793. .indent_column = 5,
  794. .text = {"i1"}},
  795. {.kind = TokenKind::IntegerTypeLiteral(),
  796. .line = 2,
  797. .column = 11,
  798. .indent_column = 5,
  799. .text = {"i20"}},
  800. {.kind = TokenKind::IntegerTypeLiteral(),
  801. .line = 2,
  802. .column = 15,
  803. .indent_column = 5,
  804. .text = {"i999999999999"}},
  805. {.kind = TokenKind::Identifier(),
  806. .line = 2,
  807. .column = 29,
  808. .indent_column = 5,
  809. .text = {"i0x1"}},
  810. {.kind = TokenKind::Identifier(),
  811. .line = 3,
  812. .column = 5,
  813. .indent_column = 5,
  814. .text = {"u0"}},
  815. {.kind = TokenKind::UnsignedIntegerTypeLiteral(),
  816. .line = 3,
  817. .column = 8,
  818. .indent_column = 5,
  819. .text = {"u1"}},
  820. {.kind = TokenKind::UnsignedIntegerTypeLiteral(),
  821. .line = 3,
  822. .column = 11,
  823. .indent_column = 5,
  824. .text = {"u64"}},
  825. {.kind = TokenKind::Identifier(),
  826. .line = 3,
  827. .column = 15,
  828. .indent_column = 5,
  829. .text = {"u64b"}},
  830. {.kind = TokenKind::FloatingPointTypeLiteral(),
  831. .line = 4,
  832. .column = 5,
  833. .indent_column = 5,
  834. .text = {"f32"}},
  835. {.kind = TokenKind::FloatingPointTypeLiteral(),
  836. .line = 4,
  837. .column = 9,
  838. .indent_column = 5,
  839. .text = {"f80"}},
  840. {.kind = TokenKind::FloatingPointTypeLiteral(),
  841. .line = 4,
  842. .column = 13,
  843. .indent_column = 5,
  844. .text = {"f1"}},
  845. {.kind = TokenKind::Identifier(),
  846. .line = 4,
  847. .column = 16,
  848. .indent_column = 5,
  849. .text = {"fi"}},
  850. {.kind = TokenKind::Identifier(),
  851. .line = 5,
  852. .column = 5,
  853. .indent_column = 5,
  854. .text = {"s1"}},
  855. {.kind = TokenKind::EndOfFile(), .line = 6, .column = 3},
  856. }));
  857. auto token_i1 = buffer.Tokens().begin() + 1;
  858. EXPECT_EQ(buffer.GetTypeLiteralSize(*token_i1), 1);
  859. auto token_i20 = buffer.Tokens().begin() + 2;
  860. EXPECT_EQ(buffer.GetTypeLiteralSize(*token_i20), 20);
  861. auto token_i999999999999 = buffer.Tokens().begin() + 3;
  862. EXPECT_EQ(buffer.GetTypeLiteralSize(*token_i999999999999), 999999999999ull);
  863. auto token_u1 = buffer.Tokens().begin() + 6;
  864. EXPECT_EQ(buffer.GetTypeLiteralSize(*token_u1), 1);
  865. auto token_u64 = buffer.Tokens().begin() + 7;
  866. EXPECT_EQ(buffer.GetTypeLiteralSize(*token_u64), 64);
  867. auto token_f32 = buffer.Tokens().begin() + 9;
  868. EXPECT_EQ(buffer.GetTypeLiteralSize(*token_f32), 32);
  869. auto token_f80 = buffer.Tokens().begin() + 10;
  870. EXPECT_EQ(buffer.GetTypeLiteralSize(*token_f80), 80);
  871. auto token_f1 = buffer.Tokens().begin() + 11;
  872. EXPECT_EQ(buffer.GetTypeLiteralSize(*token_f1), 1);
  873. }
  874. TEST_F(LexerTest, Diagnostics) {
  875. llvm::StringLiteral testcase = R"(
  876. // Hello!
  877. var String x; // trailing comment
  878. //no space after comment
  879. "hello\bworld\xab"
  880. 0x123abc
  881. #"
  882. )";
  883. Testing::MockDiagnosticConsumer consumer;
  884. EXPECT_CALL(consumer, HandleDiagnostic(AllOf(
  885. DiagnosticAt(3, 19),
  886. DiagnosticMessage(HasSubstr("Trailing comment")))));
  887. EXPECT_CALL(consumer,
  888. HandleDiagnostic(AllOf(
  889. DiagnosticAt(4, 7),
  890. DiagnosticMessage(HasSubstr("Whitespace is required")))));
  891. EXPECT_CALL(
  892. consumer,
  893. HandleDiagnostic(AllOf(
  894. DiagnosticAt(5, 12),
  895. DiagnosticMessage(HasSubstr("Unrecognized escape sequence `b`")))));
  896. EXPECT_CALL(
  897. consumer,
  898. HandleDiagnostic(AllOf(
  899. DiagnosticAt(5, 20),
  900. DiagnosticMessage(HasSubstr("two uppercase hexadecimal digits")))));
  901. EXPECT_CALL(
  902. consumer,
  903. HandleDiagnostic(AllOf(
  904. DiagnosticAt(6, 10),
  905. DiagnosticMessage(HasSubstr("Invalid digit 'a' in hexadecimal")))));
  906. EXPECT_CALL(consumer,
  907. HandleDiagnostic(AllOf(
  908. DiagnosticAt(7, 5),
  909. DiagnosticMessage(HasSubstr("unrecognized character")))));
  910. Lex(testcase, consumer);
  911. }
  912. auto GetAndDropLine(llvm::StringRef& text) -> std::string {
  913. auto newline_offset = text.find_first_of('\n');
  914. llvm::StringRef line = text.slice(0, newline_offset);
  915. if (newline_offset != llvm::StringRef::npos) {
  916. text = text.substr(newline_offset + 1);
  917. } else {
  918. text = "";
  919. }
  920. return line.str();
  921. }
  922. TEST_F(LexerTest, Printing) {
  923. auto buffer = Lex(";");
  924. ASSERT_FALSE(buffer.HasErrors());
  925. std::string print_storage;
  926. llvm::raw_string_ostream print_stream(print_storage);
  927. buffer.Print(print_stream);
  928. llvm::StringRef print = print_stream.str();
  929. EXPECT_THAT(GetAndDropLine(print),
  930. StrEq("token: { index: 0, kind: 'Semi', line: 1, column: 1, "
  931. "indent: 1, spelling: ';', has_trailing_space: true }"));
  932. EXPECT_THAT(GetAndDropLine(print),
  933. StrEq("token: { index: 1, kind: 'EndOfFile', line: 1, column: 2, "
  934. "indent: 1, spelling: '' }"));
  935. EXPECT_TRUE(print.empty()) << print;
  936. // Test kind padding.
  937. buffer = Lex("(;foo;)");
  938. ASSERT_FALSE(buffer.HasErrors());
  939. print_storage.clear();
  940. buffer.Print(print_stream);
  941. print = print_stream.str();
  942. EXPECT_THAT(GetAndDropLine(print),
  943. StrEq("token: { index: 0, kind: 'OpenParen', line: 1, column: "
  944. "1, indent: 1, spelling: '(', closing_token: 4 }"));
  945. EXPECT_THAT(GetAndDropLine(print),
  946. StrEq("token: { index: 1, kind: 'Semi', line: 1, column: "
  947. "2, indent: 1, spelling: ';' }"));
  948. EXPECT_THAT(GetAndDropLine(print),
  949. StrEq("token: { index: 2, kind: 'Identifier', line: 1, column: "
  950. "3, indent: 1, spelling: 'foo', identifier: 0 }"));
  951. EXPECT_THAT(GetAndDropLine(print),
  952. StrEq("token: { index: 3, kind: 'Semi', line: 1, column: "
  953. "6, indent: 1, spelling: ';' }"));
  954. EXPECT_THAT(GetAndDropLine(print),
  955. StrEq("token: { index: 4, kind: 'CloseParen', line: 1, column: "
  956. "7, indent: 1, spelling: ')', opening_token: 0, "
  957. "has_trailing_space: true }"));
  958. EXPECT_THAT(GetAndDropLine(print),
  959. StrEq("token: { index: 5, kind: 'EndOfFile', line: 1, column: "
  960. "8, indent: 1, spelling: '' }"));
  961. EXPECT_TRUE(print.empty()) << print;
  962. // Test digit padding with max values of 9, 10, and 11.
  963. buffer = Lex(";\n\n\n\n\n\n\n\n\n\n ;;");
  964. ASSERT_FALSE(buffer.HasErrors());
  965. print_storage.clear();
  966. buffer.Print(print_stream);
  967. print = print_stream.str();
  968. EXPECT_THAT(
  969. GetAndDropLine(print),
  970. StrEq("token: { index: 0, kind: 'Semi', line: 1, column: 1, "
  971. "indent: 1, spelling: ';', has_trailing_space: true }"));
  972. EXPECT_THAT(
  973. GetAndDropLine(print),
  974. StrEq("token: { index: 1, kind: 'Semi', line: 11, column: 9, "
  975. "indent: 9, spelling: ';' }"));
  976. EXPECT_THAT(
  977. GetAndDropLine(print),
  978. StrEq("token: { index: 2, kind: 'Semi', line: 11, column: 10, "
  979. "indent: 9, spelling: ';', has_trailing_space: true }"));
  980. EXPECT_THAT(
  981. GetAndDropLine(print),
  982. StrEq("token: { index: 3, kind: 'EndOfFile', line: 11, column: 11, "
  983. "indent: 9, spelling: '' }"));
  984. EXPECT_TRUE(print.empty()) << print;
  985. }
  986. TEST_F(LexerTest, PrintingAsYaml) {
  987. // Test that we can parse this into YAML and verify line and indent data.
  988. auto buffer = Lex("\n ;\n\n\n; ;\n\n\n\n\n\n\n\n\n\n\n");
  989. ASSERT_FALSE(buffer.HasErrors());
  990. std::string print_output;
  991. llvm::raw_string_ostream print_stream(print_output);
  992. buffer.Print(print_stream);
  993. print_stream.flush();
  994. EXPECT_THAT(Yaml::Value::FromText(print_output),
  995. ElementsAre(Yaml::MappingValue{
  996. {"token", Yaml::MappingValue{{"index", "0"},
  997. {"kind", "Semi"},
  998. {"line", "2"},
  999. {"column", "2"},
  1000. {"indent", "2"},
  1001. {"spelling", ";"},
  1002. {"has_trailing_space", "true"}}},
  1003. {"token", Yaml::MappingValue{{"index", "1"},
  1004. {"kind", "Semi"},
  1005. {"line", "5"},
  1006. {"column", "1"},
  1007. {"indent", "1"},
  1008. {"spelling", ";"},
  1009. {"has_trailing_space", "true"}}},
  1010. {"token", Yaml::MappingValue{{"index", "2"},
  1011. {"kind", "Semi"},
  1012. {"line", "5"},
  1013. {"column", "3"},
  1014. {"indent", "1"},
  1015. {"spelling", ";"},
  1016. {"has_trailing_space", "true"}}},
  1017. {"token", Yaml::MappingValue{{"index", "3"},
  1018. {"kind", "EndOfFile"},
  1019. {"line", "15"},
  1020. {"column", "1"},
  1021. {"indent", "1"},
  1022. {"spelling", ""}}}}));
  1023. }
  1024. } // namespace
  1025. } // namespace Carbon::Testing