tokenized_buffer_test.cpp 41 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093
  1. // Part of the Carbon Language project, under the Apache License v2.0 with LLVM
  2. // Exceptions. See /LICENSE for license information.
  3. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
  4. #include "toolchain/lex/tokenized_buffer.h"
  5. #include <gmock/gmock.h>
  6. #include <gtest/gtest.h>
  7. #include <forward_list>
  8. #include <iterator>
  9. #include "llvm/ADT/ArrayRef.h"
  10. #include "testing/base/test_raw_ostream.h"
  11. #include "toolchain/base/value_store.h"
  12. #include "toolchain/diagnostics/diagnostic_emitter.h"
  13. #include "toolchain/diagnostics/mocks.h"
  14. #include "toolchain/lex/lex.h"
  15. #include "toolchain/lex/tokenized_buffer_test_helpers.h"
  16. #include "toolchain/testing/yaml_test_helpers.h"
  17. namespace Carbon::Lex {
  18. namespace {
  19. using ::Carbon::Testing::ExpectedToken;
  20. using ::Carbon::Testing::IsDiagnostic;
  21. using ::Carbon::Testing::TestRawOstream;
  22. using ::testing::_;
  23. using ::testing::ElementsAre;
  24. using ::testing::Eq;
  25. using ::testing::HasSubstr;
  26. using ::testing::Pair;
  27. namespace Yaml = ::Carbon::Testing::Yaml;
  28. class LexerTest : public ::testing::Test {
  29. protected:
  30. auto GetSourceBuffer(llvm::StringRef text) -> SourceBuffer& {
  31. std::string filename = llvm::formatv("test{0}.carbon", ++file_index_);
  32. CARBON_CHECK(fs_.addFile(filename, /*ModificationTime=*/0,
  33. llvm::MemoryBuffer::getMemBuffer(text)));
  34. source_storage_.push_front(std::move(*SourceBuffer::CreateFromFile(
  35. fs_, filename, ConsoleDiagnosticConsumer())));
  36. return source_storage_.front();
  37. }
  38. auto Lex(llvm::StringRef text,
  39. DiagnosticConsumer& consumer = ConsoleDiagnosticConsumer())
  40. -> TokenizedBuffer {
  41. return Lex::Lex(value_stores_, GetSourceBuffer(text), consumer);
  42. }
  43. SharedValueStores value_stores_;
  44. llvm::vfs::InMemoryFileSystem fs_;
  45. int file_index_ = 0;
  46. std::forward_list<SourceBuffer> source_storage_;
  47. };
  48. TEST_F(LexerTest, HandlesEmptyBuffer) {
  49. auto buffer = Lex("");
  50. EXPECT_FALSE(buffer.has_errors());
  51. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  52. {TokenKind::FileStart}, {TokenKind::FileEnd}}));
  53. }
  54. TEST_F(LexerTest, TracksLinesAndColumns) {
  55. auto buffer = Lex("\n ;;\n ;;;\n x\"foo\" '''baz\n a\n ''' y");
  56. EXPECT_FALSE(buffer.has_errors());
  57. EXPECT_THAT(
  58. buffer,
  59. HasTokens(llvm::ArrayRef<ExpectedToken>{
  60. {.kind = TokenKind::FileStart,
  61. .line = 1,
  62. .column = 1,
  63. .indent_column = 1},
  64. {.kind = TokenKind::Semi, .line = 2, .column = 3, .indent_column = 3},
  65. {.kind = TokenKind::Semi, .line = 2, .column = 4, .indent_column = 3},
  66. {.kind = TokenKind::Semi, .line = 3, .column = 4, .indent_column = 4},
  67. {.kind = TokenKind::Semi, .line = 3, .column = 5, .indent_column = 4},
  68. {.kind = TokenKind::Semi, .line = 3, .column = 6, .indent_column = 4},
  69. {.kind = TokenKind::Identifier,
  70. .line = 4,
  71. .column = 4,
  72. .indent_column = 4,
  73. .text = "x"},
  74. {.kind = TokenKind::StringLiteral,
  75. .line = 4,
  76. .column = 5,
  77. .indent_column = 4},
  78. {.kind = TokenKind::StringLiteral,
  79. .line = 4,
  80. .column = 11,
  81. .indent_column = 4},
  82. {.kind = TokenKind::Identifier,
  83. .line = 6,
  84. .column = 6,
  85. .indent_column = 11,
  86. .text = "y"},
  87. {.kind = TokenKind::FileEnd, .line = 6, .column = 7},
  88. }));
  89. }
  90. TEST_F(LexerTest, HandlesNumericLiteral) {
  91. auto buffer = Lex("12-578\n 1 2\n0x12_3ABC\n0b10_10_11\n1_234_567\n1.5e9");
  92. EXPECT_FALSE(buffer.has_errors());
  93. ASSERT_THAT(buffer,
  94. HasTokens(llvm::ArrayRef<ExpectedToken>{
  95. {.kind = TokenKind::FileStart, .line = 1, .column = 1},
  96. {.kind = TokenKind::IntLiteral,
  97. .line = 1,
  98. .column = 1,
  99. .indent_column = 1,
  100. .text = "12"},
  101. {.kind = TokenKind::Minus,
  102. .line = 1,
  103. .column = 3,
  104. .indent_column = 1},
  105. {.kind = TokenKind::IntLiteral,
  106. .line = 1,
  107. .column = 4,
  108. .indent_column = 1,
  109. .text = "578"},
  110. {.kind = TokenKind::IntLiteral,
  111. .line = 2,
  112. .column = 3,
  113. .indent_column = 3,
  114. .text = "1"},
  115. {.kind = TokenKind::IntLiteral,
  116. .line = 2,
  117. .column = 6,
  118. .indent_column = 3,
  119. .text = "2"},
  120. {.kind = TokenKind::IntLiteral,
  121. .line = 3,
  122. .column = 1,
  123. .indent_column = 1,
  124. .text = "0x12_3ABC"},
  125. {.kind = TokenKind::IntLiteral,
  126. .line = 4,
  127. .column = 1,
  128. .indent_column = 1,
  129. .text = "0b10_10_11"},
  130. {.kind = TokenKind::IntLiteral,
  131. .line = 5,
  132. .column = 1,
  133. .indent_column = 1,
  134. .text = "1_234_567"},
  135. {.kind = TokenKind::RealLiteral,
  136. .line = 6,
  137. .column = 1,
  138. .indent_column = 1,
  139. .text = "1.5e9"},
  140. {.kind = TokenKind::FileEnd, .line = 6, .column = 6},
  141. }));
  142. auto token_start = buffer.tokens().begin();
  143. auto token_12 = token_start + 1;
  144. EXPECT_EQ(value_stores_.ints().Get(buffer.GetIntLiteral(*token_12)), 12);
  145. auto token_578 = token_12 + 2;
  146. EXPECT_EQ(value_stores_.ints().Get(buffer.GetIntLiteral(*token_578)), 578);
  147. auto token_1 = token_578 + 1;
  148. EXPECT_EQ(value_stores_.ints().Get(buffer.GetIntLiteral(*token_1)), 1);
  149. auto token_2 = token_1 + 1;
  150. EXPECT_EQ(value_stores_.ints().Get(buffer.GetIntLiteral(*token_2)), 2);
  151. auto token_0x12_3abc = token_2 + 1;
  152. EXPECT_EQ(value_stores_.ints().Get(buffer.GetIntLiteral(*token_0x12_3abc)),
  153. 0x12'3abc);
  154. auto token_0b10_10_11 = token_0x12_3abc + 1;
  155. EXPECT_EQ(value_stores_.ints().Get(buffer.GetIntLiteral(*token_0b10_10_11)),
  156. 0b10'10'11);
  157. auto token_1_234_567 = token_0b10_10_11 + 1;
  158. EXPECT_EQ(value_stores_.ints().Get(buffer.GetIntLiteral(*token_1_234_567)),
  159. 1'234'567);
  160. auto token_1_5e9 = token_1_234_567 + 1;
  161. auto value_1_5e9 =
  162. value_stores_.reals().Get(buffer.GetRealLiteral(*token_1_5e9));
  163. EXPECT_EQ(value_1_5e9.mantissa.getZExtValue(), 15);
  164. EXPECT_EQ(value_1_5e9.exponent.getSExtValue(), 8);
  165. EXPECT_EQ(value_1_5e9.is_decimal, true);
  166. }
  167. TEST_F(LexerTest, HandlesInvalidNumericLiterals) {
  168. auto buffer = Lex("14x 15_49 0x3.5q 0x3_4.5_6 0ops");
  169. EXPECT_TRUE(buffer.has_errors());
  170. ASSERT_THAT(buffer,
  171. HasTokens(llvm::ArrayRef<ExpectedToken>{
  172. {.kind = TokenKind::FileStart, .line = 1, .column = 1},
  173. {.kind = TokenKind::Error,
  174. .line = 1,
  175. .column = 1,
  176. .indent_column = 1,
  177. .text = "14x"},
  178. {.kind = TokenKind::IntLiteral,
  179. .line = 1,
  180. .column = 5,
  181. .indent_column = 1,
  182. .text = "15_49"},
  183. {.kind = TokenKind::Error,
  184. .line = 1,
  185. .column = 11,
  186. .indent_column = 1,
  187. .text = "0x3.5q"},
  188. {.kind = TokenKind::RealLiteral,
  189. .line = 1,
  190. .column = 18,
  191. .indent_column = 1,
  192. .text = "0x3_4.5_6"},
  193. {.kind = TokenKind::Error,
  194. .line = 1,
  195. .column = 28,
  196. .indent_column = 1,
  197. .text = "0ops"},
  198. {.kind = TokenKind::FileEnd, .line = 1, .column = 32},
  199. }));
  200. }
  201. TEST_F(LexerTest, SplitsNumericLiteralsProperly) {
  202. llvm::StringLiteral source_text = R"(
  203. 1.
  204. .2
  205. 3.+foo
  206. 4.0-bar
  207. 5.0e+123+456
  208. 6.0e+1e+2
  209. 1e7
  210. 8..10
  211. 9.0.9.5
  212. 10.foo
  213. 11.0.foo
  214. 12e+1
  215. 13._
  216. )";
  217. auto buffer = Lex(source_text);
  218. EXPECT_TRUE(buffer.has_errors());
  219. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  220. {.kind = TokenKind::FileStart},
  221. {.kind = TokenKind::IntLiteral, .text = "1"},
  222. {.kind = TokenKind::Period},
  223. // newline
  224. {.kind = TokenKind::Period},
  225. {.kind = TokenKind::IntLiteral, .text = "2"},
  226. // newline
  227. {.kind = TokenKind::IntLiteral, .text = "3"},
  228. {.kind = TokenKind::Period},
  229. {.kind = TokenKind::Plus},
  230. {.kind = TokenKind::Identifier, .text = "foo"},
  231. // newline
  232. {.kind = TokenKind::RealLiteral, .text = "4.0"},
  233. {.kind = TokenKind::Minus},
  234. {.kind = TokenKind::Identifier, .text = "bar"},
  235. // newline
  236. {.kind = TokenKind::RealLiteral, .text = "5.0e+123"},
  237. {.kind = TokenKind::Plus},
  238. {.kind = TokenKind::IntLiteral, .text = "456"},
  239. // newline
  240. {.kind = TokenKind::Error, .text = "6.0e+1e"},
  241. {.kind = TokenKind::Plus},
  242. {.kind = TokenKind::IntLiteral, .text = "2"},
  243. // newline
  244. {.kind = TokenKind::Error, .text = "1e7"},
  245. // newline
  246. {.kind = TokenKind::IntLiteral, .text = "8"},
  247. {.kind = TokenKind::Period},
  248. {.kind = TokenKind::Period},
  249. {.kind = TokenKind::IntLiteral, .text = "10"},
  250. // newline
  251. {.kind = TokenKind::RealLiteral, .text = "9.0"},
  252. {.kind = TokenKind::Period},
  253. {.kind = TokenKind::RealLiteral, .text = "9.5"},
  254. // newline
  255. {.kind = TokenKind::Error, .text = "10.foo"},
  256. // newline
  257. {.kind = TokenKind::RealLiteral, .text = "11.0"},
  258. {.kind = TokenKind::Period},
  259. {.kind = TokenKind::Identifier, .text = "foo"},
  260. // newline
  261. {.kind = TokenKind::Error, .text = "12e"},
  262. {.kind = TokenKind::Plus},
  263. {.kind = TokenKind::IntLiteral, .text = "1"},
  264. // newline
  265. {.kind = TokenKind::IntLiteral, .text = "13"},
  266. {.kind = TokenKind::Period},
  267. {.kind = TokenKind::Underscore},
  268. // newline
  269. {.kind = TokenKind::FileEnd},
  270. }));
  271. }
  272. TEST_F(LexerTest, HandlesGarbageCharacters) {
  273. constexpr char GarbageText[] = "$$💩-$\n$\0$12$\n\\\"\\\n\"x";
  274. auto buffer = Lex(llvm::StringRef(GarbageText, sizeof(GarbageText) - 1));
  275. EXPECT_TRUE(buffer.has_errors());
  276. EXPECT_THAT(
  277. buffer,
  278. HasTokens(llvm::ArrayRef<ExpectedToken>{
  279. {.kind = TokenKind::FileStart, .line = 1, .column = 1},
  280. {.kind = TokenKind::Error,
  281. .line = 1,
  282. .column = 1,
  283. // 💩 takes 4 bytes, and we count column as bytes offset.
  284. .text = llvm::StringRef("$$💩", 6)},
  285. {.kind = TokenKind::Minus, .line = 1, .column = 7},
  286. {.kind = TokenKind::Error, .line = 1, .column = 8, .text = "$"},
  287. // newline
  288. {.kind = TokenKind::Error,
  289. .line = 2,
  290. .column = 1,
  291. .text = llvm::StringRef("$\0$", 3)},
  292. {.kind = TokenKind::IntLiteral, .line = 2, .column = 4, .text = "12"},
  293. {.kind = TokenKind::Error, .line = 2, .column = 6, .text = "$"},
  294. // newline
  295. {.kind = TokenKind::Backslash, .line = 3, .column = 1, .text = "\\"},
  296. {.kind = TokenKind::Error, .line = 3, .column = 2, .text = "\"\\"},
  297. // newline
  298. {.kind = TokenKind::Error, .line = 4, .column = 1, .text = "\"x"},
  299. {.kind = TokenKind::FileEnd, .line = 4, .column = 3},
  300. }));
  301. }
  302. TEST_F(LexerTest, Symbols) {
  303. // We don't need to exhaustively test symbols here as they're handled with
  304. // common code, but we want to check specific patterns to verify things like
  305. // max-munch rule and handling of interesting symbols.
  306. auto buffer = Lex("<<<");
  307. EXPECT_FALSE(buffer.has_errors());
  308. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  309. {TokenKind::FileStart},
  310. {TokenKind::LessLess},
  311. {TokenKind::Less},
  312. {TokenKind::FileEnd},
  313. }));
  314. buffer = Lex("<<=>>");
  315. EXPECT_FALSE(buffer.has_errors());
  316. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  317. {TokenKind::FileStart},
  318. {TokenKind::LessLessEqual},
  319. {TokenKind::GreaterGreater},
  320. {TokenKind::FileEnd},
  321. }));
  322. buffer = Lex("< <=> >");
  323. EXPECT_FALSE(buffer.has_errors());
  324. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  325. {TokenKind::FileStart},
  326. {TokenKind::Less},
  327. {TokenKind::LessEqualGreater},
  328. {TokenKind::Greater},
  329. {TokenKind::FileEnd},
  330. }));
  331. buffer = Lex("\\/?@&^!");
  332. EXPECT_FALSE(buffer.has_errors());
  333. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  334. {TokenKind::FileStart},
  335. {TokenKind::Backslash},
  336. {TokenKind::Slash},
  337. {TokenKind::Question},
  338. {TokenKind::At},
  339. {TokenKind::Amp},
  340. {TokenKind::Caret},
  341. {TokenKind::Exclaim},
  342. {TokenKind::FileEnd},
  343. }));
  344. }
  345. TEST_F(LexerTest, Parens) {
  346. auto buffer = Lex("()");
  347. EXPECT_FALSE(buffer.has_errors());
  348. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  349. {TokenKind::FileStart},
  350. {TokenKind::OpenParen},
  351. {TokenKind::CloseParen},
  352. {TokenKind::FileEnd},
  353. }));
  354. buffer = Lex("((()()))");
  355. EXPECT_FALSE(buffer.has_errors());
  356. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  357. {TokenKind::FileStart},
  358. {TokenKind::OpenParen},
  359. {TokenKind::OpenParen},
  360. {TokenKind::OpenParen},
  361. {TokenKind::CloseParen},
  362. {TokenKind::OpenParen},
  363. {TokenKind::CloseParen},
  364. {TokenKind::CloseParen},
  365. {TokenKind::CloseParen},
  366. {TokenKind::FileEnd},
  367. }));
  368. }
  369. TEST_F(LexerTest, CurlyBraces) {
  370. auto buffer = Lex("{}");
  371. EXPECT_FALSE(buffer.has_errors());
  372. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  373. {TokenKind::FileStart},
  374. {TokenKind::OpenCurlyBrace},
  375. {TokenKind::CloseCurlyBrace},
  376. {TokenKind::FileEnd},
  377. }));
  378. buffer = Lex("{{{}{}}}");
  379. EXPECT_FALSE(buffer.has_errors());
  380. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  381. {TokenKind::FileStart},
  382. {TokenKind::OpenCurlyBrace},
  383. {TokenKind::OpenCurlyBrace},
  384. {TokenKind::OpenCurlyBrace},
  385. {TokenKind::CloseCurlyBrace},
  386. {TokenKind::OpenCurlyBrace},
  387. {TokenKind::CloseCurlyBrace},
  388. {TokenKind::CloseCurlyBrace},
  389. {TokenKind::CloseCurlyBrace},
  390. {TokenKind::FileEnd},
  391. }));
  392. }
  393. TEST_F(LexerTest, MatchingGroups) {
  394. {
  395. TokenizedBuffer buffer = Lex("(){}");
  396. ASSERT_FALSE(buffer.has_errors());
  397. auto it = ++buffer.tokens().begin();
  398. auto open_paren_token = *it++;
  399. auto close_paren_token = *it++;
  400. EXPECT_EQ(close_paren_token,
  401. buffer.GetMatchedClosingToken(open_paren_token));
  402. EXPECT_EQ(open_paren_token,
  403. buffer.GetMatchedOpeningToken(close_paren_token));
  404. auto open_curly_token = *it++;
  405. auto close_curly_token = *it++;
  406. EXPECT_EQ(close_curly_token,
  407. buffer.GetMatchedClosingToken(open_curly_token));
  408. EXPECT_EQ(open_curly_token,
  409. buffer.GetMatchedOpeningToken(close_curly_token));
  410. auto eof_token = *it++;
  411. EXPECT_EQ(buffer.GetKind(eof_token), TokenKind::FileEnd);
  412. EXPECT_EQ(buffer.tokens().end(), it);
  413. }
  414. {
  415. TokenizedBuffer buffer = Lex("({x}){(y)} {{((z))}}");
  416. ASSERT_FALSE(buffer.has_errors());
  417. auto it = ++buffer.tokens().begin();
  418. auto open_paren_token = *it++;
  419. auto open_curly_token = *it++;
  420. ASSERT_EQ("x",
  421. value_stores_.identifiers().Get(buffer.GetIdentifier(*it++)));
  422. auto close_curly_token = *it++;
  423. auto close_paren_token = *it++;
  424. EXPECT_EQ(close_paren_token,
  425. buffer.GetMatchedClosingToken(open_paren_token));
  426. EXPECT_EQ(open_paren_token,
  427. buffer.GetMatchedOpeningToken(close_paren_token));
  428. EXPECT_EQ(close_curly_token,
  429. buffer.GetMatchedClosingToken(open_curly_token));
  430. EXPECT_EQ(open_curly_token,
  431. buffer.GetMatchedOpeningToken(close_curly_token));
  432. open_curly_token = *it++;
  433. open_paren_token = *it++;
  434. ASSERT_EQ("y",
  435. value_stores_.identifiers().Get(buffer.GetIdentifier(*it++)));
  436. close_paren_token = *it++;
  437. close_curly_token = *it++;
  438. EXPECT_EQ(close_curly_token,
  439. buffer.GetMatchedClosingToken(open_curly_token));
  440. EXPECT_EQ(open_curly_token,
  441. buffer.GetMatchedOpeningToken(close_curly_token));
  442. EXPECT_EQ(close_paren_token,
  443. buffer.GetMatchedClosingToken(open_paren_token));
  444. EXPECT_EQ(open_paren_token,
  445. buffer.GetMatchedOpeningToken(close_paren_token));
  446. open_curly_token = *it++;
  447. auto inner_open_curly_token = *it++;
  448. open_paren_token = *it++;
  449. auto inner_open_paren_token = *it++;
  450. ASSERT_EQ("z",
  451. value_stores_.identifiers().Get(buffer.GetIdentifier(*it++)));
  452. auto inner_close_paren_token = *it++;
  453. close_paren_token = *it++;
  454. auto inner_close_curly_token = *it++;
  455. close_curly_token = *it++;
  456. EXPECT_EQ(close_curly_token,
  457. buffer.GetMatchedClosingToken(open_curly_token));
  458. EXPECT_EQ(open_curly_token,
  459. buffer.GetMatchedOpeningToken(close_curly_token));
  460. EXPECT_EQ(inner_close_curly_token,
  461. buffer.GetMatchedClosingToken(inner_open_curly_token));
  462. EXPECT_EQ(inner_open_curly_token,
  463. buffer.GetMatchedOpeningToken(inner_close_curly_token));
  464. EXPECT_EQ(close_paren_token,
  465. buffer.GetMatchedClosingToken(open_paren_token));
  466. EXPECT_EQ(open_paren_token,
  467. buffer.GetMatchedOpeningToken(close_paren_token));
  468. EXPECT_EQ(inner_close_paren_token,
  469. buffer.GetMatchedClosingToken(inner_open_paren_token));
  470. EXPECT_EQ(inner_open_paren_token,
  471. buffer.GetMatchedOpeningToken(inner_close_paren_token));
  472. auto eof_token = *it++;
  473. EXPECT_EQ(buffer.GetKind(eof_token), TokenKind::FileEnd);
  474. EXPECT_EQ(buffer.tokens().end(), it);
  475. }
  476. }
  477. TEST_F(LexerTest, MismatchedGroups) {
  478. auto buffer = Lex("{");
  479. EXPECT_TRUE(buffer.has_errors());
  480. EXPECT_THAT(buffer,
  481. HasTokens(llvm::ArrayRef<ExpectedToken>{
  482. {TokenKind::FileStart},
  483. {TokenKind::OpenCurlyBrace},
  484. {.kind = TokenKind::CloseCurlyBrace, .recovery = true},
  485. {TokenKind::FileEnd},
  486. }));
  487. buffer = Lex("}");
  488. EXPECT_TRUE(buffer.has_errors());
  489. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  490. {TokenKind::FileStart},
  491. {.kind = TokenKind::Error, .text = "}"},
  492. {TokenKind::FileEnd},
  493. }));
  494. buffer = Lex("{(}");
  495. EXPECT_TRUE(buffer.has_errors());
  496. EXPECT_THAT(
  497. buffer,
  498. HasTokens(llvm::ArrayRef<ExpectedToken>{
  499. {TokenKind::FileStart},
  500. {.kind = TokenKind::OpenCurlyBrace, .column = 1},
  501. {.kind = TokenKind::OpenParen, .column = 2},
  502. {.kind = TokenKind::CloseParen, .column = 3, .recovery = true},
  503. {.kind = TokenKind::CloseCurlyBrace, .column = 3},
  504. {TokenKind::FileEnd},
  505. }));
  506. buffer = Lex(")({)");
  507. EXPECT_TRUE(buffer.has_errors());
  508. EXPECT_THAT(
  509. buffer,
  510. HasTokens(llvm::ArrayRef<ExpectedToken>{
  511. {TokenKind::FileStart},
  512. {.kind = TokenKind::Error, .column = 1, .text = ")"},
  513. {.kind = TokenKind::OpenParen, .column = 2},
  514. {.kind = TokenKind::OpenCurlyBrace, .column = 3},
  515. {.kind = TokenKind::CloseCurlyBrace, .column = 4, .recovery = true},
  516. {.kind = TokenKind::CloseParen, .column = 4},
  517. {TokenKind::FileEnd},
  518. }));
  519. }
  520. TEST_F(LexerTest, Whitespace) {
  521. auto buffer = Lex("{( } {(");
  522. // Whether there should be whitespace before/after each token.
  523. bool space[] = {true,
  524. // start-of-file
  525. true,
  526. // {
  527. false,
  528. // (
  529. true,
  530. // inserted )
  531. true,
  532. // }
  533. true,
  534. // {
  535. false,
  536. // (
  537. true,
  538. // inserted )
  539. true,
  540. // inserted }
  541. true,
  542. // EOF
  543. false};
  544. int pos = 0;
  545. for (TokenIndex token : buffer.tokens()) {
  546. SCOPED_TRACE(
  547. llvm::formatv("Token #{0}: '{1}'", token, buffer.GetTokenText(token)));
  548. ASSERT_LT(pos, std::size(space));
  549. EXPECT_THAT(buffer.HasLeadingWhitespace(token), Eq(space[pos]));
  550. ++pos;
  551. ASSERT_LT(pos, std::size(space));
  552. EXPECT_THAT(buffer.HasTrailingWhitespace(token), Eq(space[pos]));
  553. }
  554. ASSERT_EQ(pos + 1, std::size(space));
  555. }
  556. TEST_F(LexerTest, Keywords) {
  557. auto buffer = Lex(" fn");
  558. EXPECT_FALSE(buffer.has_errors());
  559. EXPECT_THAT(buffer,
  560. HasTokens(llvm::ArrayRef<ExpectedToken>{
  561. {TokenKind::FileStart},
  562. {.kind = TokenKind::Fn, .column = 4, .indent_column = 4},
  563. {TokenKind::FileEnd},
  564. }));
  565. buffer = Lex("and or not if else for return var break continue _");
  566. EXPECT_FALSE(buffer.has_errors());
  567. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  568. {TokenKind::FileStart},
  569. {TokenKind::And},
  570. {TokenKind::Or},
  571. {TokenKind::Not},
  572. {TokenKind::If},
  573. {TokenKind::Else},
  574. {TokenKind::For},
  575. {TokenKind::Return},
  576. {TokenKind::Var},
  577. {TokenKind::Break},
  578. {TokenKind::Continue},
  579. {TokenKind::Underscore},
  580. {TokenKind::FileEnd},
  581. }));
  582. }
  583. TEST_F(LexerTest, Comments) {
  584. auto buffer = Lex(" ;\n // foo\n ;\n");
  585. EXPECT_FALSE(buffer.has_errors());
  586. EXPECT_THAT(
  587. buffer,
  588. HasTokens(llvm::ArrayRef<ExpectedToken>{
  589. {.kind = TokenKind::FileStart, .line = 1, .column = 1},
  590. {.kind = TokenKind::Semi, .line = 1, .column = 2, .indent_column = 2},
  591. {.kind = TokenKind::Semi, .line = 3, .column = 3, .indent_column = 3},
  592. {.kind = TokenKind::FileEnd, .line = 3, .column = 4},
  593. }));
  594. buffer = Lex("// foo\n//\n// bar");
  595. EXPECT_FALSE(buffer.has_errors());
  596. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  597. {TokenKind::FileStart}, {TokenKind::FileEnd}}));
  598. // Make sure weird characters aren't a problem.
  599. buffer = Lex(" // foo#$!^?@-_💩🍫⃠ [̲̅$̲̅(̲̅ ͡° ͜ʖ ͡°̲̅)̲̅$̲̅]");
  600. EXPECT_FALSE(buffer.has_errors());
  601. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  602. {TokenKind::FileStart}, {TokenKind::FileEnd}}));
  603. // Make sure we can lex a comment at the end of the input.
  604. buffer = Lex("//");
  605. EXPECT_FALSE(buffer.has_errors());
  606. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  607. {TokenKind::FileStart}, {TokenKind::FileEnd}}));
  608. }
  609. TEST_F(LexerTest, InvalidComments) {
  610. llvm::StringLiteral testcases[] = {
  611. " /// foo\n",
  612. "foo // bar\n",
  613. "//! hello",
  614. " //world",
  615. };
  616. for (llvm::StringLiteral testcase : testcases) {
  617. auto buffer = Lex(testcase);
  618. EXPECT_TRUE(buffer.has_errors());
  619. }
  620. }
  621. TEST_F(LexerTest, Identifiers) {
  622. auto buffer = Lex(" foobar");
  623. EXPECT_FALSE(buffer.has_errors());
  624. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  625. {TokenKind::FileStart},
  626. {.kind = TokenKind::Identifier,
  627. .column = 4,
  628. .indent_column = 4,
  629. .text = "foobar"},
  630. {TokenKind::FileEnd},
  631. }));
  632. // Check different kinds of identifier character sequences.
  633. buffer = Lex("_foo_bar");
  634. EXPECT_FALSE(buffer.has_errors());
  635. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  636. {TokenKind::FileStart},
  637. {.kind = TokenKind::Identifier, .text = "_foo_bar"},
  638. {TokenKind::FileEnd},
  639. }));
  640. buffer = Lex("foo2bar00");
  641. EXPECT_FALSE(buffer.has_errors());
  642. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  643. {TokenKind::FileStart},
  644. {.kind = TokenKind::Identifier, .text = "foo2bar00"},
  645. {TokenKind::FileEnd},
  646. }));
  647. // Check that we can parse identifiers that start with a keyword.
  648. buffer = Lex("fnord");
  649. EXPECT_FALSE(buffer.has_errors());
  650. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  651. {TokenKind::FileStart},
  652. {.kind = TokenKind::Identifier, .text = "fnord"},
  653. {TokenKind::FileEnd},
  654. }));
  655. // Check multiple identifiers with indent and interning.
  656. buffer = Lex(" foo;bar\nbar \n foo\tfoo");
  657. EXPECT_FALSE(buffer.has_errors());
  658. EXPECT_THAT(buffer,
  659. HasTokens(llvm::ArrayRef<ExpectedToken>{
  660. {.kind = TokenKind::FileStart, .line = 1, .column = 1},
  661. {.kind = TokenKind::Identifier,
  662. .line = 1,
  663. .column = 4,
  664. .indent_column = 4,
  665. .text = "foo"},
  666. {.kind = TokenKind::Semi},
  667. {.kind = TokenKind::Identifier,
  668. .line = 1,
  669. .column = 8,
  670. .indent_column = 4,
  671. .text = "bar"},
  672. {.kind = TokenKind::Identifier,
  673. .line = 2,
  674. .column = 1,
  675. .indent_column = 1,
  676. .text = "bar"},
  677. {.kind = TokenKind::Identifier,
  678. .line = 3,
  679. .column = 3,
  680. .indent_column = 3,
  681. .text = "foo"},
  682. {.kind = TokenKind::Identifier,
  683. .line = 3,
  684. .column = 7,
  685. .indent_column = 3,
  686. .text = "foo"},
  687. {.kind = TokenKind::FileEnd, .line = 3, .column = 10},
  688. }));
  689. }
  690. TEST_F(LexerTest, StringLiterals) {
  691. llvm::StringLiteral testcase = R"(
  692. "hello world\n"
  693. '''foo
  694. test \
  695. \xAB
  696. ''' trailing
  697. #"""#
  698. "\0"
  699. #"\0"foo"\1"#
  700. """x"""
  701. )";
  702. auto buffer = Lex(testcase);
  703. EXPECT_FALSE(buffer.has_errors());
  704. EXPECT_THAT(buffer,
  705. HasTokens(llvm::ArrayRef<ExpectedToken>{
  706. {.kind = TokenKind::FileStart, .line = 1, .column = 1},
  707. {.kind = TokenKind::StringLiteral,
  708. .line = 2,
  709. .column = 5,
  710. .indent_column = 5,
  711. .value_stores = &value_stores_,
  712. .string_contents = {"hello world\n"}},
  713. {.kind = TokenKind::StringLiteral,
  714. .line = 4,
  715. .column = 5,
  716. .indent_column = 5,
  717. .value_stores = &value_stores_,
  718. .string_contents = {" test \xAB\n"}},
  719. {.kind = TokenKind::Identifier,
  720. .line = 7,
  721. .column = 10,
  722. .indent_column = 5,
  723. .text = "trailing"},
  724. {.kind = TokenKind::StringLiteral,
  725. .line = 9,
  726. .column = 7,
  727. .indent_column = 7,
  728. .value_stores = &value_stores_,
  729. .string_contents = {"\""}},
  730. {.kind = TokenKind::StringLiteral,
  731. .line = 11,
  732. .column = 5,
  733. .indent_column = 5,
  734. .value_stores = &value_stores_,
  735. .string_contents = llvm::StringLiteral::withInnerNUL("\0")},
  736. {.kind = TokenKind::StringLiteral,
  737. .line = 13,
  738. .column = 5,
  739. .indent_column = 5,
  740. .value_stores = &value_stores_,
  741. .string_contents = {"\\0\"foo\"\\1"}},
  742. // """x""" is three string literals, not one invalid
  743. // attempt at a block string literal.
  744. {.kind = TokenKind::StringLiteral,
  745. .line = 15,
  746. .column = 5,
  747. .indent_column = 5,
  748. .value_stores = &value_stores_,
  749. .string_contents = {""}},
  750. {.kind = TokenKind::StringLiteral,
  751. .line = 15,
  752. .column = 7,
  753. .indent_column = 5,
  754. .value_stores = &value_stores_,
  755. .string_contents = {"x"}},
  756. {.kind = TokenKind::StringLiteral,
  757. .line = 15,
  758. .column = 10,
  759. .indent_column = 5,
  760. .value_stores = &value_stores_,
  761. .string_contents = {""}},
  762. {.kind = TokenKind::FileEnd, .line = 16, .column = 3},
  763. }));
  764. }
  765. TEST_F(LexerTest, InvalidStringLiterals) {
  766. llvm::StringLiteral invalid[] = {
  767. // clang-format off
  768. R"(")",
  769. R"('''
  770. '')",
  771. R"("\)",
  772. R"("\")",
  773. R"("\\)",
  774. R"("\\\")",
  775. R"(''')",
  776. R"('''
  777. )",
  778. R"('''\)",
  779. R"(#'''
  780. ''')",
  781. // clang-format on
  782. };
  783. for (llvm::StringLiteral test : invalid) {
  784. SCOPED_TRACE(test);
  785. auto buffer = Lex(test);
  786. EXPECT_TRUE(buffer.has_errors());
  787. // We should have formed at least one error token.
  788. bool found_error = false;
  789. for (TokenIndex token : buffer.tokens()) {
  790. if (buffer.GetKind(token) == TokenKind::Error) {
  791. found_error = true;
  792. break;
  793. }
  794. }
  795. EXPECT_TRUE(found_error);
  796. }
  797. }
  798. TEST_F(LexerTest, TypeLiterals) {
  799. llvm::StringLiteral testcase = R"(
  800. i0 i1 i20 i999999999999 i0x1
  801. u0 u1 u64 u64b
  802. f32 f80 f1 fi
  803. s1
  804. )";
  805. auto buffer = Lex(testcase);
  806. EXPECT_FALSE(buffer.has_errors());
  807. ASSERT_THAT(buffer,
  808. HasTokens(llvm::ArrayRef<ExpectedToken>{
  809. {.kind = TokenKind::FileStart, .line = 1, .column = 1},
  810. {.kind = TokenKind::Identifier,
  811. .line = 2,
  812. .column = 5,
  813. .indent_column = 5,
  814. .text = {"i0"}},
  815. {.kind = TokenKind::IntTypeLiteral,
  816. .line = 2,
  817. .column = 8,
  818. .indent_column = 5,
  819. .text = {"i1"}},
  820. {.kind = TokenKind::IntTypeLiteral,
  821. .line = 2,
  822. .column = 11,
  823. .indent_column = 5,
  824. .text = {"i20"}},
  825. {.kind = TokenKind::IntTypeLiteral,
  826. .line = 2,
  827. .column = 15,
  828. .indent_column = 5,
  829. .text = {"i999999999999"}},
  830. {.kind = TokenKind::Identifier,
  831. .line = 2,
  832. .column = 29,
  833. .indent_column = 5,
  834. .text = {"i0x1"}},
  835. {.kind = TokenKind::Identifier,
  836. .line = 3,
  837. .column = 5,
  838. .indent_column = 5,
  839. .text = {"u0"}},
  840. {.kind = TokenKind::UnsignedIntTypeLiteral,
  841. .line = 3,
  842. .column = 8,
  843. .indent_column = 5,
  844. .text = {"u1"}},
  845. {.kind = TokenKind::UnsignedIntTypeLiteral,
  846. .line = 3,
  847. .column = 11,
  848. .indent_column = 5,
  849. .text = {"u64"}},
  850. {.kind = TokenKind::Identifier,
  851. .line = 3,
  852. .column = 15,
  853. .indent_column = 5,
  854. .text = {"u64b"}},
  855. {.kind = TokenKind::FloatTypeLiteral,
  856. .line = 4,
  857. .column = 5,
  858. .indent_column = 5,
  859. .text = {"f32"}},
  860. {.kind = TokenKind::FloatTypeLiteral,
  861. .line = 4,
  862. .column = 9,
  863. .indent_column = 5,
  864. .text = {"f80"}},
  865. {.kind = TokenKind::FloatTypeLiteral,
  866. .line = 4,
  867. .column = 13,
  868. .indent_column = 5,
  869. .text = {"f1"}},
  870. {.kind = TokenKind::Identifier,
  871. .line = 4,
  872. .column = 16,
  873. .indent_column = 5,
  874. .text = {"fi"}},
  875. {.kind = TokenKind::Identifier,
  876. .line = 5,
  877. .column = 5,
  878. .indent_column = 5,
  879. .text = {"s1"}},
  880. {.kind = TokenKind::FileEnd, .line = 6, .column = 3},
  881. }));
  882. auto token_i1 = buffer.tokens().begin() + 2;
  883. EXPECT_EQ(buffer.GetTypeLiteralSize(*token_i1), 1);
  884. auto token_i20 = buffer.tokens().begin() + 3;
  885. EXPECT_EQ(buffer.GetTypeLiteralSize(*token_i20), 20);
  886. auto token_i999999999999 = buffer.tokens().begin() + 4;
  887. EXPECT_EQ(buffer.GetTypeLiteralSize(*token_i999999999999), 999999999999ULL);
  888. auto token_u1 = buffer.tokens().begin() + 7;
  889. EXPECT_EQ(buffer.GetTypeLiteralSize(*token_u1), 1);
  890. auto token_u64 = buffer.tokens().begin() + 8;
  891. EXPECT_EQ(buffer.GetTypeLiteralSize(*token_u64), 64);
  892. auto token_f32 = buffer.tokens().begin() + 10;
  893. EXPECT_EQ(buffer.GetTypeLiteralSize(*token_f32), 32);
  894. auto token_f80 = buffer.tokens().begin() + 11;
  895. EXPECT_EQ(buffer.GetTypeLiteralSize(*token_f80), 80);
  896. auto token_f1 = buffer.tokens().begin() + 12;
  897. EXPECT_EQ(buffer.GetTypeLiteralSize(*token_f1), 1);
  898. }
  899. TEST_F(LexerTest, TypeLiteralTooManyDigits) {
  900. std::string code = "i";
  901. constexpr int Count = 10000;
  902. code.append(Count, '9');
  903. Testing::MockDiagnosticConsumer consumer;
  904. EXPECT_CALL(consumer,
  905. HandleDiagnostic(IsDiagnostic(
  906. DiagnosticKind::TooManyDigits, DiagnosticLevel::Error, 1, 2,
  907. HasSubstr(llvm::formatv(" {0} ", Count)))));
  908. auto buffer = Lex(code, consumer);
  909. EXPECT_TRUE(buffer.has_errors());
  910. ASSERT_THAT(buffer,
  911. HasTokens(llvm::ArrayRef<ExpectedToken>{
  912. {.kind = TokenKind::FileStart, .line = 1, .column = 1},
  913. {.kind = TokenKind::Error,
  914. .line = 1,
  915. .column = 1,
  916. .indent_column = 1,
  917. .text = {code}},
  918. {.kind = TokenKind::FileEnd, .line = 1, .column = Count + 2},
  919. }));
  920. }
  921. TEST_F(LexerTest, DiagnosticTrailingComment) {
  922. llvm::StringLiteral testcase = R"(
  923. // Hello!
  924. var String x; // trailing comment
  925. )";
  926. Testing::MockDiagnosticConsumer consumer;
  927. EXPECT_CALL(consumer,
  928. HandleDiagnostic(IsDiagnostic(DiagnosticKind::TrailingComment,
  929. DiagnosticLevel::Error, 3, 19, _)));
  930. Lex(testcase, consumer);
  931. }
  932. TEST_F(LexerTest, DiagnosticWhitespace) {
  933. Testing::MockDiagnosticConsumer consumer;
  934. EXPECT_CALL(consumer, HandleDiagnostic(IsDiagnostic(
  935. DiagnosticKind::NoWhitespaceAfterCommentIntroducer,
  936. DiagnosticLevel::Error, 1, 3, _)));
  937. Lex("//no space after comment", consumer);
  938. }
  939. TEST_F(LexerTest, DiagnosticUnrecognizedEscape) {
  940. Testing::MockDiagnosticConsumer consumer;
  941. EXPECT_CALL(consumer, HandleDiagnostic(IsDiagnostic(
  942. DiagnosticKind::UnknownEscapeSequence,
  943. DiagnosticLevel::Error, 1, 8, HasSubstr("`b`"))));
  944. Lex(R"("hello\bworld")", consumer);
  945. }
  946. TEST_F(LexerTest, DiagnosticBadHex) {
  947. Testing::MockDiagnosticConsumer consumer;
  948. EXPECT_CALL(consumer, HandleDiagnostic(IsDiagnostic(
  949. DiagnosticKind::HexadecimalEscapeMissingDigits,
  950. DiagnosticLevel::Error, 1, 9, _)));
  951. Lex(R"("hello\xabworld")", consumer);
  952. }
  953. TEST_F(LexerTest, DiagnosticInvalidDigit) {
  954. Testing::MockDiagnosticConsumer consumer;
  955. EXPECT_CALL(consumer, HandleDiagnostic(IsDiagnostic(
  956. DiagnosticKind::InvalidDigit,
  957. DiagnosticLevel::Error, 1, 6, HasSubstr("'a'"))));
  958. Lex("0x123abc", consumer);
  959. }
  960. TEST_F(LexerTest, DiagnosticMissingTerminator) {
  961. Testing::MockDiagnosticConsumer consumer;
  962. EXPECT_CALL(consumer,
  963. HandleDiagnostic(IsDiagnostic(DiagnosticKind::UnterminatedString,
  964. DiagnosticLevel::Error, 1, 1, _)));
  965. Lex(R"(#" ")", consumer);
  966. }
  967. TEST_F(LexerTest, DiagnosticUnrecognizedChar) {
  968. Testing::MockDiagnosticConsumer consumer;
  969. EXPECT_CALL(consumer, HandleDiagnostic(
  970. IsDiagnostic(DiagnosticKind::UnrecognizedCharacters,
  971. DiagnosticLevel::Error, 1, 1, _)));
  972. Lex("\b", consumer);
  973. }
  974. TEST_F(LexerTest, PrintingOutputYaml) {
  975. // Test that we can parse this into YAML and verify line and indent data.
  976. auto buffer = Lex("\n ;\n\n\n; ;\n\n\n\n\n\n\n\n\n\n\n");
  977. ASSERT_FALSE(buffer.has_errors());
  978. TestRawOstream print_stream;
  979. buffer.Print(print_stream);
  980. EXPECT_THAT(
  981. Yaml::Value::FromText(print_stream.TakeStr()),
  982. IsYaml(ElementsAre(Yaml::Sequence(ElementsAre(Yaml::Mapping(ElementsAre(
  983. Pair("filename", source_storage_.front().filename().str()),
  984. Pair("tokens",
  985. Yaml::Sequence(ElementsAre(
  986. Yaml::Mapping(ElementsAre(
  987. Pair("index", "0"), Pair("kind", "FileStart"),
  988. Pair("line", "1"), Pair("column", "1"),
  989. Pair("indent", "1"), Pair("spelling", ""),
  990. Pair("has_trailing_space", "true"))),
  991. Yaml::Mapping(
  992. ElementsAre(Pair("index", "1"), Pair("kind", "Semi"),
  993. Pair("line", "2"), Pair("column", "2"),
  994. Pair("indent", "2"), Pair("spelling", ";"),
  995. Pair("has_trailing_space", "true"))),
  996. Yaml::Mapping(
  997. ElementsAre(Pair("index", "2"), Pair("kind", "Semi"),
  998. Pair("line", "5"), Pair("column", "1"),
  999. Pair("indent", "1"), Pair("spelling", ";"),
  1000. Pair("has_trailing_space", "true"))),
  1001. Yaml::Mapping(
  1002. ElementsAre(Pair("index", "3"), Pair("kind", "Semi"),
  1003. Pair("line", "5"), Pair("column", "3"),
  1004. Pair("indent", "1"), Pair("spelling", ";"),
  1005. Pair("has_trailing_space", "true"))),
  1006. Yaml::Mapping(ElementsAre(
  1007. Pair("index", "4"), Pair("kind", "FileEnd"),
  1008. Pair("line", "15"), Pair("column", "1"),
  1009. Pair("indent", "1"), Pair("spelling", "")))))))))))));
  1010. }
  1011. } // namespace
  1012. } // namespace Carbon::Lex