tokenized_buffer_test.cpp 41 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105
  1. // Part of the Carbon Language project, under the Apache License v2.0 with LLVM
  2. // Exceptions. See /LICENSE for license information.
  3. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
  4. #include "toolchain/lexer/tokenized_buffer.h"
  5. #include <gmock/gmock.h>
  6. #include <gtest/gtest.h>
  7. #include <iterator>
  8. #include "llvm/ADT/ArrayRef.h"
  9. #include "llvm/ADT/None.h"
  10. #include "llvm/ADT/Sequence.h"
  11. #include "llvm/ADT/SmallString.h"
  12. #include "llvm/ADT/Twine.h"
  13. #include "llvm/Support/SourceMgr.h"
  14. #include "llvm/Support/raw_ostream.h"
  15. #include "toolchain/common/yaml_test_helpers.h"
  16. #include "toolchain/diagnostics/diagnostic_emitter.h"
  17. #include "toolchain/diagnostics/mocks.h"
  18. #include "toolchain/lexer/tokenized_buffer_test_helpers.h"
  19. namespace Carbon {
  20. namespace {
  21. using ::Carbon::Testing::DiagnosticAt;
  22. using ::Carbon::Testing::DiagnosticMessage;
  23. using ::Carbon::Testing::ExpectedToken;
  24. using ::Carbon::Testing::HasTokens;
  25. using ::testing::ElementsAre;
  26. using ::testing::Eq;
  27. using ::testing::HasSubstr;
  28. using ::testing::StrEq;
  29. namespace Yaml = Carbon::Testing::Yaml;
  30. struct LexerTest : ::testing::Test {
  31. llvm::SmallVector<SourceBuffer, 16> source_storage;
  32. auto GetSourceBuffer(llvm::Twine text) -> SourceBuffer& {
  33. source_storage.push_back(SourceBuffer::CreateFromText(text.str()));
  34. return source_storage.back();
  35. }
  36. auto Lex(llvm::Twine text,
  37. DiagnosticConsumer& consumer = ConsoleDiagnosticConsumer())
  38. -> TokenizedBuffer {
  39. return TokenizedBuffer::Lex(GetSourceBuffer(text), consumer);
  40. }
  41. };
  42. TEST_F(LexerTest, HandlesEmptyBuffer) {
  43. auto buffer = Lex("");
  44. EXPECT_FALSE(buffer.HasErrors());
  45. EXPECT_THAT(
  46. buffer,
  47. HasTokens(llvm::ArrayRef<ExpectedToken>{{TokenKind::EndOfFile()}}));
  48. }
  49. TEST_F(LexerTest, TracksLinesAndColumns) {
  50. auto buffer = Lex("\n ;;\n ;;;\n x\"foo\" \"\"\"baz\n a\n \"\"\" y");
  51. EXPECT_FALSE(buffer.HasErrors());
  52. EXPECT_THAT(buffer,
  53. HasTokens(llvm::ArrayRef<ExpectedToken>{
  54. {.kind = TokenKind::Semi(),
  55. .line = 2,
  56. .column = 3,
  57. .indent_column = 3},
  58. {.kind = TokenKind::Semi(),
  59. .line = 2,
  60. .column = 4,
  61. .indent_column = 3},
  62. {.kind = TokenKind::Semi(),
  63. .line = 3,
  64. .column = 4,
  65. .indent_column = 4},
  66. {.kind = TokenKind::Semi(),
  67. .line = 3,
  68. .column = 5,
  69. .indent_column = 4},
  70. {.kind = TokenKind::Semi(),
  71. .line = 3,
  72. .column = 6,
  73. .indent_column = 4},
  74. {.kind = TokenKind::Identifier(),
  75. .line = 4,
  76. .column = 4,
  77. .indent_column = 4,
  78. .text = "x"},
  79. {.kind = TokenKind::StringLiteral(),
  80. .line = 4,
  81. .column = 5,
  82. .indent_column = 4},
  83. {.kind = TokenKind::StringLiteral(),
  84. .line = 4,
  85. .column = 11,
  86. .indent_column = 4},
  87. {.kind = TokenKind::Identifier(),
  88. .line = 6,
  89. .column = 6,
  90. .indent_column = 11,
  91. .text = "y"},
  92. {.kind = TokenKind::EndOfFile(), .line = 6, .column = 7},
  93. }));
  94. }
  95. TEST_F(LexerTest, HandlesNumericLiteral) {
  96. auto buffer = Lex("12-578\n 1 2\n0x12_3ABC\n0b10_10_11\n1_234_567\n1.5e9");
  97. EXPECT_FALSE(buffer.HasErrors());
  98. ASSERT_THAT(buffer,
  99. HasTokens(llvm::ArrayRef<ExpectedToken>{
  100. {.kind = TokenKind::IntegerLiteral(),
  101. .line = 1,
  102. .column = 1,
  103. .indent_column = 1,
  104. .text = "12"},
  105. {.kind = TokenKind::Minus(),
  106. .line = 1,
  107. .column = 3,
  108. .indent_column = 1},
  109. {.kind = TokenKind::IntegerLiteral(),
  110. .line = 1,
  111. .column = 4,
  112. .indent_column = 1,
  113. .text = "578"},
  114. {.kind = TokenKind::IntegerLiteral(),
  115. .line = 2,
  116. .column = 3,
  117. .indent_column = 3,
  118. .text = "1"},
  119. {.kind = TokenKind::IntegerLiteral(),
  120. .line = 2,
  121. .column = 6,
  122. .indent_column = 3,
  123. .text = "2"},
  124. {.kind = TokenKind::IntegerLiteral(),
  125. .line = 3,
  126. .column = 1,
  127. .indent_column = 1,
  128. .text = "0x12_3ABC"},
  129. {.kind = TokenKind::IntegerLiteral(),
  130. .line = 4,
  131. .column = 1,
  132. .indent_column = 1,
  133. .text = "0b10_10_11"},
  134. {.kind = TokenKind::IntegerLiteral(),
  135. .line = 5,
  136. .column = 1,
  137. .indent_column = 1,
  138. .text = "1_234_567"},
  139. {.kind = TokenKind::RealLiteral(),
  140. .line = 6,
  141. .column = 1,
  142. .indent_column = 1,
  143. .text = "1.5e9"},
  144. {.kind = TokenKind::EndOfFile(), .line = 6, .column = 6},
  145. }));
  146. auto token_12 = buffer.Tokens().begin();
  147. EXPECT_EQ(buffer.GetIntegerLiteral(*token_12), 12);
  148. auto token_578 = buffer.Tokens().begin() + 2;
  149. EXPECT_EQ(buffer.GetIntegerLiteral(*token_578), 578);
  150. auto token_1 = buffer.Tokens().begin() + 3;
  151. EXPECT_EQ(buffer.GetIntegerLiteral(*token_1), 1);
  152. auto token_2 = buffer.Tokens().begin() + 4;
  153. EXPECT_EQ(buffer.GetIntegerLiteral(*token_2), 2);
  154. auto token_0x12_3abc = buffer.Tokens().begin() + 5;
  155. EXPECT_EQ(buffer.GetIntegerLiteral(*token_0x12_3abc), 0x12'3abc);
  156. auto token_0b10_10_11 = buffer.Tokens().begin() + 6;
  157. EXPECT_EQ(buffer.GetIntegerLiteral(*token_0b10_10_11), 0b10'10'11);
  158. auto token_1_234_567 = buffer.Tokens().begin() + 7;
  159. EXPECT_EQ(buffer.GetIntegerLiteral(*token_1_234_567), 1'234'567);
  160. auto token_1_5e9 = buffer.Tokens().begin() + 8;
  161. auto value_1_5e9 = buffer.GetRealLiteral(*token_1_5e9);
  162. EXPECT_EQ(value_1_5e9.Mantissa().getZExtValue(), 15);
  163. EXPECT_EQ(value_1_5e9.Exponent().getSExtValue(), 8);
  164. EXPECT_EQ(value_1_5e9.IsDecimal(), true);
  165. }
  166. TEST_F(LexerTest, HandlesInvalidNumericLiterals) {
  167. auto buffer = Lex("14x 15_49 0x3.5q 0x3_4.5_6 0ops");
  168. EXPECT_TRUE(buffer.HasErrors());
  169. ASSERT_THAT(buffer,
  170. HasTokens(llvm::ArrayRef<ExpectedToken>{
  171. {.kind = TokenKind::Error(),
  172. .line = 1,
  173. .column = 1,
  174. .indent_column = 1,
  175. .text = "14x"},
  176. {.kind = TokenKind::IntegerLiteral(),
  177. .line = 1,
  178. .column = 5,
  179. .indent_column = 1,
  180. .text = "15_49"},
  181. {.kind = TokenKind::Error(),
  182. .line = 1,
  183. .column = 11,
  184. .indent_column = 1,
  185. .text = "0x3.5q"},
  186. {.kind = TokenKind::RealLiteral(),
  187. .line = 1,
  188. .column = 18,
  189. .indent_column = 1,
  190. .text = "0x3_4.5_6"},
  191. {.kind = TokenKind::Error(),
  192. .line = 1,
  193. .column = 28,
  194. .indent_column = 1,
  195. .text = "0ops"},
  196. {.kind = TokenKind::EndOfFile(), .line = 1, .column = 32},
  197. }));
  198. }
  199. TEST_F(LexerTest, SplitsNumericLiteralsProperly) {
  200. llvm::StringLiteral source_text = R"(
  201. 1.
  202. .2
  203. 3.+foo
  204. 4.0-bar
  205. 5.0e+123+456
  206. 6.0e+1e+2
  207. 1e7
  208. 8..10
  209. 9.0.9.5
  210. 10.foo
  211. 11.0.foo
  212. 12e+1
  213. 13._
  214. )";
  215. auto buffer = Lex(source_text);
  216. EXPECT_TRUE(buffer.HasErrors());
  217. EXPECT_THAT(buffer,
  218. HasTokens(llvm::ArrayRef<ExpectedToken>{
  219. {.kind = TokenKind::IntegerLiteral(), .text = "1"},
  220. {.kind = TokenKind::Period()},
  221. // newline
  222. {.kind = TokenKind::Period()},
  223. {.kind = TokenKind::IntegerLiteral(), .text = "2"},
  224. // newline
  225. {.kind = TokenKind::IntegerLiteral(), .text = "3"},
  226. {.kind = TokenKind::Period()},
  227. {.kind = TokenKind::Plus()},
  228. {.kind = TokenKind::Identifier(), .text = "foo"},
  229. // newline
  230. {.kind = TokenKind::RealLiteral(), .text = "4.0"},
  231. {.kind = TokenKind::Minus()},
  232. {.kind = TokenKind::Identifier(), .text = "bar"},
  233. // newline
  234. {.kind = TokenKind::RealLiteral(), .text = "5.0e+123"},
  235. {.kind = TokenKind::Plus()},
  236. {.kind = TokenKind::IntegerLiteral(), .text = "456"},
  237. // newline
  238. {.kind = TokenKind::Error(), .text = "6.0e+1e"},
  239. {.kind = TokenKind::Plus()},
  240. {.kind = TokenKind::IntegerLiteral(), .text = "2"},
  241. // newline
  242. {.kind = TokenKind::Error(), .text = "1e7"},
  243. // newline
  244. {.kind = TokenKind::IntegerLiteral(), .text = "8"},
  245. {.kind = TokenKind::Period()},
  246. {.kind = TokenKind::Period()},
  247. {.kind = TokenKind::IntegerLiteral(), .text = "10"},
  248. // newline
  249. {.kind = TokenKind::RealLiteral(), .text = "9.0"},
  250. {.kind = TokenKind::Period()},
  251. {.kind = TokenKind::RealLiteral(), .text = "9.5"},
  252. // newline
  253. {.kind = TokenKind::Error(), .text = "10.foo"},
  254. // newline
  255. {.kind = TokenKind::RealLiteral(), .text = "11.0"},
  256. {.kind = TokenKind::Period()},
  257. {.kind = TokenKind::Identifier(), .text = "foo"},
  258. // newline
  259. {.kind = TokenKind::Error(), .text = "12e"},
  260. {.kind = TokenKind::Plus()},
  261. {.kind = TokenKind::IntegerLiteral(), .text = "1"},
  262. // newline
  263. {.kind = TokenKind::IntegerLiteral(), .text = "13"},
  264. {.kind = TokenKind::Period()},
  265. {.kind = TokenKind::UnderscoreKeyword()},
  266. // newline
  267. {.kind = TokenKind::EndOfFile()},
  268. }));
  269. }
  270. TEST_F(LexerTest, HandlesGarbageCharacters) {
  271. constexpr char GarbageText[] = "$$💩-$\n$\0$12$\n\"\n\"\\";
  272. auto buffer = Lex(llvm::StringRef(GarbageText, sizeof(GarbageText) - 1));
  273. EXPECT_TRUE(buffer.HasErrors());
  274. EXPECT_THAT(
  275. buffer,
  276. HasTokens(llvm::ArrayRef<ExpectedToken>{
  277. {.kind = TokenKind::Error(),
  278. .line = 1,
  279. .column = 1,
  280. .text = llvm::StringRef("$$💩", 6)},
  281. // 💩 takes 4 bytes, and we count column as bytes offset.
  282. {.kind = TokenKind::Minus(), .line = 1, .column = 7},
  283. {.kind = TokenKind::Error(), .line = 1, .column = 8, .text = "$"},
  284. // newline
  285. {.kind = TokenKind::Error(),
  286. .line = 2,
  287. .column = 1,
  288. .text = llvm::StringRef("$\0$", 3)},
  289. {.kind = TokenKind::IntegerLiteral(),
  290. .line = 2,
  291. .column = 4,
  292. .text = "12"},
  293. {.kind = TokenKind::Error(), .line = 2, .column = 6, .text = "$"},
  294. // newline
  295. {.kind = TokenKind::Error(),
  296. .line = 3,
  297. .column = 1,
  298. .text = llvm::StringRef("\"", 1)},
  299. // newline
  300. {.kind = TokenKind::Error(),
  301. .line = 4,
  302. .column = 1,
  303. .text = llvm::StringRef("\"", 1)},
  304. {.kind = TokenKind::Backslash(),
  305. .line = 4,
  306. .column = 2,
  307. .text = llvm::StringRef("\\", 1)},
  308. {.kind = TokenKind::EndOfFile(), .line = 4, .column = 3},
  309. }));
  310. }
  311. TEST_F(LexerTest, Symbols) {
  312. // We don't need to exhaustively test symbols here as they're handled with
  313. // common code, but we want to check specific patterns to verify things like
  314. // max-munch rule and handling of interesting symbols.
  315. auto buffer = Lex("<<<");
  316. EXPECT_FALSE(buffer.HasErrors());
  317. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  318. {TokenKind::LessLess()},
  319. {TokenKind::Less()},
  320. {TokenKind::EndOfFile()},
  321. }));
  322. buffer = Lex("<<=>>");
  323. EXPECT_FALSE(buffer.HasErrors());
  324. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  325. {TokenKind::LessLessEqual()},
  326. {TokenKind::GreaterGreater()},
  327. {TokenKind::EndOfFile()},
  328. }));
  329. buffer = Lex("< <=> >");
  330. EXPECT_FALSE(buffer.HasErrors());
  331. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  332. {TokenKind::Less()},
  333. {TokenKind::LessEqualGreater()},
  334. {TokenKind::Greater()},
  335. {TokenKind::EndOfFile()},
  336. }));
  337. buffer = Lex("\\/?@&^!");
  338. EXPECT_FALSE(buffer.HasErrors());
  339. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  340. {TokenKind::Backslash()},
  341. {TokenKind::Slash()},
  342. {TokenKind::Question()},
  343. {TokenKind::At()},
  344. {TokenKind::Amp()},
  345. {TokenKind::Caret()},
  346. {TokenKind::Exclaim()},
  347. {TokenKind::EndOfFile()},
  348. }));
  349. }
  350. TEST_F(LexerTest, Parens) {
  351. auto buffer = Lex("()");
  352. EXPECT_FALSE(buffer.HasErrors());
  353. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  354. {TokenKind::OpenParen()},
  355. {TokenKind::CloseParen()},
  356. {TokenKind::EndOfFile()},
  357. }));
  358. buffer = Lex("((()()))");
  359. EXPECT_FALSE(buffer.HasErrors());
  360. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  361. {TokenKind::OpenParen()},
  362. {TokenKind::OpenParen()},
  363. {TokenKind::OpenParen()},
  364. {TokenKind::CloseParen()},
  365. {TokenKind::OpenParen()},
  366. {TokenKind::CloseParen()},
  367. {TokenKind::CloseParen()},
  368. {TokenKind::CloseParen()},
  369. {TokenKind::EndOfFile()},
  370. }));
  371. }
  372. TEST_F(LexerTest, CurlyBraces) {
  373. auto buffer = Lex("{}");
  374. EXPECT_FALSE(buffer.HasErrors());
  375. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  376. {TokenKind::OpenCurlyBrace()},
  377. {TokenKind::CloseCurlyBrace()},
  378. {TokenKind::EndOfFile()},
  379. }));
  380. buffer = Lex("{{{}{}}}");
  381. EXPECT_FALSE(buffer.HasErrors());
  382. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  383. {TokenKind::OpenCurlyBrace()},
  384. {TokenKind::OpenCurlyBrace()},
  385. {TokenKind::OpenCurlyBrace()},
  386. {TokenKind::CloseCurlyBrace()},
  387. {TokenKind::OpenCurlyBrace()},
  388. {TokenKind::CloseCurlyBrace()},
  389. {TokenKind::CloseCurlyBrace()},
  390. {TokenKind::CloseCurlyBrace()},
  391. {TokenKind::EndOfFile()},
  392. }));
  393. }
  394. TEST_F(LexerTest, MatchingGroups) {
  395. {
  396. TokenizedBuffer buffer = Lex("(){}");
  397. ASSERT_FALSE(buffer.HasErrors());
  398. auto it = buffer.Tokens().begin();
  399. auto open_paren_token = *it++;
  400. auto close_paren_token = *it++;
  401. EXPECT_EQ(close_paren_token,
  402. buffer.GetMatchedClosingToken(open_paren_token));
  403. EXPECT_EQ(open_paren_token,
  404. buffer.GetMatchedOpeningToken(close_paren_token));
  405. auto open_curly_token = *it++;
  406. auto close_curly_token = *it++;
  407. EXPECT_EQ(close_curly_token,
  408. buffer.GetMatchedClosingToken(open_curly_token));
  409. EXPECT_EQ(open_curly_token,
  410. buffer.GetMatchedOpeningToken(close_curly_token));
  411. auto eof_token = *it++;
  412. EXPECT_EQ(buffer.GetKind(eof_token), TokenKind::EndOfFile());
  413. EXPECT_EQ(buffer.Tokens().end(), it);
  414. }
  415. {
  416. TokenizedBuffer buffer = Lex("({x}){(y)} {{((z))}}");
  417. ASSERT_FALSE(buffer.HasErrors());
  418. auto it = buffer.Tokens().begin();
  419. auto open_paren_token = *it++;
  420. auto open_curly_token = *it++;
  421. ASSERT_EQ("x", buffer.GetIdentifierText(buffer.GetIdentifier(*it++)));
  422. auto close_curly_token = *it++;
  423. auto close_paren_token = *it++;
  424. EXPECT_EQ(close_paren_token,
  425. buffer.GetMatchedClosingToken(open_paren_token));
  426. EXPECT_EQ(open_paren_token,
  427. buffer.GetMatchedOpeningToken(close_paren_token));
  428. EXPECT_EQ(close_curly_token,
  429. buffer.GetMatchedClosingToken(open_curly_token));
  430. EXPECT_EQ(open_curly_token,
  431. buffer.GetMatchedOpeningToken(close_curly_token));
  432. open_curly_token = *it++;
  433. open_paren_token = *it++;
  434. ASSERT_EQ("y", buffer.GetIdentifierText(buffer.GetIdentifier(*it++)));
  435. close_paren_token = *it++;
  436. close_curly_token = *it++;
  437. EXPECT_EQ(close_curly_token,
  438. buffer.GetMatchedClosingToken(open_curly_token));
  439. EXPECT_EQ(open_curly_token,
  440. buffer.GetMatchedOpeningToken(close_curly_token));
  441. EXPECT_EQ(close_paren_token,
  442. buffer.GetMatchedClosingToken(open_paren_token));
  443. EXPECT_EQ(open_paren_token,
  444. buffer.GetMatchedOpeningToken(close_paren_token));
  445. open_curly_token = *it++;
  446. auto inner_open_curly_token = *it++;
  447. open_paren_token = *it++;
  448. auto inner_open_paren_token = *it++;
  449. ASSERT_EQ("z", buffer.GetIdentifierText(buffer.GetIdentifier(*it++)));
  450. auto inner_close_paren_token = *it++;
  451. close_paren_token = *it++;
  452. auto inner_close_curly_token = *it++;
  453. close_curly_token = *it++;
  454. EXPECT_EQ(close_curly_token,
  455. buffer.GetMatchedClosingToken(open_curly_token));
  456. EXPECT_EQ(open_curly_token,
  457. buffer.GetMatchedOpeningToken(close_curly_token));
  458. EXPECT_EQ(inner_close_curly_token,
  459. buffer.GetMatchedClosingToken(inner_open_curly_token));
  460. EXPECT_EQ(inner_open_curly_token,
  461. buffer.GetMatchedOpeningToken(inner_close_curly_token));
  462. EXPECT_EQ(close_paren_token,
  463. buffer.GetMatchedClosingToken(open_paren_token));
  464. EXPECT_EQ(open_paren_token,
  465. buffer.GetMatchedOpeningToken(close_paren_token));
  466. EXPECT_EQ(inner_close_paren_token,
  467. buffer.GetMatchedClosingToken(inner_open_paren_token));
  468. EXPECT_EQ(inner_open_paren_token,
  469. buffer.GetMatchedOpeningToken(inner_close_paren_token));
  470. auto eof_token = *it++;
  471. EXPECT_EQ(buffer.GetKind(eof_token), TokenKind::EndOfFile());
  472. EXPECT_EQ(buffer.Tokens().end(), it);
  473. }
  474. }
  475. TEST_F(LexerTest, MismatchedGroups) {
  476. auto buffer = Lex("{");
  477. EXPECT_TRUE(buffer.HasErrors());
  478. EXPECT_THAT(buffer,
  479. HasTokens(llvm::ArrayRef<ExpectedToken>{
  480. {TokenKind::OpenCurlyBrace()},
  481. {.kind = TokenKind::CloseCurlyBrace(), .recovery = true},
  482. {TokenKind::EndOfFile()},
  483. }));
  484. buffer = Lex("}");
  485. EXPECT_TRUE(buffer.HasErrors());
  486. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  487. {.kind = TokenKind::Error(), .text = "}"},
  488. {TokenKind::EndOfFile()},
  489. }));
  490. buffer = Lex("{(}");
  491. EXPECT_TRUE(buffer.HasErrors());
  492. EXPECT_THAT(
  493. buffer,
  494. HasTokens(llvm::ArrayRef<ExpectedToken>{
  495. {.kind = TokenKind::OpenCurlyBrace(), .column = 1},
  496. {.kind = TokenKind::OpenParen(), .column = 2},
  497. {.kind = TokenKind::CloseParen(), .column = 3, .recovery = true},
  498. {.kind = TokenKind::CloseCurlyBrace(), .column = 3},
  499. {TokenKind::EndOfFile()},
  500. }));
  501. buffer = Lex(")({)");
  502. EXPECT_TRUE(buffer.HasErrors());
  503. EXPECT_THAT(
  504. buffer,
  505. HasTokens(llvm::ArrayRef<ExpectedToken>{
  506. {.kind = TokenKind::Error(), .column = 1, .text = ")"},
  507. {.kind = TokenKind::OpenParen(), .column = 2},
  508. {.kind = TokenKind::OpenCurlyBrace(), .column = 3},
  509. {.kind = TokenKind::CloseCurlyBrace(), .column = 4, .recovery = true},
  510. {.kind = TokenKind::CloseParen(), .column = 4},
  511. {TokenKind::EndOfFile()},
  512. }));
  513. }
  514. TEST_F(LexerTest, Whitespace) {
  515. auto buffer = Lex("{( } {(");
  516. // Whether there should be whitespace before/after each token.
  517. bool space[] = {true,
  518. // {
  519. false,
  520. // (
  521. true,
  522. // inserted )
  523. true,
  524. // }
  525. true,
  526. // {
  527. false,
  528. // (
  529. true,
  530. // inserted )
  531. true,
  532. // inserted }
  533. true,
  534. // EOF
  535. false};
  536. int pos = 0;
  537. for (TokenizedBuffer::Token token : buffer.Tokens()) {
  538. ASSERT_LT(pos, std::size(space));
  539. EXPECT_THAT(buffer.HasLeadingWhitespace(token), Eq(space[pos]));
  540. ++pos;
  541. ASSERT_LT(pos, std::size(space));
  542. EXPECT_THAT(buffer.HasTrailingWhitespace(token), Eq(space[pos]));
  543. }
  544. ASSERT_EQ(pos + 1, std::size(space));
  545. }
  546. TEST_F(LexerTest, Keywords) {
  547. auto buffer = Lex(" fn");
  548. EXPECT_FALSE(buffer.HasErrors());
  549. EXPECT_THAT(
  550. buffer,
  551. HasTokens(llvm::ArrayRef<ExpectedToken>{
  552. {.kind = TokenKind::FnKeyword(), .column = 4, .indent_column = 4},
  553. {TokenKind::EndOfFile()},
  554. }));
  555. buffer = Lex("and or not if else for loop return var break continue _");
  556. EXPECT_FALSE(buffer.HasErrors());
  557. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  558. {TokenKind::AndKeyword()},
  559. {TokenKind::OrKeyword()},
  560. {TokenKind::NotKeyword()},
  561. {TokenKind::IfKeyword()},
  562. {TokenKind::ElseKeyword()},
  563. {TokenKind::ForKeyword()},
  564. {TokenKind::LoopKeyword()},
  565. {TokenKind::ReturnKeyword()},
  566. {TokenKind::VarKeyword()},
  567. {TokenKind::BreakKeyword()},
  568. {TokenKind::ContinueKeyword()},
  569. {TokenKind::UnderscoreKeyword()},
  570. {TokenKind::EndOfFile()},
  571. }));
  572. }
  573. TEST_F(LexerTest, Comments) {
  574. auto buffer = Lex(" ;\n // foo\n ;\n");
  575. EXPECT_FALSE(buffer.HasErrors());
  576. EXPECT_THAT(buffer,
  577. HasTokens(llvm::ArrayRef<ExpectedToken>{
  578. {.kind = TokenKind::Semi(),
  579. .line = 1,
  580. .column = 2,
  581. .indent_column = 2},
  582. {.kind = TokenKind::Semi(),
  583. .line = 3,
  584. .column = 3,
  585. .indent_column = 3},
  586. {.kind = TokenKind::EndOfFile(), .line = 3, .column = 4},
  587. }));
  588. buffer = Lex("// foo\n//\n// bar");
  589. EXPECT_FALSE(buffer.HasErrors());
  590. EXPECT_THAT(
  591. buffer,
  592. HasTokens(llvm::ArrayRef<ExpectedToken>{{TokenKind::EndOfFile()}}));
  593. // Make sure weird characters aren't a problem.
  594. buffer = Lex(" // foo#$!^?@-_💩🍫⃠ [̲̅$̲̅(̲̅ ͡° ͜ʖ ͡°̲̅)̲̅$̲̅]");
  595. EXPECT_FALSE(buffer.HasErrors());
  596. EXPECT_THAT(
  597. buffer,
  598. HasTokens(llvm::ArrayRef<ExpectedToken>{{TokenKind::EndOfFile()}}));
  599. // Make sure we can lex a comment at the end of the input.
  600. buffer = Lex("//");
  601. EXPECT_FALSE(buffer.HasErrors());
  602. EXPECT_THAT(
  603. buffer,
  604. HasTokens(llvm::ArrayRef<ExpectedToken>{{TokenKind::EndOfFile()}}));
  605. }
  606. TEST_F(LexerTest, InvalidComments) {
  607. llvm::StringLiteral testcases[] = {
  608. " /// foo\n",
  609. "foo // bar\n",
  610. "//! hello",
  611. " //world",
  612. };
  613. for (llvm::StringLiteral testcase : testcases) {
  614. auto buffer = Lex(testcase);
  615. EXPECT_TRUE(buffer.HasErrors());
  616. }
  617. }
  618. TEST_F(LexerTest, Identifiers) {
  619. auto buffer = Lex(" foobar");
  620. EXPECT_FALSE(buffer.HasErrors());
  621. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  622. {.kind = TokenKind::Identifier(),
  623. .column = 4,
  624. .indent_column = 4,
  625. .text = "foobar"},
  626. {TokenKind::EndOfFile()},
  627. }));
  628. // Check different kinds of identifier character sequences.
  629. buffer = Lex("_foo_bar");
  630. EXPECT_FALSE(buffer.HasErrors());
  631. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  632. {.kind = TokenKind::Identifier(), .text = "_foo_bar"},
  633. {TokenKind::EndOfFile()},
  634. }));
  635. buffer = Lex("foo2bar00");
  636. EXPECT_FALSE(buffer.HasErrors());
  637. EXPECT_THAT(buffer,
  638. HasTokens(llvm::ArrayRef<ExpectedToken>{
  639. {.kind = TokenKind::Identifier(), .text = "foo2bar00"},
  640. {TokenKind::EndOfFile()},
  641. }));
  642. // Check that we can parse identifiers that start with a keyword.
  643. buffer = Lex("fnord");
  644. EXPECT_FALSE(buffer.HasErrors());
  645. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  646. {.kind = TokenKind::Identifier(), .text = "fnord"},
  647. {TokenKind::EndOfFile()},
  648. }));
  649. // Check multiple identifiers with indent and interning.
  650. buffer = Lex(" foo;bar\nbar \n foo\tfoo");
  651. EXPECT_FALSE(buffer.HasErrors());
  652. EXPECT_THAT(buffer,
  653. HasTokens(llvm::ArrayRef<ExpectedToken>{
  654. {.kind = TokenKind::Identifier(),
  655. .line = 1,
  656. .column = 4,
  657. .indent_column = 4,
  658. .text = "foo"},
  659. {.kind = TokenKind::Semi()},
  660. {.kind = TokenKind::Identifier(),
  661. .line = 1,
  662. .column = 8,
  663. .indent_column = 4,
  664. .text = "bar"},
  665. {.kind = TokenKind::Identifier(),
  666. .line = 2,
  667. .column = 1,
  668. .indent_column = 1,
  669. .text = "bar"},
  670. {.kind = TokenKind::Identifier(),
  671. .line = 3,
  672. .column = 3,
  673. .indent_column = 3,
  674. .text = "foo"},
  675. {.kind = TokenKind::Identifier(),
  676. .line = 3,
  677. .column = 7,
  678. .indent_column = 3,
  679. .text = "foo"},
  680. {.kind = TokenKind::EndOfFile(), .line = 3, .column = 10},
  681. }));
  682. }
  683. TEST_F(LexerTest, StringLiterals) {
  684. llvm::StringLiteral testcase = R"(
  685. "hello world\n"
  686. """foo
  687. test \
  688. \xAB
  689. """ trailing
  690. #"""#
  691. "\0"
  692. #"\0"foo"\1"#
  693. """x"""
  694. )";
  695. auto buffer = Lex(testcase);
  696. EXPECT_FALSE(buffer.HasErrors());
  697. EXPECT_THAT(buffer,
  698. HasTokens(llvm::ArrayRef<ExpectedToken>{
  699. {.kind = TokenKind::StringLiteral(),
  700. .line = 2,
  701. .column = 5,
  702. .indent_column = 5,
  703. .string_contents = {"hello world\n"}},
  704. {.kind = TokenKind::StringLiteral(),
  705. .line = 4,
  706. .column = 5,
  707. .indent_column = 5,
  708. .string_contents = {" test \xAB\n"}},
  709. {.kind = TokenKind::Identifier(),
  710. .line = 7,
  711. .column = 10,
  712. .indent_column = 5,
  713. .text = "trailing"},
  714. {.kind = TokenKind::StringLiteral(),
  715. .line = 9,
  716. .column = 7,
  717. .indent_column = 7,
  718. .string_contents = {"\""}},
  719. {.kind = TokenKind::StringLiteral(),
  720. .line = 11,
  721. .column = 5,
  722. .indent_column = 5,
  723. .string_contents = llvm::StringLiteral::withInnerNUL("\0")},
  724. {.kind = TokenKind::StringLiteral(),
  725. .line = 13,
  726. .column = 5,
  727. .indent_column = 5,
  728. .string_contents = {"\\0\"foo\"\\1"}},
  729. // """x""" is three string literals, not one.
  730. {.kind = TokenKind::StringLiteral(),
  731. .line = 15,
  732. .column = 5,
  733. .indent_column = 5,
  734. .string_contents = {""}},
  735. {.kind = TokenKind::StringLiteral(),
  736. .line = 15,
  737. .column = 7,
  738. .indent_column = 5,
  739. .string_contents = {"x"}},
  740. {.kind = TokenKind::StringLiteral(),
  741. .line = 15,
  742. .column = 10,
  743. .indent_column = 5,
  744. .string_contents = {""}},
  745. {.kind = TokenKind::EndOfFile(), .line = 16, .column = 3},
  746. }));
  747. }
  748. TEST_F(LexerTest, InvalidStringLiterals) {
  749. llvm::StringLiteral invalid[] = {
  750. R"(")",
  751. R"("""
  752. "")", //
  753. R"("\)", //
  754. R"("\")", //
  755. R"("\\)", //
  756. R"("\\\")", //
  757. R"(""")",
  758. R"("""
  759. )", //
  760. R"("""\)",
  761. R"(#"""
  762. """)",
  763. };
  764. for (llvm::StringLiteral test : invalid) {
  765. auto buffer = Lex(test);
  766. EXPECT_TRUE(buffer.HasErrors()) << "`" << test << "`";
  767. // We should have formed at least one error token.
  768. bool found_error = false;
  769. for (TokenizedBuffer::Token token : buffer.Tokens()) {
  770. if (buffer.GetKind(token) == TokenKind::Error()) {
  771. found_error = true;
  772. break;
  773. }
  774. }
  775. EXPECT_TRUE(found_error) << "`" << test << "`";
  776. }
  777. }
  778. TEST_F(LexerTest, TypeLiterals) {
  779. llvm::StringLiteral testcase = R"(
  780. i0 i1 i20 i999999999999 i0x1
  781. u0 u1 u64 u64b
  782. f32 f80 f1 fi
  783. s1
  784. )";
  785. auto buffer = Lex(testcase);
  786. EXPECT_FALSE(buffer.HasErrors());
  787. ASSERT_THAT(buffer,
  788. HasTokens(llvm::ArrayRef<ExpectedToken>{
  789. {.kind = TokenKind::Identifier(),
  790. .line = 2,
  791. .column = 5,
  792. .indent_column = 5,
  793. .text = {"i0"}},
  794. {.kind = TokenKind::IntegerTypeLiteral(),
  795. .line = 2,
  796. .column = 8,
  797. .indent_column = 5,
  798. .text = {"i1"}},
  799. {.kind = TokenKind::IntegerTypeLiteral(),
  800. .line = 2,
  801. .column = 11,
  802. .indent_column = 5,
  803. .text = {"i20"}},
  804. {.kind = TokenKind::IntegerTypeLiteral(),
  805. .line = 2,
  806. .column = 15,
  807. .indent_column = 5,
  808. .text = {"i999999999999"}},
  809. {.kind = TokenKind::Identifier(),
  810. .line = 2,
  811. .column = 29,
  812. .indent_column = 5,
  813. .text = {"i0x1"}},
  814. {.kind = TokenKind::Identifier(),
  815. .line = 3,
  816. .column = 5,
  817. .indent_column = 5,
  818. .text = {"u0"}},
  819. {.kind = TokenKind::UnsignedIntegerTypeLiteral(),
  820. .line = 3,
  821. .column = 8,
  822. .indent_column = 5,
  823. .text = {"u1"}},
  824. {.kind = TokenKind::UnsignedIntegerTypeLiteral(),
  825. .line = 3,
  826. .column = 11,
  827. .indent_column = 5,
  828. .text = {"u64"}},
  829. {.kind = TokenKind::Identifier(),
  830. .line = 3,
  831. .column = 15,
  832. .indent_column = 5,
  833. .text = {"u64b"}},
  834. {.kind = TokenKind::FloatingPointTypeLiteral(),
  835. .line = 4,
  836. .column = 5,
  837. .indent_column = 5,
  838. .text = {"f32"}},
  839. {.kind = TokenKind::FloatingPointTypeLiteral(),
  840. .line = 4,
  841. .column = 9,
  842. .indent_column = 5,
  843. .text = {"f80"}},
  844. {.kind = TokenKind::FloatingPointTypeLiteral(),
  845. .line = 4,
  846. .column = 13,
  847. .indent_column = 5,
  848. .text = {"f1"}},
  849. {.kind = TokenKind::Identifier(),
  850. .line = 4,
  851. .column = 16,
  852. .indent_column = 5,
  853. .text = {"fi"}},
  854. {.kind = TokenKind::Identifier(),
  855. .line = 5,
  856. .column = 5,
  857. .indent_column = 5,
  858. .text = {"s1"}},
  859. {.kind = TokenKind::EndOfFile(), .line = 6, .column = 3},
  860. }));
  861. auto token_i1 = buffer.Tokens().begin() + 1;
  862. EXPECT_EQ(buffer.GetTypeLiteralSize(*token_i1), 1);
  863. auto token_i20 = buffer.Tokens().begin() + 2;
  864. EXPECT_EQ(buffer.GetTypeLiteralSize(*token_i20), 20);
  865. auto token_i999999999999 = buffer.Tokens().begin() + 3;
  866. EXPECT_EQ(buffer.GetTypeLiteralSize(*token_i999999999999), 999999999999ull);
  867. auto token_u1 = buffer.Tokens().begin() + 6;
  868. EXPECT_EQ(buffer.GetTypeLiteralSize(*token_u1), 1);
  869. auto token_u64 = buffer.Tokens().begin() + 7;
  870. EXPECT_EQ(buffer.GetTypeLiteralSize(*token_u64), 64);
  871. auto token_f32 = buffer.Tokens().begin() + 9;
  872. EXPECT_EQ(buffer.GetTypeLiteralSize(*token_f32), 32);
  873. auto token_f80 = buffer.Tokens().begin() + 10;
  874. EXPECT_EQ(buffer.GetTypeLiteralSize(*token_f80), 80);
  875. auto token_f1 = buffer.Tokens().begin() + 11;
  876. EXPECT_EQ(buffer.GetTypeLiteralSize(*token_f1), 1);
  877. }
  878. TEST_F(LexerTest, Diagnostics) {
  879. llvm::StringLiteral testcase = R"(
  880. // Hello!
  881. var String x; // trailing comment
  882. //no space after comment
  883. "hello\bworld\xab"
  884. 0x123abc
  885. #"
  886. )";
  887. Testing::MockDiagnosticConsumer consumer;
  888. EXPECT_CALL(consumer, HandleDiagnostic(AllOf(
  889. DiagnosticAt(3, 19),
  890. DiagnosticMessage(HasSubstr("Trailing comment")))));
  891. EXPECT_CALL(consumer,
  892. HandleDiagnostic(AllOf(
  893. DiagnosticAt(4, 7),
  894. DiagnosticMessage(HasSubstr("Whitespace is required")))));
  895. EXPECT_CALL(
  896. consumer,
  897. HandleDiagnostic(AllOf(
  898. DiagnosticAt(5, 12),
  899. DiagnosticMessage(HasSubstr("Unrecognized escape sequence `b`")))));
  900. EXPECT_CALL(
  901. consumer,
  902. HandleDiagnostic(AllOf(
  903. DiagnosticAt(5, 20),
  904. DiagnosticMessage(HasSubstr("two uppercase hexadecimal digits")))));
  905. EXPECT_CALL(
  906. consumer,
  907. HandleDiagnostic(AllOf(
  908. DiagnosticAt(6, 10),
  909. DiagnosticMessage(HasSubstr("Invalid digit 'a' in hexadecimal")))));
  910. EXPECT_CALL(consumer,
  911. HandleDiagnostic(AllOf(
  912. DiagnosticAt(7, 5),
  913. DiagnosticMessage(HasSubstr("unrecognized character")))));
  914. Lex(testcase, consumer);
  915. }
  916. auto GetAndDropLine(llvm::StringRef& text) -> std::string {
  917. auto newline_offset = text.find_first_of('\n');
  918. llvm::StringRef line = text.slice(0, newline_offset);
  919. if (newline_offset != llvm::StringRef::npos) {
  920. text = text.substr(newline_offset + 1);
  921. } else {
  922. text = "";
  923. }
  924. return line.str();
  925. }
  926. TEST_F(LexerTest, Printing) {
  927. auto buffer = Lex(";");
  928. ASSERT_FALSE(buffer.HasErrors());
  929. std::string print_storage;
  930. llvm::raw_string_ostream print_stream(print_storage);
  931. buffer.Print(print_stream);
  932. llvm::StringRef print = print_stream.str();
  933. EXPECT_THAT(GetAndDropLine(print),
  934. StrEq("token: { index: 0, kind: 'Semi', line: 1, column: 1, "
  935. "indent: 1, spelling: ';', has_trailing_space: true }"));
  936. EXPECT_THAT(GetAndDropLine(print),
  937. StrEq("token: { index: 1, kind: 'EndOfFile', line: 1, column: 2, "
  938. "indent: 1, spelling: '' }"));
  939. EXPECT_TRUE(print.empty()) << print;
  940. // Test kind padding.
  941. buffer = Lex("(;foo;)");
  942. ASSERT_FALSE(buffer.HasErrors());
  943. print_storage.clear();
  944. buffer.Print(print_stream);
  945. print = print_stream.str();
  946. EXPECT_THAT(GetAndDropLine(print),
  947. StrEq("token: { index: 0, kind: 'OpenParen', line: 1, column: "
  948. "1, indent: 1, spelling: '(', closing_token: 4 }"));
  949. EXPECT_THAT(GetAndDropLine(print),
  950. StrEq("token: { index: 1, kind: 'Semi', line: 1, column: "
  951. "2, indent: 1, spelling: ';' }"));
  952. EXPECT_THAT(GetAndDropLine(print),
  953. StrEq("token: { index: 2, kind: 'Identifier', line: 1, column: "
  954. "3, indent: 1, spelling: 'foo', identifier: 0 }"));
  955. EXPECT_THAT(GetAndDropLine(print),
  956. StrEq("token: { index: 3, kind: 'Semi', line: 1, column: "
  957. "6, indent: 1, spelling: ';' }"));
  958. EXPECT_THAT(GetAndDropLine(print),
  959. StrEq("token: { index: 4, kind: 'CloseParen', line: 1, column: "
  960. "7, indent: 1, spelling: ')', opening_token: 0, "
  961. "has_trailing_space: true }"));
  962. EXPECT_THAT(GetAndDropLine(print),
  963. StrEq("token: { index: 5, kind: 'EndOfFile', line: 1, column: "
  964. "8, indent: 1, spelling: '' }"));
  965. EXPECT_TRUE(print.empty()) << print;
  966. // Test digit padding with max values of 9, 10, and 11.
  967. buffer = Lex(";\n\n\n\n\n\n\n\n\n\n ;;");
  968. ASSERT_FALSE(buffer.HasErrors());
  969. print_storage.clear();
  970. buffer.Print(print_stream);
  971. print = print_stream.str();
  972. EXPECT_THAT(
  973. GetAndDropLine(print),
  974. StrEq("token: { index: 0, kind: 'Semi', line: 1, column: 1, "
  975. "indent: 1, spelling: ';', has_trailing_space: true }"));
  976. EXPECT_THAT(
  977. GetAndDropLine(print),
  978. StrEq("token: { index: 1, kind: 'Semi', line: 11, column: 9, "
  979. "indent: 9, spelling: ';' }"));
  980. EXPECT_THAT(
  981. GetAndDropLine(print),
  982. StrEq("token: { index: 2, kind: 'Semi', line: 11, column: 10, "
  983. "indent: 9, spelling: ';', has_trailing_space: true }"));
  984. EXPECT_THAT(
  985. GetAndDropLine(print),
  986. StrEq("token: { index: 3, kind: 'EndOfFile', line: 11, column: 11, "
  987. "indent: 9, spelling: '' }"));
  988. EXPECT_TRUE(print.empty()) << print;
  989. }
  990. TEST_F(LexerTest, PrintingAsYaml) {
  991. // Test that we can parse this into YAML and verify line and indent data.
  992. auto buffer = Lex("\n ;\n\n\n; ;\n\n\n\n\n\n\n\n\n\n\n");
  993. ASSERT_FALSE(buffer.HasErrors());
  994. std::string print_output;
  995. llvm::raw_string_ostream print_stream(print_output);
  996. buffer.Print(print_stream);
  997. print_stream.flush();
  998. EXPECT_THAT(Yaml::Value::FromText(print_output),
  999. ElementsAre(Yaml::MappingValue{
  1000. {"token", Yaml::MappingValue{{"index", "0"},
  1001. {"kind", "Semi"},
  1002. {"line", "2"},
  1003. {"column", "2"},
  1004. {"indent", "2"},
  1005. {"spelling", ";"},
  1006. {"has_trailing_space", "true"}}},
  1007. {"token", Yaml::MappingValue{{"index", "1"},
  1008. {"kind", "Semi"},
  1009. {"line", "5"},
  1010. {"column", "1"},
  1011. {"indent", "1"},
  1012. {"spelling", ";"},
  1013. {"has_trailing_space", "true"}}},
  1014. {"token", Yaml::MappingValue{{"index", "2"},
  1015. {"kind", "Semi"},
  1016. {"line", "5"},
  1017. {"column", "3"},
  1018. {"indent", "1"},
  1019. {"spelling", ";"},
  1020. {"has_trailing_space", "true"}}},
  1021. {"token", Yaml::MappingValue{{"index", "3"},
  1022. {"kind", "EndOfFile"},
  1023. {"line", "15"},
  1024. {"column", "1"},
  1025. {"indent", "1"},
  1026. {"spelling", ""}}}}));
  1027. }
  1028. } // namespace
  1029. } // namespace Carbon