tokenized_buffer_test.cpp 43 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153
  1. // Part of the Carbon Language project, under the Apache License v2.0 with LLVM
  2. // Exceptions. See /LICENSE for license information.
  3. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
  4. #include "toolchain/lexer/tokenized_buffer.h"
  5. #include <gmock/gmock.h>
  6. #include <gtest/gtest.h>
  7. #include <iterator>
  8. #include "llvm/ADT/ArrayRef.h"
  9. #include "llvm/ADT/Sequence.h"
  10. #include "llvm/ADT/SmallString.h"
  11. #include "llvm/ADT/Twine.h"
  12. #include "llvm/Support/SourceMgr.h"
  13. #include "llvm/Support/raw_ostream.h"
  14. #include "toolchain/common/yaml_test_helpers.h"
  15. #include "toolchain/diagnostics/diagnostic_emitter.h"
  16. #include "toolchain/diagnostics/mocks.h"
  17. #include "toolchain/lexer/tokenized_buffer_test_helpers.h"
  18. namespace Carbon::Testing {
  19. namespace {
  20. using ::testing::_;
  21. using ::testing::ElementsAre;
  22. using ::testing::Eq;
  23. using ::testing::HasSubstr;
  24. using ::testing::StrEq;
  25. class LexerTest : public ::testing::Test {
  26. protected:
  27. auto GetSourceBuffer(llvm::Twine text) -> SourceBuffer& {
  28. source_storage.push_back(
  29. std::move(*SourceBuffer::CreateFromText(text.str())));
  30. return source_storage.back();
  31. }
  32. auto Lex(llvm::Twine text,
  33. DiagnosticConsumer& consumer = ConsoleDiagnosticConsumer())
  34. -> TokenizedBuffer {
  35. return TokenizedBuffer::Lex(GetSourceBuffer(text), consumer);
  36. }
  37. llvm::SmallVector<SourceBuffer, 16> source_storage;
  38. };
  39. TEST_F(LexerTest, HandlesEmptyBuffer) {
  40. auto buffer = Lex("");
  41. EXPECT_FALSE(buffer.has_errors());
  42. EXPECT_THAT(buffer,
  43. HasTokens(llvm::ArrayRef<ExpectedToken>{{TokenKind::EndOfFile}}));
  44. }
  45. TEST_F(LexerTest, TracksLinesAndColumns) {
  46. auto buffer = Lex("\n ;;\n ;;;\n x\"foo\" '''baz\n a\n ''' y");
  47. EXPECT_FALSE(buffer.has_errors());
  48. EXPECT_THAT(
  49. buffer,
  50. HasTokens(llvm::ArrayRef<ExpectedToken>{
  51. {.kind = TokenKind::Semi, .line = 2, .column = 3, .indent_column = 3},
  52. {.kind = TokenKind::Semi, .line = 2, .column = 4, .indent_column = 3},
  53. {.kind = TokenKind::Semi, .line = 3, .column = 4, .indent_column = 4},
  54. {.kind = TokenKind::Semi, .line = 3, .column = 5, .indent_column = 4},
  55. {.kind = TokenKind::Semi, .line = 3, .column = 6, .indent_column = 4},
  56. {.kind = TokenKind::Identifier,
  57. .line = 4,
  58. .column = 4,
  59. .indent_column = 4,
  60. .text = "x"},
  61. {.kind = TokenKind::StringLiteral,
  62. .line = 4,
  63. .column = 5,
  64. .indent_column = 4},
  65. {.kind = TokenKind::StringLiteral,
  66. .line = 4,
  67. .column = 11,
  68. .indent_column = 4},
  69. {.kind = TokenKind::Identifier,
  70. .line = 6,
  71. .column = 6,
  72. .indent_column = 11,
  73. .text = "y"},
  74. {.kind = TokenKind::EndOfFile, .line = 6, .column = 7},
  75. }));
  76. }
  77. TEST_F(LexerTest, HandlesNumericLiteral) {
  78. auto buffer = Lex("12-578\n 1 2\n0x12_3ABC\n0b10_10_11\n1_234_567\n1.5e9");
  79. EXPECT_FALSE(buffer.has_errors());
  80. ASSERT_THAT(buffer,
  81. HasTokens(llvm::ArrayRef<ExpectedToken>{
  82. {.kind = TokenKind::IntegerLiteral,
  83. .line = 1,
  84. .column = 1,
  85. .indent_column = 1,
  86. .text = "12"},
  87. {.kind = TokenKind::Minus,
  88. .line = 1,
  89. .column = 3,
  90. .indent_column = 1},
  91. {.kind = TokenKind::IntegerLiteral,
  92. .line = 1,
  93. .column = 4,
  94. .indent_column = 1,
  95. .text = "578"},
  96. {.kind = TokenKind::IntegerLiteral,
  97. .line = 2,
  98. .column = 3,
  99. .indent_column = 3,
  100. .text = "1"},
  101. {.kind = TokenKind::IntegerLiteral,
  102. .line = 2,
  103. .column = 6,
  104. .indent_column = 3,
  105. .text = "2"},
  106. {.kind = TokenKind::IntegerLiteral,
  107. .line = 3,
  108. .column = 1,
  109. .indent_column = 1,
  110. .text = "0x12_3ABC"},
  111. {.kind = TokenKind::IntegerLiteral,
  112. .line = 4,
  113. .column = 1,
  114. .indent_column = 1,
  115. .text = "0b10_10_11"},
  116. {.kind = TokenKind::IntegerLiteral,
  117. .line = 5,
  118. .column = 1,
  119. .indent_column = 1,
  120. .text = "1_234_567"},
  121. {.kind = TokenKind::RealLiteral,
  122. .line = 6,
  123. .column = 1,
  124. .indent_column = 1,
  125. .text = "1.5e9"},
  126. {.kind = TokenKind::EndOfFile, .line = 6, .column = 6},
  127. }));
  128. auto token_12 = buffer.tokens().begin();
  129. EXPECT_EQ(buffer.GetIntegerLiteral(*token_12), 12);
  130. auto token_578 = buffer.tokens().begin() + 2;
  131. EXPECT_EQ(buffer.GetIntegerLiteral(*token_578), 578);
  132. auto token_1 = buffer.tokens().begin() + 3;
  133. EXPECT_EQ(buffer.GetIntegerLiteral(*token_1), 1);
  134. auto token_2 = buffer.tokens().begin() + 4;
  135. EXPECT_EQ(buffer.GetIntegerLiteral(*token_2), 2);
  136. auto token_0x12_3abc = buffer.tokens().begin() + 5;
  137. EXPECT_EQ(buffer.GetIntegerLiteral(*token_0x12_3abc), 0x12'3abc);
  138. auto token_0b10_10_11 = buffer.tokens().begin() + 6;
  139. EXPECT_EQ(buffer.GetIntegerLiteral(*token_0b10_10_11), 0b10'10'11);
  140. auto token_1_234_567 = buffer.tokens().begin() + 7;
  141. EXPECT_EQ(buffer.GetIntegerLiteral(*token_1_234_567), 1'234'567);
  142. auto token_1_5e9 = buffer.tokens().begin() + 8;
  143. auto value_1_5e9 = buffer.GetRealLiteral(*token_1_5e9);
  144. EXPECT_EQ(value_1_5e9.Mantissa().getZExtValue(), 15);
  145. EXPECT_EQ(value_1_5e9.Exponent().getSExtValue(), 8);
  146. EXPECT_EQ(value_1_5e9.IsDecimal(), true);
  147. }
  148. TEST_F(LexerTest, HandlesInvalidNumericLiterals) {
  149. auto buffer = Lex("14x 15_49 0x3.5q 0x3_4.5_6 0ops");
  150. EXPECT_TRUE(buffer.has_errors());
  151. ASSERT_THAT(buffer,
  152. HasTokens(llvm::ArrayRef<ExpectedToken>{
  153. {.kind = TokenKind::Error,
  154. .line = 1,
  155. .column = 1,
  156. .indent_column = 1,
  157. .text = "14x"},
  158. {.kind = TokenKind::IntegerLiteral,
  159. .line = 1,
  160. .column = 5,
  161. .indent_column = 1,
  162. .text = "15_49"},
  163. {.kind = TokenKind::Error,
  164. .line = 1,
  165. .column = 11,
  166. .indent_column = 1,
  167. .text = "0x3.5q"},
  168. {.kind = TokenKind::RealLiteral,
  169. .line = 1,
  170. .column = 18,
  171. .indent_column = 1,
  172. .text = "0x3_4.5_6"},
  173. {.kind = TokenKind::Error,
  174. .line = 1,
  175. .column = 28,
  176. .indent_column = 1,
  177. .text = "0ops"},
  178. {.kind = TokenKind::EndOfFile, .line = 1, .column = 32},
  179. }));
  180. }
  181. TEST_F(LexerTest, SplitsNumericLiteralsProperly) {
  182. llvm::StringLiteral source_text = R"(
  183. 1.
  184. .2
  185. 3.+foo
  186. 4.0-bar
  187. 5.0e+123+456
  188. 6.0e+1e+2
  189. 1e7
  190. 8..10
  191. 9.0.9.5
  192. 10.foo
  193. 11.0.foo
  194. 12e+1
  195. 13._
  196. )";
  197. auto buffer = Lex(source_text);
  198. EXPECT_TRUE(buffer.has_errors());
  199. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  200. {.kind = TokenKind::IntegerLiteral, .text = "1"},
  201. {.kind = TokenKind::Period},
  202. // newline
  203. {.kind = TokenKind::Period},
  204. {.kind = TokenKind::IntegerLiteral, .text = "2"},
  205. // newline
  206. {.kind = TokenKind::IntegerLiteral, .text = "3"},
  207. {.kind = TokenKind::Period},
  208. {.kind = TokenKind::Plus},
  209. {.kind = TokenKind::Identifier, .text = "foo"},
  210. // newline
  211. {.kind = TokenKind::RealLiteral, .text = "4.0"},
  212. {.kind = TokenKind::Minus},
  213. {.kind = TokenKind::Identifier, .text = "bar"},
  214. // newline
  215. {.kind = TokenKind::RealLiteral, .text = "5.0e+123"},
  216. {.kind = TokenKind::Plus},
  217. {.kind = TokenKind::IntegerLiteral, .text = "456"},
  218. // newline
  219. {.kind = TokenKind::Error, .text = "6.0e+1e"},
  220. {.kind = TokenKind::Plus},
  221. {.kind = TokenKind::IntegerLiteral, .text = "2"},
  222. // newline
  223. {.kind = TokenKind::Error, .text = "1e7"},
  224. // newline
  225. {.kind = TokenKind::IntegerLiteral, .text = "8"},
  226. {.kind = TokenKind::Period},
  227. {.kind = TokenKind::Period},
  228. {.kind = TokenKind::IntegerLiteral, .text = "10"},
  229. // newline
  230. {.kind = TokenKind::RealLiteral, .text = "9.0"},
  231. {.kind = TokenKind::Period},
  232. {.kind = TokenKind::RealLiteral, .text = "9.5"},
  233. // newline
  234. {.kind = TokenKind::Error, .text = "10.foo"},
  235. // newline
  236. {.kind = TokenKind::RealLiteral, .text = "11.0"},
  237. {.kind = TokenKind::Period},
  238. {.kind = TokenKind::Identifier, .text = "foo"},
  239. // newline
  240. {.kind = TokenKind::Error, .text = "12e"},
  241. {.kind = TokenKind::Plus},
  242. {.kind = TokenKind::IntegerLiteral, .text = "1"},
  243. // newline
  244. {.kind = TokenKind::IntegerLiteral, .text = "13"},
  245. {.kind = TokenKind::Period},
  246. {.kind = TokenKind::Underscore},
  247. // newline
  248. {.kind = TokenKind::EndOfFile},
  249. }));
  250. }
  251. TEST_F(LexerTest, HandlesGarbageCharacters) {
  252. constexpr char GarbageText[] = "$$💩-$\n$\0$12$\n\\\"\\\n\"x";
  253. auto buffer = Lex(llvm::StringRef(GarbageText, sizeof(GarbageText) - 1));
  254. EXPECT_TRUE(buffer.has_errors());
  255. EXPECT_THAT(
  256. buffer,
  257. HasTokens(llvm::ArrayRef<ExpectedToken>{
  258. {.kind = TokenKind::Error,
  259. .line = 1,
  260. .column = 1,
  261. // 💩 takes 4 bytes, and we count column as bytes offset.
  262. .text = llvm::StringRef("$$💩", 6)},
  263. {.kind = TokenKind::Minus, .line = 1, .column = 7},
  264. {.kind = TokenKind::Error, .line = 1, .column = 8, .text = "$"},
  265. // newline
  266. {.kind = TokenKind::Error,
  267. .line = 2,
  268. .column = 1,
  269. .text = llvm::StringRef("$\0$", 3)},
  270. {.kind = TokenKind::IntegerLiteral,
  271. .line = 2,
  272. .column = 4,
  273. .text = "12"},
  274. {.kind = TokenKind::Error, .line = 2, .column = 6, .text = "$"},
  275. // newline
  276. {.kind = TokenKind::Backslash, .line = 3, .column = 1, .text = "\\"},
  277. {.kind = TokenKind::Error, .line = 3, .column = 2, .text = "\"\\"},
  278. // newline
  279. {.kind = TokenKind::Error, .line = 4, .column = 1, .text = "\"x"},
  280. {.kind = TokenKind::EndOfFile, .line = 4, .column = 3},
  281. }));
  282. }
  283. TEST_F(LexerTest, Symbols) {
  284. // We don't need to exhaustively test symbols here as they're handled with
  285. // common code, but we want to check specific patterns to verify things like
  286. // max-munch rule and handling of interesting symbols.
  287. auto buffer = Lex("<<<");
  288. EXPECT_FALSE(buffer.has_errors());
  289. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  290. {TokenKind::LessLess},
  291. {TokenKind::Less},
  292. {TokenKind::EndOfFile},
  293. }));
  294. buffer = Lex("<<=>>");
  295. EXPECT_FALSE(buffer.has_errors());
  296. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  297. {TokenKind::LessLessEqual},
  298. {TokenKind::GreaterGreater},
  299. {TokenKind::EndOfFile},
  300. }));
  301. buffer = Lex("< <=> >");
  302. EXPECT_FALSE(buffer.has_errors());
  303. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  304. {TokenKind::Less},
  305. {TokenKind::LessEqualGreater},
  306. {TokenKind::Greater},
  307. {TokenKind::EndOfFile},
  308. }));
  309. buffer = Lex("\\/?@&^!");
  310. EXPECT_FALSE(buffer.has_errors());
  311. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  312. {TokenKind::Backslash},
  313. {TokenKind::Slash},
  314. {TokenKind::Question},
  315. {TokenKind::At},
  316. {TokenKind::Amp},
  317. {TokenKind::Caret},
  318. {TokenKind::Exclaim},
  319. {TokenKind::EndOfFile},
  320. }));
  321. }
  322. TEST_F(LexerTest, Parens) {
  323. auto buffer = Lex("()");
  324. EXPECT_FALSE(buffer.has_errors());
  325. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  326. {TokenKind::OpenParen},
  327. {TokenKind::CloseParen},
  328. {TokenKind::EndOfFile},
  329. }));
  330. buffer = Lex("((()()))");
  331. EXPECT_FALSE(buffer.has_errors());
  332. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  333. {TokenKind::OpenParen},
  334. {TokenKind::OpenParen},
  335. {TokenKind::OpenParen},
  336. {TokenKind::CloseParen},
  337. {TokenKind::OpenParen},
  338. {TokenKind::CloseParen},
  339. {TokenKind::CloseParen},
  340. {TokenKind::CloseParen},
  341. {TokenKind::EndOfFile},
  342. }));
  343. }
  344. TEST_F(LexerTest, CurlyBraces) {
  345. auto buffer = Lex("{}");
  346. EXPECT_FALSE(buffer.has_errors());
  347. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  348. {TokenKind::OpenCurlyBrace},
  349. {TokenKind::CloseCurlyBrace},
  350. {TokenKind::EndOfFile},
  351. }));
  352. buffer = Lex("{{{}{}}}");
  353. EXPECT_FALSE(buffer.has_errors());
  354. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  355. {TokenKind::OpenCurlyBrace},
  356. {TokenKind::OpenCurlyBrace},
  357. {TokenKind::OpenCurlyBrace},
  358. {TokenKind::CloseCurlyBrace},
  359. {TokenKind::OpenCurlyBrace},
  360. {TokenKind::CloseCurlyBrace},
  361. {TokenKind::CloseCurlyBrace},
  362. {TokenKind::CloseCurlyBrace},
  363. {TokenKind::EndOfFile},
  364. }));
  365. }
  366. TEST_F(LexerTest, MatchingGroups) {
  367. {
  368. TokenizedBuffer buffer = Lex("(){}");
  369. ASSERT_FALSE(buffer.has_errors());
  370. auto it = buffer.tokens().begin();
  371. auto open_paren_token = *it++;
  372. auto close_paren_token = *it++;
  373. EXPECT_EQ(close_paren_token,
  374. buffer.GetMatchedClosingToken(open_paren_token));
  375. EXPECT_EQ(open_paren_token,
  376. buffer.GetMatchedOpeningToken(close_paren_token));
  377. auto open_curly_token = *it++;
  378. auto close_curly_token = *it++;
  379. EXPECT_EQ(close_curly_token,
  380. buffer.GetMatchedClosingToken(open_curly_token));
  381. EXPECT_EQ(open_curly_token,
  382. buffer.GetMatchedOpeningToken(close_curly_token));
  383. auto eof_token = *it++;
  384. EXPECT_EQ(buffer.GetKind(eof_token), TokenKind::EndOfFile);
  385. EXPECT_EQ(buffer.tokens().end(), it);
  386. }
  387. {
  388. TokenizedBuffer buffer = Lex("({x}){(y)} {{((z))}}");
  389. ASSERT_FALSE(buffer.has_errors());
  390. auto it = buffer.tokens().begin();
  391. auto open_paren_token = *it++;
  392. auto open_curly_token = *it++;
  393. ASSERT_EQ("x", buffer.GetIdentifierText(buffer.GetIdentifier(*it++)));
  394. auto close_curly_token = *it++;
  395. auto close_paren_token = *it++;
  396. EXPECT_EQ(close_paren_token,
  397. buffer.GetMatchedClosingToken(open_paren_token));
  398. EXPECT_EQ(open_paren_token,
  399. buffer.GetMatchedOpeningToken(close_paren_token));
  400. EXPECT_EQ(close_curly_token,
  401. buffer.GetMatchedClosingToken(open_curly_token));
  402. EXPECT_EQ(open_curly_token,
  403. buffer.GetMatchedOpeningToken(close_curly_token));
  404. open_curly_token = *it++;
  405. open_paren_token = *it++;
  406. ASSERT_EQ("y", buffer.GetIdentifierText(buffer.GetIdentifier(*it++)));
  407. close_paren_token = *it++;
  408. close_curly_token = *it++;
  409. EXPECT_EQ(close_curly_token,
  410. buffer.GetMatchedClosingToken(open_curly_token));
  411. EXPECT_EQ(open_curly_token,
  412. buffer.GetMatchedOpeningToken(close_curly_token));
  413. EXPECT_EQ(close_paren_token,
  414. buffer.GetMatchedClosingToken(open_paren_token));
  415. EXPECT_EQ(open_paren_token,
  416. buffer.GetMatchedOpeningToken(close_paren_token));
  417. open_curly_token = *it++;
  418. auto inner_open_curly_token = *it++;
  419. open_paren_token = *it++;
  420. auto inner_open_paren_token = *it++;
  421. ASSERT_EQ("z", buffer.GetIdentifierText(buffer.GetIdentifier(*it++)));
  422. auto inner_close_paren_token = *it++;
  423. close_paren_token = *it++;
  424. auto inner_close_curly_token = *it++;
  425. close_curly_token = *it++;
  426. EXPECT_EQ(close_curly_token,
  427. buffer.GetMatchedClosingToken(open_curly_token));
  428. EXPECT_EQ(open_curly_token,
  429. buffer.GetMatchedOpeningToken(close_curly_token));
  430. EXPECT_EQ(inner_close_curly_token,
  431. buffer.GetMatchedClosingToken(inner_open_curly_token));
  432. EXPECT_EQ(inner_open_curly_token,
  433. buffer.GetMatchedOpeningToken(inner_close_curly_token));
  434. EXPECT_EQ(close_paren_token,
  435. buffer.GetMatchedClosingToken(open_paren_token));
  436. EXPECT_EQ(open_paren_token,
  437. buffer.GetMatchedOpeningToken(close_paren_token));
  438. EXPECT_EQ(inner_close_paren_token,
  439. buffer.GetMatchedClosingToken(inner_open_paren_token));
  440. EXPECT_EQ(inner_open_paren_token,
  441. buffer.GetMatchedOpeningToken(inner_close_paren_token));
  442. auto eof_token = *it++;
  443. EXPECT_EQ(buffer.GetKind(eof_token), TokenKind::EndOfFile);
  444. EXPECT_EQ(buffer.tokens().end(), it);
  445. }
  446. }
  447. TEST_F(LexerTest, MismatchedGroups) {
  448. auto buffer = Lex("{");
  449. EXPECT_TRUE(buffer.has_errors());
  450. EXPECT_THAT(buffer,
  451. HasTokens(llvm::ArrayRef<ExpectedToken>{
  452. {TokenKind::OpenCurlyBrace},
  453. {.kind = TokenKind::CloseCurlyBrace, .recovery = true},
  454. {TokenKind::EndOfFile},
  455. }));
  456. buffer = Lex("}");
  457. EXPECT_TRUE(buffer.has_errors());
  458. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  459. {.kind = TokenKind::Error, .text = "}"},
  460. {TokenKind::EndOfFile},
  461. }));
  462. buffer = Lex("{(}");
  463. EXPECT_TRUE(buffer.has_errors());
  464. EXPECT_THAT(
  465. buffer,
  466. HasTokens(llvm::ArrayRef<ExpectedToken>{
  467. {.kind = TokenKind::OpenCurlyBrace, .column = 1},
  468. {.kind = TokenKind::OpenParen, .column = 2},
  469. {.kind = TokenKind::CloseParen, .column = 3, .recovery = true},
  470. {.kind = TokenKind::CloseCurlyBrace, .column = 3},
  471. {TokenKind::EndOfFile},
  472. }));
  473. buffer = Lex(")({)");
  474. EXPECT_TRUE(buffer.has_errors());
  475. EXPECT_THAT(
  476. buffer,
  477. HasTokens(llvm::ArrayRef<ExpectedToken>{
  478. {.kind = TokenKind::Error, .column = 1, .text = ")"},
  479. {.kind = TokenKind::OpenParen, .column = 2},
  480. {.kind = TokenKind::OpenCurlyBrace, .column = 3},
  481. {.kind = TokenKind::CloseCurlyBrace, .column = 4, .recovery = true},
  482. {.kind = TokenKind::CloseParen, .column = 4},
  483. {TokenKind::EndOfFile},
  484. }));
  485. }
  486. TEST_F(LexerTest, Whitespace) {
  487. auto buffer = Lex("{( } {(");
  488. // Whether there should be whitespace before/after each token.
  489. bool space[] = {true,
  490. // {
  491. false,
  492. // (
  493. true,
  494. // inserted )
  495. true,
  496. // }
  497. true,
  498. // {
  499. false,
  500. // (
  501. true,
  502. // inserted )
  503. true,
  504. // inserted }
  505. true,
  506. // EOF
  507. false};
  508. int pos = 0;
  509. for (TokenizedBuffer::Token token : buffer.tokens()) {
  510. ASSERT_LT(pos, std::size(space));
  511. EXPECT_THAT(buffer.HasLeadingWhitespace(token), Eq(space[pos]));
  512. ++pos;
  513. ASSERT_LT(pos, std::size(space));
  514. EXPECT_THAT(buffer.HasTrailingWhitespace(token), Eq(space[pos]));
  515. }
  516. ASSERT_EQ(pos + 1, std::size(space));
  517. }
  518. TEST_F(LexerTest, Keywords) {
  519. auto buffer = Lex(" fn");
  520. EXPECT_FALSE(buffer.has_errors());
  521. EXPECT_THAT(buffer,
  522. HasTokens(llvm::ArrayRef<ExpectedToken>{
  523. {.kind = TokenKind::Fn, .column = 4, .indent_column = 4},
  524. {TokenKind::EndOfFile},
  525. }));
  526. buffer = Lex("and or not if else for return var break continue _");
  527. EXPECT_FALSE(buffer.has_errors());
  528. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  529. {TokenKind::And},
  530. {TokenKind::Or},
  531. {TokenKind::Not},
  532. {TokenKind::If},
  533. {TokenKind::Else},
  534. {TokenKind::For},
  535. {TokenKind::Return},
  536. {TokenKind::Var},
  537. {TokenKind::Break},
  538. {TokenKind::Continue},
  539. {TokenKind::Underscore},
  540. {TokenKind::EndOfFile},
  541. }));
  542. }
  543. TEST_F(LexerTest, Comments) {
  544. auto buffer = Lex(" ;\n // foo\n ;\n");
  545. EXPECT_FALSE(buffer.has_errors());
  546. EXPECT_THAT(
  547. buffer,
  548. HasTokens(llvm::ArrayRef<ExpectedToken>{
  549. {.kind = TokenKind::Semi, .line = 1, .column = 2, .indent_column = 2},
  550. {.kind = TokenKind::Semi, .line = 3, .column = 3, .indent_column = 3},
  551. {.kind = TokenKind::EndOfFile, .line = 3, .column = 4},
  552. }));
  553. buffer = Lex("// foo\n//\n// bar");
  554. EXPECT_FALSE(buffer.has_errors());
  555. EXPECT_THAT(buffer,
  556. HasTokens(llvm::ArrayRef<ExpectedToken>{{TokenKind::EndOfFile}}));
  557. // Make sure weird characters aren't a problem.
  558. buffer = Lex(" // foo#$!^?@-_💩🍫⃠ [̲̅$̲̅(̲̅ ͡° ͜ʖ ͡°̲̅)̲̅$̲̅]");
  559. EXPECT_FALSE(buffer.has_errors());
  560. EXPECT_THAT(buffer,
  561. HasTokens(llvm::ArrayRef<ExpectedToken>{{TokenKind::EndOfFile}}));
  562. // Make sure we can lex a comment at the end of the input.
  563. buffer = Lex("//");
  564. EXPECT_FALSE(buffer.has_errors());
  565. EXPECT_THAT(buffer,
  566. HasTokens(llvm::ArrayRef<ExpectedToken>{{TokenKind::EndOfFile}}));
  567. }
  568. TEST_F(LexerTest, InvalidComments) {
  569. llvm::StringLiteral testcases[] = {
  570. " /// foo\n",
  571. "foo // bar\n",
  572. "//! hello",
  573. " //world",
  574. };
  575. for (llvm::StringLiteral testcase : testcases) {
  576. auto buffer = Lex(testcase);
  577. EXPECT_TRUE(buffer.has_errors());
  578. }
  579. }
  580. TEST_F(LexerTest, Identifiers) {
  581. auto buffer = Lex(" foobar");
  582. EXPECT_FALSE(buffer.has_errors());
  583. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  584. {.kind = TokenKind::Identifier,
  585. .column = 4,
  586. .indent_column = 4,
  587. .text = "foobar"},
  588. {TokenKind::EndOfFile},
  589. }));
  590. // Check different kinds of identifier character sequences.
  591. buffer = Lex("_foo_bar");
  592. EXPECT_FALSE(buffer.has_errors());
  593. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  594. {.kind = TokenKind::Identifier, .text = "_foo_bar"},
  595. {TokenKind::EndOfFile},
  596. }));
  597. buffer = Lex("foo2bar00");
  598. EXPECT_FALSE(buffer.has_errors());
  599. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  600. {.kind = TokenKind::Identifier, .text = "foo2bar00"},
  601. {TokenKind::EndOfFile},
  602. }));
  603. // Check that we can parse identifiers that start with a keyword.
  604. buffer = Lex("fnord");
  605. EXPECT_FALSE(buffer.has_errors());
  606. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  607. {.kind = TokenKind::Identifier, .text = "fnord"},
  608. {TokenKind::EndOfFile},
  609. }));
  610. // Check multiple identifiers with indent and interning.
  611. buffer = Lex(" foo;bar\nbar \n foo\tfoo");
  612. EXPECT_FALSE(buffer.has_errors());
  613. EXPECT_THAT(buffer,
  614. HasTokens(llvm::ArrayRef<ExpectedToken>{
  615. {.kind = TokenKind::Identifier,
  616. .line = 1,
  617. .column = 4,
  618. .indent_column = 4,
  619. .text = "foo"},
  620. {.kind = TokenKind::Semi},
  621. {.kind = TokenKind::Identifier,
  622. .line = 1,
  623. .column = 8,
  624. .indent_column = 4,
  625. .text = "bar"},
  626. {.kind = TokenKind::Identifier,
  627. .line = 2,
  628. .column = 1,
  629. .indent_column = 1,
  630. .text = "bar"},
  631. {.kind = TokenKind::Identifier,
  632. .line = 3,
  633. .column = 3,
  634. .indent_column = 3,
  635. .text = "foo"},
  636. {.kind = TokenKind::Identifier,
  637. .line = 3,
  638. .column = 7,
  639. .indent_column = 3,
  640. .text = "foo"},
  641. {.kind = TokenKind::EndOfFile, .line = 3, .column = 10},
  642. }));
  643. }
  644. TEST_F(LexerTest, StringLiterals) {
  645. llvm::StringLiteral testcase = R"(
  646. "hello world\n"
  647. '''foo
  648. test \
  649. \xAB
  650. ''' trailing
  651. #"""#
  652. "\0"
  653. #"\0"foo"\1"#
  654. """x"""
  655. )";
  656. auto buffer = Lex(testcase);
  657. EXPECT_FALSE(buffer.has_errors());
  658. EXPECT_THAT(buffer,
  659. HasTokens(llvm::ArrayRef<ExpectedToken>{
  660. {.kind = TokenKind::StringLiteral,
  661. .line = 2,
  662. .column = 5,
  663. .indent_column = 5,
  664. .string_contents = {"hello world\n"}},
  665. {.kind = TokenKind::StringLiteral,
  666. .line = 4,
  667. .column = 5,
  668. .indent_column = 5,
  669. .string_contents = {" test \xAB\n"}},
  670. {.kind = TokenKind::Identifier,
  671. .line = 7,
  672. .column = 10,
  673. .indent_column = 5,
  674. .text = "trailing"},
  675. {.kind = TokenKind::StringLiteral,
  676. .line = 9,
  677. .column = 7,
  678. .indent_column = 7,
  679. .string_contents = {"\""}},
  680. {.kind = TokenKind::StringLiteral,
  681. .line = 11,
  682. .column = 5,
  683. .indent_column = 5,
  684. .string_contents = llvm::StringLiteral::withInnerNUL("\0")},
  685. {.kind = TokenKind::StringLiteral,
  686. .line = 13,
  687. .column = 5,
  688. .indent_column = 5,
  689. .string_contents = {"\\0\"foo\"\\1"}},
  690. // """x""" is three string literals, not one invalid
  691. // attempt at a block string literal.
  692. {.kind = TokenKind::StringLiteral,
  693. .line = 15,
  694. .column = 5,
  695. .indent_column = 5,
  696. .string_contents = {""}},
  697. {.kind = TokenKind::StringLiteral,
  698. .line = 15,
  699. .column = 7,
  700. .indent_column = 5,
  701. .string_contents = {"x"}},
  702. {.kind = TokenKind::StringLiteral,
  703. .line = 15,
  704. .column = 10,
  705. .indent_column = 5,
  706. .string_contents = {""}},
  707. {.kind = TokenKind::EndOfFile, .line = 16, .column = 3},
  708. }));
  709. }
  710. TEST_F(LexerTest, InvalidStringLiterals) {
  711. llvm::StringLiteral invalid[] = {
  712. // clang-format off
  713. R"(")",
  714. R"('''
  715. '')",
  716. R"("\)",
  717. R"("\")",
  718. R"("\\)",
  719. R"("\\\")",
  720. R"(''')",
  721. R"('''
  722. )",
  723. R"('''\)",
  724. R"(#'''
  725. ''')",
  726. // clang-format on
  727. };
  728. for (llvm::StringLiteral test : invalid) {
  729. SCOPED_TRACE(test);
  730. auto buffer = Lex(test);
  731. EXPECT_TRUE(buffer.has_errors());
  732. // We should have formed at least one error token.
  733. bool found_error = false;
  734. for (TokenizedBuffer::Token token : buffer.tokens()) {
  735. if (buffer.GetKind(token) == TokenKind::Error) {
  736. found_error = true;
  737. break;
  738. }
  739. }
  740. EXPECT_TRUE(found_error);
  741. }
  742. }
  743. TEST_F(LexerTest, TypeLiterals) {
  744. llvm::StringLiteral testcase = R"(
  745. i0 i1 i20 i999999999999 i0x1
  746. u0 u1 u64 u64b
  747. f32 f80 f1 fi
  748. s1
  749. )";
  750. auto buffer = Lex(testcase);
  751. EXPECT_FALSE(buffer.has_errors());
  752. ASSERT_THAT(buffer,
  753. HasTokens(llvm::ArrayRef<ExpectedToken>{
  754. {.kind = TokenKind::Identifier,
  755. .line = 2,
  756. .column = 5,
  757. .indent_column = 5,
  758. .text = {"i0"}},
  759. {.kind = TokenKind::IntegerTypeLiteral,
  760. .line = 2,
  761. .column = 8,
  762. .indent_column = 5,
  763. .text = {"i1"}},
  764. {.kind = TokenKind::IntegerTypeLiteral,
  765. .line = 2,
  766. .column = 11,
  767. .indent_column = 5,
  768. .text = {"i20"}},
  769. {.kind = TokenKind::IntegerTypeLiteral,
  770. .line = 2,
  771. .column = 15,
  772. .indent_column = 5,
  773. .text = {"i999999999999"}},
  774. {.kind = TokenKind::Identifier,
  775. .line = 2,
  776. .column = 29,
  777. .indent_column = 5,
  778. .text = {"i0x1"}},
  779. {.kind = TokenKind::Identifier,
  780. .line = 3,
  781. .column = 5,
  782. .indent_column = 5,
  783. .text = {"u0"}},
  784. {.kind = TokenKind::UnsignedIntegerTypeLiteral,
  785. .line = 3,
  786. .column = 8,
  787. .indent_column = 5,
  788. .text = {"u1"}},
  789. {.kind = TokenKind::UnsignedIntegerTypeLiteral,
  790. .line = 3,
  791. .column = 11,
  792. .indent_column = 5,
  793. .text = {"u64"}},
  794. {.kind = TokenKind::Identifier,
  795. .line = 3,
  796. .column = 15,
  797. .indent_column = 5,
  798. .text = {"u64b"}},
  799. {.kind = TokenKind::FloatingPointTypeLiteral,
  800. .line = 4,
  801. .column = 5,
  802. .indent_column = 5,
  803. .text = {"f32"}},
  804. {.kind = TokenKind::FloatingPointTypeLiteral,
  805. .line = 4,
  806. .column = 9,
  807. .indent_column = 5,
  808. .text = {"f80"}},
  809. {.kind = TokenKind::FloatingPointTypeLiteral,
  810. .line = 4,
  811. .column = 13,
  812. .indent_column = 5,
  813. .text = {"f1"}},
  814. {.kind = TokenKind::Identifier,
  815. .line = 4,
  816. .column = 16,
  817. .indent_column = 5,
  818. .text = {"fi"}},
  819. {.kind = TokenKind::Identifier,
  820. .line = 5,
  821. .column = 5,
  822. .indent_column = 5,
  823. .text = {"s1"}},
  824. {.kind = TokenKind::EndOfFile, .line = 6, .column = 3},
  825. }));
  826. auto token_i1 = buffer.tokens().begin() + 1;
  827. EXPECT_EQ(buffer.GetTypeLiteralSize(*token_i1), 1);
  828. auto token_i20 = buffer.tokens().begin() + 2;
  829. EXPECT_EQ(buffer.GetTypeLiteralSize(*token_i20), 20);
  830. auto token_i999999999999 = buffer.tokens().begin() + 3;
  831. EXPECT_EQ(buffer.GetTypeLiteralSize(*token_i999999999999), 999999999999ULL);
  832. auto token_u1 = buffer.tokens().begin() + 6;
  833. EXPECT_EQ(buffer.GetTypeLiteralSize(*token_u1), 1);
  834. auto token_u64 = buffer.tokens().begin() + 7;
  835. EXPECT_EQ(buffer.GetTypeLiteralSize(*token_u64), 64);
  836. auto token_f32 = buffer.tokens().begin() + 9;
  837. EXPECT_EQ(buffer.GetTypeLiteralSize(*token_f32), 32);
  838. auto token_f80 = buffer.tokens().begin() + 10;
  839. EXPECT_EQ(buffer.GetTypeLiteralSize(*token_f80), 80);
  840. auto token_f1 = buffer.tokens().begin() + 11;
  841. EXPECT_EQ(buffer.GetTypeLiteralSize(*token_f1), 1);
  842. }
  843. TEST_F(LexerTest, TypeLiteralTooManyDigits) {
  844. std::string code = "i";
  845. constexpr int Count = 10000;
  846. code.append(Count, '9');
  847. Testing::MockDiagnosticConsumer consumer;
  848. EXPECT_CALL(consumer,
  849. HandleDiagnostic(IsDiagnostic(
  850. DiagnosticKind::TooManyDigits, DiagnosticLevel::Error, 1, 2,
  851. HasSubstr(llvm::formatv(" {0} ", Count)))));
  852. auto buffer = Lex(code, consumer);
  853. EXPECT_TRUE(buffer.has_errors());
  854. ASSERT_THAT(
  855. buffer,
  856. HasTokens(llvm::ArrayRef<ExpectedToken>{
  857. {.kind = TokenKind::Error,
  858. .line = 1,
  859. .column = 1,
  860. .indent_column = 1,
  861. .text = {code}},
  862. {.kind = TokenKind::EndOfFile, .line = 1, .column = Count + 2},
  863. }));
  864. }
  865. TEST_F(LexerTest, DiagnosticTrailingComment) {
  866. llvm::StringLiteral testcase = R"(
  867. // Hello!
  868. var String x; // trailing comment
  869. )";
  870. Testing::MockDiagnosticConsumer consumer;
  871. EXPECT_CALL(consumer,
  872. HandleDiagnostic(IsDiagnostic(DiagnosticKind::TrailingComment,
  873. DiagnosticLevel::Error, 3, 19, _)));
  874. Lex(testcase, consumer);
  875. }
  876. TEST_F(LexerTest, DiagnosticWhitespace) {
  877. Testing::MockDiagnosticConsumer consumer;
  878. EXPECT_CALL(consumer, HandleDiagnostic(IsDiagnostic(
  879. DiagnosticKind::NoWhitespaceAfterCommentIntroducer,
  880. DiagnosticLevel::Error, 1, 3, _)));
  881. Lex("//no space after comment", consumer);
  882. }
  883. TEST_F(LexerTest, DiagnosticUnrecognizedEscape) {
  884. Testing::MockDiagnosticConsumer consumer;
  885. EXPECT_CALL(consumer, HandleDiagnostic(IsDiagnostic(
  886. DiagnosticKind::UnknownEscapeSequence,
  887. DiagnosticLevel::Error, 1, 8, HasSubstr("`b`"))));
  888. Lex(R"("hello\bworld")", consumer);
  889. }
  890. TEST_F(LexerTest, DiagnosticBadHex) {
  891. Testing::MockDiagnosticConsumer consumer;
  892. EXPECT_CALL(consumer, HandleDiagnostic(IsDiagnostic(
  893. DiagnosticKind::HexadecimalEscapeMissingDigits,
  894. DiagnosticLevel::Error, 1, 9, _)));
  895. Lex(R"("hello\xabworld")", consumer);
  896. }
  897. TEST_F(LexerTest, DiagnosticInvalidDigit) {
  898. Testing::MockDiagnosticConsumer consumer;
  899. EXPECT_CALL(consumer, HandleDiagnostic(IsDiagnostic(
  900. DiagnosticKind::InvalidDigit,
  901. DiagnosticLevel::Error, 1, 6, HasSubstr("'a'"))));
  902. Lex("0x123abc", consumer);
  903. }
  904. TEST_F(LexerTest, DiagnosticMissingTerminator) {
  905. Testing::MockDiagnosticConsumer consumer;
  906. EXPECT_CALL(consumer,
  907. HandleDiagnostic(IsDiagnostic(DiagnosticKind::UnterminatedString,
  908. DiagnosticLevel::Error, 1, 1, _)));
  909. Lex(R"(#" ")", consumer);
  910. }
  911. TEST_F(LexerTest, DiagnosticUnrecognizedChar) {
  912. Testing::MockDiagnosticConsumer consumer;
  913. EXPECT_CALL(consumer, HandleDiagnostic(
  914. IsDiagnostic(DiagnosticKind::UnrecognizedCharacters,
  915. DiagnosticLevel::Error, 1, 1, _)));
  916. Lex("\b", consumer);
  917. }
  918. auto GetAndDropLine(llvm::StringRef& text) -> std::string {
  919. auto newline_offset = text.find_first_of('\n');
  920. llvm::StringRef line = text.slice(0, newline_offset);
  921. if (newline_offset != llvm::StringRef::npos) {
  922. text = text.substr(newline_offset + 1);
  923. } else {
  924. text = "";
  925. }
  926. return line.str();
  927. }
  928. TEST_F(LexerTest, PrintingInteger) {
  929. auto buffer = Lex("123");
  930. ASSERT_FALSE(buffer.has_errors());
  931. std::string print_storage;
  932. llvm::raw_string_ostream print_stream(print_storage);
  933. buffer.Print(print_stream);
  934. llvm::StringRef print = print_stream.str();
  935. EXPECT_THAT(GetAndDropLine(print), StrEq("["));
  936. EXPECT_THAT(GetAndDropLine(print),
  937. StrEq("{ index: 0, kind: 'IntegerLiteral', line: 1, "
  938. "column: 1, indent: 1, spelling: '123', value: `123`, "
  939. "has_trailing_space: true },"));
  940. EXPECT_THAT(GetAndDropLine(print), HasSubstr("'EndOfFile'"));
  941. EXPECT_THAT(GetAndDropLine(print), StrEq("]"));
  942. EXPECT_TRUE(print.empty()) << print;
  943. }
  944. TEST_F(LexerTest, PrintingReal) {
  945. auto buffer = Lex("2.5");
  946. ASSERT_FALSE(buffer.has_errors());
  947. std::string print_storage;
  948. llvm::raw_string_ostream print_stream(print_storage);
  949. buffer.Print(print_stream);
  950. llvm::StringRef print = print_stream.str();
  951. EXPECT_THAT(GetAndDropLine(print), StrEq("["));
  952. EXPECT_THAT(
  953. GetAndDropLine(print),
  954. StrEq("{ index: 0, kind: 'RealLiteral', line: 1, column: 1, indent: "
  955. "1, spelling: '2.5', value: `25*10^-1`, has_trailing_space: true "
  956. "},"));
  957. EXPECT_THAT(GetAndDropLine(print), HasSubstr("'EndOfFile'"));
  958. EXPECT_THAT(GetAndDropLine(print), StrEq("]"));
  959. EXPECT_TRUE(print.empty()) << print;
  960. }
  961. TEST_F(LexerTest, PrintingPadding) {
  962. // Test kind padding.
  963. auto buffer = Lex("(;foo;)");
  964. ASSERT_FALSE(buffer.has_errors());
  965. std::string print_storage;
  966. llvm::raw_string_ostream print_stream(print_storage);
  967. buffer.Print(print_stream);
  968. llvm::StringRef print = print_stream.str();
  969. EXPECT_THAT(GetAndDropLine(print), StrEq("["));
  970. EXPECT_THAT(GetAndDropLine(print),
  971. StrEq("{ index: 0, kind: 'OpenParen', line: 1, column: "
  972. "1, indent: 1, spelling: '(', closing_token: 4 },"));
  973. EXPECT_THAT(GetAndDropLine(print),
  974. StrEq("{ index: 1, kind: 'Semi', line: 1, column: "
  975. "2, indent: 1, spelling: ';' },"));
  976. EXPECT_THAT(GetAndDropLine(print),
  977. StrEq("{ index: 2, kind: 'Identifier', line: 1, column: "
  978. "3, indent: 1, spelling: 'foo', identifier: 0 },"));
  979. EXPECT_THAT(GetAndDropLine(print),
  980. StrEq("{ index: 3, kind: 'Semi', line: 1, column: "
  981. "6, indent: 1, spelling: ';' },"));
  982. EXPECT_THAT(GetAndDropLine(print),
  983. StrEq("{ index: 4, kind: 'CloseParen', line: 1, column: "
  984. "7, indent: 1, spelling: ')', opening_token: 0, "
  985. "has_trailing_space: true },"));
  986. EXPECT_THAT(GetAndDropLine(print),
  987. StrEq("{ index: 5, kind: 'EndOfFile', line: 1, column: "
  988. "8, indent: 1, spelling: '' },"));
  989. EXPECT_THAT(GetAndDropLine(print), StrEq("]"));
  990. EXPECT_TRUE(print.empty()) << print;
  991. }
  992. TEST_F(LexerTest, PrintingPaddingDigits) {
  993. // Test digit padding with max values of 9, 10, and 11.
  994. auto buffer = Lex(";\n\n\n\n\n\n\n\n\n\n ;;");
  995. ASSERT_FALSE(buffer.has_errors());
  996. std::string print_storage;
  997. llvm::raw_string_ostream print_stream(print_storage);
  998. buffer.Print(print_stream);
  999. llvm::StringRef print = print_stream.str();
  1000. EXPECT_THAT(GetAndDropLine(print), StrEq("["));
  1001. EXPECT_THAT(GetAndDropLine(print),
  1002. StrEq("{ index: 0, kind: 'Semi', line: 1, column: 1, "
  1003. "indent: 1, spelling: ';', has_trailing_space: true },"));
  1004. EXPECT_THAT(GetAndDropLine(print),
  1005. StrEq("{ index: 1, kind: 'Semi', line: 11, column: 9, "
  1006. "indent: 9, spelling: ';' },"));
  1007. EXPECT_THAT(GetAndDropLine(print),
  1008. StrEq("{ index: 2, kind: 'Semi', line: 11, column: 10, "
  1009. "indent: 9, spelling: ';', has_trailing_space: true },"));
  1010. EXPECT_THAT(GetAndDropLine(print),
  1011. StrEq("{ index: 3, kind: 'EndOfFile', line: 11, column: 11, "
  1012. "indent: 9, spelling: '' },"));
  1013. EXPECT_THAT(GetAndDropLine(print), StrEq("]"));
  1014. EXPECT_TRUE(print.empty()) << print;
  1015. }
  1016. TEST_F(LexerTest, PrintingAsYaml) {
  1017. // Test that we can parse this into YAML and verify line and indent data.
  1018. auto buffer = Lex("\n ;\n\n\n; ;\n\n\n\n\n\n\n\n\n\n\n");
  1019. ASSERT_FALSE(buffer.has_errors());
  1020. std::string print_output;
  1021. llvm::raw_string_ostream print_stream(print_output);
  1022. buffer.Print(print_stream);
  1023. print_stream.flush();
  1024. EXPECT_THAT(Yaml::Value::FromText(print_output),
  1025. ElementsAre(Yaml::SequenceValue{
  1026. Yaml::MappingValue{{"index", "0"},
  1027. {"kind", "Semi"},
  1028. {"line", "2"},
  1029. {"column", "2"},
  1030. {"indent", "2"},
  1031. {"spelling", ";"},
  1032. {"has_trailing_space", "true"}},
  1033. Yaml::MappingValue{{"index", "1"},
  1034. {"kind", "Semi"},
  1035. {"line", "5"},
  1036. {"column", "1"},
  1037. {"indent", "1"},
  1038. {"spelling", ";"},
  1039. {"has_trailing_space", "true"}},
  1040. Yaml::MappingValue{{"index", "2"},
  1041. {"kind", "Semi"},
  1042. {"line", "5"},
  1043. {"column", "3"},
  1044. {"indent", "1"},
  1045. {"spelling", ";"},
  1046. {"has_trailing_space", "true"}},
  1047. Yaml::MappingValue{{"index", "3"},
  1048. {"kind", "EndOfFile"},
  1049. {"line", "15"},
  1050. {"column", "1"},
  1051. {"indent", "1"},
  1052. {"spelling", ""}}}));
  1053. }
  1054. TEST_F(LexerTest, PrintToken) {
  1055. auto buffer = Lex("0x9");
  1056. ASSERT_FALSE(buffer.has_errors());
  1057. std::string print_output;
  1058. llvm::raw_string_ostream print_stream(print_output);
  1059. buffer.Print(print_stream);
  1060. llvm::StringRef print = print_stream.str();
  1061. EXPECT_THAT(GetAndDropLine(print), StrEq("["));
  1062. EXPECT_THAT(GetAndDropLine(print),
  1063. StrEq("{ index: 0, kind: 'IntegerLiteral', line: 1, "
  1064. "column: 1, indent: 1, spelling: '0x9', value: `9`, "
  1065. "has_trailing_space: true },"));
  1066. }
  1067. } // namespace
  1068. } // namespace Carbon::Testing