tokenized_buffer_test.cpp 43 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154
  1. // Part of the Carbon Language project, under the Apache License v2.0 with LLVM
  2. // Exceptions. See /LICENSE for license information.
  3. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
  4. #include "toolchain/lexer/tokenized_buffer.h"
  5. #include <gmock/gmock.h>
  6. #include <gtest/gtest.h>
  7. #include <iterator>
  8. #include "llvm/ADT/ArrayRef.h"
  9. #include "llvm/ADT/None.h"
  10. #include "llvm/ADT/Sequence.h"
  11. #include "llvm/ADT/SmallString.h"
  12. #include "llvm/ADT/Twine.h"
  13. #include "llvm/Support/SourceMgr.h"
  14. #include "llvm/Support/raw_ostream.h"
  15. #include "toolchain/common/yaml_test_helpers.h"
  16. #include "toolchain/diagnostics/diagnostic_emitter.h"
  17. #include "toolchain/diagnostics/mocks.h"
  18. #include "toolchain/lexer/tokenized_buffer_test_helpers.h"
  19. namespace Carbon::Testing {
  20. namespace {
  21. using ::testing::_;
  22. using ::testing::ElementsAre;
  23. using ::testing::Eq;
  24. using ::testing::HasSubstr;
  25. using ::testing::StrEq;
  26. class LexerTest : public ::testing::Test {
  27. protected:
  28. auto GetSourceBuffer(llvm::Twine text) -> SourceBuffer& {
  29. source_storage.push_back(
  30. std::move(*SourceBuffer::CreateFromText(text.str())));
  31. return source_storage.back();
  32. }
  33. auto Lex(llvm::Twine text,
  34. DiagnosticConsumer& consumer = ConsoleDiagnosticConsumer())
  35. -> TokenizedBuffer {
  36. return TokenizedBuffer::Lex(GetSourceBuffer(text), consumer);
  37. }
  38. llvm::SmallVector<SourceBuffer, 16> source_storage;
  39. };
  40. TEST_F(LexerTest, HandlesEmptyBuffer) {
  41. auto buffer = Lex("");
  42. EXPECT_FALSE(buffer.has_errors());
  43. EXPECT_THAT(buffer,
  44. HasTokens(llvm::ArrayRef<ExpectedToken>{{TokenKind::EndOfFile}}));
  45. }
  46. TEST_F(LexerTest, TracksLinesAndColumns) {
  47. auto buffer = Lex("\n ;;\n ;;;\n x\"foo\" '''baz\n a\n ''' y");
  48. EXPECT_FALSE(buffer.has_errors());
  49. EXPECT_THAT(
  50. buffer,
  51. HasTokens(llvm::ArrayRef<ExpectedToken>{
  52. {.kind = TokenKind::Semi, .line = 2, .column = 3, .indent_column = 3},
  53. {.kind = TokenKind::Semi, .line = 2, .column = 4, .indent_column = 3},
  54. {.kind = TokenKind::Semi, .line = 3, .column = 4, .indent_column = 4},
  55. {.kind = TokenKind::Semi, .line = 3, .column = 5, .indent_column = 4},
  56. {.kind = TokenKind::Semi, .line = 3, .column = 6, .indent_column = 4},
  57. {.kind = TokenKind::Identifier,
  58. .line = 4,
  59. .column = 4,
  60. .indent_column = 4,
  61. .text = "x"},
  62. {.kind = TokenKind::StringLiteral,
  63. .line = 4,
  64. .column = 5,
  65. .indent_column = 4},
  66. {.kind = TokenKind::StringLiteral,
  67. .line = 4,
  68. .column = 11,
  69. .indent_column = 4},
  70. {.kind = TokenKind::Identifier,
  71. .line = 6,
  72. .column = 6,
  73. .indent_column = 11,
  74. .text = "y"},
  75. {.kind = TokenKind::EndOfFile, .line = 6, .column = 7},
  76. }));
  77. }
  78. TEST_F(LexerTest, HandlesNumericLiteral) {
  79. auto buffer = Lex("12-578\n 1 2\n0x12_3ABC\n0b10_10_11\n1_234_567\n1.5e9");
  80. EXPECT_FALSE(buffer.has_errors());
  81. ASSERT_THAT(buffer,
  82. HasTokens(llvm::ArrayRef<ExpectedToken>{
  83. {.kind = TokenKind::IntegerLiteral,
  84. .line = 1,
  85. .column = 1,
  86. .indent_column = 1,
  87. .text = "12"},
  88. {.kind = TokenKind::Minus,
  89. .line = 1,
  90. .column = 3,
  91. .indent_column = 1},
  92. {.kind = TokenKind::IntegerLiteral,
  93. .line = 1,
  94. .column = 4,
  95. .indent_column = 1,
  96. .text = "578"},
  97. {.kind = TokenKind::IntegerLiteral,
  98. .line = 2,
  99. .column = 3,
  100. .indent_column = 3,
  101. .text = "1"},
  102. {.kind = TokenKind::IntegerLiteral,
  103. .line = 2,
  104. .column = 6,
  105. .indent_column = 3,
  106. .text = "2"},
  107. {.kind = TokenKind::IntegerLiteral,
  108. .line = 3,
  109. .column = 1,
  110. .indent_column = 1,
  111. .text = "0x12_3ABC"},
  112. {.kind = TokenKind::IntegerLiteral,
  113. .line = 4,
  114. .column = 1,
  115. .indent_column = 1,
  116. .text = "0b10_10_11"},
  117. {.kind = TokenKind::IntegerLiteral,
  118. .line = 5,
  119. .column = 1,
  120. .indent_column = 1,
  121. .text = "1_234_567"},
  122. {.kind = TokenKind::RealLiteral,
  123. .line = 6,
  124. .column = 1,
  125. .indent_column = 1,
  126. .text = "1.5e9"},
  127. {.kind = TokenKind::EndOfFile, .line = 6, .column = 6},
  128. }));
  129. auto token_12 = buffer.tokens().begin();
  130. EXPECT_EQ(buffer.GetIntegerLiteral(*token_12), 12);
  131. auto token_578 = buffer.tokens().begin() + 2;
  132. EXPECT_EQ(buffer.GetIntegerLiteral(*token_578), 578);
  133. auto token_1 = buffer.tokens().begin() + 3;
  134. EXPECT_EQ(buffer.GetIntegerLiteral(*token_1), 1);
  135. auto token_2 = buffer.tokens().begin() + 4;
  136. EXPECT_EQ(buffer.GetIntegerLiteral(*token_2), 2);
  137. auto token_0x12_3abc = buffer.tokens().begin() + 5;
  138. EXPECT_EQ(buffer.GetIntegerLiteral(*token_0x12_3abc), 0x12'3abc);
  139. auto token_0b10_10_11 = buffer.tokens().begin() + 6;
  140. EXPECT_EQ(buffer.GetIntegerLiteral(*token_0b10_10_11), 0b10'10'11);
  141. auto token_1_234_567 = buffer.tokens().begin() + 7;
  142. EXPECT_EQ(buffer.GetIntegerLiteral(*token_1_234_567), 1'234'567);
  143. auto token_1_5e9 = buffer.tokens().begin() + 8;
  144. auto value_1_5e9 = buffer.GetRealLiteral(*token_1_5e9);
  145. EXPECT_EQ(value_1_5e9.Mantissa().getZExtValue(), 15);
  146. EXPECT_EQ(value_1_5e9.Exponent().getSExtValue(), 8);
  147. EXPECT_EQ(value_1_5e9.IsDecimal(), true);
  148. }
  149. TEST_F(LexerTest, HandlesInvalidNumericLiterals) {
  150. auto buffer = Lex("14x 15_49 0x3.5q 0x3_4.5_6 0ops");
  151. EXPECT_TRUE(buffer.has_errors());
  152. ASSERT_THAT(buffer,
  153. HasTokens(llvm::ArrayRef<ExpectedToken>{
  154. {.kind = TokenKind::Error,
  155. .line = 1,
  156. .column = 1,
  157. .indent_column = 1,
  158. .text = "14x"},
  159. {.kind = TokenKind::IntegerLiteral,
  160. .line = 1,
  161. .column = 5,
  162. .indent_column = 1,
  163. .text = "15_49"},
  164. {.kind = TokenKind::Error,
  165. .line = 1,
  166. .column = 11,
  167. .indent_column = 1,
  168. .text = "0x3.5q"},
  169. {.kind = TokenKind::RealLiteral,
  170. .line = 1,
  171. .column = 18,
  172. .indent_column = 1,
  173. .text = "0x3_4.5_6"},
  174. {.kind = TokenKind::Error,
  175. .line = 1,
  176. .column = 28,
  177. .indent_column = 1,
  178. .text = "0ops"},
  179. {.kind = TokenKind::EndOfFile, .line = 1, .column = 32},
  180. }));
  181. }
  182. TEST_F(LexerTest, SplitsNumericLiteralsProperly) {
  183. llvm::StringLiteral source_text = R"(
  184. 1.
  185. .2
  186. 3.+foo
  187. 4.0-bar
  188. 5.0e+123+456
  189. 6.0e+1e+2
  190. 1e7
  191. 8..10
  192. 9.0.9.5
  193. 10.foo
  194. 11.0.foo
  195. 12e+1
  196. 13._
  197. )";
  198. auto buffer = Lex(source_text);
  199. EXPECT_TRUE(buffer.has_errors());
  200. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  201. {.kind = TokenKind::IntegerLiteral, .text = "1"},
  202. {.kind = TokenKind::Period},
  203. // newline
  204. {.kind = TokenKind::Period},
  205. {.kind = TokenKind::IntegerLiteral, .text = "2"},
  206. // newline
  207. {.kind = TokenKind::IntegerLiteral, .text = "3"},
  208. {.kind = TokenKind::Period},
  209. {.kind = TokenKind::Plus},
  210. {.kind = TokenKind::Identifier, .text = "foo"},
  211. // newline
  212. {.kind = TokenKind::RealLiteral, .text = "4.0"},
  213. {.kind = TokenKind::Minus},
  214. {.kind = TokenKind::Identifier, .text = "bar"},
  215. // newline
  216. {.kind = TokenKind::RealLiteral, .text = "5.0e+123"},
  217. {.kind = TokenKind::Plus},
  218. {.kind = TokenKind::IntegerLiteral, .text = "456"},
  219. // newline
  220. {.kind = TokenKind::Error, .text = "6.0e+1e"},
  221. {.kind = TokenKind::Plus},
  222. {.kind = TokenKind::IntegerLiteral, .text = "2"},
  223. // newline
  224. {.kind = TokenKind::Error, .text = "1e7"},
  225. // newline
  226. {.kind = TokenKind::IntegerLiteral, .text = "8"},
  227. {.kind = TokenKind::Period},
  228. {.kind = TokenKind::Period},
  229. {.kind = TokenKind::IntegerLiteral, .text = "10"},
  230. // newline
  231. {.kind = TokenKind::RealLiteral, .text = "9.0"},
  232. {.kind = TokenKind::Period},
  233. {.kind = TokenKind::RealLiteral, .text = "9.5"},
  234. // newline
  235. {.kind = TokenKind::Error, .text = "10.foo"},
  236. // newline
  237. {.kind = TokenKind::RealLiteral, .text = "11.0"},
  238. {.kind = TokenKind::Period},
  239. {.kind = TokenKind::Identifier, .text = "foo"},
  240. // newline
  241. {.kind = TokenKind::Error, .text = "12e"},
  242. {.kind = TokenKind::Plus},
  243. {.kind = TokenKind::IntegerLiteral, .text = "1"},
  244. // newline
  245. {.kind = TokenKind::IntegerLiteral, .text = "13"},
  246. {.kind = TokenKind::Period},
  247. {.kind = TokenKind::Underscore},
  248. // newline
  249. {.kind = TokenKind::EndOfFile},
  250. }));
  251. }
  252. TEST_F(LexerTest, HandlesGarbageCharacters) {
  253. constexpr char GarbageText[] = "$$💩-$\n$\0$12$\n\\\"\\\n\"x";
  254. auto buffer = Lex(llvm::StringRef(GarbageText, sizeof(GarbageText) - 1));
  255. EXPECT_TRUE(buffer.has_errors());
  256. EXPECT_THAT(
  257. buffer,
  258. HasTokens(llvm::ArrayRef<ExpectedToken>{
  259. {.kind = TokenKind::Error,
  260. .line = 1,
  261. .column = 1,
  262. // 💩 takes 4 bytes, and we count column as bytes offset.
  263. .text = llvm::StringRef("$$💩", 6)},
  264. {.kind = TokenKind::Minus, .line = 1, .column = 7},
  265. {.kind = TokenKind::Error, .line = 1, .column = 8, .text = "$"},
  266. // newline
  267. {.kind = TokenKind::Error,
  268. .line = 2,
  269. .column = 1,
  270. .text = llvm::StringRef("$\0$", 3)},
  271. {.kind = TokenKind::IntegerLiteral,
  272. .line = 2,
  273. .column = 4,
  274. .text = "12"},
  275. {.kind = TokenKind::Error, .line = 2, .column = 6, .text = "$"},
  276. // newline
  277. {.kind = TokenKind::Backslash, .line = 3, .column = 1, .text = "\\"},
  278. {.kind = TokenKind::Error, .line = 3, .column = 2, .text = "\"\\"},
  279. // newline
  280. {.kind = TokenKind::Error, .line = 4, .column = 1, .text = "\"x"},
  281. {.kind = TokenKind::EndOfFile, .line = 4, .column = 3},
  282. }));
  283. }
  284. TEST_F(LexerTest, Symbols) {
  285. // We don't need to exhaustively test symbols here as they're handled with
  286. // common code, but we want to check specific patterns to verify things like
  287. // max-munch rule and handling of interesting symbols.
  288. auto buffer = Lex("<<<");
  289. EXPECT_FALSE(buffer.has_errors());
  290. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  291. {TokenKind::LessLess},
  292. {TokenKind::Less},
  293. {TokenKind::EndOfFile},
  294. }));
  295. buffer = Lex("<<=>>");
  296. EXPECT_FALSE(buffer.has_errors());
  297. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  298. {TokenKind::LessLessEqual},
  299. {TokenKind::GreaterGreater},
  300. {TokenKind::EndOfFile},
  301. }));
  302. buffer = Lex("< <=> >");
  303. EXPECT_FALSE(buffer.has_errors());
  304. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  305. {TokenKind::Less},
  306. {TokenKind::LessEqualGreater},
  307. {TokenKind::Greater},
  308. {TokenKind::EndOfFile},
  309. }));
  310. buffer = Lex("\\/?@&^!");
  311. EXPECT_FALSE(buffer.has_errors());
  312. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  313. {TokenKind::Backslash},
  314. {TokenKind::Slash},
  315. {TokenKind::Question},
  316. {TokenKind::At},
  317. {TokenKind::Amp},
  318. {TokenKind::Caret},
  319. {TokenKind::Exclaim},
  320. {TokenKind::EndOfFile},
  321. }));
  322. }
  323. TEST_F(LexerTest, Parens) {
  324. auto buffer = Lex("()");
  325. EXPECT_FALSE(buffer.has_errors());
  326. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  327. {TokenKind::OpenParen},
  328. {TokenKind::CloseParen},
  329. {TokenKind::EndOfFile},
  330. }));
  331. buffer = Lex("((()()))");
  332. EXPECT_FALSE(buffer.has_errors());
  333. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  334. {TokenKind::OpenParen},
  335. {TokenKind::OpenParen},
  336. {TokenKind::OpenParen},
  337. {TokenKind::CloseParen},
  338. {TokenKind::OpenParen},
  339. {TokenKind::CloseParen},
  340. {TokenKind::CloseParen},
  341. {TokenKind::CloseParen},
  342. {TokenKind::EndOfFile},
  343. }));
  344. }
  345. TEST_F(LexerTest, CurlyBraces) {
  346. auto buffer = Lex("{}");
  347. EXPECT_FALSE(buffer.has_errors());
  348. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  349. {TokenKind::OpenCurlyBrace},
  350. {TokenKind::CloseCurlyBrace},
  351. {TokenKind::EndOfFile},
  352. }));
  353. buffer = Lex("{{{}{}}}");
  354. EXPECT_FALSE(buffer.has_errors());
  355. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  356. {TokenKind::OpenCurlyBrace},
  357. {TokenKind::OpenCurlyBrace},
  358. {TokenKind::OpenCurlyBrace},
  359. {TokenKind::CloseCurlyBrace},
  360. {TokenKind::OpenCurlyBrace},
  361. {TokenKind::CloseCurlyBrace},
  362. {TokenKind::CloseCurlyBrace},
  363. {TokenKind::CloseCurlyBrace},
  364. {TokenKind::EndOfFile},
  365. }));
  366. }
  367. TEST_F(LexerTest, MatchingGroups) {
  368. {
  369. TokenizedBuffer buffer = Lex("(){}");
  370. ASSERT_FALSE(buffer.has_errors());
  371. auto it = buffer.tokens().begin();
  372. auto open_paren_token = *it++;
  373. auto close_paren_token = *it++;
  374. EXPECT_EQ(close_paren_token,
  375. buffer.GetMatchedClosingToken(open_paren_token));
  376. EXPECT_EQ(open_paren_token,
  377. buffer.GetMatchedOpeningToken(close_paren_token));
  378. auto open_curly_token = *it++;
  379. auto close_curly_token = *it++;
  380. EXPECT_EQ(close_curly_token,
  381. buffer.GetMatchedClosingToken(open_curly_token));
  382. EXPECT_EQ(open_curly_token,
  383. buffer.GetMatchedOpeningToken(close_curly_token));
  384. auto eof_token = *it++;
  385. EXPECT_EQ(buffer.GetKind(eof_token), TokenKind::EndOfFile);
  386. EXPECT_EQ(buffer.tokens().end(), it);
  387. }
  388. {
  389. TokenizedBuffer buffer = Lex("({x}){(y)} {{((z))}}");
  390. ASSERT_FALSE(buffer.has_errors());
  391. auto it = buffer.tokens().begin();
  392. auto open_paren_token = *it++;
  393. auto open_curly_token = *it++;
  394. ASSERT_EQ("x", buffer.GetIdentifierText(buffer.GetIdentifier(*it++)));
  395. auto close_curly_token = *it++;
  396. auto close_paren_token = *it++;
  397. EXPECT_EQ(close_paren_token,
  398. buffer.GetMatchedClosingToken(open_paren_token));
  399. EXPECT_EQ(open_paren_token,
  400. buffer.GetMatchedOpeningToken(close_paren_token));
  401. EXPECT_EQ(close_curly_token,
  402. buffer.GetMatchedClosingToken(open_curly_token));
  403. EXPECT_EQ(open_curly_token,
  404. buffer.GetMatchedOpeningToken(close_curly_token));
  405. open_curly_token = *it++;
  406. open_paren_token = *it++;
  407. ASSERT_EQ("y", buffer.GetIdentifierText(buffer.GetIdentifier(*it++)));
  408. close_paren_token = *it++;
  409. close_curly_token = *it++;
  410. EXPECT_EQ(close_curly_token,
  411. buffer.GetMatchedClosingToken(open_curly_token));
  412. EXPECT_EQ(open_curly_token,
  413. buffer.GetMatchedOpeningToken(close_curly_token));
  414. EXPECT_EQ(close_paren_token,
  415. buffer.GetMatchedClosingToken(open_paren_token));
  416. EXPECT_EQ(open_paren_token,
  417. buffer.GetMatchedOpeningToken(close_paren_token));
  418. open_curly_token = *it++;
  419. auto inner_open_curly_token = *it++;
  420. open_paren_token = *it++;
  421. auto inner_open_paren_token = *it++;
  422. ASSERT_EQ("z", buffer.GetIdentifierText(buffer.GetIdentifier(*it++)));
  423. auto inner_close_paren_token = *it++;
  424. close_paren_token = *it++;
  425. auto inner_close_curly_token = *it++;
  426. close_curly_token = *it++;
  427. EXPECT_EQ(close_curly_token,
  428. buffer.GetMatchedClosingToken(open_curly_token));
  429. EXPECT_EQ(open_curly_token,
  430. buffer.GetMatchedOpeningToken(close_curly_token));
  431. EXPECT_EQ(inner_close_curly_token,
  432. buffer.GetMatchedClosingToken(inner_open_curly_token));
  433. EXPECT_EQ(inner_open_curly_token,
  434. buffer.GetMatchedOpeningToken(inner_close_curly_token));
  435. EXPECT_EQ(close_paren_token,
  436. buffer.GetMatchedClosingToken(open_paren_token));
  437. EXPECT_EQ(open_paren_token,
  438. buffer.GetMatchedOpeningToken(close_paren_token));
  439. EXPECT_EQ(inner_close_paren_token,
  440. buffer.GetMatchedClosingToken(inner_open_paren_token));
  441. EXPECT_EQ(inner_open_paren_token,
  442. buffer.GetMatchedOpeningToken(inner_close_paren_token));
  443. auto eof_token = *it++;
  444. EXPECT_EQ(buffer.GetKind(eof_token), TokenKind::EndOfFile);
  445. EXPECT_EQ(buffer.tokens().end(), it);
  446. }
  447. }
  448. TEST_F(LexerTest, MismatchedGroups) {
  449. auto buffer = Lex("{");
  450. EXPECT_TRUE(buffer.has_errors());
  451. EXPECT_THAT(buffer,
  452. HasTokens(llvm::ArrayRef<ExpectedToken>{
  453. {TokenKind::OpenCurlyBrace},
  454. {.kind = TokenKind::CloseCurlyBrace, .recovery = true},
  455. {TokenKind::EndOfFile},
  456. }));
  457. buffer = Lex("}");
  458. EXPECT_TRUE(buffer.has_errors());
  459. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  460. {.kind = TokenKind::Error, .text = "}"},
  461. {TokenKind::EndOfFile},
  462. }));
  463. buffer = Lex("{(}");
  464. EXPECT_TRUE(buffer.has_errors());
  465. EXPECT_THAT(
  466. buffer,
  467. HasTokens(llvm::ArrayRef<ExpectedToken>{
  468. {.kind = TokenKind::OpenCurlyBrace, .column = 1},
  469. {.kind = TokenKind::OpenParen, .column = 2},
  470. {.kind = TokenKind::CloseParen, .column = 3, .recovery = true},
  471. {.kind = TokenKind::CloseCurlyBrace, .column = 3},
  472. {TokenKind::EndOfFile},
  473. }));
  474. buffer = Lex(")({)");
  475. EXPECT_TRUE(buffer.has_errors());
  476. EXPECT_THAT(
  477. buffer,
  478. HasTokens(llvm::ArrayRef<ExpectedToken>{
  479. {.kind = TokenKind::Error, .column = 1, .text = ")"},
  480. {.kind = TokenKind::OpenParen, .column = 2},
  481. {.kind = TokenKind::OpenCurlyBrace, .column = 3},
  482. {.kind = TokenKind::CloseCurlyBrace, .column = 4, .recovery = true},
  483. {.kind = TokenKind::CloseParen, .column = 4},
  484. {TokenKind::EndOfFile},
  485. }));
  486. }
  487. TEST_F(LexerTest, Whitespace) {
  488. auto buffer = Lex("{( } {(");
  489. // Whether there should be whitespace before/after each token.
  490. bool space[] = {true,
  491. // {
  492. false,
  493. // (
  494. true,
  495. // inserted )
  496. true,
  497. // }
  498. true,
  499. // {
  500. false,
  501. // (
  502. true,
  503. // inserted )
  504. true,
  505. // inserted }
  506. true,
  507. // EOF
  508. false};
  509. int pos = 0;
  510. for (TokenizedBuffer::Token token : buffer.tokens()) {
  511. ASSERT_LT(pos, std::size(space));
  512. EXPECT_THAT(buffer.HasLeadingWhitespace(token), Eq(space[pos]));
  513. ++pos;
  514. ASSERT_LT(pos, std::size(space));
  515. EXPECT_THAT(buffer.HasTrailingWhitespace(token), Eq(space[pos]));
  516. }
  517. ASSERT_EQ(pos + 1, std::size(space));
  518. }
  519. TEST_F(LexerTest, Keywords) {
  520. auto buffer = Lex(" fn");
  521. EXPECT_FALSE(buffer.has_errors());
  522. EXPECT_THAT(buffer,
  523. HasTokens(llvm::ArrayRef<ExpectedToken>{
  524. {.kind = TokenKind::Fn, .column = 4, .indent_column = 4},
  525. {TokenKind::EndOfFile},
  526. }));
  527. buffer = Lex("and or not if else for return var break continue _");
  528. EXPECT_FALSE(buffer.has_errors());
  529. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  530. {TokenKind::And},
  531. {TokenKind::Or},
  532. {TokenKind::Not},
  533. {TokenKind::If},
  534. {TokenKind::Else},
  535. {TokenKind::For},
  536. {TokenKind::Return},
  537. {TokenKind::Var},
  538. {TokenKind::Break},
  539. {TokenKind::Continue},
  540. {TokenKind::Underscore},
  541. {TokenKind::EndOfFile},
  542. }));
  543. }
  544. TEST_F(LexerTest, Comments) {
  545. auto buffer = Lex(" ;\n // foo\n ;\n");
  546. EXPECT_FALSE(buffer.has_errors());
  547. EXPECT_THAT(
  548. buffer,
  549. HasTokens(llvm::ArrayRef<ExpectedToken>{
  550. {.kind = TokenKind::Semi, .line = 1, .column = 2, .indent_column = 2},
  551. {.kind = TokenKind::Semi, .line = 3, .column = 3, .indent_column = 3},
  552. {.kind = TokenKind::EndOfFile, .line = 3, .column = 4},
  553. }));
  554. buffer = Lex("// foo\n//\n// bar");
  555. EXPECT_FALSE(buffer.has_errors());
  556. EXPECT_THAT(buffer,
  557. HasTokens(llvm::ArrayRef<ExpectedToken>{{TokenKind::EndOfFile}}));
  558. // Make sure weird characters aren't a problem.
  559. buffer = Lex(" // foo#$!^?@-_💩🍫⃠ [̲̅$̲̅(̲̅ ͡° ͜ʖ ͡°̲̅)̲̅$̲̅]");
  560. EXPECT_FALSE(buffer.has_errors());
  561. EXPECT_THAT(buffer,
  562. HasTokens(llvm::ArrayRef<ExpectedToken>{{TokenKind::EndOfFile}}));
  563. // Make sure we can lex a comment at the end of the input.
  564. buffer = Lex("//");
  565. EXPECT_FALSE(buffer.has_errors());
  566. EXPECT_THAT(buffer,
  567. HasTokens(llvm::ArrayRef<ExpectedToken>{{TokenKind::EndOfFile}}));
  568. }
  569. TEST_F(LexerTest, InvalidComments) {
  570. llvm::StringLiteral testcases[] = {
  571. " /// foo\n",
  572. "foo // bar\n",
  573. "//! hello",
  574. " //world",
  575. };
  576. for (llvm::StringLiteral testcase : testcases) {
  577. auto buffer = Lex(testcase);
  578. EXPECT_TRUE(buffer.has_errors());
  579. }
  580. }
  581. TEST_F(LexerTest, Identifiers) {
  582. auto buffer = Lex(" foobar");
  583. EXPECT_FALSE(buffer.has_errors());
  584. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  585. {.kind = TokenKind::Identifier,
  586. .column = 4,
  587. .indent_column = 4,
  588. .text = "foobar"},
  589. {TokenKind::EndOfFile},
  590. }));
  591. // Check different kinds of identifier character sequences.
  592. buffer = Lex("_foo_bar");
  593. EXPECT_FALSE(buffer.has_errors());
  594. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  595. {.kind = TokenKind::Identifier, .text = "_foo_bar"},
  596. {TokenKind::EndOfFile},
  597. }));
  598. buffer = Lex("foo2bar00");
  599. EXPECT_FALSE(buffer.has_errors());
  600. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  601. {.kind = TokenKind::Identifier, .text = "foo2bar00"},
  602. {TokenKind::EndOfFile},
  603. }));
  604. // Check that we can parse identifiers that start with a keyword.
  605. buffer = Lex("fnord");
  606. EXPECT_FALSE(buffer.has_errors());
  607. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  608. {.kind = TokenKind::Identifier, .text = "fnord"},
  609. {TokenKind::EndOfFile},
  610. }));
  611. // Check multiple identifiers with indent and interning.
  612. buffer = Lex(" foo;bar\nbar \n foo\tfoo");
  613. EXPECT_FALSE(buffer.has_errors());
  614. EXPECT_THAT(buffer,
  615. HasTokens(llvm::ArrayRef<ExpectedToken>{
  616. {.kind = TokenKind::Identifier,
  617. .line = 1,
  618. .column = 4,
  619. .indent_column = 4,
  620. .text = "foo"},
  621. {.kind = TokenKind::Semi},
  622. {.kind = TokenKind::Identifier,
  623. .line = 1,
  624. .column = 8,
  625. .indent_column = 4,
  626. .text = "bar"},
  627. {.kind = TokenKind::Identifier,
  628. .line = 2,
  629. .column = 1,
  630. .indent_column = 1,
  631. .text = "bar"},
  632. {.kind = TokenKind::Identifier,
  633. .line = 3,
  634. .column = 3,
  635. .indent_column = 3,
  636. .text = "foo"},
  637. {.kind = TokenKind::Identifier,
  638. .line = 3,
  639. .column = 7,
  640. .indent_column = 3,
  641. .text = "foo"},
  642. {.kind = TokenKind::EndOfFile, .line = 3, .column = 10},
  643. }));
  644. }
  645. TEST_F(LexerTest, StringLiterals) {
  646. llvm::StringLiteral testcase = R"(
  647. "hello world\n"
  648. '''foo
  649. test \
  650. \xAB
  651. ''' trailing
  652. #"""#
  653. "\0"
  654. #"\0"foo"\1"#
  655. """x"""
  656. )";
  657. auto buffer = Lex(testcase);
  658. EXPECT_FALSE(buffer.has_errors());
  659. EXPECT_THAT(buffer,
  660. HasTokens(llvm::ArrayRef<ExpectedToken>{
  661. {.kind = TokenKind::StringLiteral,
  662. .line = 2,
  663. .column = 5,
  664. .indent_column = 5,
  665. .string_contents = {"hello world\n"}},
  666. {.kind = TokenKind::StringLiteral,
  667. .line = 4,
  668. .column = 5,
  669. .indent_column = 5,
  670. .string_contents = {" test \xAB\n"}},
  671. {.kind = TokenKind::Identifier,
  672. .line = 7,
  673. .column = 10,
  674. .indent_column = 5,
  675. .text = "trailing"},
  676. {.kind = TokenKind::StringLiteral,
  677. .line = 9,
  678. .column = 7,
  679. .indent_column = 7,
  680. .string_contents = {"\""}},
  681. {.kind = TokenKind::StringLiteral,
  682. .line = 11,
  683. .column = 5,
  684. .indent_column = 5,
  685. .string_contents = llvm::StringLiteral::withInnerNUL("\0")},
  686. {.kind = TokenKind::StringLiteral,
  687. .line = 13,
  688. .column = 5,
  689. .indent_column = 5,
  690. .string_contents = {"\\0\"foo\"\\1"}},
  691. // """x""" is three string literals, not one invalid
  692. // attempt at a block string literal.
  693. {.kind = TokenKind::StringLiteral,
  694. .line = 15,
  695. .column = 5,
  696. .indent_column = 5,
  697. .string_contents = {""}},
  698. {.kind = TokenKind::StringLiteral,
  699. .line = 15,
  700. .column = 7,
  701. .indent_column = 5,
  702. .string_contents = {"x"}},
  703. {.kind = TokenKind::StringLiteral,
  704. .line = 15,
  705. .column = 10,
  706. .indent_column = 5,
  707. .string_contents = {""}},
  708. {.kind = TokenKind::EndOfFile, .line = 16, .column = 3},
  709. }));
  710. }
  711. TEST_F(LexerTest, InvalidStringLiterals) {
  712. llvm::StringLiteral invalid[] = {
  713. // clang-format off
  714. R"(")",
  715. R"('''
  716. '')",
  717. R"("\)",
  718. R"("\")",
  719. R"("\\)",
  720. R"("\\\")",
  721. R"(''')",
  722. R"('''
  723. )",
  724. R"('''\)",
  725. R"(#'''
  726. ''')",
  727. // clang-format on
  728. };
  729. for (llvm::StringLiteral test : invalid) {
  730. SCOPED_TRACE(test);
  731. auto buffer = Lex(test);
  732. EXPECT_TRUE(buffer.has_errors());
  733. // We should have formed at least one error token.
  734. bool found_error = false;
  735. for (TokenizedBuffer::Token token : buffer.tokens()) {
  736. if (buffer.GetKind(token) == TokenKind::Error) {
  737. found_error = true;
  738. break;
  739. }
  740. }
  741. EXPECT_TRUE(found_error);
  742. }
  743. }
  744. TEST_F(LexerTest, TypeLiterals) {
  745. llvm::StringLiteral testcase = R"(
  746. i0 i1 i20 i999999999999 i0x1
  747. u0 u1 u64 u64b
  748. f32 f80 f1 fi
  749. s1
  750. )";
  751. auto buffer = Lex(testcase);
  752. EXPECT_FALSE(buffer.has_errors());
  753. ASSERT_THAT(buffer,
  754. HasTokens(llvm::ArrayRef<ExpectedToken>{
  755. {.kind = TokenKind::Identifier,
  756. .line = 2,
  757. .column = 5,
  758. .indent_column = 5,
  759. .text = {"i0"}},
  760. {.kind = TokenKind::IntegerTypeLiteral,
  761. .line = 2,
  762. .column = 8,
  763. .indent_column = 5,
  764. .text = {"i1"}},
  765. {.kind = TokenKind::IntegerTypeLiteral,
  766. .line = 2,
  767. .column = 11,
  768. .indent_column = 5,
  769. .text = {"i20"}},
  770. {.kind = TokenKind::IntegerTypeLiteral,
  771. .line = 2,
  772. .column = 15,
  773. .indent_column = 5,
  774. .text = {"i999999999999"}},
  775. {.kind = TokenKind::Identifier,
  776. .line = 2,
  777. .column = 29,
  778. .indent_column = 5,
  779. .text = {"i0x1"}},
  780. {.kind = TokenKind::Identifier,
  781. .line = 3,
  782. .column = 5,
  783. .indent_column = 5,
  784. .text = {"u0"}},
  785. {.kind = TokenKind::UnsignedIntegerTypeLiteral,
  786. .line = 3,
  787. .column = 8,
  788. .indent_column = 5,
  789. .text = {"u1"}},
  790. {.kind = TokenKind::UnsignedIntegerTypeLiteral,
  791. .line = 3,
  792. .column = 11,
  793. .indent_column = 5,
  794. .text = {"u64"}},
  795. {.kind = TokenKind::Identifier,
  796. .line = 3,
  797. .column = 15,
  798. .indent_column = 5,
  799. .text = {"u64b"}},
  800. {.kind = TokenKind::FloatingPointTypeLiteral,
  801. .line = 4,
  802. .column = 5,
  803. .indent_column = 5,
  804. .text = {"f32"}},
  805. {.kind = TokenKind::FloatingPointTypeLiteral,
  806. .line = 4,
  807. .column = 9,
  808. .indent_column = 5,
  809. .text = {"f80"}},
  810. {.kind = TokenKind::FloatingPointTypeLiteral,
  811. .line = 4,
  812. .column = 13,
  813. .indent_column = 5,
  814. .text = {"f1"}},
  815. {.kind = TokenKind::Identifier,
  816. .line = 4,
  817. .column = 16,
  818. .indent_column = 5,
  819. .text = {"fi"}},
  820. {.kind = TokenKind::Identifier,
  821. .line = 5,
  822. .column = 5,
  823. .indent_column = 5,
  824. .text = {"s1"}},
  825. {.kind = TokenKind::EndOfFile, .line = 6, .column = 3},
  826. }));
  827. auto token_i1 = buffer.tokens().begin() + 1;
  828. EXPECT_EQ(buffer.GetTypeLiteralSize(*token_i1), 1);
  829. auto token_i20 = buffer.tokens().begin() + 2;
  830. EXPECT_EQ(buffer.GetTypeLiteralSize(*token_i20), 20);
  831. auto token_i999999999999 = buffer.tokens().begin() + 3;
  832. EXPECT_EQ(buffer.GetTypeLiteralSize(*token_i999999999999), 999999999999ULL);
  833. auto token_u1 = buffer.tokens().begin() + 6;
  834. EXPECT_EQ(buffer.GetTypeLiteralSize(*token_u1), 1);
  835. auto token_u64 = buffer.tokens().begin() + 7;
  836. EXPECT_EQ(buffer.GetTypeLiteralSize(*token_u64), 64);
  837. auto token_f32 = buffer.tokens().begin() + 9;
  838. EXPECT_EQ(buffer.GetTypeLiteralSize(*token_f32), 32);
  839. auto token_f80 = buffer.tokens().begin() + 10;
  840. EXPECT_EQ(buffer.GetTypeLiteralSize(*token_f80), 80);
  841. auto token_f1 = buffer.tokens().begin() + 11;
  842. EXPECT_EQ(buffer.GetTypeLiteralSize(*token_f1), 1);
  843. }
  844. TEST_F(LexerTest, TypeLiteralTooManyDigits) {
  845. std::string code = "i";
  846. constexpr int Count = 10000;
  847. code.append(Count, '9');
  848. Testing::MockDiagnosticConsumer consumer;
  849. EXPECT_CALL(consumer,
  850. HandleDiagnostic(IsDiagnostic(
  851. DiagnosticKind::TooManyDigits, DiagnosticLevel::Error, 1, 2,
  852. HasSubstr(llvm::formatv(" {0} ", Count)))));
  853. auto buffer = Lex(code, consumer);
  854. EXPECT_TRUE(buffer.has_errors());
  855. ASSERT_THAT(
  856. buffer,
  857. HasTokens(llvm::ArrayRef<ExpectedToken>{
  858. {.kind = TokenKind::Error,
  859. .line = 1,
  860. .column = 1,
  861. .indent_column = 1,
  862. .text = {code}},
  863. {.kind = TokenKind::EndOfFile, .line = 1, .column = Count + 2},
  864. }));
  865. }
  866. TEST_F(LexerTest, DiagnosticTrailingComment) {
  867. llvm::StringLiteral testcase = R"(
  868. // Hello!
  869. var String x; // trailing comment
  870. )";
  871. Testing::MockDiagnosticConsumer consumer;
  872. EXPECT_CALL(consumer,
  873. HandleDiagnostic(IsDiagnostic(DiagnosticKind::TrailingComment,
  874. DiagnosticLevel::Error, 3, 19, _)));
  875. Lex(testcase, consumer);
  876. }
  877. TEST_F(LexerTest, DiagnosticWhitespace) {
  878. Testing::MockDiagnosticConsumer consumer;
  879. EXPECT_CALL(consumer, HandleDiagnostic(IsDiagnostic(
  880. DiagnosticKind::NoWhitespaceAfterCommentIntroducer,
  881. DiagnosticLevel::Error, 1, 3, _)));
  882. Lex("//no space after comment", consumer);
  883. }
  884. TEST_F(LexerTest, DiagnosticUnrecognizedEscape) {
  885. Testing::MockDiagnosticConsumer consumer;
  886. EXPECT_CALL(consumer, HandleDiagnostic(IsDiagnostic(
  887. DiagnosticKind::UnknownEscapeSequence,
  888. DiagnosticLevel::Error, 1, 8, HasSubstr("`b`"))));
  889. Lex(R"("hello\bworld")", consumer);
  890. }
  891. TEST_F(LexerTest, DiagnosticBadHex) {
  892. Testing::MockDiagnosticConsumer consumer;
  893. EXPECT_CALL(consumer, HandleDiagnostic(IsDiagnostic(
  894. DiagnosticKind::HexadecimalEscapeMissingDigits,
  895. DiagnosticLevel::Error, 1, 9, _)));
  896. Lex(R"("hello\xabworld")", consumer);
  897. }
  898. TEST_F(LexerTest, DiagnosticInvalidDigit) {
  899. Testing::MockDiagnosticConsumer consumer;
  900. EXPECT_CALL(consumer, HandleDiagnostic(IsDiagnostic(
  901. DiagnosticKind::InvalidDigit,
  902. DiagnosticLevel::Error, 1, 6, HasSubstr("'a'"))));
  903. Lex("0x123abc", consumer);
  904. }
  905. TEST_F(LexerTest, DiagnosticMissingTerminator) {
  906. Testing::MockDiagnosticConsumer consumer;
  907. EXPECT_CALL(consumer,
  908. HandleDiagnostic(IsDiagnostic(DiagnosticKind::UnterminatedString,
  909. DiagnosticLevel::Error, 1, 1, _)));
  910. Lex(R"(#" ")", consumer);
  911. }
  912. TEST_F(LexerTest, DiagnosticUnrecognizedChar) {
  913. Testing::MockDiagnosticConsumer consumer;
  914. EXPECT_CALL(consumer, HandleDiagnostic(
  915. IsDiagnostic(DiagnosticKind::UnrecognizedCharacters,
  916. DiagnosticLevel::Error, 1, 1, _)));
  917. Lex("\b", consumer);
  918. }
  919. auto GetAndDropLine(llvm::StringRef& text) -> std::string {
  920. auto newline_offset = text.find_first_of('\n');
  921. llvm::StringRef line = text.slice(0, newline_offset);
  922. if (newline_offset != llvm::StringRef::npos) {
  923. text = text.substr(newline_offset + 1);
  924. } else {
  925. text = "";
  926. }
  927. return line.str();
  928. }
  929. TEST_F(LexerTest, PrintingInteger) {
  930. auto buffer = Lex("123");
  931. ASSERT_FALSE(buffer.has_errors());
  932. std::string print_storage;
  933. llvm::raw_string_ostream print_stream(print_storage);
  934. buffer.Print(print_stream);
  935. llvm::StringRef print = print_stream.str();
  936. EXPECT_THAT(GetAndDropLine(print), StrEq("["));
  937. EXPECT_THAT(GetAndDropLine(print),
  938. StrEq("{ index: 0, kind: 'IntegerLiteral', line: 1, "
  939. "column: 1, indent: 1, spelling: '123', value: `123`, "
  940. "has_trailing_space: true },"));
  941. EXPECT_THAT(GetAndDropLine(print), HasSubstr("'EndOfFile'"));
  942. EXPECT_THAT(GetAndDropLine(print), StrEq("]"));
  943. EXPECT_TRUE(print.empty()) << print;
  944. }
  945. TEST_F(LexerTest, PrintingReal) {
  946. auto buffer = Lex("2.5");
  947. ASSERT_FALSE(buffer.has_errors());
  948. std::string print_storage;
  949. llvm::raw_string_ostream print_stream(print_storage);
  950. buffer.Print(print_stream);
  951. llvm::StringRef print = print_stream.str();
  952. EXPECT_THAT(GetAndDropLine(print), StrEq("["));
  953. EXPECT_THAT(
  954. GetAndDropLine(print),
  955. StrEq("{ index: 0, kind: 'RealLiteral', line: 1, column: 1, indent: "
  956. "1, spelling: '2.5', value: `25*10^-1`, has_trailing_space: true "
  957. "},"));
  958. EXPECT_THAT(GetAndDropLine(print), HasSubstr("'EndOfFile'"));
  959. EXPECT_THAT(GetAndDropLine(print), StrEq("]"));
  960. EXPECT_TRUE(print.empty()) << print;
  961. }
  962. TEST_F(LexerTest, PrintingPadding) {
  963. // Test kind padding.
  964. auto buffer = Lex("(;foo;)");
  965. ASSERT_FALSE(buffer.has_errors());
  966. std::string print_storage;
  967. llvm::raw_string_ostream print_stream(print_storage);
  968. buffer.Print(print_stream);
  969. llvm::StringRef print = print_stream.str();
  970. EXPECT_THAT(GetAndDropLine(print), StrEq("["));
  971. EXPECT_THAT(GetAndDropLine(print),
  972. StrEq("{ index: 0, kind: 'OpenParen', line: 1, column: "
  973. "1, indent: 1, spelling: '(', closing_token: 4 },"));
  974. EXPECT_THAT(GetAndDropLine(print),
  975. StrEq("{ index: 1, kind: 'Semi', line: 1, column: "
  976. "2, indent: 1, spelling: ';' },"));
  977. EXPECT_THAT(GetAndDropLine(print),
  978. StrEq("{ index: 2, kind: 'Identifier', line: 1, column: "
  979. "3, indent: 1, spelling: 'foo', identifier: 0 },"));
  980. EXPECT_THAT(GetAndDropLine(print),
  981. StrEq("{ index: 3, kind: 'Semi', line: 1, column: "
  982. "6, indent: 1, spelling: ';' },"));
  983. EXPECT_THAT(GetAndDropLine(print),
  984. StrEq("{ index: 4, kind: 'CloseParen', line: 1, column: "
  985. "7, indent: 1, spelling: ')', opening_token: 0, "
  986. "has_trailing_space: true },"));
  987. EXPECT_THAT(GetAndDropLine(print),
  988. StrEq("{ index: 5, kind: 'EndOfFile', line: 1, column: "
  989. "8, indent: 1, spelling: '' },"));
  990. EXPECT_THAT(GetAndDropLine(print), StrEq("]"));
  991. EXPECT_TRUE(print.empty()) << print;
  992. }
  993. TEST_F(LexerTest, PrintingPaddingDigits) {
  994. // Test digit padding with max values of 9, 10, and 11.
  995. auto buffer = Lex(";\n\n\n\n\n\n\n\n\n\n ;;");
  996. ASSERT_FALSE(buffer.has_errors());
  997. std::string print_storage;
  998. llvm::raw_string_ostream print_stream(print_storage);
  999. buffer.Print(print_stream);
  1000. llvm::StringRef print = print_stream.str();
  1001. EXPECT_THAT(GetAndDropLine(print), StrEq("["));
  1002. EXPECT_THAT(GetAndDropLine(print),
  1003. StrEq("{ index: 0, kind: 'Semi', line: 1, column: 1, "
  1004. "indent: 1, spelling: ';', has_trailing_space: true },"));
  1005. EXPECT_THAT(GetAndDropLine(print),
  1006. StrEq("{ index: 1, kind: 'Semi', line: 11, column: 9, "
  1007. "indent: 9, spelling: ';' },"));
  1008. EXPECT_THAT(GetAndDropLine(print),
  1009. StrEq("{ index: 2, kind: 'Semi', line: 11, column: 10, "
  1010. "indent: 9, spelling: ';', has_trailing_space: true },"));
  1011. EXPECT_THAT(GetAndDropLine(print),
  1012. StrEq("{ index: 3, kind: 'EndOfFile', line: 11, column: 11, "
  1013. "indent: 9, spelling: '' },"));
  1014. EXPECT_THAT(GetAndDropLine(print), StrEq("]"));
  1015. EXPECT_TRUE(print.empty()) << print;
  1016. }
  1017. TEST_F(LexerTest, PrintingAsYaml) {
  1018. // Test that we can parse this into YAML and verify line and indent data.
  1019. auto buffer = Lex("\n ;\n\n\n; ;\n\n\n\n\n\n\n\n\n\n\n");
  1020. ASSERT_FALSE(buffer.has_errors());
  1021. std::string print_output;
  1022. llvm::raw_string_ostream print_stream(print_output);
  1023. buffer.Print(print_stream);
  1024. print_stream.flush();
  1025. EXPECT_THAT(Yaml::Value::FromText(print_output),
  1026. ElementsAre(Yaml::SequenceValue{
  1027. Yaml::MappingValue{{"index", "0"},
  1028. {"kind", "Semi"},
  1029. {"line", "2"},
  1030. {"column", "2"},
  1031. {"indent", "2"},
  1032. {"spelling", ";"},
  1033. {"has_trailing_space", "true"}},
  1034. Yaml::MappingValue{{"index", "1"},
  1035. {"kind", "Semi"},
  1036. {"line", "5"},
  1037. {"column", "1"},
  1038. {"indent", "1"},
  1039. {"spelling", ";"},
  1040. {"has_trailing_space", "true"}},
  1041. Yaml::MappingValue{{"index", "2"},
  1042. {"kind", "Semi"},
  1043. {"line", "5"},
  1044. {"column", "3"},
  1045. {"indent", "1"},
  1046. {"spelling", ";"},
  1047. {"has_trailing_space", "true"}},
  1048. Yaml::MappingValue{{"index", "3"},
  1049. {"kind", "EndOfFile"},
  1050. {"line", "15"},
  1051. {"column", "1"},
  1052. {"indent", "1"},
  1053. {"spelling", ""}}}));
  1054. }
  1055. TEST_F(LexerTest, PrintToken) {
  1056. auto buffer = Lex("0x9");
  1057. ASSERT_FALSE(buffer.has_errors());
  1058. std::string print_output;
  1059. llvm::raw_string_ostream print_stream(print_output);
  1060. buffer.Print(print_stream);
  1061. llvm::StringRef print = print_stream.str();
  1062. EXPECT_THAT(GetAndDropLine(print), StrEq("["));
  1063. EXPECT_THAT(GetAndDropLine(print),
  1064. StrEq("{ index: 0, kind: 'IntegerLiteral', line: 1, "
  1065. "column: 1, indent: 1, spelling: '0x9', value: `9`, "
  1066. "has_trailing_space: true },"));
  1067. }
  1068. } // namespace
  1069. } // namespace Carbon::Testing