tokenized_buffer_test.cpp 51 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320
  1. // Part of the Carbon Language project, under the Apache License v2.0 with LLVM
  2. // Exceptions. See /LICENSE for license information.
  3. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
  4. #include "toolchain/lex/tokenized_buffer.h"
  5. #include <gmock/gmock.h>
  6. #include <gtest/gtest.h>
  7. #include <cmath>
  8. #include <forward_list>
  9. #include <iterator>
  10. #include "llvm/ADT/ArrayRef.h"
  11. #include "llvm/Support/FormatVariadic.h"
  12. #include "testing/base/test_raw_ostream.h"
  13. #include "toolchain/base/shared_value_stores.h"
  14. #include "toolchain/diagnostics/diagnostic_emitter.h"
  15. #include "toolchain/diagnostics/mocks.h"
  16. #include "toolchain/lex/lex.h"
  17. #include "toolchain/lex/tokenized_buffer_test_helpers.h"
  18. #include "toolchain/testing/compile_helper.h"
  19. #include "toolchain/testing/yaml_test_helpers.h"
  20. namespace Carbon::Lex {
  21. namespace {
  22. using ::Carbon::Testing::ExpectedToken;
  23. using ::Carbon::Testing::IsSingleDiagnostic;
  24. using ::Carbon::Testing::TestRawOstream;
  25. using ::testing::_;
  26. using ::testing::ElementsAre;
  27. using ::testing::Eq;
  28. using ::testing::HasSubstr;
  29. using ::testing::Pair;
  30. namespace Yaml = ::Carbon::Testing::Yaml;
  31. class LexerTest : public ::testing::Test {
  32. public:
  33. Testing::CompileHelper compile_helper_;
  34. };
  35. TEST_F(LexerTest, HandlesEmptyBuffer) {
  36. auto& buffer = compile_helper_.GetTokenizedBuffer("");
  37. EXPECT_FALSE(buffer.has_errors());
  38. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  39. {.kind = TokenKind::FileStart},
  40. {.kind = TokenKind::FileEnd}}));
  41. }
  42. TEST_F(LexerTest, TracksLinesAndColumns) {
  43. auto& buffer = compile_helper_.GetTokenizedBuffer(
  44. "\n ;;\n ;;;\n x\"foo\" '''baz\n a\n ''' y");
  45. EXPECT_FALSE(buffer.has_errors());
  46. EXPECT_THAT(
  47. buffer,
  48. HasTokens(llvm::ArrayRef<ExpectedToken>{
  49. {.kind = TokenKind::FileStart,
  50. .line = 1,
  51. .column = 1,
  52. .indent_column = 1},
  53. {.kind = TokenKind::Semi, .line = 2, .column = 3, .indent_column = 3},
  54. {.kind = TokenKind::Semi, .line = 2, .column = 4, .indent_column = 3},
  55. {.kind = TokenKind::Semi, .line = 3, .column = 4, .indent_column = 4},
  56. {.kind = TokenKind::Semi, .line = 3, .column = 5, .indent_column = 4},
  57. {.kind = TokenKind::Semi, .line = 3, .column = 6, .indent_column = 4},
  58. {.kind = TokenKind::Identifier,
  59. .line = 4,
  60. .column = 4,
  61. .indent_column = 4,
  62. .text = "x"},
  63. {.kind = TokenKind::StringLiteral,
  64. .line = 4,
  65. .column = 5,
  66. .indent_column = 4},
  67. {.kind = TokenKind::StringLiteral,
  68. .line = 4,
  69. .column = 11,
  70. .indent_column = 4},
  71. {.kind = TokenKind::Identifier,
  72. .line = 6,
  73. .column = 6,
  74. .indent_column = 11,
  75. .text = "y"},
  76. {.kind = TokenKind::FileEnd, .line = 6, .column = 7},
  77. }));
  78. }
  79. TEST_F(LexerTest, TracksLinesAndColumnsCRLF) {
  80. auto& buffer = compile_helper_.GetTokenizedBuffer(
  81. "\r\n ;;\r\n ;;;\r\n x\"foo\" '''baz\r\n a\r\n ''' y");
  82. EXPECT_FALSE(buffer.has_errors());
  83. EXPECT_THAT(
  84. buffer,
  85. HasTokens(llvm::ArrayRef<ExpectedToken>{
  86. {.kind = TokenKind::FileStart,
  87. .line = 1,
  88. .column = 1,
  89. .indent_column = 1},
  90. {.kind = TokenKind::Semi, .line = 2, .column = 3, .indent_column = 3},
  91. {.kind = TokenKind::Semi, .line = 2, .column = 4, .indent_column = 3},
  92. {.kind = TokenKind::Semi, .line = 3, .column = 4, .indent_column = 4},
  93. {.kind = TokenKind::Semi, .line = 3, .column = 5, .indent_column = 4},
  94. {.kind = TokenKind::Semi, .line = 3, .column = 6, .indent_column = 4},
  95. {.kind = TokenKind::Identifier,
  96. .line = 4,
  97. .column = 4,
  98. .indent_column = 4,
  99. .text = "x"},
  100. {.kind = TokenKind::StringLiteral,
  101. .line = 4,
  102. .column = 5,
  103. .indent_column = 4},
  104. {.kind = TokenKind::StringLiteral,
  105. .line = 4,
  106. .column = 11,
  107. .indent_column = 4},
  108. {.kind = TokenKind::Identifier,
  109. .line = 6,
  110. .column = 6,
  111. .indent_column = 11,
  112. .text = "y"},
  113. {.kind = TokenKind::FileEnd, .line = 6, .column = 7},
  114. }));
  115. }
  116. TEST_F(LexerTest, InvalidCR) {
  117. auto& buffer = compile_helper_.GetTokenizedBuffer("\n ;;\r ;\n x");
  118. EXPECT_TRUE(buffer.has_errors());
  119. EXPECT_THAT(
  120. buffer,
  121. HasTokens(llvm::ArrayRef<ExpectedToken>{
  122. {.kind = TokenKind::FileStart,
  123. .line = 1,
  124. .column = 1,
  125. .indent_column = 1},
  126. {.kind = TokenKind::Semi, .line = 2, .column = 2, .indent_column = 2},
  127. {.kind = TokenKind::Semi, .line = 2, .column = 3, .indent_column = 2},
  128. {.kind = TokenKind::Semi, .line = 2, .column = 6, .indent_column = 2},
  129. {.kind = TokenKind::Identifier,
  130. .line = 3,
  131. .column = 4,
  132. .indent_column = 4,
  133. .text = "x"},
  134. {.kind = TokenKind::FileEnd, .line = 3, .column = 5},
  135. }));
  136. }
  137. TEST_F(LexerTest, InvalidLFCR) {
  138. auto& buffer = compile_helper_.GetTokenizedBuffer("\n ;;\n\r ;\n x");
  139. EXPECT_TRUE(buffer.has_errors());
  140. EXPECT_THAT(
  141. buffer,
  142. HasTokens(llvm::ArrayRef<ExpectedToken>{
  143. {.kind = TokenKind::FileStart,
  144. .line = 1,
  145. .column = 1,
  146. .indent_column = 1},
  147. {.kind = TokenKind::Semi, .line = 2, .column = 2, .indent_column = 2},
  148. {.kind = TokenKind::Semi, .line = 2, .column = 3, .indent_column = 2},
  149. {.kind = TokenKind::Semi, .line = 3, .column = 3, .indent_column = 1},
  150. {.kind = TokenKind::Identifier,
  151. .line = 4,
  152. .column = 4,
  153. .indent_column = 4,
  154. .text = "x"},
  155. {.kind = TokenKind::FileEnd, .line = 4, .column = 5},
  156. }));
  157. }
  158. TEST_F(LexerTest, HandlesNumericLiteral) {
  159. auto [buffer, value_stores] =
  160. compile_helper_.GetTokenizedBufferWithSharedValueStore(
  161. "12-578\n 1 2\n0x12_3ABC\n0b10_10_11\n1_234_567\n1.5e9");
  162. EXPECT_FALSE(buffer.has_errors());
  163. ASSERT_THAT(buffer,
  164. HasTokens(llvm::ArrayRef<ExpectedToken>{
  165. {.kind = TokenKind::FileStart, .line = 1, .column = 1},
  166. {.kind = TokenKind::IntLiteral,
  167. .line = 1,
  168. .column = 1,
  169. .indent_column = 1,
  170. .text = "12"},
  171. {.kind = TokenKind::Minus,
  172. .line = 1,
  173. .column = 3,
  174. .indent_column = 1},
  175. {.kind = TokenKind::IntLiteral,
  176. .line = 1,
  177. .column = 4,
  178. .indent_column = 1,
  179. .text = "578"},
  180. {.kind = TokenKind::IntLiteral,
  181. .line = 2,
  182. .column = 3,
  183. .indent_column = 3,
  184. .text = "1"},
  185. {.kind = TokenKind::IntLiteral,
  186. .line = 2,
  187. .column = 6,
  188. .indent_column = 3,
  189. .text = "2"},
  190. {.kind = TokenKind::IntLiteral,
  191. .line = 3,
  192. .column = 1,
  193. .indent_column = 1,
  194. .text = "0x12_3ABC"},
  195. {.kind = TokenKind::IntLiteral,
  196. .line = 4,
  197. .column = 1,
  198. .indent_column = 1,
  199. .text = "0b10_10_11"},
  200. {.kind = TokenKind::IntLiteral,
  201. .line = 5,
  202. .column = 1,
  203. .indent_column = 1,
  204. .text = "1_234_567"},
  205. {.kind = TokenKind::RealLiteral,
  206. .line = 6,
  207. .column = 1,
  208. .indent_column = 1,
  209. .text = "1.5e9"},
  210. {.kind = TokenKind::FileEnd, .line = 6, .column = 6},
  211. }));
  212. auto token_start = buffer.tokens().begin();
  213. auto token_12 = token_start + 1;
  214. EXPECT_EQ(value_stores.ints().Get(buffer.GetIntLiteral(*token_12)), 12);
  215. auto token_578 = token_12 + 2;
  216. EXPECT_EQ(value_stores.ints().Get(buffer.GetIntLiteral(*token_578)), 578);
  217. auto token_1 = token_578 + 1;
  218. EXPECT_EQ(value_stores.ints().Get(buffer.GetIntLiteral(*token_1)), 1);
  219. auto token_2 = token_1 + 1;
  220. EXPECT_EQ(value_stores.ints().Get(buffer.GetIntLiteral(*token_2)), 2);
  221. auto token_0x12_3abc = token_2 + 1;
  222. EXPECT_EQ(value_stores.ints().Get(buffer.GetIntLiteral(*token_0x12_3abc)),
  223. 0x12'3abc);
  224. auto token_0b10_10_11 = token_0x12_3abc + 1;
  225. EXPECT_EQ(value_stores.ints().Get(buffer.GetIntLiteral(*token_0b10_10_11)),
  226. 0b10'10'11);
  227. auto token_1_234_567 = token_0b10_10_11 + 1;
  228. EXPECT_EQ(value_stores.ints().Get(buffer.GetIntLiteral(*token_1_234_567)),
  229. 1'234'567);
  230. auto token_1_5e9 = token_1_234_567 + 1;
  231. auto value_1_5e9 =
  232. value_stores.reals().Get(buffer.GetRealLiteral(*token_1_5e9));
  233. EXPECT_EQ(value_1_5e9.mantissa.getZExtValue(), 15);
  234. EXPECT_EQ(value_1_5e9.exponent.getSExtValue(), 8);
  235. EXPECT_EQ(value_1_5e9.is_decimal, true);
  236. }
  237. TEST_F(LexerTest, HandlesInvalidNumericLiterals) {
  238. auto& buffer =
  239. compile_helper_.GetTokenizedBuffer("14x 15_49 0x3.5q 0x3_4.5_6 0ops");
  240. EXPECT_TRUE(buffer.has_errors());
  241. ASSERT_THAT(buffer,
  242. HasTokens(llvm::ArrayRef<ExpectedToken>{
  243. {.kind = TokenKind::FileStart, .line = 1, .column = 1},
  244. {.kind = TokenKind::Error,
  245. .line = 1,
  246. .column = 1,
  247. .indent_column = 1,
  248. .text = "14x"},
  249. {.kind = TokenKind::IntLiteral,
  250. .line = 1,
  251. .column = 5,
  252. .indent_column = 1,
  253. .text = "15_49"},
  254. {.kind = TokenKind::Error,
  255. .line = 1,
  256. .column = 11,
  257. .indent_column = 1,
  258. .text = "0x3.5q"},
  259. {.kind = TokenKind::RealLiteral,
  260. .line = 1,
  261. .column = 18,
  262. .indent_column = 1,
  263. .text = "0x3_4.5_6"},
  264. {.kind = TokenKind::Error,
  265. .line = 1,
  266. .column = 28,
  267. .indent_column = 1,
  268. .text = "0ops"},
  269. {.kind = TokenKind::FileEnd, .line = 1, .column = 32},
  270. }));
  271. }
  272. TEST_F(LexerTest, SplitsNumericLiteralsProperly) {
  273. llvm::StringLiteral source_text = R"(
  274. 1.
  275. .2
  276. 3.+foo
  277. 4.0-bar
  278. 5.0e+123+456
  279. 6.0e+1e+2
  280. 1e7
  281. 8..10
  282. 9.0.9.5
  283. 10.foo
  284. 11.0.foo
  285. 12e+1
  286. 13._
  287. )";
  288. auto& buffer = compile_helper_.GetTokenizedBuffer(source_text);
  289. EXPECT_TRUE(buffer.has_errors());
  290. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  291. {.kind = TokenKind::FileStart},
  292. {.kind = TokenKind::IntLiteral, .text = "1"},
  293. {.kind = TokenKind::Period},
  294. // newline
  295. {.kind = TokenKind::Period},
  296. {.kind = TokenKind::IntLiteral, .text = "2"},
  297. // newline
  298. {.kind = TokenKind::IntLiteral, .text = "3"},
  299. {.kind = TokenKind::Period},
  300. {.kind = TokenKind::Plus},
  301. {.kind = TokenKind::Identifier, .text = "foo"},
  302. // newline
  303. {.kind = TokenKind::RealLiteral, .text = "4.0"},
  304. {.kind = TokenKind::Minus},
  305. {.kind = TokenKind::Identifier, .text = "bar"},
  306. // newline
  307. {.kind = TokenKind::RealLiteral, .text = "5.0e+123"},
  308. {.kind = TokenKind::Plus},
  309. {.kind = TokenKind::IntLiteral, .text = "456"},
  310. // newline
  311. {.kind = TokenKind::Error, .text = "6.0e+1e"},
  312. {.kind = TokenKind::Plus},
  313. {.kind = TokenKind::IntLiteral, .text = "2"},
  314. // newline
  315. {.kind = TokenKind::Error, .text = "1e7"},
  316. // newline
  317. {.kind = TokenKind::IntLiteral, .text = "8"},
  318. {.kind = TokenKind::Period},
  319. {.kind = TokenKind::Period},
  320. {.kind = TokenKind::IntLiteral, .text = "10"},
  321. // newline
  322. {.kind = TokenKind::RealLiteral, .text = "9.0"},
  323. {.kind = TokenKind::Period},
  324. {.kind = TokenKind::IntLiteral, .text = "9"},
  325. {.kind = TokenKind::Period},
  326. {.kind = TokenKind::IntLiteral, .text = "5"},
  327. // newline
  328. {.kind = TokenKind::Error, .text = "10.foo"},
  329. // newline
  330. {.kind = TokenKind::RealLiteral, .text = "11.0"},
  331. {.kind = TokenKind::Period},
  332. {.kind = TokenKind::Identifier, .text = "foo"},
  333. // newline
  334. {.kind = TokenKind::Error, .text = "12e"},
  335. {.kind = TokenKind::Plus},
  336. {.kind = TokenKind::IntLiteral, .text = "1"},
  337. // newline
  338. {.kind = TokenKind::IntLiteral, .text = "13"},
  339. {.kind = TokenKind::Period},
  340. {.kind = TokenKind::Underscore},
  341. // newline
  342. {.kind = TokenKind::FileEnd},
  343. }));
  344. }
  345. TEST_F(LexerTest, HandlesGarbageCharacters) {
  346. constexpr char GarbageText[] = "$$💩-$\n$\0$12$\n\\\"\\\n\"x";
  347. auto& buffer = compile_helper_.GetTokenizedBuffer(
  348. llvm::StringRef(GarbageText, sizeof(GarbageText) - 1));
  349. EXPECT_TRUE(buffer.has_errors());
  350. EXPECT_THAT(
  351. buffer,
  352. HasTokens(llvm::ArrayRef<ExpectedToken>{
  353. {.kind = TokenKind::FileStart, .line = 1, .column = 1},
  354. {.kind = TokenKind::Error,
  355. .line = 1,
  356. .column = 1,
  357. // 💩 takes 4 bytes, and we count column as bytes offset.
  358. .text = llvm::StringRef("$$💩", 6)},
  359. {.kind = TokenKind::Minus, .line = 1, .column = 7},
  360. {.kind = TokenKind::Error, .line = 1, .column = 8, .text = "$"},
  361. // newline
  362. {.kind = TokenKind::Error,
  363. .line = 2,
  364. .column = 1,
  365. .text = llvm::StringRef("$\0$", 3)},
  366. {.kind = TokenKind::IntLiteral, .line = 2, .column = 4, .text = "12"},
  367. {.kind = TokenKind::Error, .line = 2, .column = 6, .text = "$"},
  368. // newline
  369. {.kind = TokenKind::Backslash, .line = 3, .column = 1, .text = "\\"},
  370. {.kind = TokenKind::Error, .line = 3, .column = 2, .text = "\"\\"},
  371. // newline
  372. {.kind = TokenKind::Error, .line = 4, .column = 1, .text = "\"x"},
  373. {.kind = TokenKind::FileEnd, .line = 4, .column = 3},
  374. }));
  375. }
  376. TEST_F(LexerTest, Symbols) {
  377. // We don't need to exhaustively test symbols here as they're handled with
  378. // common code, but we want to check specific patterns to verify things like
  379. // max-munch rule and handling of interesting symbols.
  380. auto& buffer1 = compile_helper_.GetTokenizedBuffer("<<<");
  381. EXPECT_FALSE(buffer1.has_errors());
  382. EXPECT_THAT(buffer1, HasTokens(llvm::ArrayRef<ExpectedToken>{
  383. {.kind = TokenKind::FileStart},
  384. {.kind = TokenKind::LessLess},
  385. {.kind = TokenKind::Less},
  386. {.kind = TokenKind::FileEnd},
  387. }));
  388. auto& buffer2 = compile_helper_.GetTokenizedBuffer("<<=>>");
  389. EXPECT_FALSE(buffer2.has_errors());
  390. EXPECT_THAT(buffer2, HasTokens(llvm::ArrayRef<ExpectedToken>{
  391. {.kind = TokenKind::FileStart},
  392. {.kind = TokenKind::LessLessEqual},
  393. {.kind = TokenKind::GreaterGreater},
  394. {.kind = TokenKind::FileEnd},
  395. }));
  396. auto& buffer3 = compile_helper_.GetTokenizedBuffer("< <=> >");
  397. EXPECT_FALSE(buffer3.has_errors());
  398. EXPECT_THAT(buffer3, HasTokens(llvm::ArrayRef<ExpectedToken>{
  399. {.kind = TokenKind::FileStart},
  400. {.kind = TokenKind::Less},
  401. {.kind = TokenKind::LessEqualGreater},
  402. {.kind = TokenKind::Greater},
  403. {.kind = TokenKind::FileEnd},
  404. }));
  405. auto& buffer4 = compile_helper_.GetTokenizedBuffer("\\/?@&^!");
  406. EXPECT_FALSE(buffer4.has_errors());
  407. EXPECT_THAT(buffer4, HasTokens(llvm::ArrayRef<ExpectedToken>{
  408. {.kind = TokenKind::FileStart},
  409. {.kind = TokenKind::Backslash},
  410. {.kind = TokenKind::Slash},
  411. {.kind = TokenKind::Question},
  412. {.kind = TokenKind::At},
  413. {.kind = TokenKind::Amp},
  414. {.kind = TokenKind::Caret},
  415. {.kind = TokenKind::Exclaim},
  416. {.kind = TokenKind::FileEnd},
  417. }));
  418. }
  419. TEST_F(LexerTest, Parens) {
  420. auto& buffer1 = compile_helper_.GetTokenizedBuffer("()");
  421. EXPECT_FALSE(buffer1.has_errors());
  422. EXPECT_THAT(buffer1, HasTokens(llvm::ArrayRef<ExpectedToken>{
  423. {.kind = TokenKind::FileStart},
  424. {.kind = TokenKind::OpenParen},
  425. {.kind = TokenKind::CloseParen},
  426. {.kind = TokenKind::FileEnd},
  427. }));
  428. auto& buffer2 = compile_helper_.GetTokenizedBuffer("((()()))");
  429. EXPECT_FALSE(buffer2.has_errors());
  430. EXPECT_THAT(buffer2, HasTokens(llvm::ArrayRef<ExpectedToken>{
  431. {.kind = TokenKind::FileStart},
  432. {.kind = TokenKind::OpenParen},
  433. {.kind = TokenKind::OpenParen},
  434. {.kind = TokenKind::OpenParen},
  435. {.kind = TokenKind::CloseParen},
  436. {.kind = TokenKind::OpenParen},
  437. {.kind = TokenKind::CloseParen},
  438. {.kind = TokenKind::CloseParen},
  439. {.kind = TokenKind::CloseParen},
  440. {.kind = TokenKind::FileEnd},
  441. }));
  442. }
  443. TEST_F(LexerTest, CurlyBraces) {
  444. auto& buffer1 = compile_helper_.GetTokenizedBuffer("{}");
  445. EXPECT_FALSE(buffer1.has_errors());
  446. EXPECT_THAT(buffer1, HasTokens(llvm::ArrayRef<ExpectedToken>{
  447. {.kind = TokenKind::FileStart},
  448. {.kind = TokenKind::OpenCurlyBrace},
  449. {.kind = TokenKind::CloseCurlyBrace},
  450. {.kind = TokenKind::FileEnd},
  451. }));
  452. auto& buffer2 = compile_helper_.GetTokenizedBuffer("{{{}{}}}");
  453. EXPECT_FALSE(buffer2.has_errors());
  454. EXPECT_THAT(buffer2, HasTokens(llvm::ArrayRef<ExpectedToken>{
  455. {.kind = TokenKind::FileStart},
  456. {.kind = TokenKind::OpenCurlyBrace},
  457. {.kind = TokenKind::OpenCurlyBrace},
  458. {.kind = TokenKind::OpenCurlyBrace},
  459. {.kind = TokenKind::CloseCurlyBrace},
  460. {.kind = TokenKind::OpenCurlyBrace},
  461. {.kind = TokenKind::CloseCurlyBrace},
  462. {.kind = TokenKind::CloseCurlyBrace},
  463. {.kind = TokenKind::CloseCurlyBrace},
  464. {.kind = TokenKind::FileEnd},
  465. }));
  466. }
  467. TEST_F(LexerTest, MatchingGroups) {
  468. {
  469. auto& buffer = compile_helper_.GetTokenizedBuffer("(){}");
  470. ASSERT_FALSE(buffer.has_errors());
  471. auto it = ++buffer.tokens().begin();
  472. auto open_paren_token = *it++;
  473. auto close_paren_token = *it++;
  474. EXPECT_EQ(close_paren_token,
  475. buffer.GetMatchedClosingToken(open_paren_token));
  476. EXPECT_EQ(open_paren_token,
  477. buffer.GetMatchedOpeningToken(close_paren_token));
  478. auto open_curly_token = *it++;
  479. auto close_curly_token = *it++;
  480. EXPECT_EQ(close_curly_token,
  481. buffer.GetMatchedClosingToken(open_curly_token));
  482. EXPECT_EQ(open_curly_token,
  483. buffer.GetMatchedOpeningToken(close_curly_token));
  484. auto eof_token = *it++;
  485. EXPECT_EQ(buffer.GetKind(eof_token), TokenKind::FileEnd);
  486. EXPECT_EQ(buffer.tokens().end(), it);
  487. }
  488. {
  489. auto [buffer, value_stores] =
  490. compile_helper_.GetTokenizedBufferWithSharedValueStore(
  491. "({x}){(y)} {{((z))}}");
  492. ASSERT_FALSE(buffer.has_errors());
  493. auto it = ++buffer.tokens().begin();
  494. auto open_paren_token = *it++;
  495. auto open_curly_token = *it++;
  496. ASSERT_EQ("x", value_stores.identifiers().Get(buffer.GetIdentifier(*it++)));
  497. auto close_curly_token = *it++;
  498. auto close_paren_token = *it++;
  499. EXPECT_EQ(close_paren_token,
  500. buffer.GetMatchedClosingToken(open_paren_token));
  501. EXPECT_EQ(open_paren_token,
  502. buffer.GetMatchedOpeningToken(close_paren_token));
  503. EXPECT_EQ(close_curly_token,
  504. buffer.GetMatchedClosingToken(open_curly_token));
  505. EXPECT_EQ(open_curly_token,
  506. buffer.GetMatchedOpeningToken(close_curly_token));
  507. open_curly_token = *it++;
  508. open_paren_token = *it++;
  509. ASSERT_EQ("y", value_stores.identifiers().Get(buffer.GetIdentifier(*it++)));
  510. close_paren_token = *it++;
  511. close_curly_token = *it++;
  512. EXPECT_EQ(close_curly_token,
  513. buffer.GetMatchedClosingToken(open_curly_token));
  514. EXPECT_EQ(open_curly_token,
  515. buffer.GetMatchedOpeningToken(close_curly_token));
  516. EXPECT_EQ(close_paren_token,
  517. buffer.GetMatchedClosingToken(open_paren_token));
  518. EXPECT_EQ(open_paren_token,
  519. buffer.GetMatchedOpeningToken(close_paren_token));
  520. open_curly_token = *it++;
  521. auto inner_open_curly_token = *it++;
  522. open_paren_token = *it++;
  523. auto inner_open_paren_token = *it++;
  524. ASSERT_EQ("z", value_stores.identifiers().Get(buffer.GetIdentifier(*it++)));
  525. auto inner_close_paren_token = *it++;
  526. close_paren_token = *it++;
  527. auto inner_close_curly_token = *it++;
  528. close_curly_token = *it++;
  529. EXPECT_EQ(close_curly_token,
  530. buffer.GetMatchedClosingToken(open_curly_token));
  531. EXPECT_EQ(open_curly_token,
  532. buffer.GetMatchedOpeningToken(close_curly_token));
  533. EXPECT_EQ(inner_close_curly_token,
  534. buffer.GetMatchedClosingToken(inner_open_curly_token));
  535. EXPECT_EQ(inner_open_curly_token,
  536. buffer.GetMatchedOpeningToken(inner_close_curly_token));
  537. EXPECT_EQ(close_paren_token,
  538. buffer.GetMatchedClosingToken(open_paren_token));
  539. EXPECT_EQ(open_paren_token,
  540. buffer.GetMatchedOpeningToken(close_paren_token));
  541. EXPECT_EQ(inner_close_paren_token,
  542. buffer.GetMatchedClosingToken(inner_open_paren_token));
  543. EXPECT_EQ(inner_open_paren_token,
  544. buffer.GetMatchedOpeningToken(inner_close_paren_token));
  545. auto eof_token = *it++;
  546. EXPECT_EQ(buffer.GetKind(eof_token), TokenKind::FileEnd);
  547. EXPECT_EQ(buffer.tokens().end(), it);
  548. }
  549. }
  550. TEST_F(LexerTest, MismatchedGroups) {
  551. auto& buffer1 = compile_helper_.GetTokenizedBuffer("{");
  552. EXPECT_TRUE(buffer1.has_errors());
  553. EXPECT_THAT(buffer1, HasTokens(llvm::ArrayRef<ExpectedToken>{
  554. {.kind = TokenKind::FileStart},
  555. {.kind = TokenKind::Error, .text = "{"},
  556. {.kind = TokenKind::FileEnd},
  557. }));
  558. auto& buffer2 = compile_helper_.GetTokenizedBuffer("}");
  559. EXPECT_TRUE(buffer2.has_errors());
  560. EXPECT_THAT(buffer2, HasTokens(llvm::ArrayRef<ExpectedToken>{
  561. {.kind = TokenKind::FileStart},
  562. {.kind = TokenKind::Error, .text = "}"},
  563. {.kind = TokenKind::FileEnd},
  564. }));
  565. auto& buffer3 = compile_helper_.GetTokenizedBuffer("{(}");
  566. EXPECT_TRUE(buffer3.has_errors());
  567. EXPECT_THAT(
  568. buffer3,
  569. HasTokens(llvm::ArrayRef<ExpectedToken>{
  570. {.kind = TokenKind::FileStart},
  571. {.kind = TokenKind::OpenCurlyBrace, .column = 1},
  572. {.kind = TokenKind::OpenParen, .column = 2},
  573. {.kind = TokenKind::CloseParen, .column = 3, .recovery = true},
  574. {.kind = TokenKind::CloseCurlyBrace, .column = 3},
  575. {.kind = TokenKind::FileEnd},
  576. }));
  577. auto& buffer4 = compile_helper_.GetTokenizedBuffer(")({)");
  578. EXPECT_TRUE(buffer4.has_errors());
  579. EXPECT_THAT(
  580. buffer4,
  581. HasTokens(llvm::ArrayRef<ExpectedToken>{
  582. {.kind = TokenKind::FileStart},
  583. {.kind = TokenKind::Error, .column = 1, .text = ")"},
  584. {.kind = TokenKind::OpenParen, .column = 2},
  585. {.kind = TokenKind::OpenCurlyBrace, .column = 3},
  586. {.kind = TokenKind::CloseCurlyBrace, .column = 4, .recovery = true},
  587. {.kind = TokenKind::CloseParen, .column = 4},
  588. {.kind = TokenKind::FileEnd},
  589. }));
  590. }
  591. TEST_F(LexerTest, Whitespace) {
  592. auto& buffer = compile_helper_.GetTokenizedBuffer("{( } {(");
  593. // Whether there should be whitespace before/after each token.
  594. bool space[] = {false,
  595. // start-of-file
  596. true,
  597. // {
  598. false,
  599. // (
  600. true,
  601. // inserted )
  602. true,
  603. // }
  604. true,
  605. // error {
  606. false,
  607. // error (
  608. true,
  609. // EOF
  610. false};
  611. int pos = 0;
  612. for (TokenIndex token : buffer.tokens()) {
  613. SCOPED_TRACE(
  614. llvm::formatv("Token #{0}: '{1}'", token, buffer.GetTokenText(token)));
  615. ASSERT_LT(pos, std::size(space));
  616. EXPECT_THAT(buffer.HasLeadingWhitespace(token), Eq(space[pos]));
  617. ++pos;
  618. ASSERT_LT(pos, std::size(space));
  619. EXPECT_THAT(buffer.HasTrailingWhitespace(token), Eq(space[pos]));
  620. }
  621. ASSERT_EQ(pos + 1, std::size(space));
  622. }
  623. TEST_F(LexerTest, Keywords) {
  624. TokenKind keywords[] = {
  625. #define CARBON_TOKEN(TokenName)
  626. #define CARBON_KEYWORD_TOKEN(TokenName, ...) TokenKind::TokenName,
  627. #include "toolchain/lex/token_kind.def"
  628. };
  629. for (const auto& keyword : keywords) {
  630. auto& buffer = compile_helper_.GetTokenizedBuffer(keyword.fixed_spelling());
  631. EXPECT_FALSE(buffer.has_errors());
  632. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  633. {.kind = TokenKind::FileStart},
  634. {.kind = keyword, .column = 1, .indent_column = 1},
  635. {.kind = TokenKind::FileEnd},
  636. }));
  637. }
  638. }
  639. TEST_F(LexerTest, Comments) {
  640. auto& buffer1 = compile_helper_.GetTokenizedBuffer(" ;\n // foo\n ;\n");
  641. EXPECT_FALSE(buffer1.has_errors());
  642. EXPECT_THAT(
  643. buffer1,
  644. HasTokens(llvm::ArrayRef<ExpectedToken>{
  645. {.kind = TokenKind::FileStart, .line = 1, .column = 1},
  646. {.kind = TokenKind::Semi, .line = 1, .column = 2, .indent_column = 2},
  647. {.kind = TokenKind::Semi, .line = 3, .column = 3, .indent_column = 3},
  648. {.kind = TokenKind::FileEnd, .line = 3, .column = 4},
  649. }));
  650. auto& buffer2 = compile_helper_.GetTokenizedBuffer("// foo\n//\n// bar");
  651. EXPECT_FALSE(buffer2.has_errors());
  652. EXPECT_THAT(buffer2, HasTokens(llvm::ArrayRef<ExpectedToken>{
  653. {.kind = TokenKind::FileStart},
  654. {.kind = TokenKind::FileEnd}}));
  655. // Make sure weird characters aren't a problem.
  656. auto& buffer3 =
  657. compile_helper_.GetTokenizedBuffer(" // foo#$!^?@-_💩🍫⃠ [̲̅$̲̅(̲̅ ͡° ͜ʖ ͡°̲̅)̲̅$̲̅]");
  658. EXPECT_FALSE(buffer3.has_errors());
  659. EXPECT_THAT(buffer3, HasTokens(llvm::ArrayRef<ExpectedToken>{
  660. {.kind = TokenKind::FileStart},
  661. {.kind = TokenKind::FileEnd}}));
  662. // Make sure we can lex a comment at the end of the input.
  663. auto& buffer4 = compile_helper_.GetTokenizedBuffer("//");
  664. EXPECT_FALSE(buffer4.has_errors());
  665. EXPECT_THAT(buffer4, HasTokens(llvm::ArrayRef<ExpectedToken>{
  666. {.kind = TokenKind::FileStart},
  667. {.kind = TokenKind::FileEnd}}));
  668. }
  669. TEST_F(LexerTest, InvalidComments) {
  670. llvm::StringLiteral testcases[] = {
  671. " /// foo\n",
  672. "foo // bar\n",
  673. "//! hello",
  674. " //world",
  675. };
  676. for (llvm::StringLiteral testcase : testcases) {
  677. auto& buffer = compile_helper_.GetTokenizedBuffer(testcase);
  678. EXPECT_TRUE(buffer.has_errors());
  679. }
  680. }
  681. TEST_F(LexerTest, Identifiers) {
  682. auto& buffer1 = compile_helper_.GetTokenizedBuffer(" foobar");
  683. EXPECT_FALSE(buffer1.has_errors());
  684. EXPECT_THAT(buffer1, HasTokens(llvm::ArrayRef<ExpectedToken>{
  685. {.kind = TokenKind::FileStart},
  686. {.kind = TokenKind::Identifier,
  687. .column = 4,
  688. .indent_column = 4,
  689. .text = "foobar"},
  690. {.kind = TokenKind::FileEnd},
  691. }));
  692. // Check different kinds of identifier character sequences.
  693. auto& buffer2 = compile_helper_.GetTokenizedBuffer("_foo_bar");
  694. EXPECT_FALSE(buffer2.has_errors());
  695. EXPECT_THAT(buffer2, HasTokens(llvm::ArrayRef<ExpectedToken>{
  696. {.kind = TokenKind::FileStart},
  697. {.kind = TokenKind::Identifier, .text = "_foo_bar"},
  698. {.kind = TokenKind::FileEnd},
  699. }));
  700. auto& buffer3 = compile_helper_.GetTokenizedBuffer("foo2bar00");
  701. EXPECT_FALSE(buffer3.has_errors());
  702. EXPECT_THAT(buffer3, HasTokens(llvm::ArrayRef<ExpectedToken>{
  703. {.kind = TokenKind::FileStart},
  704. {.kind = TokenKind::Identifier, .text = "foo2bar00"},
  705. {.kind = TokenKind::FileEnd},
  706. }));
  707. // Check that we can parse identifiers that start with a keyword.
  708. auto& buffer4 = compile_helper_.GetTokenizedBuffer("fnord");
  709. EXPECT_FALSE(buffer4.has_errors());
  710. EXPECT_THAT(buffer4, HasTokens(llvm::ArrayRef<ExpectedToken>{
  711. {.kind = TokenKind::FileStart},
  712. {.kind = TokenKind::Identifier, .text = "fnord"},
  713. {.kind = TokenKind::FileEnd},
  714. }));
  715. // Check multiple identifiers with indent and interning.
  716. auto& buffer5 =
  717. compile_helper_.GetTokenizedBuffer(" foo;bar\nbar \n foo\tfoo");
  718. EXPECT_FALSE(buffer5.has_errors());
  719. EXPECT_THAT(buffer5,
  720. HasTokens(llvm::ArrayRef<ExpectedToken>{
  721. {.kind = TokenKind::FileStart, .line = 1, .column = 1},
  722. {.kind = TokenKind::Identifier,
  723. .line = 1,
  724. .column = 4,
  725. .indent_column = 4,
  726. .text = "foo"},
  727. {.kind = TokenKind::Semi},
  728. {.kind = TokenKind::Identifier,
  729. .line = 1,
  730. .column = 8,
  731. .indent_column = 4,
  732. .text = "bar"},
  733. {.kind = TokenKind::Identifier,
  734. .line = 2,
  735. .column = 1,
  736. .indent_column = 1,
  737. .text = "bar"},
  738. {.kind = TokenKind::Identifier,
  739. .line = 3,
  740. .column = 3,
  741. .indent_column = 3,
  742. .text = "foo"},
  743. {.kind = TokenKind::Identifier,
  744. .line = 3,
  745. .column = 7,
  746. .indent_column = 3,
  747. .text = "foo"},
  748. {.kind = TokenKind::FileEnd, .line = 3, .column = 10},
  749. }));
  750. }
  751. TEST_F(LexerTest, StringLiterals) {
  752. llvm::StringLiteral testcase = R"(
  753. "hello world\n"
  754. '''foo
  755. test \
  756. \xAB
  757. ''' trailing
  758. #"""#
  759. "\0"
  760. #"\0"foo"\1"#
  761. """x"""
  762. )";
  763. auto [buffer, value_stores] =
  764. compile_helper_.GetTokenizedBufferWithSharedValueStore(testcase);
  765. EXPECT_FALSE(buffer.has_errors());
  766. EXPECT_THAT(buffer,
  767. HasTokens(llvm::ArrayRef<ExpectedToken>{
  768. {.kind = TokenKind::FileStart, .line = 1, .column = 1},
  769. {.kind = TokenKind::StringLiteral,
  770. .line = 2,
  771. .column = 5,
  772. .indent_column = 5,
  773. .value_stores = &value_stores,
  774. .string_contents = {"hello world\n"}},
  775. {.kind = TokenKind::StringLiteral,
  776. .line = 4,
  777. .column = 5,
  778. .indent_column = 5,
  779. .value_stores = &value_stores,
  780. .string_contents = {" test \xAB\n"}},
  781. {.kind = TokenKind::Identifier,
  782. .line = 7,
  783. .column = 10,
  784. .indent_column = 5,
  785. .text = "trailing"},
  786. {.kind = TokenKind::StringLiteral,
  787. .line = 9,
  788. .column = 7,
  789. .indent_column = 7,
  790. .value_stores = &value_stores,
  791. .string_contents = {"\""}},
  792. {.kind = TokenKind::StringLiteral,
  793. .line = 11,
  794. .column = 5,
  795. .indent_column = 5,
  796. .value_stores = &value_stores,
  797. .string_contents = llvm::StringLiteral::withInnerNUL("\0")},
  798. {.kind = TokenKind::StringLiteral,
  799. .line = 13,
  800. .column = 5,
  801. .indent_column = 5,
  802. .value_stores = &value_stores,
  803. .string_contents = {"\\0\"foo\"\\1"}},
  804. // """x""" is three string literals, not one invalid
  805. // attempt at a block string literal.
  806. {.kind = TokenKind::StringLiteral,
  807. .line = 15,
  808. .column = 5,
  809. .indent_column = 5,
  810. .value_stores = &value_stores,
  811. .string_contents = {""}},
  812. {.kind = TokenKind::StringLiteral,
  813. .line = 15,
  814. .column = 7,
  815. .indent_column = 5,
  816. .value_stores = &value_stores,
  817. .string_contents = {"x"}},
  818. {.kind = TokenKind::StringLiteral,
  819. .line = 15,
  820. .column = 10,
  821. .indent_column = 5,
  822. .value_stores = &value_stores,
  823. .string_contents = {""}},
  824. {.kind = TokenKind::FileEnd, .line = 16, .column = 3},
  825. }));
  826. }
  827. TEST_F(LexerTest, InvalidStringLiterals) {
  828. llvm::StringLiteral invalid[] = {
  829. // clang-format off
  830. R"(")",
  831. R"('''
  832. '')",
  833. R"("\)",
  834. R"("\")",
  835. R"("\\)",
  836. R"("\\\")",
  837. R"(''')",
  838. R"('''
  839. )",
  840. R"('''\)",
  841. R"(#'''
  842. ''')",
  843. // clang-format on
  844. };
  845. for (llvm::StringLiteral test : invalid) {
  846. SCOPED_TRACE(test);
  847. auto& buffer = compile_helper_.GetTokenizedBuffer(test);
  848. EXPECT_TRUE(buffer.has_errors());
  849. // We should have formed at least one error token.
  850. bool found_error = false;
  851. for (TokenIndex token : buffer.tokens()) {
  852. if (buffer.GetKind(token) == TokenKind::Error) {
  853. found_error = true;
  854. break;
  855. }
  856. }
  857. EXPECT_TRUE(found_error);
  858. }
  859. }
  860. TEST_F(LexerTest, TypeLiterals) {
  861. llvm::StringLiteral testcase = R"(
  862. i0 i1 i20 i999999999999 i0x1
  863. u0 u1 u64 u64b
  864. f32 f80 f1 fi
  865. s1
  866. )";
  867. auto [buffer, value_stores] =
  868. compile_helper_.GetTokenizedBufferWithSharedValueStore(testcase);
  869. EXPECT_FALSE(buffer.has_errors());
  870. ASSERT_THAT(buffer,
  871. HasTokens(llvm::ArrayRef<ExpectedToken>{
  872. {.kind = TokenKind::FileStart, .line = 1, .column = 1},
  873. {.kind = TokenKind::Identifier,
  874. .line = 2,
  875. .column = 5,
  876. .indent_column = 5,
  877. .text = {"i0"}},
  878. {.kind = TokenKind::IntTypeLiteral,
  879. .line = 2,
  880. .column = 8,
  881. .indent_column = 5,
  882. .text = {"i1"}},
  883. {.kind = TokenKind::IntTypeLiteral,
  884. .line = 2,
  885. .column = 11,
  886. .indent_column = 5,
  887. .text = {"i20"}},
  888. {.kind = TokenKind::IntTypeLiteral,
  889. .line = 2,
  890. .column = 15,
  891. .indent_column = 5,
  892. .text = {"i999999999999"}},
  893. {.kind = TokenKind::Identifier,
  894. .line = 2,
  895. .column = 29,
  896. .indent_column = 5,
  897. .text = {"i0x1"}},
  898. {.kind = TokenKind::Identifier,
  899. .line = 3,
  900. .column = 5,
  901. .indent_column = 5,
  902. .text = {"u0"}},
  903. {.kind = TokenKind::UnsignedIntTypeLiteral,
  904. .line = 3,
  905. .column = 8,
  906. .indent_column = 5,
  907. .text = {"u1"}},
  908. {.kind = TokenKind::UnsignedIntTypeLiteral,
  909. .line = 3,
  910. .column = 11,
  911. .indent_column = 5,
  912. .text = {"u64"}},
  913. {.kind = TokenKind::Identifier,
  914. .line = 3,
  915. .column = 15,
  916. .indent_column = 5,
  917. .text = {"u64b"}},
  918. {.kind = TokenKind::FloatTypeLiteral,
  919. .line = 4,
  920. .column = 5,
  921. .indent_column = 5,
  922. .text = {"f32"}},
  923. {.kind = TokenKind::FloatTypeLiteral,
  924. .line = 4,
  925. .column = 9,
  926. .indent_column = 5,
  927. .text = {"f80"}},
  928. {.kind = TokenKind::FloatTypeLiteral,
  929. .line = 4,
  930. .column = 13,
  931. .indent_column = 5,
  932. .text = {"f1"}},
  933. {.kind = TokenKind::Identifier,
  934. .line = 4,
  935. .column = 16,
  936. .indent_column = 5,
  937. .text = {"fi"}},
  938. {.kind = TokenKind::Identifier,
  939. .line = 5,
  940. .column = 5,
  941. .indent_column = 5,
  942. .text = {"s1"}},
  943. {.kind = TokenKind::FileEnd, .line = 6, .column = 3},
  944. }));
  945. auto type_size = [&](int token_index) {
  946. auto token = buffer.tokens().begin()[token_index];
  947. return value_stores.ints().Get(buffer.GetTypeLiteralSize(token));
  948. };
  949. EXPECT_EQ(type_size(2), 1);
  950. EXPECT_EQ(type_size(3), 20);
  951. EXPECT_EQ(type_size(4), 999999999999ULL);
  952. EXPECT_EQ(type_size(7), 1);
  953. EXPECT_EQ(type_size(8), 64);
  954. EXPECT_EQ(type_size(10), 32);
  955. EXPECT_EQ(type_size(11), 80);
  956. EXPECT_EQ(type_size(12), 1);
  957. }
  958. TEST_F(LexerTest, TypeLiteralTooManyDigits) {
  959. // We increase the number of digits until the first one that is to large.
  960. Testing::MockDiagnosticConsumer consumer;
  961. EXPECT_CALL(consumer, HandleDiagnostic(IsSingleDiagnostic(
  962. DiagnosticKind::TooManyTypeBitWidthDigits,
  963. DiagnosticLevel::Error, 1, 2, _)));
  964. std::string code = "i";
  965. // A 128-bit APInt should be plenty large, but if needed in the future it can
  966. // be widened without issue.
  967. llvm::APInt bits = llvm::APInt::getZero(128);
  968. for ([[maybe_unused]] int _ : llvm::seq(1, 30)) {
  969. code.append("9");
  970. bits = bits * 10 + 9;
  971. auto [buffer, value_stores] =
  972. compile_helper_.GetTokenizedBufferWithSharedValueStore(code, &consumer);
  973. if (buffer.has_errors()) {
  974. ASSERT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  975. {.kind = TokenKind::FileStart},
  976. {.kind = TokenKind::Error, .text = code},
  977. {.kind = TokenKind::FileEnd},
  978. }));
  979. break;
  980. }
  981. ASSERT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  982. {.kind = TokenKind::FileStart},
  983. {.kind = TokenKind::IntTypeLiteral, .text = code},
  984. {.kind = TokenKind::FileEnd},
  985. }));
  986. auto token = buffer.tokens().begin()[1];
  987. EXPECT_TRUE(llvm::APInt::isSameValue(
  988. value_stores.ints().Get(buffer.GetTypeLiteralSize(token)), bits));
  989. }
  990. // Make sure we can also gracefully reject very large number of digits without
  991. // crashing or hanging, and show the correct number.
  992. constexpr int Count = 10000;
  993. EXPECT_CALL(consumer, HandleDiagnostic(IsSingleDiagnostic(
  994. DiagnosticKind::TooManyTypeBitWidthDigits,
  995. DiagnosticLevel::Error, 1, 2,
  996. HasSubstr(llvm::formatv(" {0} ", Count)))));
  997. code = "i";
  998. code.append(Count, '9');
  999. auto& buffer = compile_helper_.GetTokenizedBuffer(code, &consumer);
  1000. ASSERT_TRUE(buffer.has_errors());
  1001. ASSERT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  1002. {.kind = TokenKind::FileStart},
  1003. {.kind = TokenKind::Error, .text = code},
  1004. {.kind = TokenKind::FileEnd},
  1005. }));
  1006. }
  1007. TEST_F(LexerTest, DiagnosticTrailingComment) {
  1008. llvm::StringLiteral testcase = R"(
  1009. // Hello!
  1010. var String x; // trailing comment
  1011. )";
  1012. Testing::MockDiagnosticConsumer consumer;
  1013. EXPECT_CALL(consumer, HandleDiagnostic(IsSingleDiagnostic(
  1014. DiagnosticKind::TrailingComment,
  1015. DiagnosticLevel::Error, 3, 19, _)));
  1016. compile_helper_.GetTokenizedBuffer(testcase, &consumer);
  1017. }
  1018. TEST_F(LexerTest, DiagnosticWhitespace) {
  1019. Testing::MockDiagnosticConsumer consumer;
  1020. EXPECT_CALL(consumer, HandleDiagnostic(IsSingleDiagnostic(
  1021. DiagnosticKind::NoWhitespaceAfterCommentIntroducer,
  1022. DiagnosticLevel::Error, 1, 3, _)));
  1023. compile_helper_.GetTokenizedBuffer("//no space after comment", &consumer);
  1024. }
  1025. TEST_F(LexerTest, DiagnosticUnrecognizedEscape) {
  1026. Testing::MockDiagnosticConsumer consumer;
  1027. EXPECT_CALL(consumer, HandleDiagnostic(IsSingleDiagnostic(
  1028. DiagnosticKind::UnknownEscapeSequence,
  1029. DiagnosticLevel::Error, 1, 8, HasSubstr("`b`"))));
  1030. compile_helper_.GetTokenizedBuffer(R"("hello\bworld")", &consumer);
  1031. }
  1032. TEST_F(LexerTest, DiagnosticBadHex) {
  1033. Testing::MockDiagnosticConsumer consumer;
  1034. EXPECT_CALL(consumer, HandleDiagnostic(IsSingleDiagnostic(
  1035. DiagnosticKind::HexadecimalEscapeMissingDigits,
  1036. DiagnosticLevel::Error, 1, 9, _)));
  1037. compile_helper_.GetTokenizedBuffer(R"("hello\xabworld")", &consumer);
  1038. }
  1039. TEST_F(LexerTest, DiagnosticInvalidDigit) {
  1040. Testing::MockDiagnosticConsumer consumer;
  1041. EXPECT_CALL(consumer, HandleDiagnostic(IsSingleDiagnostic(
  1042. DiagnosticKind::InvalidDigit,
  1043. DiagnosticLevel::Error, 1, 6, HasSubstr("'a'"))));
  1044. compile_helper_.GetTokenizedBuffer("0x123abc", &consumer);
  1045. }
  1046. TEST_F(LexerTest, DiagnosticCR) {
  1047. Testing::MockDiagnosticConsumer consumer;
  1048. EXPECT_CALL(consumer, HandleDiagnostic(IsSingleDiagnostic(
  1049. DiagnosticKind::UnsupportedCRLineEnding,
  1050. DiagnosticLevel::Error, 1, 1, _)));
  1051. compile_helper_.GetTokenizedBuffer("\r", &consumer);
  1052. }
  1053. TEST_F(LexerTest, DiagnosticLFCR) {
  1054. Testing::MockDiagnosticConsumer consumer;
  1055. EXPECT_CALL(consumer, HandleDiagnostic(IsSingleDiagnostic(
  1056. DiagnosticKind::UnsupportedLFCRLineEnding,
  1057. DiagnosticLevel::Error, 2, 1, _)));
  1058. compile_helper_.GetTokenizedBuffer("\n\r", &consumer);
  1059. }
  1060. TEST_F(LexerTest, DiagnosticMissingTerminator) {
  1061. Testing::MockDiagnosticConsumer consumer;
  1062. EXPECT_CALL(consumer, HandleDiagnostic(IsSingleDiagnostic(
  1063. DiagnosticKind::UnterminatedString,
  1064. DiagnosticLevel::Error, 1, 1, _)));
  1065. compile_helper_.GetTokenizedBuffer(R"(#" ")", &consumer);
  1066. }
  1067. TEST_F(LexerTest, DiagnosticUnrecognizedChar) {
  1068. Testing::MockDiagnosticConsumer consumer;
  1069. EXPECT_CALL(consumer, HandleDiagnostic(IsSingleDiagnostic(
  1070. DiagnosticKind::UnrecognizedCharacters,
  1071. DiagnosticLevel::Error, 1, 1, _)));
  1072. compile_helper_.GetTokenizedBuffer("\b", &consumer);
  1073. }
  1074. TEST_F(LexerTest, DiagnosticFileTooLarge) {
  1075. Testing::MockDiagnosticConsumer consumer;
  1076. static constexpr size_t NumLines = 10'000'000;
  1077. std::string input;
  1078. input.reserve(NumLines * 3);
  1079. for ([[maybe_unused]] int _ : llvm::seq(NumLines)) {
  1080. input += "{}\n";
  1081. }
  1082. EXPECT_CALL(consumer,
  1083. HandleDiagnostic(IsSingleDiagnostic(DiagnosticKind::TooManyTokens,
  1084. DiagnosticLevel::Error,
  1085. TokenIndex::Max / 2, 1, _)));
  1086. compile_helper_.GetTokenizedBuffer(input, &consumer);
  1087. }
  1088. // Appends comment lines to the string, to create a comment block.
  1089. static auto AppendCommentLines(std::string& str, int count, llvm::StringRef tag)
  1090. -> void {
  1091. llvm::raw_string_ostream out(str);
  1092. for (int i : llvm::seq(count)) {
  1093. out << "// " << tag << i << "\n";
  1094. }
  1095. }
  1096. TEST_F(LexerTest, CommentBlock) {
  1097. for (int comments_before = 0; comments_before < 5; ++comments_before) {
  1098. std::string prefix;
  1099. AppendCommentLines(prefix, comments_before, "B");
  1100. for (int comments_after = 1; comments_after < 5; ++comments_after) {
  1101. std::string source = prefix;
  1102. if (comments_before > 0) {
  1103. source += "//\n";
  1104. }
  1105. AppendCommentLines(source, comments_after, "C");
  1106. SCOPED_TRACE(llvm::formatv(
  1107. "{0} comment lines before the empty comment line, {1} after",
  1108. comments_before, comments_after));
  1109. auto& buffer = compile_helper_.GetTokenizedBuffer(source);
  1110. ASSERT_FALSE(buffer.has_errors());
  1111. EXPECT_THAT(buffer.comments_size(), Eq(1));
  1112. }
  1113. }
  1114. }
  1115. TEST_F(LexerTest, IndentedComments) {
  1116. for (int indent = 0; indent < 40; ++indent) {
  1117. SCOPED_TRACE(llvm::formatv("Indent: {0}", indent));
  1118. std::string source;
  1119. llvm::raw_string_ostream source_stream(source);
  1120. source_stream.indent(indent);
  1121. source_stream << "// Comment\n";
  1122. auto& buffer = compile_helper_.GetTokenizedBuffer(source);
  1123. ASSERT_FALSE(buffer.has_errors());
  1124. EXPECT_THAT(buffer.comments_size(), Eq(1));
  1125. std::string simd_source =
  1126. source +
  1127. "\"Add a bunch of padding so that SIMD logic shouldn't hit EOF\"";
  1128. auto& simd_buffer = compile_helper_.GetTokenizedBuffer(simd_source);
  1129. ASSERT_FALSE(simd_buffer.has_errors());
  1130. EXPECT_THAT(simd_buffer.comments_size(), Eq(1));
  1131. }
  1132. }
  1133. TEST_F(LexerTest, MultipleComments) {
  1134. // TODO: Switch format to `llvm::StringLiteral` if
  1135. // `llvm::StringLiteral::c_str` is added.
  1136. constexpr char Format[] = R"(
  1137. {0}
  1138. {1}
  1139. {2}
  1140. {3}
  1141. '''This is a string, not a comment. The next comment will stop SIMD due to being
  1142. too close to the EOF.
  1143. '''
  1144. {4}
  1145. x
  1146. )";
  1147. constexpr llvm::StringLiteral Comments[] = {
  1148. // NOLINTNEXTLINE(bugprone-suspicious-missing-comma)
  1149. "// This comment should be possible to parse with SIMD.\n"
  1150. "// This one too.\n",
  1151. "// This one as well, though it's a different indent.\n"
  1152. " // And mixes indent.\n"
  1153. " // And mixes indent more.\n",
  1154. "// This is one comment:\n"
  1155. "//Invalid\n"
  1156. "// Valid\n"
  1157. "//Invalid\n"
  1158. "//\n"
  1159. "// Valid\n"
  1160. "//\n"
  1161. "// Valid\n",
  1162. "// This uses a high indent, which stops SIMD.\n", "//\n"};
  1163. std::string source = llvm::formatv(Format, Comments[0], Comments[1],
  1164. Comments[2], Comments[3], Comments[4])
  1165. .str();
  1166. auto& buffer = compile_helper_.GetTokenizedBuffer(source);
  1167. EXPECT_TRUE(buffer.has_errors());
  1168. EXPECT_THAT(buffer.comments_size(), Eq(std::size(Comments)));
  1169. for (int i :
  1170. llvm::seq(std::min<int>(buffer.comments_size(), std::size(Comments)))) {
  1171. EXPECT_THAT(buffer.GetCommentText(CommentIndex(i)).str(),
  1172. testing::StrEq(Comments[i]));
  1173. }
  1174. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  1175. {.kind = TokenKind::FileStart},
  1176. {.kind = TokenKind::StringLiteral},
  1177. {.kind = TokenKind::Identifier},
  1178. {.kind = TokenKind::FileEnd},
  1179. }));
  1180. }
  1181. TEST_F(LexerTest, PrintingOutputYaml) {
  1182. // Test that we can parse this into YAML and verify line and indent data.
  1183. auto& buffer =
  1184. compile_helper_.GetTokenizedBuffer("\n ;\n\n\n; ;\n\n\n\n\n\n\n\n\n\n\n");
  1185. ASSERT_FALSE(buffer.has_errors());
  1186. TestRawOstream print_stream;
  1187. buffer.Print(print_stream);
  1188. EXPECT_THAT(
  1189. Yaml::Value::FromText(print_stream.TakeStr()),
  1190. IsYaml(ElementsAre(Yaml::Sequence(ElementsAre(Yaml::Mapping(ElementsAre(
  1191. Pair("filename", buffer.source().filename().str()),
  1192. Pair("tokens", Yaml::Sequence(ElementsAre(
  1193. Yaml::Mapping(ElementsAre(
  1194. Pair("index", "0"), Pair("kind", "FileStart"),
  1195. Pair("line", "1"), Pair("column", "1"),
  1196. Pair("indent", "1"), Pair("spelling", ""))),
  1197. Yaml::Mapping(ElementsAre(
  1198. Pair("index", "1"), Pair("kind", "Semi"),
  1199. Pair("line", "2"), Pair("column", "2"),
  1200. Pair("indent", "2"), Pair("spelling", ";"),
  1201. Pair("has_leading_space", "true"))),
  1202. Yaml::Mapping(ElementsAre(
  1203. Pair("index", "2"), Pair("kind", "Semi"),
  1204. Pair("line", "5"), Pair("column", "1"),
  1205. Pair("indent", "1"), Pair("spelling", ";"),
  1206. Pair("has_leading_space", "true"))),
  1207. Yaml::Mapping(ElementsAre(
  1208. Pair("index", "3"), Pair("kind", "Semi"),
  1209. Pair("line", "5"), Pair("column", "3"),
  1210. Pair("indent", "1"), Pair("spelling", ";"),
  1211. Pair("has_leading_space", "true"))),
  1212. Yaml::Mapping(ElementsAre(
  1213. Pair("index", "4"), Pair("kind", "FileEnd"),
  1214. Pair("line", "15"), Pair("column", "1"),
  1215. Pair("indent", "1"), Pair("spelling", ""),
  1216. Pair("has_leading_space", "true")))))))))))));
  1217. }
  1218. } // namespace
  1219. } // namespace Carbon::Lex