tokenized_buffer_test.cpp 48 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257
  1. // Part of the Carbon Language project, under the Apache License v2.0 with LLVM
  2. // Exceptions. See /LICENSE for license information.
  3. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
  4. #include "toolchain/lex/tokenized_buffer.h"
  5. #include <gmock/gmock.h>
  6. #include <gtest/gtest.h>
  7. #include <forward_list>
  8. #include <iterator>
  9. #include "llvm/ADT/ArrayRef.h"
  10. #include "llvm/Support/FormatVariadic.h"
  11. #include "testing/base/test_raw_ostream.h"
  12. #include "toolchain/base/value_store.h"
  13. #include "toolchain/diagnostics/diagnostic_emitter.h"
  14. #include "toolchain/diagnostics/mocks.h"
  15. #include "toolchain/lex/lex.h"
  16. #include "toolchain/lex/tokenized_buffer_test_helpers.h"
  17. #include "toolchain/testing/compile_helper.h"
  18. #include "toolchain/testing/yaml_test_helpers.h"
  19. namespace Carbon::Lex {
  20. namespace {
  21. using ::Carbon::Testing::ExpectedToken;
  22. using ::Carbon::Testing::IsSingleDiagnostic;
  23. using ::Carbon::Testing::TestRawOstream;
  24. using ::testing::_;
  25. using ::testing::ElementsAre;
  26. using ::testing::Eq;
  27. using ::testing::HasSubstr;
  28. using ::testing::Pair;
  29. namespace Yaml = ::Carbon::Testing::Yaml;
  30. class LexerTest : public ::testing::Test {
  31. protected:
  32. Testing::CompileHelper compile_helper_;
  33. };
  34. TEST_F(LexerTest, HandlesEmptyBuffer) {
  35. auto& buffer = compile_helper_.GetTokenizedBuffer("");
  36. EXPECT_FALSE(buffer.has_errors());
  37. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  38. {.kind = TokenKind::FileStart},
  39. {.kind = TokenKind::FileEnd}}));
  40. }
  41. TEST_F(LexerTest, TracksLinesAndColumns) {
  42. auto& buffer = compile_helper_.GetTokenizedBuffer(
  43. "\n ;;\n ;;;\n x\"foo\" '''baz\n a\n ''' y");
  44. EXPECT_FALSE(buffer.has_errors());
  45. EXPECT_THAT(
  46. buffer,
  47. HasTokens(llvm::ArrayRef<ExpectedToken>{
  48. {.kind = TokenKind::FileStart,
  49. .line = 1,
  50. .column = 1,
  51. .indent_column = 1},
  52. {.kind = TokenKind::Semi, .line = 2, .column = 3, .indent_column = 3},
  53. {.kind = TokenKind::Semi, .line = 2, .column = 4, .indent_column = 3},
  54. {.kind = TokenKind::Semi, .line = 3, .column = 4, .indent_column = 4},
  55. {.kind = TokenKind::Semi, .line = 3, .column = 5, .indent_column = 4},
  56. {.kind = TokenKind::Semi, .line = 3, .column = 6, .indent_column = 4},
  57. {.kind = TokenKind::Identifier,
  58. .line = 4,
  59. .column = 4,
  60. .indent_column = 4,
  61. .text = "x"},
  62. {.kind = TokenKind::StringLiteral,
  63. .line = 4,
  64. .column = 5,
  65. .indent_column = 4},
  66. {.kind = TokenKind::StringLiteral,
  67. .line = 4,
  68. .column = 11,
  69. .indent_column = 4},
  70. {.kind = TokenKind::Identifier,
  71. .line = 6,
  72. .column = 6,
  73. .indent_column = 11,
  74. .text = "y"},
  75. {.kind = TokenKind::FileEnd, .line = 6, .column = 7},
  76. }));
  77. }
  78. TEST_F(LexerTest, TracksLinesAndColumnsCRLF) {
  79. auto& buffer = compile_helper_.GetTokenizedBuffer(
  80. "\r\n ;;\r\n ;;;\r\n x\"foo\" '''baz\r\n a\r\n ''' y");
  81. EXPECT_FALSE(buffer.has_errors());
  82. EXPECT_THAT(
  83. buffer,
  84. HasTokens(llvm::ArrayRef<ExpectedToken>{
  85. {.kind = TokenKind::FileStart,
  86. .line = 1,
  87. .column = 1,
  88. .indent_column = 1},
  89. {.kind = TokenKind::Semi, .line = 2, .column = 3, .indent_column = 3},
  90. {.kind = TokenKind::Semi, .line = 2, .column = 4, .indent_column = 3},
  91. {.kind = TokenKind::Semi, .line = 3, .column = 4, .indent_column = 4},
  92. {.kind = TokenKind::Semi, .line = 3, .column = 5, .indent_column = 4},
  93. {.kind = TokenKind::Semi, .line = 3, .column = 6, .indent_column = 4},
  94. {.kind = TokenKind::Identifier,
  95. .line = 4,
  96. .column = 4,
  97. .indent_column = 4,
  98. .text = "x"},
  99. {.kind = TokenKind::StringLiteral,
  100. .line = 4,
  101. .column = 5,
  102. .indent_column = 4},
  103. {.kind = TokenKind::StringLiteral,
  104. .line = 4,
  105. .column = 11,
  106. .indent_column = 4},
  107. {.kind = TokenKind::Identifier,
  108. .line = 6,
  109. .column = 6,
  110. .indent_column = 11,
  111. .text = "y"},
  112. {.kind = TokenKind::FileEnd, .line = 6, .column = 7},
  113. }));
  114. }
  115. TEST_F(LexerTest, InvalidCR) {
  116. auto& buffer = compile_helper_.GetTokenizedBuffer("\n ;;\r ;\n x");
  117. EXPECT_TRUE(buffer.has_errors());
  118. EXPECT_THAT(
  119. buffer,
  120. HasTokens(llvm::ArrayRef<ExpectedToken>{
  121. {.kind = TokenKind::FileStart,
  122. .line = 1,
  123. .column = 1,
  124. .indent_column = 1},
  125. {.kind = TokenKind::Semi, .line = 2, .column = 2, .indent_column = 2},
  126. {.kind = TokenKind::Semi, .line = 2, .column = 3, .indent_column = 2},
  127. {.kind = TokenKind::Semi, .line = 2, .column = 6, .indent_column = 2},
  128. {.kind = TokenKind::Identifier,
  129. .line = 3,
  130. .column = 4,
  131. .indent_column = 4,
  132. .text = "x"},
  133. {.kind = TokenKind::FileEnd, .line = 3, .column = 5},
  134. }));
  135. }
  136. TEST_F(LexerTest, InvalidLFCR) {
  137. auto& buffer = compile_helper_.GetTokenizedBuffer("\n ;;\n\r ;\n x");
  138. EXPECT_TRUE(buffer.has_errors());
  139. EXPECT_THAT(
  140. buffer,
  141. HasTokens(llvm::ArrayRef<ExpectedToken>{
  142. {.kind = TokenKind::FileStart,
  143. .line = 1,
  144. .column = 1,
  145. .indent_column = 1},
  146. {.kind = TokenKind::Semi, .line = 2, .column = 2, .indent_column = 2},
  147. {.kind = TokenKind::Semi, .line = 2, .column = 3, .indent_column = 2},
  148. {.kind = TokenKind::Semi, .line = 3, .column = 3, .indent_column = 1},
  149. {.kind = TokenKind::Identifier,
  150. .line = 4,
  151. .column = 4,
  152. .indent_column = 4,
  153. .text = "x"},
  154. {.kind = TokenKind::FileEnd, .line = 4, .column = 5},
  155. }));
  156. }
  157. TEST_F(LexerTest, HandlesNumericLiteral) {
  158. auto [buffer, value_stores] =
  159. compile_helper_.GetTokenizedBufferWithSharedValueStore(
  160. "12-578\n 1 2\n0x12_3ABC\n0b10_10_11\n1_234_567\n1.5e9");
  161. EXPECT_FALSE(buffer.has_errors());
  162. ASSERT_THAT(buffer,
  163. HasTokens(llvm::ArrayRef<ExpectedToken>{
  164. {.kind = TokenKind::FileStart, .line = 1, .column = 1},
  165. {.kind = TokenKind::IntLiteral,
  166. .line = 1,
  167. .column = 1,
  168. .indent_column = 1,
  169. .text = "12"},
  170. {.kind = TokenKind::Minus,
  171. .line = 1,
  172. .column = 3,
  173. .indent_column = 1},
  174. {.kind = TokenKind::IntLiteral,
  175. .line = 1,
  176. .column = 4,
  177. .indent_column = 1,
  178. .text = "578"},
  179. {.kind = TokenKind::IntLiteral,
  180. .line = 2,
  181. .column = 3,
  182. .indent_column = 3,
  183. .text = "1"},
  184. {.kind = TokenKind::IntLiteral,
  185. .line = 2,
  186. .column = 6,
  187. .indent_column = 3,
  188. .text = "2"},
  189. {.kind = TokenKind::IntLiteral,
  190. .line = 3,
  191. .column = 1,
  192. .indent_column = 1,
  193. .text = "0x12_3ABC"},
  194. {.kind = TokenKind::IntLiteral,
  195. .line = 4,
  196. .column = 1,
  197. .indent_column = 1,
  198. .text = "0b10_10_11"},
  199. {.kind = TokenKind::IntLiteral,
  200. .line = 5,
  201. .column = 1,
  202. .indent_column = 1,
  203. .text = "1_234_567"},
  204. {.kind = TokenKind::RealLiteral,
  205. .line = 6,
  206. .column = 1,
  207. .indent_column = 1,
  208. .text = "1.5e9"},
  209. {.kind = TokenKind::FileEnd, .line = 6, .column = 6},
  210. }));
  211. auto token_start = buffer.tokens().begin();
  212. auto token_12 = token_start + 1;
  213. EXPECT_EQ(value_stores.ints().Get(buffer.GetIntLiteral(*token_12)), 12);
  214. auto token_578 = token_12 + 2;
  215. EXPECT_EQ(value_stores.ints().Get(buffer.GetIntLiteral(*token_578)), 578);
  216. auto token_1 = token_578 + 1;
  217. EXPECT_EQ(value_stores.ints().Get(buffer.GetIntLiteral(*token_1)), 1);
  218. auto token_2 = token_1 + 1;
  219. EXPECT_EQ(value_stores.ints().Get(buffer.GetIntLiteral(*token_2)), 2);
  220. auto token_0x12_3abc = token_2 + 1;
  221. EXPECT_EQ(value_stores.ints().Get(buffer.GetIntLiteral(*token_0x12_3abc)),
  222. 0x12'3abc);
  223. auto token_0b10_10_11 = token_0x12_3abc + 1;
  224. EXPECT_EQ(value_stores.ints().Get(buffer.GetIntLiteral(*token_0b10_10_11)),
  225. 0b10'10'11);
  226. auto token_1_234_567 = token_0b10_10_11 + 1;
  227. EXPECT_EQ(value_stores.ints().Get(buffer.GetIntLiteral(*token_1_234_567)),
  228. 1'234'567);
  229. auto token_1_5e9 = token_1_234_567 + 1;
  230. auto value_1_5e9 =
  231. value_stores.reals().Get(buffer.GetRealLiteral(*token_1_5e9));
  232. EXPECT_EQ(value_1_5e9.mantissa.getZExtValue(), 15);
  233. EXPECT_EQ(value_1_5e9.exponent.getSExtValue(), 8);
  234. EXPECT_EQ(value_1_5e9.is_decimal, true);
  235. }
  236. TEST_F(LexerTest, HandlesInvalidNumericLiterals) {
  237. auto& buffer =
  238. compile_helper_.GetTokenizedBuffer("14x 15_49 0x3.5q 0x3_4.5_6 0ops");
  239. EXPECT_TRUE(buffer.has_errors());
  240. ASSERT_THAT(buffer,
  241. HasTokens(llvm::ArrayRef<ExpectedToken>{
  242. {.kind = TokenKind::FileStart, .line = 1, .column = 1},
  243. {.kind = TokenKind::Error,
  244. .line = 1,
  245. .column = 1,
  246. .indent_column = 1,
  247. .text = "14x"},
  248. {.kind = TokenKind::IntLiteral,
  249. .line = 1,
  250. .column = 5,
  251. .indent_column = 1,
  252. .text = "15_49"},
  253. {.kind = TokenKind::Error,
  254. .line = 1,
  255. .column = 11,
  256. .indent_column = 1,
  257. .text = "0x3.5q"},
  258. {.kind = TokenKind::RealLiteral,
  259. .line = 1,
  260. .column = 18,
  261. .indent_column = 1,
  262. .text = "0x3_4.5_6"},
  263. {.kind = TokenKind::Error,
  264. .line = 1,
  265. .column = 28,
  266. .indent_column = 1,
  267. .text = "0ops"},
  268. {.kind = TokenKind::FileEnd, .line = 1, .column = 32},
  269. }));
  270. }
  271. TEST_F(LexerTest, SplitsNumericLiteralsProperly) {
  272. llvm::StringLiteral source_text = R"(
  273. 1.
  274. .2
  275. 3.+foo
  276. 4.0-bar
  277. 5.0e+123+456
  278. 6.0e+1e+2
  279. 1e7
  280. 8..10
  281. 9.0.9.5
  282. 10.foo
  283. 11.0.foo
  284. 12e+1
  285. 13._
  286. )";
  287. auto& buffer = compile_helper_.GetTokenizedBuffer(source_text);
  288. EXPECT_TRUE(buffer.has_errors());
  289. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  290. {.kind = TokenKind::FileStart},
  291. {.kind = TokenKind::IntLiteral, .text = "1"},
  292. {.kind = TokenKind::Period},
  293. // newline
  294. {.kind = TokenKind::Period},
  295. {.kind = TokenKind::IntLiteral, .text = "2"},
  296. // newline
  297. {.kind = TokenKind::IntLiteral, .text = "3"},
  298. {.kind = TokenKind::Period},
  299. {.kind = TokenKind::Plus},
  300. {.kind = TokenKind::Identifier, .text = "foo"},
  301. // newline
  302. {.kind = TokenKind::RealLiteral, .text = "4.0"},
  303. {.kind = TokenKind::Minus},
  304. {.kind = TokenKind::Identifier, .text = "bar"},
  305. // newline
  306. {.kind = TokenKind::RealLiteral, .text = "5.0e+123"},
  307. {.kind = TokenKind::Plus},
  308. {.kind = TokenKind::IntLiteral, .text = "456"},
  309. // newline
  310. {.kind = TokenKind::Error, .text = "6.0e+1e"},
  311. {.kind = TokenKind::Plus},
  312. {.kind = TokenKind::IntLiteral, .text = "2"},
  313. // newline
  314. {.kind = TokenKind::Error, .text = "1e7"},
  315. // newline
  316. {.kind = TokenKind::IntLiteral, .text = "8"},
  317. {.kind = TokenKind::Period},
  318. {.kind = TokenKind::Period},
  319. {.kind = TokenKind::IntLiteral, .text = "10"},
  320. // newline
  321. {.kind = TokenKind::RealLiteral, .text = "9.0"},
  322. {.kind = TokenKind::Period},
  323. {.kind = TokenKind::RealLiteral, .text = "9.5"},
  324. // newline
  325. {.kind = TokenKind::Error, .text = "10.foo"},
  326. // newline
  327. {.kind = TokenKind::RealLiteral, .text = "11.0"},
  328. {.kind = TokenKind::Period},
  329. {.kind = TokenKind::Identifier, .text = "foo"},
  330. // newline
  331. {.kind = TokenKind::Error, .text = "12e"},
  332. {.kind = TokenKind::Plus},
  333. {.kind = TokenKind::IntLiteral, .text = "1"},
  334. // newline
  335. {.kind = TokenKind::IntLiteral, .text = "13"},
  336. {.kind = TokenKind::Period},
  337. {.kind = TokenKind::Underscore},
  338. // newline
  339. {.kind = TokenKind::FileEnd},
  340. }));
  341. }
  342. TEST_F(LexerTest, HandlesGarbageCharacters) {
  343. constexpr char GarbageText[] = "$$💩-$\n$\0$12$\n\\\"\\\n\"x";
  344. auto& buffer = compile_helper_.GetTokenizedBuffer(
  345. llvm::StringRef(GarbageText, sizeof(GarbageText) - 1));
  346. EXPECT_TRUE(buffer.has_errors());
  347. EXPECT_THAT(
  348. buffer,
  349. HasTokens(llvm::ArrayRef<ExpectedToken>{
  350. {.kind = TokenKind::FileStart, .line = 1, .column = 1},
  351. {.kind = TokenKind::Error,
  352. .line = 1,
  353. .column = 1,
  354. // 💩 takes 4 bytes, and we count column as bytes offset.
  355. .text = llvm::StringRef("$$💩", 6)},
  356. {.kind = TokenKind::Minus, .line = 1, .column = 7},
  357. {.kind = TokenKind::Error, .line = 1, .column = 8, .text = "$"},
  358. // newline
  359. {.kind = TokenKind::Error,
  360. .line = 2,
  361. .column = 1,
  362. .text = llvm::StringRef("$\0$", 3)},
  363. {.kind = TokenKind::IntLiteral, .line = 2, .column = 4, .text = "12"},
  364. {.kind = TokenKind::Error, .line = 2, .column = 6, .text = "$"},
  365. // newline
  366. {.kind = TokenKind::Backslash, .line = 3, .column = 1, .text = "\\"},
  367. {.kind = TokenKind::Error, .line = 3, .column = 2, .text = "\"\\"},
  368. // newline
  369. {.kind = TokenKind::Error, .line = 4, .column = 1, .text = "\"x"},
  370. {.kind = TokenKind::FileEnd, .line = 4, .column = 3},
  371. }));
  372. }
  373. TEST_F(LexerTest, Symbols) {
  374. // We don't need to exhaustively test symbols here as they're handled with
  375. // common code, but we want to check specific patterns to verify things like
  376. // max-munch rule and handling of interesting symbols.
  377. auto& buffer1 = compile_helper_.GetTokenizedBuffer("<<<");
  378. EXPECT_FALSE(buffer1.has_errors());
  379. EXPECT_THAT(buffer1, HasTokens(llvm::ArrayRef<ExpectedToken>{
  380. {.kind = TokenKind::FileStart},
  381. {.kind = TokenKind::LessLess},
  382. {.kind = TokenKind::Less},
  383. {.kind = TokenKind::FileEnd},
  384. }));
  385. auto& buffer2 = compile_helper_.GetTokenizedBuffer("<<=>>");
  386. EXPECT_FALSE(buffer2.has_errors());
  387. EXPECT_THAT(buffer2, HasTokens(llvm::ArrayRef<ExpectedToken>{
  388. {.kind = TokenKind::FileStart},
  389. {.kind = TokenKind::LessLessEqual},
  390. {.kind = TokenKind::GreaterGreater},
  391. {.kind = TokenKind::FileEnd},
  392. }));
  393. auto& buffer3 = compile_helper_.GetTokenizedBuffer("< <=> >");
  394. EXPECT_FALSE(buffer3.has_errors());
  395. EXPECT_THAT(buffer3, HasTokens(llvm::ArrayRef<ExpectedToken>{
  396. {.kind = TokenKind::FileStart},
  397. {.kind = TokenKind::Less},
  398. {.kind = TokenKind::LessEqualGreater},
  399. {.kind = TokenKind::Greater},
  400. {.kind = TokenKind::FileEnd},
  401. }));
  402. auto& buffer4 = compile_helper_.GetTokenizedBuffer("\\/?@&^!");
  403. EXPECT_FALSE(buffer4.has_errors());
  404. EXPECT_THAT(buffer4, HasTokens(llvm::ArrayRef<ExpectedToken>{
  405. {.kind = TokenKind::FileStart},
  406. {.kind = TokenKind::Backslash},
  407. {.kind = TokenKind::Slash},
  408. {.kind = TokenKind::Question},
  409. {.kind = TokenKind::At},
  410. {.kind = TokenKind::Amp},
  411. {.kind = TokenKind::Caret},
  412. {.kind = TokenKind::Exclaim},
  413. {.kind = TokenKind::FileEnd},
  414. }));
  415. }
  416. TEST_F(LexerTest, Parens) {
  417. auto& buffer1 = compile_helper_.GetTokenizedBuffer("()");
  418. EXPECT_FALSE(buffer1.has_errors());
  419. EXPECT_THAT(buffer1, HasTokens(llvm::ArrayRef<ExpectedToken>{
  420. {.kind = TokenKind::FileStart},
  421. {.kind = TokenKind::OpenParen},
  422. {.kind = TokenKind::CloseParen},
  423. {.kind = TokenKind::FileEnd},
  424. }));
  425. auto& buffer2 = compile_helper_.GetTokenizedBuffer("((()()))");
  426. EXPECT_FALSE(buffer2.has_errors());
  427. EXPECT_THAT(buffer2, HasTokens(llvm::ArrayRef<ExpectedToken>{
  428. {.kind = TokenKind::FileStart},
  429. {.kind = TokenKind::OpenParen},
  430. {.kind = TokenKind::OpenParen},
  431. {.kind = TokenKind::OpenParen},
  432. {.kind = TokenKind::CloseParen},
  433. {.kind = TokenKind::OpenParen},
  434. {.kind = TokenKind::CloseParen},
  435. {.kind = TokenKind::CloseParen},
  436. {.kind = TokenKind::CloseParen},
  437. {.kind = TokenKind::FileEnd},
  438. }));
  439. }
  440. TEST_F(LexerTest, CurlyBraces) {
  441. auto& buffer1 = compile_helper_.GetTokenizedBuffer("{}");
  442. EXPECT_FALSE(buffer1.has_errors());
  443. EXPECT_THAT(buffer1, HasTokens(llvm::ArrayRef<ExpectedToken>{
  444. {.kind = TokenKind::FileStart},
  445. {.kind = TokenKind::OpenCurlyBrace},
  446. {.kind = TokenKind::CloseCurlyBrace},
  447. {.kind = TokenKind::FileEnd},
  448. }));
  449. auto& buffer2 = compile_helper_.GetTokenizedBuffer("{{{}{}}}");
  450. EXPECT_FALSE(buffer2.has_errors());
  451. EXPECT_THAT(buffer2, HasTokens(llvm::ArrayRef<ExpectedToken>{
  452. {.kind = TokenKind::FileStart},
  453. {.kind = TokenKind::OpenCurlyBrace},
  454. {.kind = TokenKind::OpenCurlyBrace},
  455. {.kind = TokenKind::OpenCurlyBrace},
  456. {.kind = TokenKind::CloseCurlyBrace},
  457. {.kind = TokenKind::OpenCurlyBrace},
  458. {.kind = TokenKind::CloseCurlyBrace},
  459. {.kind = TokenKind::CloseCurlyBrace},
  460. {.kind = TokenKind::CloseCurlyBrace},
  461. {.kind = TokenKind::FileEnd},
  462. }));
  463. }
  464. TEST_F(LexerTest, MatchingGroups) {
  465. {
  466. auto& buffer = compile_helper_.GetTokenizedBuffer("(){}");
  467. ASSERT_FALSE(buffer.has_errors());
  468. auto it = ++buffer.tokens().begin();
  469. auto open_paren_token = *it++;
  470. auto close_paren_token = *it++;
  471. EXPECT_EQ(close_paren_token,
  472. buffer.GetMatchedClosingToken(open_paren_token));
  473. EXPECT_EQ(open_paren_token,
  474. buffer.GetMatchedOpeningToken(close_paren_token));
  475. auto open_curly_token = *it++;
  476. auto close_curly_token = *it++;
  477. EXPECT_EQ(close_curly_token,
  478. buffer.GetMatchedClosingToken(open_curly_token));
  479. EXPECT_EQ(open_curly_token,
  480. buffer.GetMatchedOpeningToken(close_curly_token));
  481. auto eof_token = *it++;
  482. EXPECT_EQ(buffer.GetKind(eof_token), TokenKind::FileEnd);
  483. EXPECT_EQ(buffer.tokens().end(), it);
  484. }
  485. {
  486. auto [buffer, value_stores] =
  487. compile_helper_.GetTokenizedBufferWithSharedValueStore(
  488. "({x}){(y)} {{((z))}}");
  489. ASSERT_FALSE(buffer.has_errors());
  490. auto it = ++buffer.tokens().begin();
  491. auto open_paren_token = *it++;
  492. auto open_curly_token = *it++;
  493. ASSERT_EQ("x", value_stores.identifiers().Get(buffer.GetIdentifier(*it++)));
  494. auto close_curly_token = *it++;
  495. auto close_paren_token = *it++;
  496. EXPECT_EQ(close_paren_token,
  497. buffer.GetMatchedClosingToken(open_paren_token));
  498. EXPECT_EQ(open_paren_token,
  499. buffer.GetMatchedOpeningToken(close_paren_token));
  500. EXPECT_EQ(close_curly_token,
  501. buffer.GetMatchedClosingToken(open_curly_token));
  502. EXPECT_EQ(open_curly_token,
  503. buffer.GetMatchedOpeningToken(close_curly_token));
  504. open_curly_token = *it++;
  505. open_paren_token = *it++;
  506. ASSERT_EQ("y", value_stores.identifiers().Get(buffer.GetIdentifier(*it++)));
  507. close_paren_token = *it++;
  508. close_curly_token = *it++;
  509. EXPECT_EQ(close_curly_token,
  510. buffer.GetMatchedClosingToken(open_curly_token));
  511. EXPECT_EQ(open_curly_token,
  512. buffer.GetMatchedOpeningToken(close_curly_token));
  513. EXPECT_EQ(close_paren_token,
  514. buffer.GetMatchedClosingToken(open_paren_token));
  515. EXPECT_EQ(open_paren_token,
  516. buffer.GetMatchedOpeningToken(close_paren_token));
  517. open_curly_token = *it++;
  518. auto inner_open_curly_token = *it++;
  519. open_paren_token = *it++;
  520. auto inner_open_paren_token = *it++;
  521. ASSERT_EQ("z", value_stores.identifiers().Get(buffer.GetIdentifier(*it++)));
  522. auto inner_close_paren_token = *it++;
  523. close_paren_token = *it++;
  524. auto inner_close_curly_token = *it++;
  525. close_curly_token = *it++;
  526. EXPECT_EQ(close_curly_token,
  527. buffer.GetMatchedClosingToken(open_curly_token));
  528. EXPECT_EQ(open_curly_token,
  529. buffer.GetMatchedOpeningToken(close_curly_token));
  530. EXPECT_EQ(inner_close_curly_token,
  531. buffer.GetMatchedClosingToken(inner_open_curly_token));
  532. EXPECT_EQ(inner_open_curly_token,
  533. buffer.GetMatchedOpeningToken(inner_close_curly_token));
  534. EXPECT_EQ(close_paren_token,
  535. buffer.GetMatchedClosingToken(open_paren_token));
  536. EXPECT_EQ(open_paren_token,
  537. buffer.GetMatchedOpeningToken(close_paren_token));
  538. EXPECT_EQ(inner_close_paren_token,
  539. buffer.GetMatchedClosingToken(inner_open_paren_token));
  540. EXPECT_EQ(inner_open_paren_token,
  541. buffer.GetMatchedOpeningToken(inner_close_paren_token));
  542. auto eof_token = *it++;
  543. EXPECT_EQ(buffer.GetKind(eof_token), TokenKind::FileEnd);
  544. EXPECT_EQ(buffer.tokens().end(), it);
  545. }
  546. }
  547. TEST_F(LexerTest, MismatchedGroups) {
  548. auto& buffer1 = compile_helper_.GetTokenizedBuffer("{");
  549. EXPECT_TRUE(buffer1.has_errors());
  550. EXPECT_THAT(buffer1, HasTokens(llvm::ArrayRef<ExpectedToken>{
  551. {.kind = TokenKind::FileStart},
  552. {.kind = TokenKind::Error, .text = "{"},
  553. {.kind = TokenKind::FileEnd},
  554. }));
  555. auto& buffer2 = compile_helper_.GetTokenizedBuffer("}");
  556. EXPECT_TRUE(buffer2.has_errors());
  557. EXPECT_THAT(buffer2, HasTokens(llvm::ArrayRef<ExpectedToken>{
  558. {.kind = TokenKind::FileStart},
  559. {.kind = TokenKind::Error, .text = "}"},
  560. {.kind = TokenKind::FileEnd},
  561. }));
  562. auto& buffer3 = compile_helper_.GetTokenizedBuffer("{(}");
  563. EXPECT_TRUE(buffer3.has_errors());
  564. EXPECT_THAT(
  565. buffer3,
  566. HasTokens(llvm::ArrayRef<ExpectedToken>{
  567. {.kind = TokenKind::FileStart},
  568. {.kind = TokenKind::OpenCurlyBrace, .column = 1},
  569. {.kind = TokenKind::OpenParen, .column = 2},
  570. {.kind = TokenKind::CloseParen, .column = 3, .recovery = true},
  571. {.kind = TokenKind::CloseCurlyBrace, .column = 3},
  572. {.kind = TokenKind::FileEnd},
  573. }));
  574. auto& buffer4 = compile_helper_.GetTokenizedBuffer(")({)");
  575. EXPECT_TRUE(buffer4.has_errors());
  576. EXPECT_THAT(
  577. buffer4,
  578. HasTokens(llvm::ArrayRef<ExpectedToken>{
  579. {.kind = TokenKind::FileStart},
  580. {.kind = TokenKind::Error, .column = 1, .text = ")"},
  581. {.kind = TokenKind::OpenParen, .column = 2},
  582. {.kind = TokenKind::OpenCurlyBrace, .column = 3},
  583. {.kind = TokenKind::CloseCurlyBrace, .column = 4, .recovery = true},
  584. {.kind = TokenKind::CloseParen, .column = 4},
  585. {.kind = TokenKind::FileEnd},
  586. }));
  587. }
  588. TEST_F(LexerTest, Whitespace) {
  589. auto& buffer = compile_helper_.GetTokenizedBuffer("{( } {(");
  590. // Whether there should be whitespace before/after each token.
  591. bool space[] = {false,
  592. // start-of-file
  593. true,
  594. // {
  595. false,
  596. // (
  597. true,
  598. // inserted )
  599. true,
  600. // }
  601. true,
  602. // error {
  603. false,
  604. // error (
  605. true,
  606. // EOF
  607. false};
  608. int pos = 0;
  609. for (TokenIndex token : buffer.tokens()) {
  610. SCOPED_TRACE(
  611. llvm::formatv("Token #{0}: '{1}'", token, buffer.GetTokenText(token)));
  612. ASSERT_LT(pos, std::size(space));
  613. EXPECT_THAT(buffer.HasLeadingWhitespace(token), Eq(space[pos]));
  614. ++pos;
  615. ASSERT_LT(pos, std::size(space));
  616. EXPECT_THAT(buffer.HasTrailingWhitespace(token), Eq(space[pos]));
  617. }
  618. ASSERT_EQ(pos + 1, std::size(space));
  619. }
  620. TEST_F(LexerTest, Keywords) {
  621. TokenKind keywords[] = {
  622. #define CARBON_TOKEN(TokenName)
  623. #define CARBON_KEYWORD_TOKEN(TokenName, ...) TokenKind::TokenName,
  624. #include "toolchain/lex/token_kind.def"
  625. };
  626. for (const auto& keyword : keywords) {
  627. auto& buffer = compile_helper_.GetTokenizedBuffer(keyword.fixed_spelling());
  628. EXPECT_FALSE(buffer.has_errors());
  629. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  630. {.kind = TokenKind::FileStart},
  631. {.kind = keyword, .column = 1, .indent_column = 1},
  632. {.kind = TokenKind::FileEnd},
  633. }));
  634. }
  635. }
  636. TEST_F(LexerTest, Comments) {
  637. auto& buffer1 = compile_helper_.GetTokenizedBuffer(" ;\n // foo\n ;\n");
  638. EXPECT_FALSE(buffer1.has_errors());
  639. EXPECT_THAT(
  640. buffer1,
  641. HasTokens(llvm::ArrayRef<ExpectedToken>{
  642. {.kind = TokenKind::FileStart, .line = 1, .column = 1},
  643. {.kind = TokenKind::Semi, .line = 1, .column = 2, .indent_column = 2},
  644. {.kind = TokenKind::Semi, .line = 3, .column = 3, .indent_column = 3},
  645. {.kind = TokenKind::FileEnd, .line = 3, .column = 4},
  646. }));
  647. auto& buffer2 = compile_helper_.GetTokenizedBuffer("// foo\n//\n// bar");
  648. EXPECT_FALSE(buffer2.has_errors());
  649. EXPECT_THAT(buffer2, HasTokens(llvm::ArrayRef<ExpectedToken>{
  650. {.kind = TokenKind::FileStart},
  651. {.kind = TokenKind::FileEnd}}));
  652. // Make sure weird characters aren't a problem.
  653. auto& buffer3 =
  654. compile_helper_.GetTokenizedBuffer(" // foo#$!^?@-_💩🍫⃠ [̲̅$̲̅(̲̅ ͡° ͜ʖ ͡°̲̅)̲̅$̲̅]");
  655. EXPECT_FALSE(buffer3.has_errors());
  656. EXPECT_THAT(buffer3, HasTokens(llvm::ArrayRef<ExpectedToken>{
  657. {.kind = TokenKind::FileStart},
  658. {.kind = TokenKind::FileEnd}}));
  659. // Make sure we can lex a comment at the end of the input.
  660. auto& buffer4 = compile_helper_.GetTokenizedBuffer("//");
  661. EXPECT_FALSE(buffer4.has_errors());
  662. EXPECT_THAT(buffer4, HasTokens(llvm::ArrayRef<ExpectedToken>{
  663. {.kind = TokenKind::FileStart},
  664. {.kind = TokenKind::FileEnd}}));
  665. }
  666. TEST_F(LexerTest, InvalidComments) {
  667. llvm::StringLiteral testcases[] = {
  668. " /// foo\n",
  669. "foo // bar\n",
  670. "//! hello",
  671. " //world",
  672. };
  673. for (llvm::StringLiteral testcase : testcases) {
  674. auto& buffer = compile_helper_.GetTokenizedBuffer(testcase);
  675. EXPECT_TRUE(buffer.has_errors());
  676. }
  677. }
  678. TEST_F(LexerTest, Identifiers) {
  679. auto& buffer1 = compile_helper_.GetTokenizedBuffer(" foobar");
  680. EXPECT_FALSE(buffer1.has_errors());
  681. EXPECT_THAT(buffer1, HasTokens(llvm::ArrayRef<ExpectedToken>{
  682. {.kind = TokenKind::FileStart},
  683. {.kind = TokenKind::Identifier,
  684. .column = 4,
  685. .indent_column = 4,
  686. .text = "foobar"},
  687. {.kind = TokenKind::FileEnd},
  688. }));
  689. // Check different kinds of identifier character sequences.
  690. auto& buffer2 = compile_helper_.GetTokenizedBuffer("_foo_bar");
  691. EXPECT_FALSE(buffer2.has_errors());
  692. EXPECT_THAT(buffer2, HasTokens(llvm::ArrayRef<ExpectedToken>{
  693. {.kind = TokenKind::FileStart},
  694. {.kind = TokenKind::Identifier, .text = "_foo_bar"},
  695. {.kind = TokenKind::FileEnd},
  696. }));
  697. auto& buffer3 = compile_helper_.GetTokenizedBuffer("foo2bar00");
  698. EXPECT_FALSE(buffer3.has_errors());
  699. EXPECT_THAT(buffer3, HasTokens(llvm::ArrayRef<ExpectedToken>{
  700. {.kind = TokenKind::FileStart},
  701. {.kind = TokenKind::Identifier, .text = "foo2bar00"},
  702. {.kind = TokenKind::FileEnd},
  703. }));
  704. // Check that we can parse identifiers that start with a keyword.
  705. auto& buffer4 = compile_helper_.GetTokenizedBuffer("fnord");
  706. EXPECT_FALSE(buffer4.has_errors());
  707. EXPECT_THAT(buffer4, HasTokens(llvm::ArrayRef<ExpectedToken>{
  708. {.kind = TokenKind::FileStart},
  709. {.kind = TokenKind::Identifier, .text = "fnord"},
  710. {.kind = TokenKind::FileEnd},
  711. }));
  712. // Check multiple identifiers with indent and interning.
  713. auto& buffer5 =
  714. compile_helper_.GetTokenizedBuffer(" foo;bar\nbar \n foo\tfoo");
  715. EXPECT_FALSE(buffer5.has_errors());
  716. EXPECT_THAT(buffer5,
  717. HasTokens(llvm::ArrayRef<ExpectedToken>{
  718. {.kind = TokenKind::FileStart, .line = 1, .column = 1},
  719. {.kind = TokenKind::Identifier,
  720. .line = 1,
  721. .column = 4,
  722. .indent_column = 4,
  723. .text = "foo"},
  724. {.kind = TokenKind::Semi},
  725. {.kind = TokenKind::Identifier,
  726. .line = 1,
  727. .column = 8,
  728. .indent_column = 4,
  729. .text = "bar"},
  730. {.kind = TokenKind::Identifier,
  731. .line = 2,
  732. .column = 1,
  733. .indent_column = 1,
  734. .text = "bar"},
  735. {.kind = TokenKind::Identifier,
  736. .line = 3,
  737. .column = 3,
  738. .indent_column = 3,
  739. .text = "foo"},
  740. {.kind = TokenKind::Identifier,
  741. .line = 3,
  742. .column = 7,
  743. .indent_column = 3,
  744. .text = "foo"},
  745. {.kind = TokenKind::FileEnd, .line = 3, .column = 10},
  746. }));
  747. }
  748. TEST_F(LexerTest, StringLiterals) {
  749. llvm::StringLiteral testcase = R"(
  750. "hello world\n"
  751. '''foo
  752. test \
  753. \xAB
  754. ''' trailing
  755. #"""#
  756. "\0"
  757. #"\0"foo"\1"#
  758. """x"""
  759. )";
  760. auto [buffer, value_stores] =
  761. compile_helper_.GetTokenizedBufferWithSharedValueStore(testcase);
  762. EXPECT_FALSE(buffer.has_errors());
  763. EXPECT_THAT(buffer,
  764. HasTokens(llvm::ArrayRef<ExpectedToken>{
  765. {.kind = TokenKind::FileStart, .line = 1, .column = 1},
  766. {.kind = TokenKind::StringLiteral,
  767. .line = 2,
  768. .column = 5,
  769. .indent_column = 5,
  770. .value_stores = &value_stores,
  771. .string_contents = {"hello world\n"}},
  772. {.kind = TokenKind::StringLiteral,
  773. .line = 4,
  774. .column = 5,
  775. .indent_column = 5,
  776. .value_stores = &value_stores,
  777. .string_contents = {" test \xAB\n"}},
  778. {.kind = TokenKind::Identifier,
  779. .line = 7,
  780. .column = 10,
  781. .indent_column = 5,
  782. .text = "trailing"},
  783. {.kind = TokenKind::StringLiteral,
  784. .line = 9,
  785. .column = 7,
  786. .indent_column = 7,
  787. .value_stores = &value_stores,
  788. .string_contents = {"\""}},
  789. {.kind = TokenKind::StringLiteral,
  790. .line = 11,
  791. .column = 5,
  792. .indent_column = 5,
  793. .value_stores = &value_stores,
  794. .string_contents = llvm::StringLiteral::withInnerNUL("\0")},
  795. {.kind = TokenKind::StringLiteral,
  796. .line = 13,
  797. .column = 5,
  798. .indent_column = 5,
  799. .value_stores = &value_stores,
  800. .string_contents = {"\\0\"foo\"\\1"}},
  801. // """x""" is three string literals, not one invalid
  802. // attempt at a block string literal.
  803. {.kind = TokenKind::StringLiteral,
  804. .line = 15,
  805. .column = 5,
  806. .indent_column = 5,
  807. .value_stores = &value_stores,
  808. .string_contents = {""}},
  809. {.kind = TokenKind::StringLiteral,
  810. .line = 15,
  811. .column = 7,
  812. .indent_column = 5,
  813. .value_stores = &value_stores,
  814. .string_contents = {"x"}},
  815. {.kind = TokenKind::StringLiteral,
  816. .line = 15,
  817. .column = 10,
  818. .indent_column = 5,
  819. .value_stores = &value_stores,
  820. .string_contents = {""}},
  821. {.kind = TokenKind::FileEnd, .line = 16, .column = 3},
  822. }));
  823. }
  824. TEST_F(LexerTest, InvalidStringLiterals) {
  825. llvm::StringLiteral invalid[] = {
  826. // clang-format off
  827. R"(")",
  828. R"('''
  829. '')",
  830. R"("\)",
  831. R"("\")",
  832. R"("\\)",
  833. R"("\\\")",
  834. R"(''')",
  835. R"('''
  836. )",
  837. R"('''\)",
  838. R"(#'''
  839. ''')",
  840. // clang-format on
  841. };
  842. for (llvm::StringLiteral test : invalid) {
  843. SCOPED_TRACE(test);
  844. auto& buffer = compile_helper_.GetTokenizedBuffer(test);
  845. EXPECT_TRUE(buffer.has_errors());
  846. // We should have formed at least one error token.
  847. bool found_error = false;
  848. for (TokenIndex token : buffer.tokens()) {
  849. if (buffer.GetKind(token) == TokenKind::Error) {
  850. found_error = true;
  851. break;
  852. }
  853. }
  854. EXPECT_TRUE(found_error);
  855. }
  856. }
  857. TEST_F(LexerTest, TypeLiterals) {
  858. llvm::StringLiteral testcase = R"(
  859. i0 i1 i20 i999999999999 i0x1
  860. u0 u1 u64 u64b
  861. f32 f80 f1 fi
  862. s1
  863. )";
  864. auto [buffer, value_stores] =
  865. compile_helper_.GetTokenizedBufferWithSharedValueStore(testcase);
  866. EXPECT_FALSE(buffer.has_errors());
  867. ASSERT_THAT(buffer,
  868. HasTokens(llvm::ArrayRef<ExpectedToken>{
  869. {.kind = TokenKind::FileStart, .line = 1, .column = 1},
  870. {.kind = TokenKind::Identifier,
  871. .line = 2,
  872. .column = 5,
  873. .indent_column = 5,
  874. .text = {"i0"}},
  875. {.kind = TokenKind::IntTypeLiteral,
  876. .line = 2,
  877. .column = 8,
  878. .indent_column = 5,
  879. .text = {"i1"}},
  880. {.kind = TokenKind::IntTypeLiteral,
  881. .line = 2,
  882. .column = 11,
  883. .indent_column = 5,
  884. .text = {"i20"}},
  885. {.kind = TokenKind::IntTypeLiteral,
  886. .line = 2,
  887. .column = 15,
  888. .indent_column = 5,
  889. .text = {"i999999999999"}},
  890. {.kind = TokenKind::Identifier,
  891. .line = 2,
  892. .column = 29,
  893. .indent_column = 5,
  894. .text = {"i0x1"}},
  895. {.kind = TokenKind::Identifier,
  896. .line = 3,
  897. .column = 5,
  898. .indent_column = 5,
  899. .text = {"u0"}},
  900. {.kind = TokenKind::UnsignedIntTypeLiteral,
  901. .line = 3,
  902. .column = 8,
  903. .indent_column = 5,
  904. .text = {"u1"}},
  905. {.kind = TokenKind::UnsignedIntTypeLiteral,
  906. .line = 3,
  907. .column = 11,
  908. .indent_column = 5,
  909. .text = {"u64"}},
  910. {.kind = TokenKind::Identifier,
  911. .line = 3,
  912. .column = 15,
  913. .indent_column = 5,
  914. .text = {"u64b"}},
  915. {.kind = TokenKind::FloatTypeLiteral,
  916. .line = 4,
  917. .column = 5,
  918. .indent_column = 5,
  919. .text = {"f32"}},
  920. {.kind = TokenKind::FloatTypeLiteral,
  921. .line = 4,
  922. .column = 9,
  923. .indent_column = 5,
  924. .text = {"f80"}},
  925. {.kind = TokenKind::FloatTypeLiteral,
  926. .line = 4,
  927. .column = 13,
  928. .indent_column = 5,
  929. .text = {"f1"}},
  930. {.kind = TokenKind::Identifier,
  931. .line = 4,
  932. .column = 16,
  933. .indent_column = 5,
  934. .text = {"fi"}},
  935. {.kind = TokenKind::Identifier,
  936. .line = 5,
  937. .column = 5,
  938. .indent_column = 5,
  939. .text = {"s1"}},
  940. {.kind = TokenKind::FileEnd, .line = 6, .column = 3},
  941. }));
  942. auto type_size = [&](int token_index) {
  943. auto token = buffer.tokens().begin()[token_index];
  944. return value_stores.ints().Get(buffer.GetTypeLiteralSize(token));
  945. };
  946. EXPECT_EQ(type_size(2), 1);
  947. EXPECT_EQ(type_size(3), 20);
  948. EXPECT_EQ(type_size(4), 999999999999ULL);
  949. EXPECT_EQ(type_size(7), 1);
  950. EXPECT_EQ(type_size(8), 64);
  951. EXPECT_EQ(type_size(10), 32);
  952. EXPECT_EQ(type_size(11), 80);
  953. EXPECT_EQ(type_size(12), 1);
  954. }
  955. TEST_F(LexerTest, TypeLiteralTooManyDigits) {
  956. std::string code = "i";
  957. constexpr int Count = 10000;
  958. code.append(Count, '9');
  959. Testing::MockDiagnosticConsumer consumer;
  960. EXPECT_CALL(consumer,
  961. HandleDiagnostic(IsSingleDiagnostic(
  962. DiagnosticKind::TooManyDigits, DiagnosticLevel::Error, 1, 2,
  963. HasSubstr(llvm::formatv(" {0} ", Count)))));
  964. auto& buffer = compile_helper_.GetTokenizedBuffer(code, &consumer);
  965. EXPECT_TRUE(buffer.has_errors());
  966. ASSERT_THAT(buffer,
  967. HasTokens(llvm::ArrayRef<ExpectedToken>{
  968. {.kind = TokenKind::FileStart, .line = 1, .column = 1},
  969. {.kind = TokenKind::Error,
  970. .line = 1,
  971. .column = 1,
  972. .indent_column = 1,
  973. .text = code},
  974. {.kind = TokenKind::FileEnd, .line = 1, .column = Count + 2},
  975. }));
  976. }
  977. TEST_F(LexerTest, DiagnosticTrailingComment) {
  978. llvm::StringLiteral testcase = R"(
  979. // Hello!
  980. var String x; // trailing comment
  981. )";
  982. Testing::MockDiagnosticConsumer consumer;
  983. EXPECT_CALL(consumer, HandleDiagnostic(IsSingleDiagnostic(
  984. DiagnosticKind::TrailingComment,
  985. DiagnosticLevel::Error, 3, 19, _)));
  986. compile_helper_.GetTokenizedBuffer(testcase, &consumer);
  987. }
  988. TEST_F(LexerTest, DiagnosticWhitespace) {
  989. Testing::MockDiagnosticConsumer consumer;
  990. EXPECT_CALL(consumer, HandleDiagnostic(IsSingleDiagnostic(
  991. DiagnosticKind::NoWhitespaceAfterCommentIntroducer,
  992. DiagnosticLevel::Error, 1, 3, _)));
  993. compile_helper_.GetTokenizedBuffer("//no space after comment", &consumer);
  994. }
  995. TEST_F(LexerTest, DiagnosticUnrecognizedEscape) {
  996. Testing::MockDiagnosticConsumer consumer;
  997. EXPECT_CALL(consumer, HandleDiagnostic(IsSingleDiagnostic(
  998. DiagnosticKind::UnknownEscapeSequence,
  999. DiagnosticLevel::Error, 1, 8, HasSubstr("`b`"))));
  1000. compile_helper_.GetTokenizedBuffer(R"("hello\bworld")", &consumer);
  1001. }
  1002. TEST_F(LexerTest, DiagnosticBadHex) {
  1003. Testing::MockDiagnosticConsumer consumer;
  1004. EXPECT_CALL(consumer, HandleDiagnostic(IsSingleDiagnostic(
  1005. DiagnosticKind::HexadecimalEscapeMissingDigits,
  1006. DiagnosticLevel::Error, 1, 9, _)));
  1007. compile_helper_.GetTokenizedBuffer(R"("hello\xabworld")", &consumer);
  1008. }
  1009. TEST_F(LexerTest, DiagnosticInvalidDigit) {
  1010. Testing::MockDiagnosticConsumer consumer;
  1011. EXPECT_CALL(consumer, HandleDiagnostic(IsSingleDiagnostic(
  1012. DiagnosticKind::InvalidDigit,
  1013. DiagnosticLevel::Error, 1, 6, HasSubstr("'a'"))));
  1014. compile_helper_.GetTokenizedBuffer("0x123abc", &consumer);
  1015. }
  1016. TEST_F(LexerTest, DiagnosticMissingTerminator) {
  1017. Testing::MockDiagnosticConsumer consumer;
  1018. EXPECT_CALL(consumer, HandleDiagnostic(IsSingleDiagnostic(
  1019. DiagnosticKind::UnterminatedString,
  1020. DiagnosticLevel::Error, 1, 1, _)));
  1021. compile_helper_.GetTokenizedBuffer(R"(#" ")", &consumer);
  1022. }
  1023. TEST_F(LexerTest, DiagnosticUnrecognizedChar) {
  1024. Testing::MockDiagnosticConsumer consumer;
  1025. EXPECT_CALL(consumer, HandleDiagnostic(IsSingleDiagnostic(
  1026. DiagnosticKind::UnrecognizedCharacters,
  1027. DiagnosticLevel::Error, 1, 1, _)));
  1028. compile_helper_.GetTokenizedBuffer("\b", &consumer);
  1029. }
  1030. // Appends comment lines to the string, to create a comment block.
  1031. static auto AppendCommentLines(std::string& str, int count, llvm::StringRef tag)
  1032. -> void {
  1033. llvm::raw_string_ostream out(str);
  1034. for (int i : llvm::seq(count)) {
  1035. out << "// " << tag << i << "\n";
  1036. }
  1037. }
  1038. TEST_F(LexerTest, CommentBlock) {
  1039. for (int comments_before = 0; comments_before < 5; ++comments_before) {
  1040. std::string prefix;
  1041. AppendCommentLines(prefix, comments_before, "B");
  1042. for (int comments_after = 1; comments_after < 5; ++comments_after) {
  1043. std::string source = prefix;
  1044. if (comments_before > 0) {
  1045. source += "//\n";
  1046. }
  1047. AppendCommentLines(source, comments_after, "C");
  1048. SCOPED_TRACE(llvm::formatv(
  1049. "{0} comment lines before the empty comment line, {1} after",
  1050. comments_before, comments_after));
  1051. auto& buffer = compile_helper_.GetTokenizedBuffer(source);
  1052. ASSERT_FALSE(buffer.has_errors());
  1053. EXPECT_THAT(buffer.comments_size(), Eq(1));
  1054. }
  1055. }
  1056. }
  1057. TEST_F(LexerTest, IndentedComments) {
  1058. for (int indent = 0; indent < 40; ++indent) {
  1059. SCOPED_TRACE(llvm::formatv("Indent: {0}", indent));
  1060. std::string source;
  1061. llvm::raw_string_ostream source_stream(source);
  1062. source_stream.indent(indent);
  1063. source_stream << "// Comment\n";
  1064. auto& buffer = compile_helper_.GetTokenizedBuffer(source);
  1065. ASSERT_FALSE(buffer.has_errors());
  1066. EXPECT_THAT(buffer.comments_size(), Eq(1));
  1067. std::string simd_source =
  1068. source +
  1069. "\"Add a bunch of padding so that SIMD logic shouldn't hit EOF\"";
  1070. auto& simd_buffer = compile_helper_.GetTokenizedBuffer(source);
  1071. ASSERT_FALSE(simd_buffer.has_errors());
  1072. EXPECT_THAT(simd_buffer.comments_size(), Eq(1));
  1073. }
  1074. }
  1075. TEST_F(LexerTest, MultipleComments) {
  1076. constexpr llvm::StringLiteral Format = R"(
  1077. {0}
  1078. {1}
  1079. {2}
  1080. {3}
  1081. '''This is a string, not a comment. The next comment will stop SIMD due to being
  1082. too close to the EOF.
  1083. '''
  1084. {4}
  1085. x
  1086. )";
  1087. constexpr llvm::StringLiteral Comments[] = {
  1088. // NOLINTNEXTLINE(bugprone-suspicious-missing-comma)
  1089. "// This comment should be possible to parse with SIMD.\n"
  1090. "// This one too.\n",
  1091. "// This one as well, though it's a different indent.\n"
  1092. " // And mixes indent.\n"
  1093. " // And mixes indent more.\n",
  1094. "// This is one comment:\n"
  1095. "//Invalid\n"
  1096. "// Valid\n"
  1097. "//Invalid\n"
  1098. "//\n"
  1099. "// Valid\n"
  1100. "//\n"
  1101. "// Valid\n",
  1102. "// This uses a high indent, which stops SIMD.\n", "//\n"};
  1103. std::string source = llvm::formatv(Format.data(), Comments[0], Comments[1],
  1104. Comments[2], Comments[3], Comments[4])
  1105. .str();
  1106. auto& buffer = compile_helper_.GetTokenizedBuffer(source);
  1107. EXPECT_TRUE(buffer.has_errors());
  1108. EXPECT_THAT(buffer.comments_size(), Eq(std::size(Comments)));
  1109. for (int i :
  1110. llvm::seq(std::min<int>(buffer.comments_size(), std::size(Comments)))) {
  1111. EXPECT_THAT(buffer.GetCommentText(CommentIndex(i)).str(),
  1112. testing::StrEq(Comments[i]));
  1113. }
  1114. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  1115. {.kind = TokenKind::FileStart},
  1116. {.kind = TokenKind::StringLiteral},
  1117. {.kind = TokenKind::Identifier},
  1118. {.kind = TokenKind::FileEnd},
  1119. }));
  1120. }
  1121. TEST_F(LexerTest, PrintingOutputYaml) {
  1122. // Test that we can parse this into YAML and verify line and indent data.
  1123. auto& buffer =
  1124. compile_helper_.GetTokenizedBuffer("\n ;\n\n\n; ;\n\n\n\n\n\n\n\n\n\n\n");
  1125. ASSERT_FALSE(buffer.has_errors());
  1126. TestRawOstream print_stream;
  1127. buffer.Print(print_stream);
  1128. EXPECT_THAT(
  1129. Yaml::Value::FromText(print_stream.TakeStr()),
  1130. IsYaml(ElementsAre(Yaml::Sequence(ElementsAre(Yaml::Mapping(ElementsAre(
  1131. Pair("filename", buffer.source().filename().str()),
  1132. Pair("tokens", Yaml::Sequence(ElementsAre(
  1133. Yaml::Mapping(ElementsAre(
  1134. Pair("index", "0"), Pair("kind", "FileStart"),
  1135. Pair("line", "1"), Pair("column", "1"),
  1136. Pair("indent", "1"), Pair("spelling", ""))),
  1137. Yaml::Mapping(ElementsAre(
  1138. Pair("index", "1"), Pair("kind", "Semi"),
  1139. Pair("line", "2"), Pair("column", "2"),
  1140. Pair("indent", "2"), Pair("spelling", ";"),
  1141. Pair("has_leading_space", "true"))),
  1142. Yaml::Mapping(ElementsAre(
  1143. Pair("index", "2"), Pair("kind", "Semi"),
  1144. Pair("line", "5"), Pair("column", "1"),
  1145. Pair("indent", "1"), Pair("spelling", ";"),
  1146. Pair("has_leading_space", "true"))),
  1147. Yaml::Mapping(ElementsAre(
  1148. Pair("index", "3"), Pair("kind", "Semi"),
  1149. Pair("line", "5"), Pair("column", "3"),
  1150. Pair("indent", "1"), Pair("spelling", ";"),
  1151. Pair("has_leading_space", "true"))),
  1152. Yaml::Mapping(ElementsAre(
  1153. Pair("index", "4"), Pair("kind", "FileEnd"),
  1154. Pair("line", "15"), Pair("column", "1"),
  1155. Pair("indent", "1"), Pair("spelling", ""),
  1156. Pair("has_leading_space", "true")))))))))))));
  1157. }
  1158. } // namespace
  1159. } // namespace Carbon::Lex