tokenized_buffer_test.cpp 38 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033
  1. // Part of the Carbon Language project, under the Apache License v2.0 with LLVM
  2. // Exceptions. See /LICENSE for license information.
  3. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
  4. #include "toolchain/lex/tokenized_buffer.h"
  5. #include <gmock/gmock.h>
  6. #include <gtest/gtest.h>
  7. #include <forward_list>
  8. #include <iterator>
  9. #include "llvm/ADT/ArrayRef.h"
  10. #include "testing/base/test_raw_ostream.h"
  11. #include "toolchain/diagnostics/diagnostic_emitter.h"
  12. #include "toolchain/diagnostics/mocks.h"
  13. #include "toolchain/lex/tokenized_buffer_test_helpers.h"
  14. #include "toolchain/testing/yaml_test_helpers.h"
  15. namespace Carbon::Testing {
  16. namespace {
  17. using Lex::Token;
  18. using Lex::TokenizedBuffer;
  19. using Lex::TokenKind;
  20. using ::testing::_;
  21. using ::testing::ElementsAre;
  22. using ::testing::Eq;
  23. using ::testing::HasSubstr;
  24. using ::testing::Pair;
  25. class LexerTest : public ::testing::Test {
  26. protected:
  27. auto GetSourceBuffer(llvm::StringRef text) -> SourceBuffer& {
  28. std::string filename = llvm::formatv("test{0}.carbon", ++file_index_);
  29. CARBON_CHECK(fs_.addFile(filename, /*ModificationTime=*/0,
  30. llvm::MemoryBuffer::getMemBuffer(text)));
  31. source_storage_.push_front(std::move(*SourceBuffer::CreateFromFile(
  32. fs_, filename, ConsoleDiagnosticConsumer())));
  33. return source_storage_.front();
  34. }
  35. auto Lex(llvm::StringRef text,
  36. DiagnosticConsumer& consumer = ConsoleDiagnosticConsumer())
  37. -> TokenizedBuffer {
  38. return TokenizedBuffer::Lex(GetSourceBuffer(text), consumer);
  39. }
  40. llvm::vfs::InMemoryFileSystem fs_;
  41. int file_index_ = 0;
  42. std::forward_list<SourceBuffer> source_storage_;
  43. };
  44. TEST_F(LexerTest, HandlesEmptyBuffer) {
  45. auto buffer = Lex("");
  46. EXPECT_FALSE(buffer.has_errors());
  47. EXPECT_THAT(buffer,
  48. HasTokens(llvm::ArrayRef<ExpectedToken>{{TokenKind::EndOfFile}}));
  49. }
  50. TEST_F(LexerTest, TracksLinesAndColumns) {
  51. auto buffer = Lex("\n ;;\n ;;;\n x\"foo\" '''baz\n a\n ''' y");
  52. EXPECT_FALSE(buffer.has_errors());
  53. EXPECT_THAT(
  54. buffer,
  55. HasTokens(llvm::ArrayRef<ExpectedToken>{
  56. {.kind = TokenKind::Semi, .line = 2, .column = 3, .indent_column = 3},
  57. {.kind = TokenKind::Semi, .line = 2, .column = 4, .indent_column = 3},
  58. {.kind = TokenKind::Semi, .line = 3, .column = 4, .indent_column = 4},
  59. {.kind = TokenKind::Semi, .line = 3, .column = 5, .indent_column = 4},
  60. {.kind = TokenKind::Semi, .line = 3, .column = 6, .indent_column = 4},
  61. {.kind = TokenKind::Identifier,
  62. .line = 4,
  63. .column = 4,
  64. .indent_column = 4,
  65. .text = "x"},
  66. {.kind = TokenKind::StringLiteral,
  67. .line = 4,
  68. .column = 5,
  69. .indent_column = 4},
  70. {.kind = TokenKind::StringLiteral,
  71. .line = 4,
  72. .column = 11,
  73. .indent_column = 4},
  74. {.kind = TokenKind::Identifier,
  75. .line = 6,
  76. .column = 6,
  77. .indent_column = 11,
  78. .text = "y"},
  79. {.kind = TokenKind::EndOfFile, .line = 6, .column = 7},
  80. }));
  81. }
  82. TEST_F(LexerTest, HandlesNumericLiteral) {
  83. auto buffer = Lex("12-578\n 1 2\n0x12_3ABC\n0b10_10_11\n1_234_567\n1.5e9");
  84. EXPECT_FALSE(buffer.has_errors());
  85. ASSERT_THAT(buffer,
  86. HasTokens(llvm::ArrayRef<ExpectedToken>{
  87. {.kind = TokenKind::IntegerLiteral,
  88. .line = 1,
  89. .column = 1,
  90. .indent_column = 1,
  91. .text = "12"},
  92. {.kind = TokenKind::Minus,
  93. .line = 1,
  94. .column = 3,
  95. .indent_column = 1},
  96. {.kind = TokenKind::IntegerLiteral,
  97. .line = 1,
  98. .column = 4,
  99. .indent_column = 1,
  100. .text = "578"},
  101. {.kind = TokenKind::IntegerLiteral,
  102. .line = 2,
  103. .column = 3,
  104. .indent_column = 3,
  105. .text = "1"},
  106. {.kind = TokenKind::IntegerLiteral,
  107. .line = 2,
  108. .column = 6,
  109. .indent_column = 3,
  110. .text = "2"},
  111. {.kind = TokenKind::IntegerLiteral,
  112. .line = 3,
  113. .column = 1,
  114. .indent_column = 1,
  115. .text = "0x12_3ABC"},
  116. {.kind = TokenKind::IntegerLiteral,
  117. .line = 4,
  118. .column = 1,
  119. .indent_column = 1,
  120. .text = "0b10_10_11"},
  121. {.kind = TokenKind::IntegerLiteral,
  122. .line = 5,
  123. .column = 1,
  124. .indent_column = 1,
  125. .text = "1_234_567"},
  126. {.kind = TokenKind::RealLiteral,
  127. .line = 6,
  128. .column = 1,
  129. .indent_column = 1,
  130. .text = "1.5e9"},
  131. {.kind = TokenKind::EndOfFile, .line = 6, .column = 6},
  132. }));
  133. auto token_12 = buffer.tokens().begin();
  134. EXPECT_EQ(buffer.GetIntegerLiteral(*token_12), 12);
  135. auto token_578 = buffer.tokens().begin() + 2;
  136. EXPECT_EQ(buffer.GetIntegerLiteral(*token_578), 578);
  137. auto token_1 = buffer.tokens().begin() + 3;
  138. EXPECT_EQ(buffer.GetIntegerLiteral(*token_1), 1);
  139. auto token_2 = buffer.tokens().begin() + 4;
  140. EXPECT_EQ(buffer.GetIntegerLiteral(*token_2), 2);
  141. auto token_0x12_3abc = buffer.tokens().begin() + 5;
  142. EXPECT_EQ(buffer.GetIntegerLiteral(*token_0x12_3abc), 0x12'3abc);
  143. auto token_0b10_10_11 = buffer.tokens().begin() + 6;
  144. EXPECT_EQ(buffer.GetIntegerLiteral(*token_0b10_10_11), 0b10'10'11);
  145. auto token_1_234_567 = buffer.tokens().begin() + 7;
  146. EXPECT_EQ(buffer.GetIntegerLiteral(*token_1_234_567), 1'234'567);
  147. auto token_1_5e9 = buffer.tokens().begin() + 8;
  148. auto value_1_5e9 = buffer.GetRealLiteral(*token_1_5e9);
  149. EXPECT_EQ(value_1_5e9.mantissa.getZExtValue(), 15);
  150. EXPECT_EQ(value_1_5e9.exponent.getSExtValue(), 8);
  151. EXPECT_EQ(value_1_5e9.is_decimal, true);
  152. }
  153. TEST_F(LexerTest, HandlesInvalidNumericLiterals) {
  154. auto buffer = Lex("14x 15_49 0x3.5q 0x3_4.5_6 0ops");
  155. EXPECT_TRUE(buffer.has_errors());
  156. ASSERT_THAT(buffer,
  157. HasTokens(llvm::ArrayRef<ExpectedToken>{
  158. {.kind = TokenKind::Error,
  159. .line = 1,
  160. .column = 1,
  161. .indent_column = 1,
  162. .text = "14x"},
  163. {.kind = TokenKind::IntegerLiteral,
  164. .line = 1,
  165. .column = 5,
  166. .indent_column = 1,
  167. .text = "15_49"},
  168. {.kind = TokenKind::Error,
  169. .line = 1,
  170. .column = 11,
  171. .indent_column = 1,
  172. .text = "0x3.5q"},
  173. {.kind = TokenKind::RealLiteral,
  174. .line = 1,
  175. .column = 18,
  176. .indent_column = 1,
  177. .text = "0x3_4.5_6"},
  178. {.kind = TokenKind::Error,
  179. .line = 1,
  180. .column = 28,
  181. .indent_column = 1,
  182. .text = "0ops"},
  183. {.kind = TokenKind::EndOfFile, .line = 1, .column = 32},
  184. }));
  185. }
  186. TEST_F(LexerTest, SplitsNumericLiteralsProperly) {
  187. llvm::StringLiteral source_text = R"(
  188. 1.
  189. .2
  190. 3.+foo
  191. 4.0-bar
  192. 5.0e+123+456
  193. 6.0e+1e+2
  194. 1e7
  195. 8..10
  196. 9.0.9.5
  197. 10.foo
  198. 11.0.foo
  199. 12e+1
  200. 13._
  201. )";
  202. auto buffer = Lex(source_text);
  203. EXPECT_TRUE(buffer.has_errors());
  204. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  205. {.kind = TokenKind::IntegerLiteral, .text = "1"},
  206. {.kind = TokenKind::Period},
  207. // newline
  208. {.kind = TokenKind::Period},
  209. {.kind = TokenKind::IntegerLiteral, .text = "2"},
  210. // newline
  211. {.kind = TokenKind::IntegerLiteral, .text = "3"},
  212. {.kind = TokenKind::Period},
  213. {.kind = TokenKind::Plus},
  214. {.kind = TokenKind::Identifier, .text = "foo"},
  215. // newline
  216. {.kind = TokenKind::RealLiteral, .text = "4.0"},
  217. {.kind = TokenKind::Minus},
  218. {.kind = TokenKind::Identifier, .text = "bar"},
  219. // newline
  220. {.kind = TokenKind::RealLiteral, .text = "5.0e+123"},
  221. {.kind = TokenKind::Plus},
  222. {.kind = TokenKind::IntegerLiteral, .text = "456"},
  223. // newline
  224. {.kind = TokenKind::Error, .text = "6.0e+1e"},
  225. {.kind = TokenKind::Plus},
  226. {.kind = TokenKind::IntegerLiteral, .text = "2"},
  227. // newline
  228. {.kind = TokenKind::Error, .text = "1e7"},
  229. // newline
  230. {.kind = TokenKind::IntegerLiteral, .text = "8"},
  231. {.kind = TokenKind::Period},
  232. {.kind = TokenKind::Period},
  233. {.kind = TokenKind::IntegerLiteral, .text = "10"},
  234. // newline
  235. {.kind = TokenKind::RealLiteral, .text = "9.0"},
  236. {.kind = TokenKind::Period},
  237. {.kind = TokenKind::RealLiteral, .text = "9.5"},
  238. // newline
  239. {.kind = TokenKind::Error, .text = "10.foo"},
  240. // newline
  241. {.kind = TokenKind::RealLiteral, .text = "11.0"},
  242. {.kind = TokenKind::Period},
  243. {.kind = TokenKind::Identifier, .text = "foo"},
  244. // newline
  245. {.kind = TokenKind::Error, .text = "12e"},
  246. {.kind = TokenKind::Plus},
  247. {.kind = TokenKind::IntegerLiteral, .text = "1"},
  248. // newline
  249. {.kind = TokenKind::IntegerLiteral, .text = "13"},
  250. {.kind = TokenKind::Period},
  251. {.kind = TokenKind::Underscore},
  252. // newline
  253. {.kind = TokenKind::EndOfFile},
  254. }));
  255. }
  256. TEST_F(LexerTest, HandlesGarbageCharacters) {
  257. constexpr char GarbageText[] = "$$💩-$\n$\0$12$\n\\\"\\\n\"x";
  258. auto buffer = Lex(llvm::StringRef(GarbageText, sizeof(GarbageText) - 1));
  259. EXPECT_TRUE(buffer.has_errors());
  260. EXPECT_THAT(
  261. buffer,
  262. HasTokens(llvm::ArrayRef<ExpectedToken>{
  263. {.kind = TokenKind::Error,
  264. .line = 1,
  265. .column = 1,
  266. // 💩 takes 4 bytes, and we count column as bytes offset.
  267. .text = llvm::StringRef("$$💩", 6)},
  268. {.kind = TokenKind::Minus, .line = 1, .column = 7},
  269. {.kind = TokenKind::Error, .line = 1, .column = 8, .text = "$"},
  270. // newline
  271. {.kind = TokenKind::Error,
  272. .line = 2,
  273. .column = 1,
  274. .text = llvm::StringRef("$\0$", 3)},
  275. {.kind = TokenKind::IntegerLiteral,
  276. .line = 2,
  277. .column = 4,
  278. .text = "12"},
  279. {.kind = TokenKind::Error, .line = 2, .column = 6, .text = "$"},
  280. // newline
  281. {.kind = TokenKind::Backslash, .line = 3, .column = 1, .text = "\\"},
  282. {.kind = TokenKind::Error, .line = 3, .column = 2, .text = "\"\\"},
  283. // newline
  284. {.kind = TokenKind::Error, .line = 4, .column = 1, .text = "\"x"},
  285. {.kind = TokenKind::EndOfFile, .line = 4, .column = 3},
  286. }));
  287. }
  288. TEST_F(LexerTest, Symbols) {
  289. // We don't need to exhaustively test symbols here as they're handled with
  290. // common code, but we want to check specific patterns to verify things like
  291. // max-munch rule and handling of interesting symbols.
  292. auto buffer = Lex("<<<");
  293. EXPECT_FALSE(buffer.has_errors());
  294. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  295. {TokenKind::LessLess},
  296. {TokenKind::Less},
  297. {TokenKind::EndOfFile},
  298. }));
  299. buffer = Lex("<<=>>");
  300. EXPECT_FALSE(buffer.has_errors());
  301. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  302. {TokenKind::LessLessEqual},
  303. {TokenKind::GreaterGreater},
  304. {TokenKind::EndOfFile},
  305. }));
  306. buffer = Lex("< <=> >");
  307. EXPECT_FALSE(buffer.has_errors());
  308. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  309. {TokenKind::Less},
  310. {TokenKind::LessEqualGreater},
  311. {TokenKind::Greater},
  312. {TokenKind::EndOfFile},
  313. }));
  314. buffer = Lex("\\/?@&^!");
  315. EXPECT_FALSE(buffer.has_errors());
  316. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  317. {TokenKind::Backslash},
  318. {TokenKind::Slash},
  319. {TokenKind::Question},
  320. {TokenKind::At},
  321. {TokenKind::Amp},
  322. {TokenKind::Caret},
  323. {TokenKind::Exclaim},
  324. {TokenKind::EndOfFile},
  325. }));
  326. }
  327. TEST_F(LexerTest, Parens) {
  328. auto buffer = Lex("()");
  329. EXPECT_FALSE(buffer.has_errors());
  330. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  331. {TokenKind::OpenParen},
  332. {TokenKind::CloseParen},
  333. {TokenKind::EndOfFile},
  334. }));
  335. buffer = Lex("((()()))");
  336. EXPECT_FALSE(buffer.has_errors());
  337. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  338. {TokenKind::OpenParen},
  339. {TokenKind::OpenParen},
  340. {TokenKind::OpenParen},
  341. {TokenKind::CloseParen},
  342. {TokenKind::OpenParen},
  343. {TokenKind::CloseParen},
  344. {TokenKind::CloseParen},
  345. {TokenKind::CloseParen},
  346. {TokenKind::EndOfFile},
  347. }));
  348. }
  349. TEST_F(LexerTest, CurlyBraces) {
  350. auto buffer = Lex("{}");
  351. EXPECT_FALSE(buffer.has_errors());
  352. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  353. {TokenKind::OpenCurlyBrace},
  354. {TokenKind::CloseCurlyBrace},
  355. {TokenKind::EndOfFile},
  356. }));
  357. buffer = Lex("{{{}{}}}");
  358. EXPECT_FALSE(buffer.has_errors());
  359. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  360. {TokenKind::OpenCurlyBrace},
  361. {TokenKind::OpenCurlyBrace},
  362. {TokenKind::OpenCurlyBrace},
  363. {TokenKind::CloseCurlyBrace},
  364. {TokenKind::OpenCurlyBrace},
  365. {TokenKind::CloseCurlyBrace},
  366. {TokenKind::CloseCurlyBrace},
  367. {TokenKind::CloseCurlyBrace},
  368. {TokenKind::EndOfFile},
  369. }));
  370. }
  371. TEST_F(LexerTest, MatchingGroups) {
  372. {
  373. TokenizedBuffer buffer = Lex("(){}");
  374. ASSERT_FALSE(buffer.has_errors());
  375. auto it = buffer.tokens().begin();
  376. auto open_paren_token = *it++;
  377. auto close_paren_token = *it++;
  378. EXPECT_EQ(close_paren_token,
  379. buffer.GetMatchedClosingToken(open_paren_token));
  380. EXPECT_EQ(open_paren_token,
  381. buffer.GetMatchedOpeningToken(close_paren_token));
  382. auto open_curly_token = *it++;
  383. auto close_curly_token = *it++;
  384. EXPECT_EQ(close_curly_token,
  385. buffer.GetMatchedClosingToken(open_curly_token));
  386. EXPECT_EQ(open_curly_token,
  387. buffer.GetMatchedOpeningToken(close_curly_token));
  388. auto eof_token = *it++;
  389. EXPECT_EQ(buffer.GetKind(eof_token), TokenKind::EndOfFile);
  390. EXPECT_EQ(buffer.tokens().end(), it);
  391. }
  392. {
  393. TokenizedBuffer buffer = Lex("({x}){(y)} {{((z))}}");
  394. ASSERT_FALSE(buffer.has_errors());
  395. auto it = buffer.tokens().begin();
  396. auto open_paren_token = *it++;
  397. auto open_curly_token = *it++;
  398. ASSERT_EQ("x", buffer.GetIdentifierText(buffer.GetIdentifier(*it++)));
  399. auto close_curly_token = *it++;
  400. auto close_paren_token = *it++;
  401. EXPECT_EQ(close_paren_token,
  402. buffer.GetMatchedClosingToken(open_paren_token));
  403. EXPECT_EQ(open_paren_token,
  404. buffer.GetMatchedOpeningToken(close_paren_token));
  405. EXPECT_EQ(close_curly_token,
  406. buffer.GetMatchedClosingToken(open_curly_token));
  407. EXPECT_EQ(open_curly_token,
  408. buffer.GetMatchedOpeningToken(close_curly_token));
  409. open_curly_token = *it++;
  410. open_paren_token = *it++;
  411. ASSERT_EQ("y", buffer.GetIdentifierText(buffer.GetIdentifier(*it++)));
  412. close_paren_token = *it++;
  413. close_curly_token = *it++;
  414. EXPECT_EQ(close_curly_token,
  415. buffer.GetMatchedClosingToken(open_curly_token));
  416. EXPECT_EQ(open_curly_token,
  417. buffer.GetMatchedOpeningToken(close_curly_token));
  418. EXPECT_EQ(close_paren_token,
  419. buffer.GetMatchedClosingToken(open_paren_token));
  420. EXPECT_EQ(open_paren_token,
  421. buffer.GetMatchedOpeningToken(close_paren_token));
  422. open_curly_token = *it++;
  423. auto inner_open_curly_token = *it++;
  424. open_paren_token = *it++;
  425. auto inner_open_paren_token = *it++;
  426. ASSERT_EQ("z", buffer.GetIdentifierText(buffer.GetIdentifier(*it++)));
  427. auto inner_close_paren_token = *it++;
  428. close_paren_token = *it++;
  429. auto inner_close_curly_token = *it++;
  430. close_curly_token = *it++;
  431. EXPECT_EQ(close_curly_token,
  432. buffer.GetMatchedClosingToken(open_curly_token));
  433. EXPECT_EQ(open_curly_token,
  434. buffer.GetMatchedOpeningToken(close_curly_token));
  435. EXPECT_EQ(inner_close_curly_token,
  436. buffer.GetMatchedClosingToken(inner_open_curly_token));
  437. EXPECT_EQ(inner_open_curly_token,
  438. buffer.GetMatchedOpeningToken(inner_close_curly_token));
  439. EXPECT_EQ(close_paren_token,
  440. buffer.GetMatchedClosingToken(open_paren_token));
  441. EXPECT_EQ(open_paren_token,
  442. buffer.GetMatchedOpeningToken(close_paren_token));
  443. EXPECT_EQ(inner_close_paren_token,
  444. buffer.GetMatchedClosingToken(inner_open_paren_token));
  445. EXPECT_EQ(inner_open_paren_token,
  446. buffer.GetMatchedOpeningToken(inner_close_paren_token));
  447. auto eof_token = *it++;
  448. EXPECT_EQ(buffer.GetKind(eof_token), TokenKind::EndOfFile);
  449. EXPECT_EQ(buffer.tokens().end(), it);
  450. }
  451. }
  452. TEST_F(LexerTest, MismatchedGroups) {
  453. auto buffer = Lex("{");
  454. EXPECT_TRUE(buffer.has_errors());
  455. EXPECT_THAT(buffer,
  456. HasTokens(llvm::ArrayRef<ExpectedToken>{
  457. {TokenKind::OpenCurlyBrace},
  458. {.kind = TokenKind::CloseCurlyBrace, .recovery = true},
  459. {TokenKind::EndOfFile},
  460. }));
  461. buffer = Lex("}");
  462. EXPECT_TRUE(buffer.has_errors());
  463. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  464. {.kind = TokenKind::Error, .text = "}"},
  465. {TokenKind::EndOfFile},
  466. }));
  467. buffer = Lex("{(}");
  468. EXPECT_TRUE(buffer.has_errors());
  469. EXPECT_THAT(
  470. buffer,
  471. HasTokens(llvm::ArrayRef<ExpectedToken>{
  472. {.kind = TokenKind::OpenCurlyBrace, .column = 1},
  473. {.kind = TokenKind::OpenParen, .column = 2},
  474. {.kind = TokenKind::CloseParen, .column = 3, .recovery = true},
  475. {.kind = TokenKind::CloseCurlyBrace, .column = 3},
  476. {TokenKind::EndOfFile},
  477. }));
  478. buffer = Lex(")({)");
  479. EXPECT_TRUE(buffer.has_errors());
  480. EXPECT_THAT(
  481. buffer,
  482. HasTokens(llvm::ArrayRef<ExpectedToken>{
  483. {.kind = TokenKind::Error, .column = 1, .text = ")"},
  484. {.kind = TokenKind::OpenParen, .column = 2},
  485. {.kind = TokenKind::OpenCurlyBrace, .column = 3},
  486. {.kind = TokenKind::CloseCurlyBrace, .column = 4, .recovery = true},
  487. {.kind = TokenKind::CloseParen, .column = 4},
  488. {TokenKind::EndOfFile},
  489. }));
  490. }
  491. TEST_F(LexerTest, Whitespace) {
  492. auto buffer = Lex("{( } {(");
  493. // Whether there should be whitespace before/after each token.
  494. bool space[] = {true,
  495. // {
  496. false,
  497. // (
  498. true,
  499. // inserted )
  500. true,
  501. // }
  502. true,
  503. // {
  504. false,
  505. // (
  506. true,
  507. // inserted )
  508. true,
  509. // inserted }
  510. true,
  511. // EOF
  512. false};
  513. int pos = 0;
  514. for (Token token : buffer.tokens()) {
  515. ASSERT_LT(pos, std::size(space));
  516. EXPECT_THAT(buffer.HasLeadingWhitespace(token), Eq(space[pos]));
  517. ++pos;
  518. ASSERT_LT(pos, std::size(space));
  519. EXPECT_THAT(buffer.HasTrailingWhitespace(token), Eq(space[pos]));
  520. }
  521. ASSERT_EQ(pos + 1, std::size(space));
  522. }
  523. TEST_F(LexerTest, Keywords) {
  524. auto buffer = Lex(" fn");
  525. EXPECT_FALSE(buffer.has_errors());
  526. EXPECT_THAT(buffer,
  527. HasTokens(llvm::ArrayRef<ExpectedToken>{
  528. {.kind = TokenKind::Fn, .column = 4, .indent_column = 4},
  529. {TokenKind::EndOfFile},
  530. }));
  531. buffer = Lex("and or not if else for return var break continue _");
  532. EXPECT_FALSE(buffer.has_errors());
  533. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  534. {TokenKind::And},
  535. {TokenKind::Or},
  536. {TokenKind::Not},
  537. {TokenKind::If},
  538. {TokenKind::Else},
  539. {TokenKind::For},
  540. {TokenKind::Return},
  541. {TokenKind::Var},
  542. {TokenKind::Break},
  543. {TokenKind::Continue},
  544. {TokenKind::Underscore},
  545. {TokenKind::EndOfFile},
  546. }));
  547. }
  548. TEST_F(LexerTest, Comments) {
  549. auto buffer = Lex(" ;\n // foo\n ;\n");
  550. EXPECT_FALSE(buffer.has_errors());
  551. EXPECT_THAT(
  552. buffer,
  553. HasTokens(llvm::ArrayRef<ExpectedToken>{
  554. {.kind = TokenKind::Semi, .line = 1, .column = 2, .indent_column = 2},
  555. {.kind = TokenKind::Semi, .line = 3, .column = 3, .indent_column = 3},
  556. {.kind = TokenKind::EndOfFile, .line = 3, .column = 4},
  557. }));
  558. buffer = Lex("// foo\n//\n// bar");
  559. EXPECT_FALSE(buffer.has_errors());
  560. EXPECT_THAT(buffer,
  561. HasTokens(llvm::ArrayRef<ExpectedToken>{{TokenKind::EndOfFile}}));
  562. // Make sure weird characters aren't a problem.
  563. buffer = Lex(" // foo#$!^?@-_💩🍫⃠ [̲̅$̲̅(̲̅ ͡° ͜ʖ ͡°̲̅)̲̅$̲̅]");
  564. EXPECT_FALSE(buffer.has_errors());
  565. EXPECT_THAT(buffer,
  566. HasTokens(llvm::ArrayRef<ExpectedToken>{{TokenKind::EndOfFile}}));
  567. // Make sure we can lex a comment at the end of the input.
  568. buffer = Lex("//");
  569. EXPECT_FALSE(buffer.has_errors());
  570. EXPECT_THAT(buffer,
  571. HasTokens(llvm::ArrayRef<ExpectedToken>{{TokenKind::EndOfFile}}));
  572. }
  573. TEST_F(LexerTest, InvalidComments) {
  574. llvm::StringLiteral testcases[] = {
  575. " /// foo\n",
  576. "foo // bar\n",
  577. "//! hello",
  578. " //world",
  579. };
  580. for (llvm::StringLiteral testcase : testcases) {
  581. auto buffer = Lex(testcase);
  582. EXPECT_TRUE(buffer.has_errors());
  583. }
  584. }
  585. TEST_F(LexerTest, Identifiers) {
  586. auto buffer = Lex(" foobar");
  587. EXPECT_FALSE(buffer.has_errors());
  588. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  589. {.kind = TokenKind::Identifier,
  590. .column = 4,
  591. .indent_column = 4,
  592. .text = "foobar"},
  593. {TokenKind::EndOfFile},
  594. }));
  595. // Check different kinds of identifier character sequences.
  596. buffer = Lex("_foo_bar");
  597. EXPECT_FALSE(buffer.has_errors());
  598. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  599. {.kind = TokenKind::Identifier, .text = "_foo_bar"},
  600. {TokenKind::EndOfFile},
  601. }));
  602. buffer = Lex("foo2bar00");
  603. EXPECT_FALSE(buffer.has_errors());
  604. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  605. {.kind = TokenKind::Identifier, .text = "foo2bar00"},
  606. {TokenKind::EndOfFile},
  607. }));
  608. // Check that we can parse identifiers that start with a keyword.
  609. buffer = Lex("fnord");
  610. EXPECT_FALSE(buffer.has_errors());
  611. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  612. {.kind = TokenKind::Identifier, .text = "fnord"},
  613. {TokenKind::EndOfFile},
  614. }));
  615. // Check multiple identifiers with indent and interning.
  616. buffer = Lex(" foo;bar\nbar \n foo\tfoo");
  617. EXPECT_FALSE(buffer.has_errors());
  618. EXPECT_THAT(buffer,
  619. HasTokens(llvm::ArrayRef<ExpectedToken>{
  620. {.kind = TokenKind::Identifier,
  621. .line = 1,
  622. .column = 4,
  623. .indent_column = 4,
  624. .text = "foo"},
  625. {.kind = TokenKind::Semi},
  626. {.kind = TokenKind::Identifier,
  627. .line = 1,
  628. .column = 8,
  629. .indent_column = 4,
  630. .text = "bar"},
  631. {.kind = TokenKind::Identifier,
  632. .line = 2,
  633. .column = 1,
  634. .indent_column = 1,
  635. .text = "bar"},
  636. {.kind = TokenKind::Identifier,
  637. .line = 3,
  638. .column = 3,
  639. .indent_column = 3,
  640. .text = "foo"},
  641. {.kind = TokenKind::Identifier,
  642. .line = 3,
  643. .column = 7,
  644. .indent_column = 3,
  645. .text = "foo"},
  646. {.kind = TokenKind::EndOfFile, .line = 3, .column = 10},
  647. }));
  648. }
  649. TEST_F(LexerTest, StringLiterals) {
  650. llvm::StringLiteral testcase = R"(
  651. "hello world\n"
  652. '''foo
  653. test \
  654. \xAB
  655. ''' trailing
  656. #"""#
  657. "\0"
  658. #"\0"foo"\1"#
  659. """x"""
  660. )";
  661. auto buffer = Lex(testcase);
  662. EXPECT_FALSE(buffer.has_errors());
  663. EXPECT_THAT(buffer,
  664. HasTokens(llvm::ArrayRef<ExpectedToken>{
  665. {.kind = TokenKind::StringLiteral,
  666. .line = 2,
  667. .column = 5,
  668. .indent_column = 5,
  669. .string_contents = {"hello world\n"}},
  670. {.kind = TokenKind::StringLiteral,
  671. .line = 4,
  672. .column = 5,
  673. .indent_column = 5,
  674. .string_contents = {" test \xAB\n"}},
  675. {.kind = TokenKind::Identifier,
  676. .line = 7,
  677. .column = 10,
  678. .indent_column = 5,
  679. .text = "trailing"},
  680. {.kind = TokenKind::StringLiteral,
  681. .line = 9,
  682. .column = 7,
  683. .indent_column = 7,
  684. .string_contents = {"\""}},
  685. {.kind = TokenKind::StringLiteral,
  686. .line = 11,
  687. .column = 5,
  688. .indent_column = 5,
  689. .string_contents = llvm::StringLiteral::withInnerNUL("\0")},
  690. {.kind = TokenKind::StringLiteral,
  691. .line = 13,
  692. .column = 5,
  693. .indent_column = 5,
  694. .string_contents = {"\\0\"foo\"\\1"}},
  695. // """x""" is three string literals, not one invalid
  696. // attempt at a block string literal.
  697. {.kind = TokenKind::StringLiteral,
  698. .line = 15,
  699. .column = 5,
  700. .indent_column = 5,
  701. .string_contents = {""}},
  702. {.kind = TokenKind::StringLiteral,
  703. .line = 15,
  704. .column = 7,
  705. .indent_column = 5,
  706. .string_contents = {"x"}},
  707. {.kind = TokenKind::StringLiteral,
  708. .line = 15,
  709. .column = 10,
  710. .indent_column = 5,
  711. .string_contents = {""}},
  712. {.kind = TokenKind::EndOfFile, .line = 16, .column = 3},
  713. }));
  714. }
  715. TEST_F(LexerTest, InvalidStringLiterals) {
  716. llvm::StringLiteral invalid[] = {
  717. // clang-format off
  718. R"(")",
  719. R"('''
  720. '')",
  721. R"("\)",
  722. R"("\")",
  723. R"("\\)",
  724. R"("\\\")",
  725. R"(''')",
  726. R"('''
  727. )",
  728. R"('''\)",
  729. R"(#'''
  730. ''')",
  731. // clang-format on
  732. };
  733. for (llvm::StringLiteral test : invalid) {
  734. SCOPED_TRACE(test);
  735. auto buffer = Lex(test);
  736. EXPECT_TRUE(buffer.has_errors());
  737. // We should have formed at least one error token.
  738. bool found_error = false;
  739. for (Token token : buffer.tokens()) {
  740. if (buffer.GetKind(token) == TokenKind::Error) {
  741. found_error = true;
  742. break;
  743. }
  744. }
  745. EXPECT_TRUE(found_error);
  746. }
  747. }
  748. TEST_F(LexerTest, TypeLiterals) {
  749. llvm::StringLiteral testcase = R"(
  750. i0 i1 i20 i999999999999 i0x1
  751. u0 u1 u64 u64b
  752. f32 f80 f1 fi
  753. s1
  754. )";
  755. auto buffer = Lex(testcase);
  756. EXPECT_FALSE(buffer.has_errors());
  757. ASSERT_THAT(buffer,
  758. HasTokens(llvm::ArrayRef<ExpectedToken>{
  759. {.kind = TokenKind::Identifier,
  760. .line = 2,
  761. .column = 5,
  762. .indent_column = 5,
  763. .text = {"i0"}},
  764. {.kind = TokenKind::IntegerTypeLiteral,
  765. .line = 2,
  766. .column = 8,
  767. .indent_column = 5,
  768. .text = {"i1"}},
  769. {.kind = TokenKind::IntegerTypeLiteral,
  770. .line = 2,
  771. .column = 11,
  772. .indent_column = 5,
  773. .text = {"i20"}},
  774. {.kind = TokenKind::IntegerTypeLiteral,
  775. .line = 2,
  776. .column = 15,
  777. .indent_column = 5,
  778. .text = {"i999999999999"}},
  779. {.kind = TokenKind::Identifier,
  780. .line = 2,
  781. .column = 29,
  782. .indent_column = 5,
  783. .text = {"i0x1"}},
  784. {.kind = TokenKind::Identifier,
  785. .line = 3,
  786. .column = 5,
  787. .indent_column = 5,
  788. .text = {"u0"}},
  789. {.kind = TokenKind::UnsignedIntegerTypeLiteral,
  790. .line = 3,
  791. .column = 8,
  792. .indent_column = 5,
  793. .text = {"u1"}},
  794. {.kind = TokenKind::UnsignedIntegerTypeLiteral,
  795. .line = 3,
  796. .column = 11,
  797. .indent_column = 5,
  798. .text = {"u64"}},
  799. {.kind = TokenKind::Identifier,
  800. .line = 3,
  801. .column = 15,
  802. .indent_column = 5,
  803. .text = {"u64b"}},
  804. {.kind = TokenKind::FloatingPointTypeLiteral,
  805. .line = 4,
  806. .column = 5,
  807. .indent_column = 5,
  808. .text = {"f32"}},
  809. {.kind = TokenKind::FloatingPointTypeLiteral,
  810. .line = 4,
  811. .column = 9,
  812. .indent_column = 5,
  813. .text = {"f80"}},
  814. {.kind = TokenKind::FloatingPointTypeLiteral,
  815. .line = 4,
  816. .column = 13,
  817. .indent_column = 5,
  818. .text = {"f1"}},
  819. {.kind = TokenKind::Identifier,
  820. .line = 4,
  821. .column = 16,
  822. .indent_column = 5,
  823. .text = {"fi"}},
  824. {.kind = TokenKind::Identifier,
  825. .line = 5,
  826. .column = 5,
  827. .indent_column = 5,
  828. .text = {"s1"}},
  829. {.kind = TokenKind::EndOfFile, .line = 6, .column = 3},
  830. }));
  831. auto token_i1 = buffer.tokens().begin() + 1;
  832. EXPECT_EQ(buffer.GetTypeLiteralSize(*token_i1), 1);
  833. auto token_i20 = buffer.tokens().begin() + 2;
  834. EXPECT_EQ(buffer.GetTypeLiteralSize(*token_i20), 20);
  835. auto token_i999999999999 = buffer.tokens().begin() + 3;
  836. EXPECT_EQ(buffer.GetTypeLiteralSize(*token_i999999999999), 999999999999ULL);
  837. auto token_u1 = buffer.tokens().begin() + 6;
  838. EXPECT_EQ(buffer.GetTypeLiteralSize(*token_u1), 1);
  839. auto token_u64 = buffer.tokens().begin() + 7;
  840. EXPECT_EQ(buffer.GetTypeLiteralSize(*token_u64), 64);
  841. auto token_f32 = buffer.tokens().begin() + 9;
  842. EXPECT_EQ(buffer.GetTypeLiteralSize(*token_f32), 32);
  843. auto token_f80 = buffer.tokens().begin() + 10;
  844. EXPECT_EQ(buffer.GetTypeLiteralSize(*token_f80), 80);
  845. auto token_f1 = buffer.tokens().begin() + 11;
  846. EXPECT_EQ(buffer.GetTypeLiteralSize(*token_f1), 1);
  847. }
  848. TEST_F(LexerTest, TypeLiteralTooManyDigits) {
  849. std::string code = "i";
  850. constexpr int Count = 10000;
  851. code.append(Count, '9');
  852. Testing::MockDiagnosticConsumer consumer;
  853. EXPECT_CALL(consumer,
  854. HandleDiagnostic(IsDiagnostic(
  855. DiagnosticKind::TooManyDigits, DiagnosticLevel::Error, 1, 2,
  856. HasSubstr(llvm::formatv(" {0} ", Count)))));
  857. auto buffer = Lex(code, consumer);
  858. EXPECT_TRUE(buffer.has_errors());
  859. ASSERT_THAT(
  860. buffer,
  861. HasTokens(llvm::ArrayRef<ExpectedToken>{
  862. {.kind = TokenKind::Error,
  863. .line = 1,
  864. .column = 1,
  865. .indent_column = 1,
  866. .text = {code}},
  867. {.kind = TokenKind::EndOfFile, .line = 1, .column = Count + 2},
  868. }));
  869. }
  870. TEST_F(LexerTest, DiagnosticTrailingComment) {
  871. llvm::StringLiteral testcase = R"(
  872. // Hello!
  873. var String x; // trailing comment
  874. )";
  875. Testing::MockDiagnosticConsumer consumer;
  876. EXPECT_CALL(consumer,
  877. HandleDiagnostic(IsDiagnostic(DiagnosticKind::TrailingComment,
  878. DiagnosticLevel::Error, 3, 19, _)));
  879. Lex(testcase, consumer);
  880. }
  881. TEST_F(LexerTest, DiagnosticWhitespace) {
  882. Testing::MockDiagnosticConsumer consumer;
  883. EXPECT_CALL(consumer, HandleDiagnostic(IsDiagnostic(
  884. DiagnosticKind::NoWhitespaceAfterCommentIntroducer,
  885. DiagnosticLevel::Error, 1, 3, _)));
  886. Lex("//no space after comment", consumer);
  887. }
  888. TEST_F(LexerTest, DiagnosticUnrecognizedEscape) {
  889. Testing::MockDiagnosticConsumer consumer;
  890. EXPECT_CALL(consumer, HandleDiagnostic(IsDiagnostic(
  891. DiagnosticKind::UnknownEscapeSequence,
  892. DiagnosticLevel::Error, 1, 8, HasSubstr("`b`"))));
  893. Lex(R"("hello\bworld")", consumer);
  894. }
  895. TEST_F(LexerTest, DiagnosticBadHex) {
  896. Testing::MockDiagnosticConsumer consumer;
  897. EXPECT_CALL(consumer, HandleDiagnostic(IsDiagnostic(
  898. DiagnosticKind::HexadecimalEscapeMissingDigits,
  899. DiagnosticLevel::Error, 1, 9, _)));
  900. Lex(R"("hello\xabworld")", consumer);
  901. }
  902. TEST_F(LexerTest, DiagnosticInvalidDigit) {
  903. Testing::MockDiagnosticConsumer consumer;
  904. EXPECT_CALL(consumer, HandleDiagnostic(IsDiagnostic(
  905. DiagnosticKind::InvalidDigit,
  906. DiagnosticLevel::Error, 1, 6, HasSubstr("'a'"))));
  907. Lex("0x123abc", consumer);
  908. }
  909. TEST_F(LexerTest, DiagnosticMissingTerminator) {
  910. Testing::MockDiagnosticConsumer consumer;
  911. EXPECT_CALL(consumer,
  912. HandleDiagnostic(IsDiagnostic(DiagnosticKind::UnterminatedString,
  913. DiagnosticLevel::Error, 1, 1, _)));
  914. Lex(R"(#" ")", consumer);
  915. }
  916. TEST_F(LexerTest, DiagnosticUnrecognizedChar) {
  917. Testing::MockDiagnosticConsumer consumer;
  918. EXPECT_CALL(consumer, HandleDiagnostic(
  919. IsDiagnostic(DiagnosticKind::UnrecognizedCharacters,
  920. DiagnosticLevel::Error, 1, 1, _)));
  921. Lex("\b", consumer);
  922. }
  923. TEST_F(LexerTest, PrintingAsYaml) {
  924. // Test that we can parse this into YAML and verify line and indent data.
  925. auto buffer = Lex("\n ;\n\n\n; ;\n\n\n\n\n\n\n\n\n\n\n");
  926. ASSERT_FALSE(buffer.has_errors());
  927. TestRawOstream print_stream;
  928. buffer.Print(print_stream);
  929. EXPECT_THAT(
  930. Yaml::Value::FromText(print_stream.TakeStr()),
  931. IsYaml(ElementsAre(Yaml::Sequence(ElementsAre(Yaml::Mapping(ElementsAre(
  932. Pair("filename", source_storage_.front().filename().str()),
  933. Pair("tokens",
  934. Yaml::Sequence(ElementsAre(
  935. Yaml::Mapping(ElementsAre(
  936. Pair("index", "0"), Pair("kind", "Semi"),
  937. Pair("line", "2"), Pair("column", "2"),
  938. Pair("indent", "2"), Pair("spelling", ";"),
  939. Pair("has_trailing_space", "true"))),
  940. Yaml::Mapping(
  941. ElementsAre(Pair("index", "1"), Pair("kind", "Semi"),
  942. Pair("line", "5"), Pair("column", "1"),
  943. Pair("indent", "1"), Pair("spelling", ";"),
  944. Pair("has_trailing_space", "true"))),
  945. Yaml::Mapping(
  946. ElementsAre(Pair("index", "2"), Pair("kind", "Semi"),
  947. Pair("line", "5"), Pair("column", "3"),
  948. Pair("indent", "1"), Pair("spelling", ";"),
  949. Pair("has_trailing_space", "true"))),
  950. Yaml::Mapping(ElementsAre(
  951. Pair("index", "3"), Pair("kind", "EndOfFile"),
  952. Pair("line", "15"), Pair("column", "1"),
  953. Pair("indent", "1"), Pair("spelling", "")))))))))))));
  954. }
  955. } // namespace
  956. } // namespace Carbon::Testing