hashing_test.cpp 35 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865
  1. // Part of the Carbon Language project, under the Apache License v2.0 with LLVM
  2. // Exceptions. See /LICENSE for license information.
  3. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
  4. #include "common/hashing.h"
  5. #include <gmock/gmock.h>
  6. #include <gtest/gtest.h>
  7. #include <concepts>
  8. #include <type_traits>
  9. #include "common/raw_string_ostream.h"
  10. #include "llvm/ADT/Sequence.h"
  11. #include "llvm/ADT/StringExtras.h"
  12. #include "llvm/Support/FormatVariadic.h"
  13. #include "llvm/Support/TypeName.h"
  14. namespace Carbon {
  15. namespace {
  16. using ::testing::Eq;
  17. using ::testing::Le;
  18. using ::testing::Ne;
  19. TEST(HashingTest, HashCodeApi) {
  20. // Manually compute a few hash codes where we can exercise the underlying API.
  21. HashCode empty = HashValue("");
  22. HashCode a = HashValue("a");
  23. HashCode b = HashValue("b");
  24. ASSERT_THAT(HashValue(""), Eq(empty));
  25. ASSERT_THAT(HashValue("a"), Eq(a));
  26. ASSERT_THAT(HashValue("b"), Eq(b));
  27. ASSERT_THAT(empty, Ne(a));
  28. ASSERT_THAT(empty, Ne(b));
  29. ASSERT_THAT(a, Ne(b));
  30. // Exercise the methods in basic ways across a few sizes. This doesn't check
  31. // much beyond stability across re-computed values, crashing, or hitting UB.
  32. EXPECT_THAT(HashValue("a").ExtractIndex(), Eq(a.ExtractIndex()));
  33. EXPECT_THAT(a.ExtractIndex(), Ne(b.ExtractIndex()));
  34. EXPECT_THAT(a.ExtractIndex(), Ne(empty.ExtractIndex()));
  35. // The tag shouldn't have bits set outside the range requested.
  36. EXPECT_THAT(HashValue("a").ExtractIndexAndTag<1>().second & ~0b1, Eq(0));
  37. EXPECT_THAT(HashValue("a").ExtractIndexAndTag<2>().second & ~0b11, Eq(0));
  38. EXPECT_THAT(HashValue("a").ExtractIndexAndTag<3>().second & ~0b111, Eq(0));
  39. EXPECT_THAT(HashValue("a").ExtractIndexAndTag<4>().second & ~0b1111, Eq(0));
  40. // Note that the index produced with a tag may be different from the index
  41. // alone!
  42. EXPECT_THAT(HashValue("a").ExtractIndexAndTag<2>(),
  43. Eq(a.ExtractIndexAndTag<2>()));
  44. EXPECT_THAT(HashValue("a").ExtractIndexAndTag<16>(),
  45. Eq(a.ExtractIndexAndTag<16>()));
  46. EXPECT_THAT(HashValue("a").ExtractIndexAndTag<7>(),
  47. Eq(a.ExtractIndexAndTag<7>()));
  48. const auto [a_index, a_tag] = a.ExtractIndexAndTag<4>();
  49. const auto [b_index, b_tag] = b.ExtractIndexAndTag<4>();
  50. EXPECT_THAT(a_index, Ne(b_index));
  51. EXPECT_THAT(a_tag, Ne(b_tag));
  52. }
  53. TEST(HashingTest, Integers) {
  54. for (int64_t i : {0, 1, 2, 3, 42, -1, -2, -3, -13}) {
  55. SCOPED_TRACE(llvm::formatv("Hashing: {0}", i).str());
  56. auto test_int_hash = [](auto i) {
  57. using T = decltype(i);
  58. SCOPED_TRACE(
  59. llvm::formatv("Hashing type: {0}", llvm::getTypeName<T>()).str());
  60. HashCode hash = HashValue(i);
  61. // Hashes should be stable within the execution.
  62. EXPECT_THAT(HashValue(i), Eq(hash));
  63. // Zero should match, and other integers shouldn't collide trivially.
  64. HashCode hash_zero = HashValue(static_cast<T>(0));
  65. if (i == 0) {
  66. EXPECT_THAT(hash, Eq(hash_zero));
  67. } else {
  68. EXPECT_THAT(hash, Ne(hash_zero));
  69. }
  70. };
  71. test_int_hash(static_cast<int8_t>(i));
  72. test_int_hash(static_cast<uint8_t>(i));
  73. test_int_hash(static_cast<int16_t>(i));
  74. test_int_hash(static_cast<uint16_t>(i));
  75. test_int_hash(static_cast<int32_t>(i));
  76. test_int_hash(static_cast<uint32_t>(i));
  77. // `i` is already an int64_t variable.
  78. test_int_hash(i);
  79. test_int_hash(static_cast<uint64_t>(i));
  80. }
  81. }
  82. TEST(HashingTest, BasicSeeding) {
  83. auto unseeded_hash = HashValue(42);
  84. EXPECT_THAT(unseeded_hash, Ne(HashValue(42, 1)));
  85. EXPECT_THAT(unseeded_hash, Ne(HashValue(42, 2)));
  86. EXPECT_THAT(unseeded_hash, Ne(HashValue(42, 3)));
  87. EXPECT_THAT(unseeded_hash,
  88. Ne(HashValue(42, static_cast<uint64_t>(unseeded_hash))));
  89. }
  90. TEST(HashingTest, Pointers) {
  91. int object1 = 42;
  92. std::string object2 =
  93. "Hello World! This is a long-ish string so it ends up on the heap!";
  94. HashCode hash_null = HashValue(nullptr);
  95. // Hashes should be stable.
  96. EXPECT_THAT(HashValue(nullptr), Eq(hash_null));
  97. // Hash other kinds of pointers without trivial collisions.
  98. HashCode hash1 = HashValue(&object1);
  99. HashCode hash2 = HashValue(&object2);
  100. HashCode hash3 = HashValue(object2.data());
  101. EXPECT_THAT(hash1, Ne(hash_null));
  102. EXPECT_THAT(hash2, Ne(hash_null));
  103. EXPECT_THAT(hash3, Ne(hash_null));
  104. EXPECT_THAT(hash1, Ne(hash2));
  105. EXPECT_THAT(hash1, Ne(hash3));
  106. EXPECT_THAT(hash2, Ne(hash3));
  107. // Hash values reflect the address and not the type.
  108. EXPECT_THAT(HashValue(static_cast<void*>(nullptr)), Eq(hash_null));
  109. EXPECT_THAT(HashValue(static_cast<int*>(nullptr)), Eq(hash_null));
  110. EXPECT_THAT(HashValue(static_cast<std::string*>(nullptr)), Eq(hash_null));
  111. EXPECT_THAT(HashValue(reinterpret_cast<void*>(&object1)), Eq(hash1));
  112. EXPECT_THAT(HashValue(reinterpret_cast<int*>(&object2)), Eq(hash2));
  113. EXPECT_THAT(HashValue(reinterpret_cast<std::string*>(object2.data())),
  114. Eq(hash3));
  115. }
  116. TEST(HashingTest, PairsAndTuples) {
  117. // Note that we can't compare hash codes across arity, or in general, compare
  118. // hash codes for different types as the type isn't part of the hash. These
  119. // hashes are targeted at use in hash tables which pick a single type that's
  120. // the basis of any comparison.
  121. HashCode hash_00 = HashValue(std::pair(0, 0));
  122. HashCode hash_01 = HashValue(std::pair(0, 1));
  123. HashCode hash_10 = HashValue(std::pair(1, 0));
  124. HashCode hash_11 = HashValue(std::pair(1, 1));
  125. EXPECT_THAT(hash_00, Ne(hash_01));
  126. EXPECT_THAT(hash_00, Ne(hash_10));
  127. EXPECT_THAT(hash_00, Ne(hash_11));
  128. EXPECT_THAT(hash_01, Ne(hash_10));
  129. EXPECT_THAT(hash_01, Ne(hash_11));
  130. EXPECT_THAT(hash_10, Ne(hash_11));
  131. HashCode hash_000 = HashValue(std::tuple(0, 0, 0));
  132. HashCode hash_001 = HashValue(std::tuple(0, 0, 1));
  133. HashCode hash_010 = HashValue(std::tuple(0, 1, 0));
  134. HashCode hash_011 = HashValue(std::tuple(0, 1, 1));
  135. HashCode hash_100 = HashValue(std::tuple(1, 0, 0));
  136. HashCode hash_101 = HashValue(std::tuple(1, 0, 1));
  137. HashCode hash_110 = HashValue(std::tuple(1, 1, 0));
  138. HashCode hash_111 = HashValue(std::tuple(1, 1, 1));
  139. EXPECT_THAT(hash_000, Ne(hash_001));
  140. EXPECT_THAT(hash_000, Ne(hash_010));
  141. EXPECT_THAT(hash_000, Ne(hash_011));
  142. EXPECT_THAT(hash_000, Ne(hash_100));
  143. EXPECT_THAT(hash_000, Ne(hash_101));
  144. EXPECT_THAT(hash_000, Ne(hash_110));
  145. EXPECT_THAT(hash_000, Ne(hash_111));
  146. EXPECT_THAT(hash_001, Ne(hash_010));
  147. EXPECT_THAT(hash_001, Ne(hash_011));
  148. EXPECT_THAT(hash_001, Ne(hash_100));
  149. EXPECT_THAT(hash_001, Ne(hash_101));
  150. EXPECT_THAT(hash_001, Ne(hash_110));
  151. EXPECT_THAT(hash_001, Ne(hash_111));
  152. EXPECT_THAT(hash_010, Ne(hash_011));
  153. EXPECT_THAT(hash_010, Ne(hash_100));
  154. EXPECT_THAT(hash_010, Ne(hash_101));
  155. EXPECT_THAT(hash_010, Ne(hash_110));
  156. EXPECT_THAT(hash_010, Ne(hash_111));
  157. EXPECT_THAT(hash_011, Ne(hash_100));
  158. EXPECT_THAT(hash_011, Ne(hash_101));
  159. EXPECT_THAT(hash_011, Ne(hash_110));
  160. EXPECT_THAT(hash_011, Ne(hash_111));
  161. EXPECT_THAT(hash_100, Ne(hash_101));
  162. EXPECT_THAT(hash_100, Ne(hash_110));
  163. EXPECT_THAT(hash_100, Ne(hash_111));
  164. EXPECT_THAT(hash_101, Ne(hash_110));
  165. EXPECT_THAT(hash_101, Ne(hash_111));
  166. EXPECT_THAT(hash_110, Ne(hash_111));
  167. // Hashing a 2-tuple and a pair should produce identical results, so pairs
  168. // are compatible with code using things like variadic tuple construction.
  169. EXPECT_THAT(HashValue(std::tuple(0, 0)), Eq(hash_00));
  170. EXPECT_THAT(HashValue(std::tuple(0, 1)), Eq(hash_01));
  171. EXPECT_THAT(HashValue(std::tuple(1, 0)), Eq(hash_10));
  172. EXPECT_THAT(HashValue(std::tuple(1, 1)), Eq(hash_11));
  173. // Integers in tuples should also work.
  174. for (int i : {0, 1, 2, 3, 42, -1, -2, -3, -13}) {
  175. SCOPED_TRACE(llvm::formatv("Hashing: ({0}, {0}, {0})", i).str());
  176. auto test_int_tuple_hash = [](auto i) {
  177. using T = decltype(i);
  178. SCOPED_TRACE(
  179. llvm::formatv("Hashing integer type: {0}", llvm::getTypeName<T>())
  180. .str());
  181. std::tuple v = {i, i, i};
  182. HashCode hash = HashValue(v);
  183. // Hashes should be stable within the execution.
  184. EXPECT_THAT(HashValue(v), Eq(hash));
  185. // Zero should match, and other integers shouldn't collide trivially.
  186. T zero = 0;
  187. std::tuple zero_tuple = {zero, zero, zero};
  188. HashCode hash_zero = HashValue(zero_tuple);
  189. if (i == 0) {
  190. EXPECT_THAT(hash, Eq(hash_zero));
  191. } else {
  192. EXPECT_THAT(hash, Ne(hash_zero));
  193. }
  194. };
  195. test_int_tuple_hash(i);
  196. test_int_tuple_hash(static_cast<int8_t>(i));
  197. test_int_tuple_hash(static_cast<uint8_t>(i));
  198. test_int_tuple_hash(static_cast<int16_t>(i));
  199. test_int_tuple_hash(static_cast<uint16_t>(i));
  200. test_int_tuple_hash(static_cast<int32_t>(i));
  201. test_int_tuple_hash(static_cast<uint32_t>(i));
  202. test_int_tuple_hash(static_cast<int64_t>(i));
  203. test_int_tuple_hash(static_cast<uint64_t>(i));
  204. // Heterogeneous integer types should also work, but we only support
  205. // comparing against hashes of tuples with the exact same type.
  206. using T1 = std::tuple<int8_t, uint32_t, int16_t>;
  207. using T2 = std::tuple<uint32_t, int16_t, uint64_t>;
  208. if (i == 0) {
  209. EXPECT_THAT(HashValue(T1{i, i, i}), Eq(HashValue(T1{0, 0, 0})));
  210. EXPECT_THAT(HashValue(T2{i, i, i}), Eq(HashValue(T2{0, 0, 0})));
  211. } else {
  212. EXPECT_THAT(HashValue(T1{i, i, i}), Ne(HashValue(T1{0, 0, 0})));
  213. EXPECT_THAT(HashValue(T2{i, i, i}), Ne(HashValue(T2{0, 0, 0})));
  214. }
  215. }
  216. // Hash values of pointers in pairs and tuples reflect the address and not the
  217. // type. Pairs and 2-tuples give the same hash values.
  218. HashCode hash_2null = HashValue(std::pair(nullptr, nullptr));
  219. EXPECT_THAT(HashValue(std::tuple(static_cast<int*>(nullptr),
  220. static_cast<double*>(nullptr))),
  221. Eq(hash_2null));
  222. // Hash other kinds of pointers without trivial collisions.
  223. int object1 = 42;
  224. std::string object2 = "Hello world!";
  225. HashCode hash_3ptr =
  226. HashValue(std::tuple(&object1, &object2, object2.data()));
  227. EXPECT_THAT(hash_3ptr, Ne(HashValue(std::tuple(nullptr, nullptr, nullptr))));
  228. // Hash values reflect the address and not the type.
  229. EXPECT_THAT(
  230. HashValue(std::tuple(reinterpret_cast<void*>(&object1),
  231. reinterpret_cast<int*>(&object2),
  232. reinterpret_cast<std::string*>(object2.data()))),
  233. Eq(hash_3ptr));
  234. }
  235. TEST(HashingTest, BasicStrings) {
  236. llvm::SmallVector<std::pair<std::string, HashCode>> hashes;
  237. for (int size : {0, 1, 2, 4, 16, 64, 256, 1024}) {
  238. std::string s(size, 'a');
  239. hashes.push_back({s, HashValue(s)});
  240. }
  241. for (const auto& [s1, hash1] : hashes) {
  242. EXPECT_THAT(HashValue(s1), Eq(hash1));
  243. // Also check that we get the same hashes even when using string-wrapping
  244. // types.
  245. EXPECT_THAT(HashValue(std::string_view(s1)), Eq(hash1));
  246. EXPECT_THAT(HashValue(llvm::StringRef(s1)), Eq(hash1));
  247. // And some basic tests that simple things don't collide.
  248. for (const auto& [s2, hash2] : hashes) {
  249. if (s1 != s2) {
  250. EXPECT_THAT(hash1, Ne(hash2))
  251. << "Matching hashes for '" << s1 << "' and '" << s2 << "'";
  252. }
  253. }
  254. }
  255. }
  256. TEST(HashingTest, ArrayLike) {
  257. int c_array[] = {1, 2, 3, 4};
  258. EXPECT_THAT(HashValue(c_array), Eq(HashValue(c_array)));
  259. EXPECT_THAT(HashValue(std::array{1, 2, 3, 4}), Eq(HashValue(c_array)));
  260. EXPECT_THAT(HashValue(std::vector{1, 2, 3, 4}), Eq(HashValue(c_array)));
  261. EXPECT_THAT(HashValue(llvm::SmallVector<int>{1, 2, 3, 4}),
  262. Eq(HashValue(c_array)));
  263. }
  264. TEST(HashingTest, HashAPInt) {
  265. // The bit width should be hashed as well as the value.
  266. llvm::APInt one_64(/*numBits=*/64, /*val=*/1);
  267. llvm::APInt two_64(/*numBits=*/64, /*val=*/2);
  268. llvm::APInt one_128(/*numBits=*/128, /*val=*/1);
  269. llvm::APInt two_128(/*numBits=*/128, /*val=*/2);
  270. std::array array = {one_64, two_64, one_128, two_128};
  271. for (int i : llvm::seq<int>(array.size())) {
  272. EXPECT_THAT(HashValue(array[i]), Eq(HashValue(array[i])));
  273. for (int j : llvm::seq<int>(i + 1, array.size())) {
  274. EXPECT_THAT(HashValue(array[i]), Ne(HashValue(array[j])))
  275. << "Hashing #" << i << " and #" << j;
  276. }
  277. }
  278. }
  279. TEST(HashingTest, HashAPFloat) {
  280. // Hashtable equality for `APFloat` uses a bitwise comparison. This
  281. // differentiates between various things that would otherwise not make sense:
  282. // - Different floating point semantics
  283. // - `-0.0` and `0.0`
  284. //
  285. // It also allows NaNs to be compared meaningfully.
  286. llvm::APFloat zero_float =
  287. llvm::APFloat::getZero(llvm::APFloat::IEEEsingle());
  288. llvm::APFloat neg_zero_float =
  289. llvm::APFloat::getZero(llvm::APFloat::IEEEsingle(), /*Negative=*/true);
  290. llvm::APFloat zero_double =
  291. llvm::APFloat::getZero(llvm::APFloat::IEEEdouble());
  292. llvm::APFloat zero_bfloat = llvm::APFloat::getZero(llvm::APFloat::BFloat());
  293. llvm::APFloat one_float = llvm::APFloat::getOne(llvm::APFloat::IEEEsingle());
  294. llvm::APFloat inf_float = llvm::APFloat::getInf(llvm::APFloat::IEEEsingle());
  295. llvm::APFloat nan_0_float = llvm::APFloat::getNaN(
  296. llvm::APFloat::IEEEsingle(), /*Negative=*/false, /*payload=*/0);
  297. llvm::APFloat nan_42_float = llvm::APFloat::getNaN(
  298. llvm::APFloat::IEEEsingle(), /*Negative=*/false, /*payload=*/42);
  299. std::array array = {zero_float, neg_zero_float, zero_double, zero_bfloat,
  300. one_float, inf_float, nan_42_float};
  301. for (int i : llvm::seq<int>(array.size())) {
  302. EXPECT_THAT(HashValue(array[i]), Eq(HashValue(array[i])));
  303. for (int j : llvm::seq<int>(i + 1, array.size())) {
  304. EXPECT_THAT(HashValue(array[i]), Ne(HashValue(array[j])))
  305. << "Hashing #" << i << " and #" << j;
  306. }
  307. }
  308. // Note that currently we use LLVM's hashing of `APFloat` which does *not*
  309. // hash the payload of NaNs.
  310. EXPECT_THAT(HashValue(nan_0_float), Eq(HashValue(nan_42_float)));
  311. }
  312. // A type that has hashing customization. However, it also works to be small and
  313. // appear to have a unique object representation. This helps ensure that when a
  314. // user provides custom hashing it is reliably used.
  315. struct HashableType {
  316. int8_t x;
  317. int8_t y;
  318. int16_t ignored = 0;
  319. // Provide the hashing but try to craft a relatively low-ranking overload to
  320. // help ensure that the hashing framework doesn't accidentally override this.
  321. template <typename T>
  322. requires(std::same_as<T, HashableType>)
  323. friend auto CarbonHashValue(const T& value, uint64_t seed) -> HashCode {
  324. Hasher hasher(seed);
  325. hasher.Hash(value.x, value.y);
  326. return static_cast<HashCode>(hasher);
  327. }
  328. };
  329. static_assert(std::has_unique_object_representations_v<HashableType>);
  330. TEST(HashingTest, CustomType) {
  331. HashableType a = {.x = 1, .y = 2};
  332. HashableType b = {.x = 3, .y = 4};
  333. EXPECT_THAT(HashValue(a), Eq(HashValue(a)));
  334. EXPECT_THAT(HashValue(a), Ne(HashValue(b)));
  335. // Differences in an ignored field have no impact.
  336. HashableType c = {.x = 3, .y = 4, .ignored = 42};
  337. EXPECT_THAT(HashValue(c), Eq(HashValue(b)));
  338. }
  339. TEST(HashingTest, ArrayRecursion) {
  340. // Make sure we correctly recurse when hashing an array and don't try to use
  341. // the object representation.
  342. llvm::APInt one_64(/*numBits=*/64, /*val=*/1);
  343. llvm::APInt two_64(/*numBits=*/64, /*val=*/2);
  344. llvm::APInt one_128(/*numBits=*/128, /*val=*/1);
  345. llvm::APInt two_128(/*numBits=*/128, /*val=*/2);
  346. std::array apint_array = {one_64, two_64, one_128, two_128};
  347. EXPECT_THAT(HashValue(apint_array),
  348. Eq(HashValue(std::array{one_64, two_64, one_128, two_128})));
  349. EXPECT_THAT(HashValue(apint_array),
  350. Ne(HashValue(std::array{one_64, two_64, two_128, one_128})));
  351. EXPECT_THAT(HashValue(apint_array),
  352. Ne(HashValue(std::array{one_64, two_64, one_64, two_128})));
  353. EXPECT_THAT(HashValue(apint_array),
  354. Ne(HashValue(std::array{one_64, two_128, one_128, two_128})));
  355. EXPECT_THAT(HashValue(apint_array),
  356. Ne(HashValue(std::array{one_64, two_64, one_128})));
  357. EXPECT_THAT(
  358. HashValue(apint_array),
  359. Ne(HashValue(std::array{one_64, two_64, one_128, two_128, two_128})));
  360. // Also test for a custom type that still *looks* like plain data.
  361. HashableType a = {.x = 1, .y = 2};
  362. HashableType b = {.x = 3, .y = 4};
  363. HashableType c = {.x = 3, .y = 4, .ignored = 42};
  364. std::array custom_array = {a, b, c, a};
  365. EXPECT_THAT(HashValue(custom_array), Eq(HashValue(std::array{a, b, c, a})));
  366. EXPECT_THAT(HashValue(custom_array), Eq(HashValue(std::array{a, b, b, a})));
  367. EXPECT_THAT(HashValue(custom_array), Ne(HashValue(std::array{a, b, c, b})));
  368. EXPECT_THAT(HashValue(custom_array), Ne(HashValue(std::array{a, b, a, c})));
  369. EXPECT_THAT(HashValue(custom_array), Ne(HashValue(std::array{a, b, c})));
  370. EXPECT_THAT(HashValue(custom_array),
  371. Ne(HashValue(std::array{a, b, c, a, a})));
  372. }
  373. TEST(HashingTest, TupleRecursion) {
  374. // Make sure we can hash pairs and tuples which require us to recurse for each
  375. // element rather than treating the whole object as raw storage.
  376. // We can use APInt values to help test this.
  377. llvm::APInt one_64(/*numBits=*/64, /*val=*/1);
  378. llvm::APInt two_64(/*numBits=*/64, /*val=*/2);
  379. llvm::APInt one_128(/*numBits=*/128, /*val=*/1);
  380. llvm::APInt two_128(/*numBits=*/128, /*val=*/2);
  381. EXPECT_THAT(HashValue(std::pair{one_64, one_128}),
  382. Eq(HashValue(std::pair{one_64, one_128})));
  383. EXPECT_THAT(HashValue(std::pair{one_64, one_128}),
  384. Ne(HashValue(std::pair{one_64, two_64})));
  385. EXPECT_THAT(HashValue(std::pair{one_64, one_128}),
  386. Ne(HashValue(std::pair{one_64, one_64})));
  387. EXPECT_THAT(HashValue(std::pair{one_64, one_128}),
  388. Ne(HashValue(std::pair{one_128, one_64})));
  389. EXPECT_THAT(HashValue(std::tuple{one_64, one_128, two_64}),
  390. Eq(HashValue(std::tuple{one_64, one_128, two_64})));
  391. EXPECT_THAT(HashValue(std::tuple{one_64, one_128, two_64}),
  392. Ne(HashValue(std::tuple{one_64, two_64, two_64})));
  393. EXPECT_THAT(HashValue(std::tuple{one_64, one_128, two_64}),
  394. Ne(HashValue(std::tuple{one_64, one_64, two_64})));
  395. EXPECT_THAT(HashValue(std::tuple{one_64, one_128, two_64}),
  396. Ne(HashValue(std::tuple{one_64, two_64, one_128})));
  397. EXPECT_THAT(HashValue(std::tuple{one_64, one_128, two_64}),
  398. Ne(HashValue(std::tuple{one_64, one_128})));
  399. // Also test for a custom type that still *looks* like plain data.
  400. HashableType a = {.x = 1, .y = 2};
  401. HashableType b = {.x = 3, .y = 4};
  402. HashableType c = {.x = 3, .y = 4, .ignored = 42};
  403. EXPECT_THAT(HashValue(std::pair{a, b}), Eq(HashValue(std::pair{a, b})));
  404. EXPECT_THAT(HashValue(std::pair{a, b}), Ne(HashValue(std::pair{a, a})));
  405. EXPECT_THAT(HashValue(std::pair{a, b}), Ne(HashValue(std::pair{b, a})));
  406. EXPECT_THAT(HashValue(std::pair{a, b}), Eq(HashValue(std::pair{a, c})));
  407. EXPECT_THAT(HashValue(std::tuple{a, b, a}),
  408. Eq(HashValue(std::tuple{a, b, a})));
  409. EXPECT_THAT(HashValue(std::tuple{a, b, a}),
  410. Ne(HashValue(std::tuple{a, b, b})));
  411. EXPECT_THAT(HashValue(std::tuple{a, b, a}),
  412. Ne(HashValue(std::tuple{a, a, a})));
  413. EXPECT_THAT(HashValue(std::tuple{a, b, a}),
  414. Eq(HashValue(std::tuple{a, c, a})));
  415. }
  416. // The only significantly bad seed is zero, so pick a non-zero seed with a tiny
  417. // amount of entropy to make sure that none of the testing relies on the entropy
  418. // from this.
  419. constexpr uint64_t TestSeed = 42ULL * 1024;
  420. auto ToHexBytes(llvm::StringRef s) -> std::string {
  421. RawStringOstream rendered;
  422. rendered << "{";
  423. llvm::ListSeparator sep(", ");
  424. for (const char c : s) {
  425. rendered << sep << llvm::formatv("{0:x2}", static_cast<uint8_t>(c));
  426. }
  427. rendered << "}";
  428. return rendered.TakeStr();
  429. }
  430. template <typename T>
  431. struct HashedValue {
  432. HashCode hash;
  433. T v;
  434. };
  435. using HashedString = HashedValue<std::string>;
  436. template <typename T>
  437. auto PrintFullWidthHex(llvm::raw_ostream& os, T value) {
  438. static_assert(sizeof(T) == 1 || sizeof(T) == 2 || sizeof(T) == 4 ||
  439. sizeof(T) == 8);
  440. // Given the nature of a format string and the good formatting, a nested
  441. // conditional seems like the most readable structure.
  442. // NOLINTBEGIN(readability-avoid-nested-conditional-operator)
  443. os << llvm::formatv(sizeof(T) == 1 ? "{0:x2}"
  444. : sizeof(T) == 2 ? "{0:x4}"
  445. : sizeof(T) == 4 ? "{0:x8}"
  446. : "{0:x16}",
  447. static_cast<uint64_t>(value));
  448. // NOLINTEND(readability-avoid-nested-conditional-operator)
  449. }
  450. template <typename T>
  451. requires std::integral<T>
  452. auto operator<<(llvm::raw_ostream& os, HashedValue<T> hv)
  453. -> llvm::raw_ostream& {
  454. os << "hash " << hv.hash << " for value ";
  455. PrintFullWidthHex(os, hv.v);
  456. return os;
  457. }
  458. template <typename T, typename U>
  459. requires std::integral<T> && std::integral<U>
  460. auto operator<<(llvm::raw_ostream& os, HashedValue<std::pair<T, U>> hv)
  461. -> llvm::raw_ostream& {
  462. os << "hash " << hv.hash << " for pair of ";
  463. PrintFullWidthHex(os, hv.v.first);
  464. os << " and ";
  465. PrintFullWidthHex(os, hv.v.second);
  466. return os;
  467. }
  468. struct Collisions {
  469. int total;
  470. int median;
  471. int max;
  472. };
  473. // Analyzes a list of hashed values to find all of the hash codes which collide
  474. // within a specific bit-range.
  475. //
  476. // With `BitBegin=0` and `BitEnd=64`, this is equivalent to finding full
  477. // collisions. But when the begin and end of the bit range are narrower than the
  478. // 64-bits of the hash code, it allows this function to analyze a specific
  479. // window of bits within the 64-bit hash code to understand how many collisions
  480. // emerge purely within that bit range.
  481. //
  482. // With narrow ranges (we often look at the first N and last N bits for small
  483. // N), collisions are common and so this function summarizes this with the total
  484. // number of collisions and the median number of collisions for an input value.
  485. template <int BitBegin, int BitEnd, typename T>
  486. auto FindBitRangeCollisions(llvm::ArrayRef<HashedValue<T>> hashes)
  487. -> Collisions {
  488. static_assert(BitBegin < BitEnd);
  489. constexpr int BitCount = BitEnd - BitBegin;
  490. static_assert(BitCount <= 32);
  491. constexpr int BitShift = BitBegin;
  492. constexpr uint64_t BitMask = ((1ULL << BitCount) - 1) << BitShift;
  493. // We collect counts of collisions in a vector. Initially, we just have a zero
  494. // and all inputs map to that collision count. As we discover collisions,
  495. // we'll create a dedicated counter for it and count how many inputs collide.
  496. llvm::SmallVector<int> collision_counts;
  497. collision_counts.push_back(0);
  498. // The "map" for collision counts. Each input hashed value has a corresponding
  499. // index stored here. That index is the index of the collision count in the
  500. // container above. We resize this to fill it with zeros to start as the zero
  501. // index above has a collision count of zero.
  502. //
  503. // The result of this is that the number of collisions for `hashes[i]` is
  504. // `collision_counts[collision_map[i]]`.
  505. llvm::SmallVector<int> collision_map;
  506. collision_map.resize(hashes.size());
  507. // First, we extract the bit subsequence we want to examine from each hash and
  508. // store it with an index back into the hashed values (or the collision map).
  509. //
  510. // The result is that, `bits_and_indices[i].bits` has the hash bits of
  511. // interest from `hashes[bits_and_indices[i].index]`.
  512. //
  513. // And because `collision_map` above uses the same indices as `hashes`,
  514. // `collision_counts[collision_map[bits_and_indices[i].index]]` is the number
  515. // of collisions for `bits_and_indices[i].bits`.
  516. struct BitSequenceAndHashIndex {
  517. // The bit subsequence of a hash input, adjusted into the low bits.
  518. uint32_t bits;
  519. // The index of the hash input corresponding to this bit sequence.
  520. int index;
  521. };
  522. llvm::SmallVector<BitSequenceAndHashIndex> bits_and_indices;
  523. bits_and_indices.reserve(hashes.size());
  524. for (const auto& [hash, v] : hashes) {
  525. CARBON_DCHECK(v == hashes[bits_and_indices.size()].v);
  526. auto hash_bits = (static_cast<uint64_t>(hash) & BitMask) >> BitShift;
  527. bits_and_indices.push_back(
  528. {.bits = static_cast<uint32_t>(hash_bits),
  529. .index = static_cast<int>(bits_and_indices.size())});
  530. }
  531. // Now we sort by the extracted bit sequence so we can efficiently scan for
  532. // colliding bit patterns.
  533. llvm::sort(bits_and_indices, [](const auto& lhs, const auto& rhs) {
  534. return lhs.bits < rhs.bits;
  535. });
  536. // Scan the sorted bit sequences we've extracted looking for collisions. We
  537. // count the total collisions, but we also track the number of individual
  538. // inputs that collide with each specific bit pattern.
  539. uint32_t prev_hash_bits = bits_and_indices[0].bits;
  540. int prev_index = bits_and_indices[0].index;
  541. bool in_collision = false;
  542. int total = 0;
  543. for (const auto& [hash_bits, hash_index] :
  544. llvm::ArrayRef(bits_and_indices).slice(1)) {
  545. // Check if we've found a new hash (and thus a new value), reset everything.
  546. CARBON_CHECK(hashes[prev_index].v != hashes[hash_index].v);
  547. if (hash_bits != prev_hash_bits) {
  548. CARBON_CHECK(hashes[prev_index].hash != hashes[hash_index].hash);
  549. prev_hash_bits = hash_bits;
  550. prev_index = hash_index;
  551. in_collision = false;
  552. continue;
  553. }
  554. // Otherwise, we have a colliding bit sequence.
  555. ++total;
  556. // If we've already created a collision count to track this, just increment
  557. // it and map this hash to it.
  558. if (in_collision) {
  559. ++collision_counts.back();
  560. collision_map[hash_index] = collision_counts.size() - 1;
  561. continue;
  562. }
  563. // If this is a new collision, create a dedicated count to track it and
  564. // begin counting.
  565. in_collision = true;
  566. collision_map[prev_index] = collision_counts.size();
  567. collision_map[hash_index] = collision_counts.size();
  568. collision_counts.push_back(1);
  569. }
  570. // Sort by collision count for each hash.
  571. llvm::sort(bits_and_indices, [&](const auto& lhs, const auto& rhs) {
  572. return collision_counts[collision_map[lhs.index]] <
  573. collision_counts[collision_map[rhs.index]];
  574. });
  575. // And compute the median and max.
  576. int median = collision_counts
  577. [collision_map[bits_and_indices[bits_and_indices.size() / 2].index]];
  578. int max = *llvm::max_element(collision_counts);
  579. CARBON_CHECK(max ==
  580. collision_counts[collision_map[bits_and_indices.back().index]]);
  581. return {.total = total, .median = median, .max = max};
  582. }
  583. auto CheckNoDuplicateValues(llvm::ArrayRef<HashedString> hashes) -> void {
  584. for (int i = 0, size = hashes.size(); i < size - 1; ++i) {
  585. const auto& [_, value] = hashes[i];
  586. CARBON_CHECK(value != hashes[i + 1].v, "Duplicate value: {0}", value);
  587. }
  588. }
  589. template <int N>
  590. auto AllByteStringsHashedAndSorted() {
  591. static_assert(N < 5, "Can only generate all 4-byte strings or shorter.");
  592. llvm::SmallVector<HashedString> hashes;
  593. int64_t count = 1LL << (N * 8);
  594. for (int64_t i : llvm::seq(count)) {
  595. uint8_t bytes[N];
  596. for (int j : llvm::seq(N)) {
  597. bytes[j] = (static_cast<uint64_t>(i) >> (8 * j)) & 0xff;
  598. }
  599. std::string s(std::begin(bytes), std::end(bytes));
  600. hashes.push_back({HashValue(s, TestSeed), s});
  601. }
  602. llvm::sort(hashes, [](const HashedString& lhs, const HashedString& rhs) {
  603. return static_cast<uint64_t>(lhs.hash) < static_cast<uint64_t>(rhs.hash);
  604. });
  605. CheckNoDuplicateValues(hashes);
  606. return hashes;
  607. }
  608. auto ExpectNoHashCollisions(llvm::ArrayRef<HashedString> hashes) -> void {
  609. HashCode prev_hash = hashes[0].hash;
  610. llvm::StringRef prev_s = hashes[0].v;
  611. for (const auto& [hash, s] : hashes.slice(1)) {
  612. if (hash != prev_hash) {
  613. prev_hash = hash;
  614. prev_s = s;
  615. continue;
  616. }
  617. FAIL() << "Colliding hash '" << hash << "' of strings "
  618. << ToHexBytes(prev_s) << " and " << ToHexBytes(s);
  619. }
  620. }
  621. TEST(HashingTest, Collisions1ByteSized) {
  622. auto hashes_storage = AllByteStringsHashedAndSorted<1>();
  623. llvm::ArrayRef hashes = hashes_storage;
  624. ExpectNoHashCollisions(hashes);
  625. auto low_32bit_collisions = FindBitRangeCollisions<0, 32>(hashes);
  626. EXPECT_THAT(low_32bit_collisions.total, Eq(0));
  627. auto high_32bit_collisions = FindBitRangeCollisions<32, 64>(hashes);
  628. EXPECT_THAT(high_32bit_collisions.total, Eq(0));
  629. // We expect collisions when only looking at 7-bits of the hash. However,
  630. // modern hash table designs need to use either the low or high 7 bits as tags
  631. // for faster searching. So we add some direct testing that the median and max
  632. // collisions for any given key stay within bounds. We express the bounds in
  633. // terms of the minimum expected "perfect" rate of collisions if uniformly
  634. // distributed.
  635. int min_7bit_collisions = llvm::NextPowerOf2(hashes.size() - 1) / (1 << 7);
  636. auto low_7bit_collisions = FindBitRangeCollisions<0, 7>(hashes);
  637. EXPECT_THAT(low_7bit_collisions.median, Le(8 * min_7bit_collisions));
  638. EXPECT_THAT(low_7bit_collisions.max, Le(8 * min_7bit_collisions));
  639. auto high_7bit_collisions = FindBitRangeCollisions<64 - 7, 64>(hashes);
  640. EXPECT_THAT(high_7bit_collisions.median, Le(2 * min_7bit_collisions));
  641. EXPECT_THAT(high_7bit_collisions.max, Le(4 * min_7bit_collisions));
  642. }
  643. TEST(HashingTest, Collisions2ByteSized) {
  644. auto hashes_storage = AllByteStringsHashedAndSorted<2>();
  645. llvm::ArrayRef hashes = hashes_storage;
  646. ExpectNoHashCollisions(hashes);
  647. auto low_32bit_collisions = FindBitRangeCollisions<0, 32>(hashes);
  648. EXPECT_THAT(low_32bit_collisions.total, Eq(0));
  649. auto high_32bit_collisions = FindBitRangeCollisions<32, 64>(hashes);
  650. EXPECT_THAT(high_32bit_collisions.total, Eq(0));
  651. // Similar to 1-byte keys, we do expect a certain rate of collisions here but
  652. // bound the median and max.
  653. int min_7bit_collisions = llvm::NextPowerOf2(hashes.size() - 1) / (1 << 7);
  654. auto low_7bit_collisions = FindBitRangeCollisions<0, 7>(hashes);
  655. EXPECT_THAT(low_7bit_collisions.median, Le(2 * min_7bit_collisions));
  656. EXPECT_THAT(low_7bit_collisions.max, Le(2 * min_7bit_collisions));
  657. auto high_7bit_collisions = FindBitRangeCollisions<64 - 7, 64>(hashes);
  658. EXPECT_THAT(high_7bit_collisions.median, Le(2 * min_7bit_collisions));
  659. EXPECT_THAT(high_7bit_collisions.max, Le(2 * min_7bit_collisions));
  660. }
  661. // Generate and hash all strings of of [BeginByteCount, EndByteCount) bytes,
  662. // with [BeginSetBitCount, EndSetBitCount) contiguous bits at each possible bit
  663. // offset set to one and all other bits set to zero.
  664. template <int BeginByteCount, int EndByteCount, int BeginSetBitCount,
  665. int EndSetBitCount>
  666. struct SparseHashTestParamRanges {
  667. static_assert(BeginByteCount >= 0);
  668. static_assert(BeginByteCount < EndByteCount);
  669. static_assert(BeginSetBitCount >= 0);
  670. static_assert(BeginSetBitCount < EndSetBitCount);
  671. // Note that we intentionally allow the end-set-bit-count to result in more
  672. // set bits than are available -- we truncate the number of set bits to fit
  673. // within the byte string.
  674. static_assert(BeginSetBitCount <= BeginByteCount * 8);
  675. struct ByteCount {
  676. static constexpr int Begin = BeginByteCount;
  677. static constexpr int End = EndByteCount;
  678. };
  679. struct SetBitCount {
  680. static constexpr int Begin = BeginSetBitCount;
  681. static constexpr int End = EndSetBitCount;
  682. };
  683. };
  684. template <typename ParamRanges>
  685. struct SparseHashTest : ::testing::Test {
  686. using ByteCount = typename ParamRanges::ByteCount;
  687. using SetBitCount = typename ParamRanges::SetBitCount;
  688. static auto GetHashedByteStrings() {
  689. llvm::SmallVector<HashedString> hashes;
  690. for (int byte_count :
  691. llvm::seq_inclusive(ByteCount::Begin, ByteCount::End)) {
  692. int bits = byte_count * 8;
  693. for (int set_bit_count : llvm::seq_inclusive(
  694. SetBitCount::Begin, std::min(bits, SetBitCount::End))) {
  695. if (set_bit_count == 0) {
  696. std::string s(byte_count, '\0');
  697. hashes.push_back({HashValue(s, TestSeed), std::move(s)});
  698. continue;
  699. }
  700. for (int begin_set_bit : llvm::seq_inclusive(0, bits - set_bit_count)) {
  701. std::string s(byte_count, '\0');
  702. int begin_set_bit_byte_index = begin_set_bit / 8;
  703. int begin_set_bit_bit_index = begin_set_bit % 8;
  704. int end_set_bit_byte_index = (begin_set_bit + set_bit_count) / 8;
  705. int end_set_bit_bit_index = (begin_set_bit + set_bit_count) % 8;
  706. // We build a begin byte and end byte. We set the begin byte, set
  707. // subsequent bytes up to *and including* the end byte to all ones,
  708. // and then mask the end byte. For multi-byte runs, the mask just sets
  709. // the end byte and for single-byte runs the mask computes the
  710. // intersecting bits.
  711. //
  712. // Consider a 4-set-bit count, starting at bit 2. The begin bit index
  713. // is 2, and the end bit index is 6.
  714. //
  715. // Begin byte: 0b11111111 -(shl 2)-----> 0b11111100
  716. // End byte: 0b11111111 -(shr (8-6))-> 0b00111111
  717. // Masked byte: 0b00111100
  718. //
  719. // Or a 10-set-bit-count starting at bit 2. The begin bit index is 2,
  720. // the end byte index is (12 / 8) or 1, and the end bit index is (12 %
  721. // 8) or 4.
  722. //
  723. // Begin byte: 0b11111111 -(shl 2)-----> 0b11111100 -> 6 bits
  724. // End byte: 0b11111111 -(shr (8-4))-> 0b00001111 -> 4 bits
  725. // 10 total bits
  726. //
  727. uint8_t begin_set_bit_byte = 0xFFU << begin_set_bit_bit_index;
  728. uint8_t end_set_bit_byte = 0xFFU >> (8 - end_set_bit_bit_index);
  729. bool has_end_byte_bits = end_set_bit_byte != 0;
  730. s[begin_set_bit_byte_index] = begin_set_bit_byte;
  731. for (int i : llvm::seq(begin_set_bit_byte_index + 1,
  732. end_set_bit_byte_index + has_end_byte_bits)) {
  733. s[i] = '\xFF';
  734. }
  735. // If there are no bits set in the end byte, it may be past-the-end
  736. // and we can't even mask a zero byte safely.
  737. if (has_end_byte_bits) {
  738. s[end_set_bit_byte_index] &= end_set_bit_byte;
  739. }
  740. hashes.push_back({HashValue(s, TestSeed), std::move(s)});
  741. }
  742. }
  743. }
  744. llvm::sort(hashes, [](const HashedString& lhs, const HashedString& rhs) {
  745. return static_cast<uint64_t>(lhs.hash) < static_cast<uint64_t>(rhs.hash);
  746. });
  747. CheckNoDuplicateValues(hashes);
  748. return hashes;
  749. }
  750. };
  751. using SparseHashTestParams = ::testing::Types<
  752. SparseHashTestParamRanges</*BeginByteCount=*/0, /*EndByteCount=*/256,
  753. /*BeginSetBitCount=*/0, /*EndSetBitCount=*/1>,
  754. SparseHashTestParamRanges</*BeginByteCount=*/1, /*EndByteCount=*/128,
  755. /*BeginSetBitCount=*/2, /*EndSetBitCount=*/4>,
  756. SparseHashTestParamRanges</*BeginByteCount=*/1, /*EndByteCount=*/64,
  757. /*BeginSetBitCount=*/4, /*EndSetBitCount=*/16>>;
  758. TYPED_TEST_SUITE(SparseHashTest, SparseHashTestParams);
  759. TYPED_TEST(SparseHashTest, Collisions) {
  760. auto hashes_storage = this->GetHashedByteStrings();
  761. llvm::ArrayRef hashes = hashes_storage;
  762. ExpectNoHashCollisions(hashes);
  763. int min_7bit_collisions = llvm::NextPowerOf2(hashes.size() - 1) / (1 << 7);
  764. auto low_7bit_collisions = FindBitRangeCollisions<0, 7>(hashes);
  765. EXPECT_THAT(low_7bit_collisions.median, Le(2 * min_7bit_collisions));
  766. EXPECT_THAT(low_7bit_collisions.max, Le(2 * min_7bit_collisions));
  767. auto high_7bit_collisions = FindBitRangeCollisions<64 - 7, 64>(hashes);
  768. EXPECT_THAT(high_7bit_collisions.median, Le(2 * min_7bit_collisions));
  769. EXPECT_THAT(high_7bit_collisions.max, Le(2 * min_7bit_collisions));
  770. }
  771. } // namespace
  772. } // namespace Carbon