hashing_test.cpp 28 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711
  1. // Part of the Carbon Language project, under the Apache License v2.0 with LLVM
  2. // Exceptions. See /LICENSE for license information.
  3. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
  4. #include "common/hashing.h"
  5. #include <gmock/gmock.h>
  6. #include <gtest/gtest.h>
  7. #include <concepts>
  8. #include "llvm/ADT/Sequence.h"
  9. #include "llvm/ADT/StringExtras.h"
  10. #include "llvm/Support/FormatVariadic.h"
  11. #include "llvm/Support/TypeName.h"
  12. namespace Carbon {
  13. namespace {
  14. using ::testing::Eq;
  15. using ::testing::Le;
  16. using ::testing::Ne;
  17. TEST(HashingTest, HashCodeAPI) {
  18. // Manually compute a few hash codes where we can exercise the underlying API.
  19. HashCode empty = HashValue("");
  20. HashCode a = HashValue("a");
  21. HashCode b = HashValue("b");
  22. ASSERT_THAT(HashValue(""), Eq(empty));
  23. ASSERT_THAT(HashValue("a"), Eq(a));
  24. ASSERT_THAT(HashValue("b"), Eq(b));
  25. ASSERT_THAT(empty, Ne(a));
  26. ASSERT_THAT(empty, Ne(b));
  27. ASSERT_THAT(a, Ne(b));
  28. // Exercise the methods in basic ways across a few sizes. This doesn't check
  29. // much beyond stability across re-computed values, crashing, or hitting UB.
  30. EXPECT_THAT(HashValue("a").ExtractIndex(), Eq(a.ExtractIndex()));
  31. EXPECT_THAT(a.ExtractIndex(), Ne(b.ExtractIndex()));
  32. EXPECT_THAT(a.ExtractIndex(), Ne(empty.ExtractIndex()));
  33. // Note that the index produced with a tag may be different from the index
  34. // alone!
  35. EXPECT_THAT(HashValue("a").ExtractIndexAndTag<2>(),
  36. Eq(a.ExtractIndexAndTag<2>()));
  37. EXPECT_THAT(HashValue("a").ExtractIndexAndTag<16>(),
  38. Eq(a.ExtractIndexAndTag<16>()));
  39. EXPECT_THAT(HashValue("a").ExtractIndexAndTag<7>(),
  40. Eq(a.ExtractIndexAndTag<7>()));
  41. const auto [a_index, a_tag] = a.ExtractIndexAndTag<4>();
  42. const auto [b_index, b_tag] = b.ExtractIndexAndTag<4>();
  43. EXPECT_THAT(a_index, Ne(b_index));
  44. EXPECT_THAT(a_tag, Ne(b_tag));
  45. }
  46. TEST(HashingTest, Integers) {
  47. for (int64_t i : {0, 1, 2, 3, 42, -1, -2, -3, -13}) {
  48. SCOPED_TRACE(llvm::formatv("Hashing: {0}", i).str());
  49. auto test_int_hash = [](auto i) {
  50. using T = decltype(i);
  51. SCOPED_TRACE(
  52. llvm::formatv("Hashing type: {0}", llvm::getTypeName<T>()).str());
  53. HashCode hash = HashValue(i);
  54. // Hashes should be stable within the execution.
  55. EXPECT_THAT(HashValue(i), Eq(hash));
  56. // Zero should match, and other integers shouldn't collide trivially.
  57. HashCode hash_zero = HashValue(static_cast<T>(0));
  58. if (i == 0) {
  59. EXPECT_THAT(hash, Eq(hash_zero));
  60. } else {
  61. EXPECT_THAT(hash, Ne(hash_zero));
  62. }
  63. };
  64. test_int_hash(static_cast<int8_t>(i));
  65. test_int_hash(static_cast<uint8_t>(i));
  66. test_int_hash(static_cast<int16_t>(i));
  67. test_int_hash(static_cast<uint16_t>(i));
  68. test_int_hash(static_cast<int32_t>(i));
  69. test_int_hash(static_cast<uint32_t>(i));
  70. // `i` is already an int64_t variable.
  71. test_int_hash(i);
  72. test_int_hash(static_cast<uint64_t>(i));
  73. }
  74. }
  75. TEST(HashingTest, BasicSeeding) {
  76. auto unseeded_hash = HashValue(42);
  77. EXPECT_THAT(unseeded_hash, Ne(HashValue(42, 1)));
  78. EXPECT_THAT(unseeded_hash, Ne(HashValue(42, 2)));
  79. EXPECT_THAT(unseeded_hash, Ne(HashValue(42, 3)));
  80. EXPECT_THAT(unseeded_hash,
  81. Ne(HashValue(42, static_cast<uint64_t>(unseeded_hash))));
  82. }
  83. TEST(HashingTest, Pointers) {
  84. int object1 = 42;
  85. std::string object2 =
  86. "Hello World! This is a long-ish string so it ends up on the heap!";
  87. HashCode hash_null = HashValue(nullptr);
  88. // Hashes should be stable.
  89. EXPECT_THAT(HashValue(nullptr), Eq(hash_null));
  90. // Hash other kinds of pointers without trivial collisions.
  91. HashCode hash1 = HashValue(&object1);
  92. HashCode hash2 = HashValue(&object2);
  93. HashCode hash3 = HashValue(object2.data());
  94. EXPECT_THAT(hash1, Ne(hash_null));
  95. EXPECT_THAT(hash2, Ne(hash_null));
  96. EXPECT_THAT(hash3, Ne(hash_null));
  97. EXPECT_THAT(hash1, Ne(hash2));
  98. EXPECT_THAT(hash1, Ne(hash3));
  99. EXPECT_THAT(hash2, Ne(hash3));
  100. // Hash values reflect the address and not the type.
  101. EXPECT_THAT(HashValue(static_cast<void*>(nullptr)), Eq(hash_null));
  102. EXPECT_THAT(HashValue(static_cast<int*>(nullptr)), Eq(hash_null));
  103. EXPECT_THAT(HashValue(static_cast<std::string*>(nullptr)), Eq(hash_null));
  104. EXPECT_THAT(HashValue(reinterpret_cast<void*>(&object1)), Eq(hash1));
  105. EXPECT_THAT(HashValue(reinterpret_cast<int*>(&object2)), Eq(hash2));
  106. EXPECT_THAT(HashValue(reinterpret_cast<std::string*>(object2.data())),
  107. Eq(hash3));
  108. }
  109. TEST(HashingTest, PairsAndTuples) {
  110. // Note that we can't compare hash codes across arity, or in general, compare
  111. // hash codes for different types as the type isn't part of the hash. These
  112. // hashes are targeted at use in hash tables which pick a single type that's
  113. // the basis of any comparison.
  114. HashCode hash_00 = HashValue(std::pair(0, 0));
  115. HashCode hash_01 = HashValue(std::pair(0, 1));
  116. HashCode hash_10 = HashValue(std::pair(1, 0));
  117. HashCode hash_11 = HashValue(std::pair(1, 1));
  118. EXPECT_THAT(hash_00, Ne(hash_01));
  119. EXPECT_THAT(hash_00, Ne(hash_10));
  120. EXPECT_THAT(hash_00, Ne(hash_11));
  121. EXPECT_THAT(hash_01, Ne(hash_10));
  122. EXPECT_THAT(hash_01, Ne(hash_11));
  123. EXPECT_THAT(hash_10, Ne(hash_11));
  124. HashCode hash_000 = HashValue(std::tuple(0, 0, 0));
  125. HashCode hash_001 = HashValue(std::tuple(0, 0, 1));
  126. HashCode hash_010 = HashValue(std::tuple(0, 1, 0));
  127. HashCode hash_011 = HashValue(std::tuple(0, 1, 1));
  128. HashCode hash_100 = HashValue(std::tuple(1, 0, 0));
  129. HashCode hash_101 = HashValue(std::tuple(1, 0, 1));
  130. HashCode hash_110 = HashValue(std::tuple(1, 1, 0));
  131. HashCode hash_111 = HashValue(std::tuple(1, 1, 1));
  132. EXPECT_THAT(hash_000, Ne(hash_001));
  133. EXPECT_THAT(hash_000, Ne(hash_010));
  134. EXPECT_THAT(hash_000, Ne(hash_011));
  135. EXPECT_THAT(hash_000, Ne(hash_100));
  136. EXPECT_THAT(hash_000, Ne(hash_101));
  137. EXPECT_THAT(hash_000, Ne(hash_110));
  138. EXPECT_THAT(hash_000, Ne(hash_111));
  139. EXPECT_THAT(hash_001, Ne(hash_010));
  140. EXPECT_THAT(hash_001, Ne(hash_011));
  141. EXPECT_THAT(hash_001, Ne(hash_100));
  142. EXPECT_THAT(hash_001, Ne(hash_101));
  143. EXPECT_THAT(hash_001, Ne(hash_110));
  144. EXPECT_THAT(hash_001, Ne(hash_111));
  145. EXPECT_THAT(hash_010, Ne(hash_011));
  146. EXPECT_THAT(hash_010, Ne(hash_100));
  147. EXPECT_THAT(hash_010, Ne(hash_101));
  148. EXPECT_THAT(hash_010, Ne(hash_110));
  149. EXPECT_THAT(hash_010, Ne(hash_111));
  150. EXPECT_THAT(hash_011, Ne(hash_100));
  151. EXPECT_THAT(hash_011, Ne(hash_101));
  152. EXPECT_THAT(hash_011, Ne(hash_110));
  153. EXPECT_THAT(hash_011, Ne(hash_111));
  154. EXPECT_THAT(hash_100, Ne(hash_101));
  155. EXPECT_THAT(hash_100, Ne(hash_110));
  156. EXPECT_THAT(hash_100, Ne(hash_111));
  157. EXPECT_THAT(hash_101, Ne(hash_110));
  158. EXPECT_THAT(hash_101, Ne(hash_111));
  159. EXPECT_THAT(hash_110, Ne(hash_111));
  160. // Hashing a 2-tuple and a pair should produce identical results, so pairs
  161. // are compatible with code using things like variadic tuple construction.
  162. EXPECT_THAT(HashValue(std::tuple(0, 0)), Eq(hash_00));
  163. EXPECT_THAT(HashValue(std::tuple(0, 1)), Eq(hash_01));
  164. EXPECT_THAT(HashValue(std::tuple(1, 0)), Eq(hash_10));
  165. EXPECT_THAT(HashValue(std::tuple(1, 1)), Eq(hash_11));
  166. // Integers in tuples should also work.
  167. for (int i : {0, 1, 2, 3, 42, -1, -2, -3, -13}) {
  168. SCOPED_TRACE(llvm::formatv("Hashing: ({0}, {0}, {0})", i).str());
  169. auto test_int_tuple_hash = [](auto i) {
  170. using T = decltype(i);
  171. SCOPED_TRACE(
  172. llvm::formatv("Hashing integer type: {0}", llvm::getTypeName<T>())
  173. .str());
  174. std::tuple v = {i, i, i};
  175. HashCode hash = HashValue(v);
  176. // Hashes should be stable within the execution.
  177. EXPECT_THAT(HashValue(v), Eq(hash));
  178. // Zero should match, and other integers shouldn't collide trivially.
  179. T zero = 0;
  180. std::tuple zero_tuple = {zero, zero, zero};
  181. HashCode hash_zero = HashValue(zero_tuple);
  182. if (i == 0) {
  183. EXPECT_THAT(hash, Eq(hash_zero));
  184. } else {
  185. EXPECT_THAT(hash, Ne(hash_zero));
  186. }
  187. };
  188. test_int_tuple_hash(i);
  189. test_int_tuple_hash(static_cast<int8_t>(i));
  190. test_int_tuple_hash(static_cast<uint8_t>(i));
  191. test_int_tuple_hash(static_cast<int16_t>(i));
  192. test_int_tuple_hash(static_cast<uint16_t>(i));
  193. test_int_tuple_hash(static_cast<int32_t>(i));
  194. test_int_tuple_hash(static_cast<uint32_t>(i));
  195. test_int_tuple_hash(static_cast<int64_t>(i));
  196. test_int_tuple_hash(static_cast<uint64_t>(i));
  197. // Heterogeneous integer types should also work, but we only support
  198. // comparing against hashes of tuples with the exact same type.
  199. using T1 = std::tuple<int8_t, uint32_t, int16_t>;
  200. using T2 = std::tuple<uint32_t, int16_t, uint64_t>;
  201. if (i == 0) {
  202. EXPECT_THAT(HashValue(T1{i, i, i}), Eq(HashValue(T1{0, 0, 0})));
  203. EXPECT_THAT(HashValue(T2{i, i, i}), Eq(HashValue(T2{0, 0, 0})));
  204. } else {
  205. EXPECT_THAT(HashValue(T1{i, i, i}), Ne(HashValue(T1{0, 0, 0})));
  206. EXPECT_THAT(HashValue(T2{i, i, i}), Ne(HashValue(T2{0, 0, 0})));
  207. }
  208. }
  209. // Hash values of pointers in pairs and tuples reflect the address and not the
  210. // type. Pairs and 2-tuples give the same hash values.
  211. HashCode hash_2null = HashValue(std::pair(nullptr, nullptr));
  212. EXPECT_THAT(HashValue(std::tuple(static_cast<int*>(nullptr),
  213. static_cast<double*>(nullptr))),
  214. Eq(hash_2null));
  215. // Hash other kinds of pointers without trivial collisions.
  216. int object1 = 42;
  217. std::string object2 = "Hello world!";
  218. HashCode hash_3ptr =
  219. HashValue(std::tuple(&object1, &object2, object2.data()));
  220. EXPECT_THAT(hash_3ptr, Ne(HashValue(std::tuple(nullptr, nullptr, nullptr))));
  221. // Hash values reflect the address and not the type.
  222. EXPECT_THAT(
  223. HashValue(std::tuple(reinterpret_cast<void*>(&object1),
  224. reinterpret_cast<int*>(&object2),
  225. reinterpret_cast<std::string*>(object2.data()))),
  226. Eq(hash_3ptr));
  227. }
  228. TEST(HashingTest, BasicStrings) {
  229. llvm::SmallVector<std::pair<std::string, HashCode>> hashes;
  230. for (int size : {0, 1, 2, 4, 16, 64, 256, 1024}) {
  231. std::string s(size, 'a');
  232. hashes.push_back({s, HashValue(s)});
  233. }
  234. for (const auto& [s1, hash1] : hashes) {
  235. EXPECT_THAT(HashValue(s1), Eq(hash1));
  236. // Also check that we get the same hashes even when using string-wrapping
  237. // types.
  238. EXPECT_THAT(HashValue(std::string_view(s1)), Eq(hash1));
  239. EXPECT_THAT(HashValue(llvm::StringRef(s1)), Eq(hash1));
  240. // And some basic tests that simple things don't collide.
  241. for (const auto& [s2, hash2] : hashes) {
  242. if (s1 != s2) {
  243. EXPECT_THAT(hash1, Ne(hash2))
  244. << "Matching hashes for '" << s1 << "' and '" << s2 << "'";
  245. }
  246. }
  247. }
  248. }
  249. struct HashableType {
  250. int x;
  251. int y;
  252. int ignored = 0;
  253. // See common/hashing.h.
  254. friend auto CarbonHashValue(const HashableType& value, uint64_t seed)
  255. -> HashCode {
  256. Hasher hasher(seed);
  257. hasher.Hash(value.x, value.y);
  258. return static_cast<HashCode>(hasher);
  259. }
  260. };
  261. TEST(HashingTest, CustomType) {
  262. HashableType a = {.x = 1, .y = 2};
  263. HashableType b = {.x = 3, .y = 4};
  264. EXPECT_THAT(HashValue(a), Eq(HashValue(a)));
  265. EXPECT_THAT(HashValue(a), Ne(HashValue(b)));
  266. // Differences in an ignored field have no impact.
  267. HashableType c = {.x = 3, .y = 4, .ignored = 42};
  268. EXPECT_THAT(HashValue(c), Eq(HashValue(b)));
  269. }
  270. // The only significantly bad seed is zero, so pick a non-zero seed with a tiny
  271. // amount of entropy to make sure that none of the testing relies on the entropy
  272. // from this.
  273. constexpr uint64_t TestSeed = 42ULL * 1024;
  274. auto ToHexBytes(llvm::StringRef s) -> std::string {
  275. std::string rendered;
  276. llvm::raw_string_ostream os(rendered);
  277. os << "{";
  278. llvm::ListSeparator sep(", ");
  279. for (const char c : s) {
  280. os << sep << llvm::formatv("{0:x2}", static_cast<uint8_t>(c));
  281. }
  282. os << "}";
  283. return rendered;
  284. }
  285. template <typename T>
  286. struct HashedValue {
  287. HashCode hash;
  288. T v;
  289. };
  290. using HashedString = HashedValue<std::string>;
  291. template <typename T>
  292. auto PrintFullWidthHex(llvm::raw_ostream& os, T value) {
  293. static_assert(sizeof(T) == 1 || sizeof(T) == 2 || sizeof(T) == 4 ||
  294. sizeof(T) == 8);
  295. // Given the nature of a format string and the good formatting, a nested
  296. // conditional seems like the most readable structure.
  297. // NOLINTBEGIN(readability-avoid-nested-conditional-operator)
  298. os << llvm::formatv(sizeof(T) == 1 ? "{0:x2}"
  299. : sizeof(T) == 2 ? "{0:x4}"
  300. : sizeof(T) == 4 ? "{0:x8}"
  301. : "{0:x16}",
  302. static_cast<uint64_t>(value));
  303. // NOLINTEND(readability-avoid-nested-conditional-operator)
  304. }
  305. template <typename T>
  306. requires std::integral<T>
  307. auto operator<<(llvm::raw_ostream& os, HashedValue<T> hv)
  308. -> llvm::raw_ostream& {
  309. os << "hash " << hv.hash << " for value ";
  310. PrintFullWidthHex(os, hv.v);
  311. return os;
  312. }
  313. template <typename T, typename U>
  314. requires std::integral<T> && std::integral<U>
  315. auto operator<<(llvm::raw_ostream& os, HashedValue<std::pair<T, U>> hv)
  316. -> llvm::raw_ostream& {
  317. os << "hash " << hv.hash << " for pair of ";
  318. PrintFullWidthHex(os, hv.v.first);
  319. os << " and ";
  320. PrintFullWidthHex(os, hv.v.second);
  321. return os;
  322. }
  323. struct Collisions {
  324. int total;
  325. int median;
  326. int max;
  327. };
  328. // Analyzes a list of hashed values to find all of the hash codes which collide
  329. // within a specific bit-range.
  330. //
  331. // With `BitBegin=0` and `BitEnd=64`, this is equivalent to finding full
  332. // collisions. But when the begin and end of the bit range are narrower than the
  333. // 64-bits of the hash code, it allows this function to analyze a specific
  334. // window of bits within the 64-bit hash code to understand how many collisions
  335. // emerge purely within that bit range.
  336. //
  337. // With narrow ranges (we often look at the first N and last N bits for small
  338. // N), collisions are common and so this function summarizes this with the total
  339. // number of collisions and the median number of collisions for an input value.
  340. template <int BitBegin, int BitEnd, typename T>
  341. auto FindBitRangeCollisions(llvm::ArrayRef<HashedValue<T>> hashes)
  342. -> Collisions {
  343. static_assert(BitBegin < BitEnd);
  344. constexpr int BitCount = BitEnd - BitBegin;
  345. static_assert(BitCount <= 32);
  346. constexpr int BitShift = BitBegin;
  347. constexpr uint64_t BitMask = ((1ULL << BitCount) - 1) << BitShift;
  348. // We collect counts of collisions in a vector. Initially, we just have a zero
  349. // and all inputs map to that collision count. As we discover collisions,
  350. // we'll create a dedicated counter for it and count how many inputs collide.
  351. llvm::SmallVector<int> collision_counts;
  352. collision_counts.push_back(0);
  353. // The "map" for collision counts. Each input hashed value has a corresponding
  354. // index stored here. That index is the index of the collision count in the
  355. // container above. We resize this to fill it with zeros to start as the zero
  356. // index above has a collision count of zero.
  357. //
  358. // The result of this is that the number of collisions for `hashes[i]` is
  359. // `collision_counts[collision_map[i]]`.
  360. llvm::SmallVector<int> collision_map;
  361. collision_map.resize(hashes.size());
  362. // First, we extract the bit subsequence we want to examine from each hash and
  363. // store it with an index back into the hashed values (or the collision map).
  364. //
  365. // The result is that, `bits_and_indices[i].bits` has the hash bits of
  366. // interest from `hashes[bits_and_indices[i].index]`.
  367. //
  368. // And because `collision_map` above uses the same indices as `hashes`,
  369. // `collision_counts[collision_map[bits_and_indices[i].index]]` is the number
  370. // of collisions for `bits_and_indices[i].bits`.
  371. struct BitSequenceAndHashIndex {
  372. // The bit subsequence of a hash input, adjusted into the low bits.
  373. uint32_t bits;
  374. // The index of the hash input corresponding to this bit sequence.
  375. int index;
  376. };
  377. llvm::SmallVector<BitSequenceAndHashIndex> bits_and_indices;
  378. bits_and_indices.reserve(hashes.size());
  379. for (const auto& [hash, v] : hashes) {
  380. CARBON_DCHECK(v == hashes[bits_and_indices.size()].v);
  381. auto hash_bits = (static_cast<uint64_t>(hash) & BitMask) >> BitShift;
  382. bits_and_indices.push_back(
  383. {.bits = static_cast<uint32_t>(hash_bits),
  384. .index = static_cast<int>(bits_and_indices.size())});
  385. }
  386. // Now we sort by the extracted bit sequence so we can efficiently scan for
  387. // colliding bit patterns.
  388. std::sort(
  389. bits_and_indices.begin(), bits_and_indices.end(),
  390. [](const auto& lhs, const auto& rhs) { return lhs.bits < rhs.bits; });
  391. // Scan the sorted bit sequences we've extracted looking for collisions. We
  392. // count the total collisions, but we also track the number of individual
  393. // inputs that collide with each specific bit pattern.
  394. uint32_t prev_hash_bits = bits_and_indices[0].bits;
  395. int prev_index = bits_and_indices[0].index;
  396. bool in_collision = false;
  397. int total = 0;
  398. for (const auto& [hash_bits, hash_index] :
  399. llvm::ArrayRef(bits_and_indices).slice(1)) {
  400. // Check if we've found a new hash (and thus a new value), reset everything.
  401. CARBON_CHECK(hashes[prev_index].v != hashes[hash_index].v);
  402. if (hash_bits != prev_hash_bits) {
  403. CARBON_CHECK(hashes[prev_index].hash != hashes[hash_index].hash);
  404. prev_hash_bits = hash_bits;
  405. prev_index = hash_index;
  406. in_collision = false;
  407. continue;
  408. }
  409. // Otherwise, we have a colliding bit sequence.
  410. ++total;
  411. // If we've already created a collision count to track this, just increment
  412. // it and map this hash to it.
  413. if (in_collision) {
  414. ++collision_counts.back();
  415. collision_map[hash_index] = collision_counts.size() - 1;
  416. continue;
  417. }
  418. // If this is a new collision, create a dedicated count to track it and
  419. // begin counting.
  420. in_collision = true;
  421. collision_map[prev_index] = collision_counts.size();
  422. collision_map[hash_index] = collision_counts.size();
  423. collision_counts.push_back(1);
  424. }
  425. // Sort by collision count for each hash.
  426. std::sort(bits_and_indices.begin(), bits_and_indices.end(),
  427. [&](const auto& lhs, const auto& rhs) {
  428. return collision_counts[collision_map[lhs.index]] <
  429. collision_counts[collision_map[rhs.index]];
  430. });
  431. // And compute the median and max.
  432. int median = collision_counts
  433. [collision_map[bits_and_indices[bits_and_indices.size() / 2].index]];
  434. int max = *std::max_element(collision_counts.begin(), collision_counts.end());
  435. CARBON_CHECK(max ==
  436. collision_counts[collision_map[bits_and_indices.back().index]]);
  437. return {.total = total, .median = median, .max = max};
  438. }
  439. auto CheckNoDuplicateValues(llvm::ArrayRef<HashedString> hashes) -> void {
  440. for (int i = 0, size = hashes.size(); i < size - 1; ++i) {
  441. const auto& [_, value] = hashes[i];
  442. CARBON_CHECK(value != hashes[i + 1].v) << "Duplicate value: " << value;
  443. }
  444. }
  445. template <int N>
  446. auto AllByteStringsHashedAndSorted() {
  447. static_assert(N < 5, "Can only generate all 4-byte strings or shorter.");
  448. llvm::SmallVector<HashedString> hashes;
  449. int64_t count = 1LL << (N * 8);
  450. for (int64_t i : llvm::seq(count)) {
  451. uint8_t bytes[N];
  452. for (int j : llvm::seq(N)) {
  453. bytes[j] = (static_cast<uint64_t>(i) >> (8 * j)) & 0xff;
  454. }
  455. std::string s(std::begin(bytes), std::end(bytes));
  456. hashes.push_back({HashValue(s, TestSeed), s});
  457. }
  458. std::sort(hashes.begin(), hashes.end(),
  459. [](const HashedString& lhs, const HashedString& rhs) {
  460. return static_cast<uint64_t>(lhs.hash) <
  461. static_cast<uint64_t>(rhs.hash);
  462. });
  463. CheckNoDuplicateValues(hashes);
  464. return hashes;
  465. }
  466. auto ExpectNoHashCollisions(llvm::ArrayRef<HashedString> hashes) -> void {
  467. HashCode prev_hash = hashes[0].hash;
  468. llvm::StringRef prev_s = hashes[0].v;
  469. for (const auto& [hash, s] : hashes.slice(1)) {
  470. if (hash != prev_hash) {
  471. prev_hash = hash;
  472. prev_s = s;
  473. continue;
  474. }
  475. FAIL() << "Colliding hash '" << hash << "' of strings "
  476. << ToHexBytes(prev_s) << " and " << ToHexBytes(s);
  477. }
  478. }
  479. TEST(HashingTest, Collisions1ByteSized) {
  480. auto hashes_storage = AllByteStringsHashedAndSorted<1>();
  481. auto hashes = llvm::ArrayRef(hashes_storage);
  482. ExpectNoHashCollisions(hashes);
  483. auto low_32bit_collisions = FindBitRangeCollisions<0, 32>(hashes);
  484. EXPECT_THAT(low_32bit_collisions.total, Eq(0));
  485. auto high_32bit_collisions = FindBitRangeCollisions<32, 64>(hashes);
  486. EXPECT_THAT(high_32bit_collisions.total, Eq(0));
  487. // We expect collisions when only looking at 7-bits of the hash. However,
  488. // modern hash table designs need to use either the low or high 7 bits as tags
  489. // for faster searching. So we add some direct testing that the median and max
  490. // collisions for any given key stay within bounds. We express the bounds in
  491. // terms of the minimum expected "perfect" rate of collisions if uniformly
  492. // distributed.
  493. int min_7bit_collisions = llvm::NextPowerOf2(hashes.size() - 1) / (1 << 7);
  494. auto low_7bit_collisions = FindBitRangeCollisions<0, 7>(hashes);
  495. EXPECT_THAT(low_7bit_collisions.median, Le(8 * min_7bit_collisions));
  496. EXPECT_THAT(low_7bit_collisions.max, Le(8 * min_7bit_collisions));
  497. auto high_7bit_collisions = FindBitRangeCollisions<64 - 7, 64>(hashes);
  498. EXPECT_THAT(high_7bit_collisions.median, Le(2 * min_7bit_collisions));
  499. EXPECT_THAT(high_7bit_collisions.max, Le(4 * min_7bit_collisions));
  500. }
  501. TEST(HashingTest, Collisions2ByteSized) {
  502. auto hashes_storage = AllByteStringsHashedAndSorted<2>();
  503. auto hashes = llvm::ArrayRef(hashes_storage);
  504. ExpectNoHashCollisions(hashes);
  505. auto low_32bit_collisions = FindBitRangeCollisions<0, 32>(hashes);
  506. EXPECT_THAT(low_32bit_collisions.total, Eq(0));
  507. auto high_32bit_collisions = FindBitRangeCollisions<32, 64>(hashes);
  508. EXPECT_THAT(high_32bit_collisions.total, Eq(0));
  509. // Similar to 1-byte keys, we do expect a certain rate of collisions here but
  510. // bound the median and max.
  511. int min_7bit_collisions = llvm::NextPowerOf2(hashes.size() - 1) / (1 << 7);
  512. auto low_7bit_collisions = FindBitRangeCollisions<0, 7>(hashes);
  513. EXPECT_THAT(low_7bit_collisions.median, Le(2 * min_7bit_collisions));
  514. EXPECT_THAT(low_7bit_collisions.max, Le(2 * min_7bit_collisions));
  515. auto high_7bit_collisions = FindBitRangeCollisions<64 - 7, 64>(hashes);
  516. EXPECT_THAT(high_7bit_collisions.median, Le(2 * min_7bit_collisions));
  517. EXPECT_THAT(high_7bit_collisions.max, Le(2 * min_7bit_collisions));
  518. }
  519. // Generate and hash all strings of of [BeginByteCount, EndByteCount) bytes,
  520. // with [BeginSetBitCount, EndSetBitCount) contiguous bits at each possible bit
  521. // offset set to one and all other bits set to zero.
  522. template <int BeginByteCount, int EndByteCount, int BeginSetBitCount,
  523. int EndSetBitCount>
  524. struct SparseHashTestParamRanges {
  525. static_assert(BeginByteCount >= 0);
  526. static_assert(BeginByteCount < EndByteCount);
  527. static_assert(BeginSetBitCount >= 0);
  528. static_assert(BeginSetBitCount < EndSetBitCount);
  529. // Note that we intentionally allow the end-set-bit-count to result in more
  530. // set bits than are available -- we truncate the number of set bits to fit
  531. // within the byte string.
  532. static_assert(BeginSetBitCount <= BeginByteCount * 8);
  533. struct ByteCount {
  534. static constexpr int Begin = BeginByteCount;
  535. static constexpr int End = EndByteCount;
  536. };
  537. struct SetBitCount {
  538. static constexpr int Begin = BeginSetBitCount;
  539. static constexpr int End = EndSetBitCount;
  540. };
  541. };
  542. template <typename ParamRanges>
  543. struct SparseHashTest : ::testing::Test {
  544. using ByteCount = typename ParamRanges::ByteCount;
  545. using SetBitCount = typename ParamRanges::SetBitCount;
  546. static auto GetHashedByteStrings() {
  547. llvm::SmallVector<HashedString> hashes;
  548. for (int byte_count :
  549. llvm::seq_inclusive(ByteCount::Begin, ByteCount::End)) {
  550. int bits = byte_count * 8;
  551. for (int set_bit_count : llvm::seq_inclusive(
  552. SetBitCount::Begin, std::min(bits, SetBitCount::End))) {
  553. if (set_bit_count == 0) {
  554. std::string s(byte_count, '\0');
  555. hashes.push_back({HashValue(s, TestSeed), std::move(s)});
  556. continue;
  557. }
  558. for (int begin_set_bit : llvm::seq_inclusive(0, bits - set_bit_count)) {
  559. std::string s(byte_count, '\0');
  560. int begin_set_bit_byte_index = begin_set_bit / 8;
  561. int begin_set_bit_bit_index = begin_set_bit % 8;
  562. int end_set_bit_byte_index = (begin_set_bit + set_bit_count) / 8;
  563. int end_set_bit_bit_index = (begin_set_bit + set_bit_count) % 8;
  564. // We build a begin byte and end byte. We set the begin byte, set
  565. // subsequent bytes up to *and including* the end byte to all ones,
  566. // and then mask the end byte. For multi-byte runs, the mask just sets
  567. // the end byte and for single-byte runs the mask computes the
  568. // intersecting bits.
  569. //
  570. // Consider a 4-set-bit count, starting at bit 2. The begin bit index
  571. // is 2, and the end bit index is 6.
  572. //
  573. // Begin byte: 0b11111111 -(shl 2)-----> 0b11111100
  574. // End byte: 0b11111111 -(shr (8-6))-> 0b00111111
  575. // Masked byte: 0b00111100
  576. //
  577. // Or a 10-set-bit-count starting at bit 2. The begin bit index is 2,
  578. // the end byte index is (12 / 8) or 1, and the end bit index is (12 %
  579. // 8) or 4.
  580. //
  581. // Begin byte: 0b11111111 -(shl 2)-----> 0b11111100 -> 6 bits
  582. // End byte: 0b11111111 -(shr (8-4))-> 0b00001111 -> 4 bits
  583. // 10 total bits
  584. //
  585. uint8_t begin_set_bit_byte = 0xFFU << begin_set_bit_bit_index;
  586. uint8_t end_set_bit_byte = 0xFFU >> (8 - end_set_bit_bit_index);
  587. bool has_end_byte_bits = end_set_bit_byte != 0;
  588. s[begin_set_bit_byte_index] = begin_set_bit_byte;
  589. for (int i : llvm::seq(begin_set_bit_byte_index + 1,
  590. end_set_bit_byte_index + has_end_byte_bits)) {
  591. s[i] = '\xFF';
  592. }
  593. // If there are no bits set in the end byte, it may be past-the-end
  594. // and we can't even mask a zero byte safely.
  595. if (has_end_byte_bits) {
  596. s[end_set_bit_byte_index] &= end_set_bit_byte;
  597. }
  598. hashes.push_back({HashValue(s, TestSeed), std::move(s)});
  599. }
  600. }
  601. }
  602. std::sort(hashes.begin(), hashes.end(),
  603. [](const HashedString& lhs, const HashedString& rhs) {
  604. return static_cast<uint64_t>(lhs.hash) <
  605. static_cast<uint64_t>(rhs.hash);
  606. });
  607. CheckNoDuplicateValues(hashes);
  608. return hashes;
  609. }
  610. };
  611. using SparseHashTestParams = ::testing::Types<
  612. SparseHashTestParamRanges</*BeginByteCount=*/0, /*EndByteCount=*/256,
  613. /*BeginSetBitCount=*/0, /*EndSetBitCount=*/1>,
  614. SparseHashTestParamRanges</*BeginByteCount=*/1, /*EndByteCount=*/128,
  615. /*BeginSetBitCount=*/2, /*EndSetBitCount=*/4>,
  616. SparseHashTestParamRanges</*BeginByteCount=*/1, /*EndByteCount=*/64,
  617. /*BeginSetBitCount=*/4, /*EndSetBitCount=*/16>>;
  618. TYPED_TEST_SUITE(SparseHashTest, SparseHashTestParams);
  619. TYPED_TEST(SparseHashTest, Collisions) {
  620. auto hashes_storage = this->GetHashedByteStrings();
  621. auto hashes = llvm::ArrayRef(hashes_storage);
  622. ExpectNoHashCollisions(hashes);
  623. int min_7bit_collisions = llvm::NextPowerOf2(hashes.size() - 1) / (1 << 7);
  624. auto low_7bit_collisions = FindBitRangeCollisions<0, 7>(hashes);
  625. EXPECT_THAT(low_7bit_collisions.median, Le(2 * min_7bit_collisions));
  626. EXPECT_THAT(low_7bit_collisions.max, Le(2 * min_7bit_collisions));
  627. auto high_7bit_collisions = FindBitRangeCollisions<64 - 7, 64>(hashes);
  628. EXPECT_THAT(high_7bit_collisions.median, Le(2 * min_7bit_collisions));
  629. EXPECT_THAT(high_7bit_collisions.max, Le(2 * min_7bit_collisions));
  630. }
  631. } // namespace
  632. } // namespace Carbon