hashing_test.cpp 27 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706
  1. // Part of the Carbon Language project, under the Apache License v2.0 with LLVM
  2. // Exceptions. See /LICENSE for license information.
  3. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
  4. #include "common/hashing.h"
  5. #include <gmock/gmock.h>
  6. #include <gtest/gtest.h>
  7. #include <type_traits>
  8. #include "llvm/ADT/Sequence.h"
  9. #include "llvm/ADT/StringExtras.h"
  10. #include "llvm/Support/FormatVariadic.h"
  11. #include "llvm/Support/TypeName.h"
  12. namespace Carbon {
  13. namespace {
  14. using ::testing::Eq;
  15. using ::testing::Le;
  16. using ::testing::Ne;
  17. TEST(HashingTest, HashCodeAPI) {
  18. // Manually compute a few hash codes where we can exercise the underlying API.
  19. HashCode empty = HashValue("");
  20. HashCode a = HashValue("a");
  21. HashCode b = HashValue("b");
  22. ASSERT_THAT(HashValue(""), Eq(empty));
  23. ASSERT_THAT(HashValue("a"), Eq(a));
  24. ASSERT_THAT(HashValue("b"), Eq(b));
  25. ASSERT_THAT(empty, Ne(a));
  26. ASSERT_THAT(empty, Ne(b));
  27. ASSERT_THAT(a, Ne(b));
  28. // Exercise the methods in basic ways across a few sizes. This doesn't check
  29. // much beyond stability across re-computed values, crashing, or hitting UB.
  30. EXPECT_THAT(HashValue("a").ExtractIndex(), Eq(a.ExtractIndex()));
  31. EXPECT_THAT(a.ExtractIndex(), Ne(b.ExtractIndex()));
  32. EXPECT_THAT(a.ExtractIndex(), Ne(empty.ExtractIndex()));
  33. // Note that the index produced with a tag may be different from the index
  34. // alone!
  35. EXPECT_THAT(HashValue("a").ExtractIndexAndTag<2>(),
  36. Eq(a.ExtractIndexAndTag<2>()));
  37. EXPECT_THAT(HashValue("a").ExtractIndexAndTag<16>(),
  38. Eq(a.ExtractIndexAndTag<16>()));
  39. EXPECT_THAT(HashValue("a").ExtractIndexAndTag<7>(),
  40. Eq(a.ExtractIndexAndTag<7>()));
  41. const auto [a_index, a_tag] = a.ExtractIndexAndTag<4>();
  42. const auto [b_index, b_tag] = b.ExtractIndexAndTag<4>();
  43. EXPECT_THAT(a_index, Ne(b_index));
  44. EXPECT_THAT(a_tag, Ne(b_tag));
  45. }
  46. TEST(HashingTest, Integers) {
  47. for (int64_t i : {0, 1, 2, 3, 42, -1, -2, -3, -13}) {
  48. SCOPED_TRACE(llvm::formatv("Hashing: {0}", i).str());
  49. auto test_int_hash = [](auto i) {
  50. using T = decltype(i);
  51. SCOPED_TRACE(
  52. llvm::formatv("Hashing type: {0}", llvm::getTypeName<T>()).str());
  53. HashCode hash = HashValue(i);
  54. // Hashes should be stable within the execution.
  55. EXPECT_THAT(HashValue(i), Eq(hash));
  56. // Zero should match, and other integers shouldn't collide trivially.
  57. HashCode hash_zero = HashValue(static_cast<T>(0));
  58. if (i == 0) {
  59. EXPECT_THAT(hash, Eq(hash_zero));
  60. } else {
  61. EXPECT_THAT(hash, Ne(hash_zero));
  62. }
  63. };
  64. test_int_hash(i);
  65. test_int_hash(static_cast<int8_t>(i));
  66. test_int_hash(static_cast<uint8_t>(i));
  67. test_int_hash(static_cast<int16_t>(i));
  68. test_int_hash(static_cast<uint16_t>(i));
  69. test_int_hash(static_cast<int32_t>(i));
  70. test_int_hash(static_cast<uint32_t>(i));
  71. test_int_hash(static_cast<int64_t>(i));
  72. test_int_hash(static_cast<uint64_t>(i));
  73. }
  74. }
  75. TEST(HashingTest, BasicSeeding) {
  76. auto unseeded_hash = HashValue(42);
  77. EXPECT_THAT(unseeded_hash, Ne(HashValue(42, 1)));
  78. EXPECT_THAT(unseeded_hash, Ne(HashValue(42, 2)));
  79. EXPECT_THAT(unseeded_hash, Ne(HashValue(42, 3)));
  80. EXPECT_THAT(unseeded_hash,
  81. Ne(HashValue(42, static_cast<uint64_t>(unseeded_hash))));
  82. }
  83. TEST(HashingTest, Pointers) {
  84. int object1 = 42;
  85. std::string object2 =
  86. "Hello World! This is a long-ish string so it ends up on the heap!";
  87. HashCode hash_null = HashValue(nullptr);
  88. // Hashes should be stable.
  89. EXPECT_THAT(HashValue(nullptr), Eq(hash_null));
  90. // Hash other kinds of pointers without trivial collisions.
  91. HashCode hash1 = HashValue(&object1);
  92. HashCode hash2 = HashValue(&object2);
  93. HashCode hash3 = HashValue(object2.data());
  94. EXPECT_THAT(hash1, Ne(hash_null));
  95. EXPECT_THAT(hash2, Ne(hash_null));
  96. EXPECT_THAT(hash3, Ne(hash_null));
  97. EXPECT_THAT(hash1, Ne(hash2));
  98. EXPECT_THAT(hash1, Ne(hash3));
  99. EXPECT_THAT(hash2, Ne(hash3));
  100. // Hash values reflect the address and not the type.
  101. EXPECT_THAT(HashValue(static_cast<void*>(nullptr)), Eq(hash_null));
  102. EXPECT_THAT(HashValue(static_cast<int*>(nullptr)), Eq(hash_null));
  103. EXPECT_THAT(HashValue(static_cast<std::string*>(nullptr)), Eq(hash_null));
  104. EXPECT_THAT(HashValue(reinterpret_cast<void*>(&object1)), Eq(hash1));
  105. EXPECT_THAT(HashValue(reinterpret_cast<int*>(&object2)), Eq(hash2));
  106. EXPECT_THAT(HashValue(reinterpret_cast<std::string*>(object2.data())),
  107. Eq(hash3));
  108. }
  109. TEST(HashingTest, PairsAndTuples) {
  110. // Note that we can't compare hash codes across arity, or in general, compare
  111. // hash codes for different types as the type isn't part of the hash. These
  112. // hashes are targeted at use in hash tables which pick a single type that's
  113. // the basis of any comparison.
  114. HashCode hash_00 = HashValue(std::pair(0, 0));
  115. HashCode hash_01 = HashValue(std::pair(0, 1));
  116. HashCode hash_10 = HashValue(std::pair(1, 0));
  117. HashCode hash_11 = HashValue(std::pair(1, 1));
  118. EXPECT_THAT(hash_00, Ne(hash_01));
  119. EXPECT_THAT(hash_00, Ne(hash_10));
  120. EXPECT_THAT(hash_00, Ne(hash_11));
  121. EXPECT_THAT(hash_01, Ne(hash_10));
  122. EXPECT_THAT(hash_01, Ne(hash_11));
  123. EXPECT_THAT(hash_10, Ne(hash_11));
  124. HashCode hash_000 = HashValue(std::tuple(0, 0, 0));
  125. HashCode hash_001 = HashValue(std::tuple(0, 0, 1));
  126. HashCode hash_010 = HashValue(std::tuple(0, 1, 0));
  127. HashCode hash_011 = HashValue(std::tuple(0, 1, 1));
  128. HashCode hash_100 = HashValue(std::tuple(1, 0, 0));
  129. HashCode hash_101 = HashValue(std::tuple(1, 0, 1));
  130. HashCode hash_110 = HashValue(std::tuple(1, 1, 0));
  131. HashCode hash_111 = HashValue(std::tuple(1, 1, 1));
  132. EXPECT_THAT(hash_000, Ne(hash_001));
  133. EXPECT_THAT(hash_000, Ne(hash_010));
  134. EXPECT_THAT(hash_000, Ne(hash_011));
  135. EXPECT_THAT(hash_000, Ne(hash_100));
  136. EXPECT_THAT(hash_000, Ne(hash_101));
  137. EXPECT_THAT(hash_000, Ne(hash_110));
  138. EXPECT_THAT(hash_000, Ne(hash_111));
  139. EXPECT_THAT(hash_001, Ne(hash_010));
  140. EXPECT_THAT(hash_001, Ne(hash_011));
  141. EXPECT_THAT(hash_001, Ne(hash_100));
  142. EXPECT_THAT(hash_001, Ne(hash_101));
  143. EXPECT_THAT(hash_001, Ne(hash_110));
  144. EXPECT_THAT(hash_001, Ne(hash_111));
  145. EXPECT_THAT(hash_010, Ne(hash_011));
  146. EXPECT_THAT(hash_010, Ne(hash_100));
  147. EXPECT_THAT(hash_010, Ne(hash_101));
  148. EXPECT_THAT(hash_010, Ne(hash_110));
  149. EXPECT_THAT(hash_010, Ne(hash_111));
  150. EXPECT_THAT(hash_011, Ne(hash_100));
  151. EXPECT_THAT(hash_011, Ne(hash_101));
  152. EXPECT_THAT(hash_011, Ne(hash_110));
  153. EXPECT_THAT(hash_011, Ne(hash_111));
  154. EXPECT_THAT(hash_100, Ne(hash_101));
  155. EXPECT_THAT(hash_100, Ne(hash_110));
  156. EXPECT_THAT(hash_100, Ne(hash_111));
  157. EXPECT_THAT(hash_101, Ne(hash_110));
  158. EXPECT_THAT(hash_101, Ne(hash_111));
  159. EXPECT_THAT(hash_110, Ne(hash_111));
  160. // Hashing a 2-tuple and a pair should produce identical results, so pairs
  161. // are compatible with code using things like variadic tuple construction.
  162. EXPECT_THAT(HashValue(std::tuple(0, 0)), Eq(hash_00));
  163. EXPECT_THAT(HashValue(std::tuple(0, 1)), Eq(hash_01));
  164. EXPECT_THAT(HashValue(std::tuple(1, 0)), Eq(hash_10));
  165. EXPECT_THAT(HashValue(std::tuple(1, 1)), Eq(hash_11));
  166. // Integers in tuples should also work.
  167. for (int i : {0, 1, 2, 3, 42, -1, -2, -3, -13}) {
  168. SCOPED_TRACE(llvm::formatv("Hashing: ({0}, {0}, {0})", i).str());
  169. auto test_int_tuple_hash = [](auto i) {
  170. using T = decltype(i);
  171. SCOPED_TRACE(
  172. llvm::formatv("Hashing integer type: {0}", llvm::getTypeName<T>())
  173. .str());
  174. std::tuple v = {i, i, i};
  175. HashCode hash = HashValue(v);
  176. // Hashes should be stable within the execution.
  177. EXPECT_THAT(HashValue(v), Eq(hash));
  178. // Zero should match, and other integers shouldn't collide trivially.
  179. T zero = 0;
  180. std::tuple zero_tuple = {zero, zero, zero};
  181. HashCode hash_zero = HashValue(zero_tuple);
  182. if (i == 0) {
  183. EXPECT_THAT(hash, Eq(hash_zero));
  184. } else {
  185. EXPECT_THAT(hash, Ne(hash_zero));
  186. }
  187. };
  188. test_int_tuple_hash(i);
  189. test_int_tuple_hash(static_cast<int8_t>(i));
  190. test_int_tuple_hash(static_cast<uint8_t>(i));
  191. test_int_tuple_hash(static_cast<int16_t>(i));
  192. test_int_tuple_hash(static_cast<uint16_t>(i));
  193. test_int_tuple_hash(static_cast<int32_t>(i));
  194. test_int_tuple_hash(static_cast<uint32_t>(i));
  195. test_int_tuple_hash(static_cast<int64_t>(i));
  196. test_int_tuple_hash(static_cast<uint64_t>(i));
  197. // Heterogeneous integer types should also work, but we only support
  198. // comparing against hashes of tuples with the exact same type.
  199. using T1 = std::tuple<int8_t, uint32_t, int16_t>;
  200. using T2 = std::tuple<uint32_t, int16_t, uint64_t>;
  201. if (i == 0) {
  202. EXPECT_THAT(HashValue(T1{i, i, i}), Eq(HashValue(T1{0, 0, 0})));
  203. EXPECT_THAT(HashValue(T2{i, i, i}), Eq(HashValue(T2{0, 0, 0})));
  204. } else {
  205. EXPECT_THAT(HashValue(T1{i, i, i}), Ne(HashValue(T1{0, 0, 0})));
  206. EXPECT_THAT(HashValue(T2{i, i, i}), Ne(HashValue(T2{0, 0, 0})));
  207. }
  208. }
  209. // Hash values of pointers in pairs and tuples reflect the address and not the
  210. // type. Pairs and 2-tuples give the same hash values.
  211. HashCode hash_2null = HashValue(std::pair(nullptr, nullptr));
  212. EXPECT_THAT(HashValue(std::tuple(static_cast<int*>(nullptr),
  213. static_cast<double*>(nullptr))),
  214. Eq(hash_2null));
  215. // Hash other kinds of pointers without trivial collisions.
  216. int object1 = 42;
  217. std::string object2 = "Hello world!";
  218. HashCode hash_3ptr =
  219. HashValue(std::tuple(&object1, &object2, object2.data()));
  220. EXPECT_THAT(hash_3ptr, Ne(HashValue(std::tuple(nullptr, nullptr, nullptr))));
  221. // Hash values reflect the address and not the type.
  222. EXPECT_THAT(
  223. HashValue(std::tuple(reinterpret_cast<void*>(&object1),
  224. reinterpret_cast<int*>(&object2),
  225. reinterpret_cast<std::string*>(object2.data()))),
  226. Eq(hash_3ptr));
  227. }
  228. TEST(HashingTest, BasicStrings) {
  229. llvm::SmallVector<std::pair<std::string, HashCode>> hashes;
  230. for (int size : {0, 1, 2, 4, 16, 64, 256, 1024}) {
  231. std::string s(size, 'a');
  232. hashes.push_back({s, HashValue(s)});
  233. }
  234. for (const auto& [s1, hash1] : hashes) {
  235. EXPECT_THAT(HashValue(s1), Eq(hash1));
  236. // Also check that we get the same hashes even when using string-wrapping
  237. // types.
  238. EXPECT_THAT(HashValue(std::string_view(s1)), Eq(hash1));
  239. EXPECT_THAT(HashValue(llvm::StringRef(s1)), Eq(hash1));
  240. // And some basic tests that simple things don't collide.
  241. for (const auto& [s2, hash2] : hashes) {
  242. if (s1 != s2) {
  243. EXPECT_THAT(hash1, Ne(hash2))
  244. << "Matching hashes for '" << s1 << "' and '" << s2 << "'";
  245. }
  246. }
  247. }
  248. }
  249. struct HashableType {
  250. int x;
  251. int y;
  252. int ignored = 0;
  253. friend auto CarbonHashValue(const HashableType& value, uint64_t seed)
  254. -> HashCode {
  255. Hasher hasher(seed);
  256. hasher.Hash(value.x, value.y);
  257. return static_cast<HashCode>(hasher);
  258. }
  259. };
  260. TEST(HashingTest, CustomType) {
  261. HashableType a = {.x = 1, .y = 2};
  262. HashableType b = {.x = 3, .y = 4};
  263. EXPECT_THAT(HashValue(a), Eq(HashValue(a)));
  264. EXPECT_THAT(HashValue(a), Ne(HashValue(b)));
  265. // Differences in an ignored field have no impact.
  266. HashableType c = {.x = 3, .y = 4, .ignored = 42};
  267. EXPECT_THAT(HashValue(c), Eq(HashValue(b)));
  268. }
  269. // The only significantly bad seed is zero, so pick a non-zero seed with a tiny
  270. // amount of entropy to make sure that none of the testing relies on the entropy
  271. // from this.
  272. constexpr uint64_t TestSeed = 42ULL * 1024;
  273. auto ToHexBytes(llvm::StringRef s) -> std::string {
  274. std::string rendered;
  275. llvm::raw_string_ostream os(rendered);
  276. os << "{";
  277. llvm::ListSeparator sep(", ");
  278. for (const char c : s) {
  279. os << sep << llvm::formatv("{0:x2}", static_cast<uint8_t>(c));
  280. }
  281. os << "}";
  282. return rendered;
  283. }
  284. template <typename T>
  285. struct HashedValue {
  286. HashCode hash;
  287. T v;
  288. };
  289. using HashedString = HashedValue<std::string>;
  290. template <typename T>
  291. auto PrintFullWidthHex(llvm::raw_ostream& os, T value) {
  292. static_assert(sizeof(T) == 1 || sizeof(T) == 2 || sizeof(T) == 4 ||
  293. sizeof(T) == 8);
  294. os << llvm::formatv(sizeof(T) == 1 ? "{0:x2}"
  295. : sizeof(T) == 2 ? "{0:x4}"
  296. : sizeof(T) == 4 ? "{0:x8}"
  297. : "{0:x16}",
  298. static_cast<uint64_t>(value));
  299. }
  300. template <typename T, typename = std::enable_if_t<std::is_integral_v<T>>>
  301. auto operator<<(llvm::raw_ostream& os, HashedValue<T> hv)
  302. -> llvm::raw_ostream& {
  303. os << "hash " << hv.hash << " for value ";
  304. PrintFullWidthHex(os, hv.v);
  305. return os;
  306. }
  307. template <typename T, typename U,
  308. typename = std::enable_if_t<std::is_integral_v<T>>,
  309. typename = std::enable_if_t<std::is_integral_v<U>>>
  310. auto operator<<(llvm::raw_ostream& os, HashedValue<std::pair<T, U>> hv)
  311. -> llvm::raw_ostream& {
  312. os << "hash " << hv.hash << " for pair of ";
  313. PrintFullWidthHex(os, hv.v.first);
  314. os << " and ";
  315. PrintFullWidthHex(os, hv.v.second);
  316. return os;
  317. }
  318. struct Collisions {
  319. int total;
  320. int median;
  321. int max;
  322. };
  323. // Analyzes a list of hashed values to find all of the hash codes which collide
  324. // within a specific bit-range.
  325. //
  326. // With `BitBegin=0` and `BitEnd=64`, this is equivalent to finding full
  327. // collisions. But when the begin and end of the bit range are narrower than the
  328. // 64-bits of the hash code, it allows this function to analyze a specific
  329. // window of bits within the 64-bit hash code to understand how many collisions
  330. // emerge purely within that bit range.
  331. //
  332. // With narrow ranges (we often look at the first N and last N bits for small
  333. // N), collisions are common and so this function summarizes this with the total
  334. // number of collisions and the median number of collisions for an input value.
  335. template <int BitBegin, int BitEnd, typename T>
  336. auto FindBitRangeCollisions(llvm::ArrayRef<HashedValue<T>> hashes)
  337. -> Collisions {
  338. static_assert(BitBegin < BitEnd);
  339. constexpr int BitCount = BitEnd - BitBegin;
  340. static_assert(BitCount <= 32);
  341. constexpr int BitShift = BitBegin;
  342. constexpr uint64_t BitMask = ((1ULL << BitCount) - 1) << BitShift;
  343. // We collect counts of collisions in a vector. Initially, we just have a zero
  344. // and all inputs map to that collision count. As we discover collisions,
  345. // we'll create a dedicated counter for it and count how many inputs collide.
  346. llvm::SmallVector<int> collision_counts;
  347. collision_counts.push_back(0);
  348. // The "map" for collision counts. Each input hashed value has a corresponding
  349. // index stored here. That index is the index of the collision count in the
  350. // container above. We resize this to fill it with zeros to start as the zero
  351. // index above has a collision count of zero.
  352. //
  353. // The result of this is that the number of collisions for `hashes[i]` is
  354. // `collision_counts[collision_map[i]]`.
  355. llvm::SmallVector<int> collision_map;
  356. collision_map.resize(hashes.size());
  357. // First, we extract the bit subsequence we want to examine from each hash and
  358. // store it with an index back into the hashed values (or the collision map).
  359. //
  360. // The result is that, `bits_and_indices[i].bits` has the hash bits of
  361. // interest from `hashes[bits_and_indices[i].index]`.
  362. //
  363. // And because `collision_map` above uses the same indices as `hashes`,
  364. // `collision_counts[collision_map[bits_and_indices[i].index]]` is the number
  365. // of collisions for `bits_and_indices[i].bits`.
  366. struct BitSequenceAndHashIndex {
  367. // The bit subsequence of a hash input, adjusted into the low bits.
  368. uint32_t bits;
  369. // The index of the hash input corresponding to this bit sequence.
  370. int index;
  371. };
  372. llvm::SmallVector<BitSequenceAndHashIndex> bits_and_indices;
  373. bits_and_indices.reserve(hashes.size());
  374. for (const auto& [hash, v] : hashes) {
  375. CARBON_DCHECK(v == hashes[bits_and_indices.size()].v);
  376. auto hash_bits = (static_cast<uint64_t>(hash) & BitMask) >> BitShift;
  377. bits_and_indices.push_back(
  378. {.bits = static_cast<uint32_t>(hash_bits),
  379. .index = static_cast<int>(bits_and_indices.size())});
  380. }
  381. // Now we sort by the extracted bit sequence so we can efficiently scan for
  382. // colliding bit patterns.
  383. std::sort(
  384. bits_and_indices.begin(), bits_and_indices.end(),
  385. [](const auto& lhs, const auto& rhs) { return lhs.bits < rhs.bits; });
  386. // Scan the sorted bit sequences we've extracted looking for collisions. We
  387. // count the total collisions, but we also track the number of individual
  388. // inputs that collide with each specific bit pattern.
  389. uint32_t prev_hash_bits = bits_and_indices[0].bits;
  390. int prev_index = bits_and_indices[0].index;
  391. bool in_collision = false;
  392. int total = 0;
  393. for (const auto& [hash_bits, hash_index] :
  394. llvm::ArrayRef(bits_and_indices).slice(1)) {
  395. // Check if we've found a new hash (and thus a new value), reset everything.
  396. CARBON_CHECK(hashes[prev_index].v != hashes[hash_index].v);
  397. if (hash_bits != prev_hash_bits) {
  398. CARBON_CHECK(hashes[prev_index].hash != hashes[hash_index].hash);
  399. prev_hash_bits = hash_bits;
  400. prev_index = hash_index;
  401. in_collision = false;
  402. continue;
  403. }
  404. // Otherwise, we have a colliding bit sequence.
  405. ++total;
  406. // If we've already created a collision count to track this, just increment
  407. // it and map this hash to it.
  408. if (in_collision) {
  409. ++collision_counts.back();
  410. collision_map[hash_index] = collision_counts.size() - 1;
  411. continue;
  412. }
  413. // If this is a new collision, create a dedicated count to track it and
  414. // begin counting.
  415. in_collision = true;
  416. collision_map[prev_index] = collision_counts.size();
  417. collision_map[hash_index] = collision_counts.size();
  418. collision_counts.push_back(1);
  419. }
  420. // Sort by collision count for each hash.
  421. std::sort(bits_and_indices.begin(), bits_and_indices.end(),
  422. [&](const auto& lhs, const auto& rhs) {
  423. return collision_counts[collision_map[lhs.index]] <
  424. collision_counts[collision_map[rhs.index]];
  425. });
  426. // And compute the median and max.
  427. int median = collision_counts
  428. [collision_map[bits_and_indices[bits_and_indices.size() / 2].index]];
  429. int max = *std::max_element(collision_counts.begin(), collision_counts.end());
  430. CARBON_CHECK(max ==
  431. collision_counts[collision_map[bits_and_indices.back().index]]);
  432. return {.total = total, .median = median, .max = max};
  433. }
  434. auto CheckNoDuplicateValues(llvm::ArrayRef<HashedString> hashes) -> void {
  435. for (int i = 0, size = hashes.size(); i < size - 1; ++i) {
  436. const auto& [_, value] = hashes[i];
  437. CARBON_CHECK(value != hashes[i + 1].v) << "Duplicate value: " << value;
  438. }
  439. }
  440. template <int N>
  441. auto AllByteStringsHashedAndSorted() {
  442. static_assert(N < 5, "Can only generate all 4-byte strings or shorter.");
  443. llvm::SmallVector<HashedString> hashes;
  444. int64_t count = 1LL << (N * 8);
  445. for (int64_t i : llvm::seq(count)) {
  446. uint8_t bytes[N];
  447. for (int j : llvm::seq(N)) {
  448. bytes[j] = (static_cast<uint64_t>(i) >> (8 * j)) & 0xff;
  449. }
  450. std::string s(std::begin(bytes), std::end(bytes));
  451. hashes.push_back({HashValue(s, TestSeed), s});
  452. }
  453. std::sort(hashes.begin(), hashes.end(),
  454. [](const HashedString& lhs, const HashedString& rhs) {
  455. return static_cast<uint64_t>(lhs.hash) <
  456. static_cast<uint64_t>(rhs.hash);
  457. });
  458. CheckNoDuplicateValues(hashes);
  459. return hashes;
  460. }
  461. auto ExpectNoHashCollisions(llvm::ArrayRef<HashedString> hashes) -> void {
  462. HashCode prev_hash = hashes[0].hash;
  463. llvm::StringRef prev_s = hashes[0].v;
  464. for (const auto& [hash, s] : hashes.slice(1)) {
  465. if (hash != prev_hash) {
  466. prev_hash = hash;
  467. prev_s = s;
  468. continue;
  469. }
  470. FAIL() << "Colliding hash '" << hash << "' of strings "
  471. << ToHexBytes(prev_s) << " and " << ToHexBytes(s);
  472. }
  473. }
  474. TEST(HashingTest, Collisions1ByteSized) {
  475. auto hashes_storage = AllByteStringsHashedAndSorted<1>();
  476. auto hashes = llvm::ArrayRef(hashes_storage);
  477. ExpectNoHashCollisions(hashes);
  478. auto low_32bit_collisions = FindBitRangeCollisions<0, 32>(hashes);
  479. EXPECT_THAT(low_32bit_collisions.total, Eq(0));
  480. auto high_32bit_collisions = FindBitRangeCollisions<32, 64>(hashes);
  481. EXPECT_THAT(high_32bit_collisions.total, Eq(0));
  482. // We expect collisions when only looking at 7-bits of the hash. However,
  483. // modern hash table designs need to use either the low or high 7 bits as tags
  484. // for faster searching. So we add some direct testing that the median and max
  485. // collisions for any given key stay within bounds. We express the bounds in
  486. // terms of the minimum expected "perfect" rate of collisions if uniformly
  487. // distributed.
  488. int min_7bit_collisions = llvm::NextPowerOf2(hashes.size() - 1) / (1 << 7);
  489. auto low_7bit_collisions = FindBitRangeCollisions<0, 7>(hashes);
  490. EXPECT_THAT(low_7bit_collisions.median, Le(2 * min_7bit_collisions));
  491. EXPECT_THAT(low_7bit_collisions.max, Le(4 * min_7bit_collisions));
  492. auto high_7bit_collisions = FindBitRangeCollisions<64 - 7, 64>(hashes);
  493. EXPECT_THAT(high_7bit_collisions.median, Le(2 * min_7bit_collisions));
  494. EXPECT_THAT(high_7bit_collisions.max, Le(4 * min_7bit_collisions));
  495. }
  496. TEST(HashingTest, Collisions2ByteSized) {
  497. auto hashes_storage = AllByteStringsHashedAndSorted<2>();
  498. auto hashes = llvm::ArrayRef(hashes_storage);
  499. ExpectNoHashCollisions(hashes);
  500. auto low_32bit_collisions = FindBitRangeCollisions<0, 32>(hashes);
  501. EXPECT_THAT(low_32bit_collisions.total, Eq(0));
  502. auto high_32bit_collisions = FindBitRangeCollisions<32, 64>(hashes);
  503. EXPECT_THAT(high_32bit_collisions.total, Eq(0));
  504. // Similar to 1-byte keys, we do expect a certain rate of collisions here but
  505. // bound the median and max.
  506. int min_7bit_collisions = llvm::NextPowerOf2(hashes.size() - 1) / (1 << 7);
  507. auto low_7bit_collisions = FindBitRangeCollisions<0, 7>(hashes);
  508. EXPECT_THAT(low_7bit_collisions.median, Le(2 * min_7bit_collisions));
  509. EXPECT_THAT(low_7bit_collisions.max, Le(2 * min_7bit_collisions));
  510. auto high_7bit_collisions = FindBitRangeCollisions<64 - 7, 64>(hashes);
  511. EXPECT_THAT(high_7bit_collisions.median, Le(2 * min_7bit_collisions));
  512. EXPECT_THAT(high_7bit_collisions.max, Le(2 * min_7bit_collisions));
  513. }
  514. // Generate and hash all strings of of [BeginByteCount, EndByteCount) bytes,
  515. // with [BeginSetBitCount, EndSetBitCount) contiguous bits at each possible bit
  516. // offset set to one and all other bits set to zero.
  517. template <int BeginByteCount, int EndByteCount, int BeginSetBitCount,
  518. int EndSetBitCount>
  519. struct SparseHashTestParamRanges {
  520. static_assert(BeginByteCount >= 0);
  521. static_assert(BeginByteCount < EndByteCount);
  522. static_assert(BeginSetBitCount >= 0);
  523. static_assert(BeginSetBitCount < EndSetBitCount);
  524. // Note that we intentionally allow the end-set-bit-count to result in more
  525. // set bits than are available -- we truncate the number of set bits to fit
  526. // within the byte string.
  527. static_assert(BeginSetBitCount <= BeginByteCount * 8);
  528. struct ByteCount {
  529. static constexpr int Begin = BeginByteCount;
  530. static constexpr int End = EndByteCount;
  531. };
  532. struct SetBitCount {
  533. static constexpr int Begin = BeginSetBitCount;
  534. static constexpr int End = EndSetBitCount;
  535. };
  536. };
  537. template <typename ParamRanges>
  538. struct SparseHashTest : ::testing::Test {
  539. using ByteCount = typename ParamRanges::ByteCount;
  540. using SetBitCount = typename ParamRanges::SetBitCount;
  541. static auto GetHashedByteStrings() {
  542. llvm::SmallVector<HashedString> hashes;
  543. for (int byte_count :
  544. llvm::seq_inclusive(ByteCount::Begin, ByteCount::End)) {
  545. int bits = byte_count * 8;
  546. for (int set_bit_count : llvm::seq_inclusive(
  547. SetBitCount::Begin, std::min(bits, SetBitCount::End))) {
  548. if (set_bit_count == 0) {
  549. std::string s(byte_count, '\0');
  550. hashes.push_back({HashValue(s, TestSeed), std::move(s)});
  551. continue;
  552. }
  553. for (int begin_set_bit : llvm::seq_inclusive(0, bits - set_bit_count)) {
  554. std::string s(byte_count, '\0');
  555. int begin_set_bit_byte_index = begin_set_bit / 8;
  556. int begin_set_bit_bit_index = begin_set_bit % 8;
  557. int end_set_bit_byte_index = (begin_set_bit + set_bit_count) / 8;
  558. int end_set_bit_bit_index = (begin_set_bit + set_bit_count) % 8;
  559. // We build a begin byte and end byte. We set the begin byte, set
  560. // subsequent bytes up to *and including* the end byte to all ones,
  561. // and then mask the end byte. For multi-byte runs, the mask just sets
  562. // the end byte and for single-byte runs the mask computes the
  563. // intersecting bits.
  564. //
  565. // Consider a 4-set-bit count, starting at bit 2. The begin bit index
  566. // is 2, and the end bit index is 6.
  567. //
  568. // Begin byte: 0b11111111 -(shl 2)-----> 0b11111100
  569. // End byte: 0b11111111 -(shr (8-6))-> 0b00111111
  570. // Masked byte: 0b00111100
  571. //
  572. // Or a 10-set-bit-count starting at bit 2. The begin bit index is 2,
  573. // the end byte index is (12 / 8) or 1, and the end bit index is (12 %
  574. // 8) or 4.
  575. //
  576. // Begin byte: 0b11111111 -(shl 2)-----> 0b11111100 -> 6 bits
  577. // End byte: 0b11111111 -(shr (8-4))-> 0b00001111 -> 4 bits
  578. // 10 total bits
  579. //
  580. uint8_t begin_set_bit_byte = 0xFFU << begin_set_bit_bit_index;
  581. uint8_t end_set_bit_byte = 0xFFU >> (8 - end_set_bit_bit_index);
  582. bool has_end_byte_bits = end_set_bit_byte != 0;
  583. s[begin_set_bit_byte_index] = begin_set_bit_byte;
  584. for (int i : llvm::seq(begin_set_bit_byte_index + 1,
  585. end_set_bit_byte_index + has_end_byte_bits)) {
  586. s[i] = '\xFF';
  587. }
  588. // If there are no bits set in the end byte, it may be past-the-end
  589. // and we can't even mask a zero byte safely.
  590. if (has_end_byte_bits) {
  591. s[end_set_bit_byte_index] &= end_set_bit_byte;
  592. }
  593. hashes.push_back({HashValue(s, TestSeed), std::move(s)});
  594. }
  595. }
  596. }
  597. std::sort(hashes.begin(), hashes.end(),
  598. [](const HashedString& lhs, const HashedString& rhs) {
  599. return static_cast<uint64_t>(lhs.hash) <
  600. static_cast<uint64_t>(rhs.hash);
  601. });
  602. CheckNoDuplicateValues(hashes);
  603. return hashes;
  604. }
  605. };
  606. using SparseHashTestParams = ::testing::Types<
  607. SparseHashTestParamRanges</*BeginByteCount=*/0, /*EndByteCount=*/256,
  608. /*BeginSetBitCount=*/0, /*EndSetBitCount=*/1>,
  609. SparseHashTestParamRanges</*BeginByteCount=*/1, /*EndByteCount=*/128,
  610. /*BeginSetBitCount=*/2, /*EndSetBitCount=*/4>,
  611. SparseHashTestParamRanges</*BeginByteCount=*/1, /*EndByteCount=*/64,
  612. /*BeginSetBitCount=*/4, /*EndSetBitCount=*/16>>;
  613. TYPED_TEST_SUITE(SparseHashTest, SparseHashTestParams);
  614. TYPED_TEST(SparseHashTest, Collisions) {
  615. auto hashes_storage = this->GetHashedByteStrings();
  616. auto hashes = llvm::ArrayRef(hashes_storage);
  617. ExpectNoHashCollisions(hashes);
  618. int min_7bit_collisions = llvm::NextPowerOf2(hashes.size() - 1) / (1 << 7);
  619. auto low_7bit_collisions = FindBitRangeCollisions<0, 7>(hashes);
  620. EXPECT_THAT(low_7bit_collisions.median, Le(2 * min_7bit_collisions));
  621. EXPECT_THAT(low_7bit_collisions.max, Le(2 * min_7bit_collisions));
  622. auto high_7bit_collisions = FindBitRangeCollisions<64 - 7, 64>(hashes);
  623. EXPECT_THAT(high_7bit_collisions.median, Le(2 * min_7bit_collisions));
  624. EXPECT_THAT(high_7bit_collisions.max, Le(2 * min_7bit_collisions));
  625. }
  626. } // namespace
  627. } // namespace Carbon