From 1330d2c3d3b8647b6053ac37e95503cd0278e2e3 Mon Sep 17 00:00:00 2001 From: Tor Egge Date: Mon, 6 Dec 2021 15:44:26 +0100 Subject: Add EntryRefFilter class. --- .../sharded_hash_map/sharded_hash_map_test.cpp | 51 ++++++++++++---------- 1 file changed, 29 insertions(+), 22 deletions(-) (limited to 'vespalib/src/tests/datastore/sharded_hash_map/sharded_hash_map_test.cpp') diff --git a/vespalib/src/tests/datastore/sharded_hash_map/sharded_hash_map_test.cpp b/vespalib/src/tests/datastore/sharded_hash_map/sharded_hash_map_test.cpp index 138dedbdd38..796e19a97d1 100644 --- a/vespalib/src/tests/datastore/sharded_hash_map/sharded_hash_map_test.cpp +++ b/vespalib/src/tests/datastore/sharded_hash_map/sharded_hash_map_test.cpp @@ -1,6 +1,7 @@ // Copyright Yahoo. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root. #include +#include #include #include #include @@ -19,6 +20,7 @@ LOG_SETUP("vespalib_datastore_shared_hash_test"); using vespalib::datastore::EntryRef; +using vespalib::datastore::EntryRefFilter; using vespalib::datastore::ICompactable; using RefT = vespalib::datastore::EntryRefT<22>; using MyAllocator = vespalib::datastore::UniqueStoreAllocator; @@ -123,7 +125,7 @@ struct DataStoreShardedHashTest : public ::testing::Test void populate_sample_data(uint32_t cnt); void populate_sample_values(uint32_t cnt); void clear_sample_values(uint32_t cnt); - void test_normalize_values(bool filter, bool one_filter); + void test_normalize_values(bool use_filter, bool one_filter); void test_foreach_value(bool one_filter); }; @@ -282,20 +284,30 @@ DataStoreShardedHashTest::clear_sample_values(uint32_t cnt) } } +namespace { + +template +EntryRefFilter +make_entry_ref_filter(bool one_filter) +{ + if (one_filter) { + EntryRefFilter filter(RefT::numBuffers(), RefT::offset_bits); + filter.add_buffer(3); + return filter; + } + return EntryRefFilter::create_all_filter(RefT::numBuffers(), RefT::offset_bits); +} + +} + void -DataStoreShardedHashTest::test_normalize_values(bool filter, bool one_filter) +DataStoreShardedHashTest::test_normalize_values(bool use_filter, bool one_filter) { populate_sample_data(large_population); populate_sample_values(large_population); - if (filter) { - std::vector bfilter; - if (one_filter) { - bfilter = std::vector(RefT::numBuffers()); - bfilter[3] = true; - } else { - bfilter = std::vector(RefT::numBuffers(), true); - } - EXPECT_TRUE(_hash_map.normalize_values([](std::vector &refs) noexcept { for (auto &ref : refs) { RefT iref(ref); ref = RefT(iref.offset() + 300, iref.bufferId()); } }, bfilter, RefT::offset_bits)); + if (use_filter) { + auto filter = make_entry_ref_filter(one_filter); + EXPECT_TRUE(_hash_map.normalize_values([](std::vector &refs) noexcept { for (auto &ref : refs) { RefT iref(ref); ref = RefT(iref.offset() + 300, iref.bufferId()); } }, filter)); } else { EXPECT_TRUE(_hash_map.normalize_values([](EntryRef ref) noexcept { RefT iref(ref); return RefT(iref.offset() + 300, iref.bufferId()); })); } @@ -305,7 +317,7 @@ DataStoreShardedHashTest::test_normalize_values(bool filter, bool one_filter) ASSERT_NE(result, nullptr); EXPECT_EQ(i, _allocator.get_wrapped(result->first.load_relaxed()).value()); ASSERT_EQ(select_buffer(i), RefT(result->second.load_relaxed()).bufferId()); - if (filter && one_filter && select_buffer(i) != 3) { + if (use_filter && one_filter && select_buffer(i) != 3) { ASSERT_EQ(i + 200, RefT(result->second.load_relaxed()).offset()); } else { ASSERT_EQ(i + 500, RefT(result->second.load_relaxed()).offset()); @@ -320,17 +332,11 @@ DataStoreShardedHashTest::test_foreach_value(bool one_filter) populate_sample_data(large_population); populate_sample_values(large_population); - std::vector bfilter; - if (one_filter) { - bfilter = std::vector(RefT::numBuffers()); - bfilter[3] = true; - } else { - bfilter = std::vector(RefT::numBuffers(), true); - } + auto filter = make_entry_ref_filter(one_filter); std::vector exp_refs; - EXPECT_FALSE(_hash_map.normalize_values([&exp_refs](std::vector& refs) { exp_refs.insert(exp_refs.end(), refs.begin(), refs.end()); }, bfilter, RefT::offset_bits)); + EXPECT_FALSE(_hash_map.normalize_values([&exp_refs](std::vector& refs) { exp_refs.insert(exp_refs.end(), refs.begin(), refs.end()); }, filter)); std::vector act_refs; - _hash_map.foreach_value([&act_refs](const std::vector &refs) { act_refs.insert(act_refs.end(), refs.begin(), refs.end()); }, bfilter, RefT::offset_bits); + _hash_map.foreach_value([&act_refs](const std::vector &refs) { act_refs.insert(act_refs.end(), refs.begin(), refs.end()); }, filter); EXPECT_EQ(exp_refs, act_refs); clear_sample_values(large_population); } @@ -396,7 +402,8 @@ TEST_F(DataStoreShardedHashTest, move_keys_works) _hash_map.foreach_key([&refs](EntryRef ref) { refs.emplace_back(ref); }); std::vector new_refs; MyCompactable my_compactable(_allocator, new_refs); - _hash_map.move_keys(my_compactable, std::vector(RefT::numBuffers(), true), RefT::offset_bits); + auto filter = make_entry_ref_filter(false); + _hash_map.move_keys(my_compactable, filter); std::vector verify_new_refs; _hash_map.foreach_key([&verify_new_refs](EntryRef ref) { verify_new_refs.emplace_back(ref); }); EXPECT_EQ(small_population, refs.size()); -- cgit v1.2.3