From 12ef321b18b2f3a0ff94270d47dec1f91fb8b4ed Mon Sep 17 00:00:00 2001 From: Tor Egge Date: Tue, 4 Oct 2022 23:31:58 +0200 Subject: Add vespalib::datastore::CompactingBuffers. --- .../tests/btree/btree-stress/btree_stress_test.cpp | 27 ++++++++-------------- .../tests/btree/btree_store/btree_store_test.cpp | 18 +++++++-------- 2 files changed, 17 insertions(+), 28 deletions(-) (limited to 'vespalib/src/tests/btree') diff --git a/vespalib/src/tests/btree/btree-stress/btree_stress_test.cpp b/vespalib/src/tests/btree/btree-stress/btree_stress_test.cpp index 4716e91c2c4..c68ff07491e 100644 --- a/vespalib/src/tests/btree/btree-stress/btree_stress_test.cpp +++ b/vespalib/src/tests/btree/btree-stress/btree_stress_test.cpp @@ -64,8 +64,7 @@ public: uint32_t get(EntryRef ref) const { return _store.getEntry(ref); } uint32_t get_acquire(const AtomicEntryRef& ref) const { return get(ref.load_acquire()); } uint32_t get_relaxed(const AtomicEntryRef& ref) const { return get(ref.load_relaxed()); } - std::vector start_compact(); - void finish_compact(std::vector to_hold); + std::unique_ptr start_compact(); static constexpr bool is_indirect = true; static uint32_t get_offset_bits() { return StoreRefType::offset_bits; } static uint32_t get_num_buffers() { return StoreRefType::numBuffers(); } @@ -79,19 +78,13 @@ RealIntStore::RealIntStore() RealIntStore::~RealIntStore() = default; -std::vector +std::unique_ptr RealIntStore::start_compact() { // Use a compaction strategy that will compact all active buffers CompactionStrategy compaction_strategy(0.0, 0.0, get_num_buffers(), 1.0); CompactionSpec compaction_spec(true, false); - return _store.startCompactWorstBuffers(compaction_spec, compaction_strategy); -} - -void -RealIntStore::finish_compact(std::vector to_hold) -{ - _store.finishCompact(to_hold); + return _store.start_compact_worst_buffers(compaction_spec, compaction_strategy); } EntryRef @@ -347,9 +340,8 @@ void Fixture::compact_keys() { if constexpr (KeyStore::is_indirect) { - auto to_hold = _keys.start_compact(); - EntryRefFilter filter(_keys.get_num_buffers(), _keys.get_offset_bits()); - filter.add_buffers(to_hold); + auto compacting_buffers = _keys.start_compact(); + auto filter = compacting_buffers->make_entry_ref_filter(); auto itr = _tree.begin(); while (itr.valid()) { auto old_ref = itr.getKey().load_relaxed(); @@ -359,7 +351,7 @@ Fixture::compact_keys() } ++itr; } - _keys.finish_compact(std::move(to_hold)); + compacting_buffers->finish(); } _compact_keys.track_compacted(); } @@ -369,9 +361,8 @@ void Fixture::compact_values() { if constexpr (ValueStore::is_indirect) { - auto to_hold = _values.start_compact(); - EntryRefFilter filter(_values.get_num_buffers(), _values.get_offset_bits()); - filter.add_buffers(to_hold); + auto compacting_buffers = _values.start_compact(); + auto filter = compacting_buffers->make_entry_ref_filter(); auto itr = _tree.begin(); while (itr.valid()) { auto old_ref = itr.getData().load_relaxed(); @@ -381,7 +372,7 @@ Fixture::compact_values() } ++itr; } - _values.finish_compact(std::move(to_hold)); + compacting_buffers->finish(); } _compact_values.track_compacted(); } diff --git a/vespalib/src/tests/btree/btree_store/btree_store_test.cpp b/vespalib/src/tests/btree/btree_store/btree_store_test.cpp index 5e2aa89b59e..4da34c64ed9 100644 --- a/vespalib/src/tests/btree/btree_store/btree_store_test.cpp +++ b/vespalib/src/tests/btree/btree_store/btree_store_test.cpp @@ -5,7 +5,9 @@ #include #include #include +#include #include +#include #include using vespalib::GenerationHandler; @@ -114,7 +116,6 @@ void BTreeStoreTest::test_compact_sequence(uint32_t sequence_length) { auto &store = _store; - uint32_t entry_ref_offset_bits = TreeStore::RefType::offset_bits; EntryRef ref1 = add_sequence(4, 4 + sequence_length); EntryRef ref2 = add_sequence(5, 5 + sequence_length); std::vector refs; @@ -136,13 +137,10 @@ BTreeStoreTest::test_compact_sequence(uint32_t sequence_length) for (uint32_t pass = 0; pass < 15; ++pass) { CompactionSpec compaction_spec(true, false); CompactionStrategy compaction_strategy; - auto to_hold = store.start_compact_worst_buffers(compaction_spec, compaction_strategy); - std::vector filter(TreeStore::RefType::numBuffers()); - for (auto buffer_id : to_hold) { - filter[buffer_id] = true; - } + auto compacting_buffers = store.start_compact_worst_buffers(compaction_spec, compaction_strategy); + auto filter = compacting_buffers->make_entry_ref_filter(); for (auto& ref : refs) { - if (ref.valid() && filter[ref.buffer_id(entry_ref_offset_bits)]) { + if (ref.valid() && filter.has(ref)) { move_refs.emplace_back(ref); change_writer.emplace_back(ref); } @@ -150,7 +148,7 @@ BTreeStoreTest::test_compact_sequence(uint32_t sequence_length) store.move(move_refs); change_writer.write(move_refs); move_refs.clear(); - store.finishCompact(to_hold); + compacting_buffers->finish(); inc_generation(); } EXPECT_NE(ref1, refs[0]); @@ -174,9 +172,9 @@ TEST_F(BTreeStoreTest, require_that_nodes_for_multiple_btrees_are_compacted) auto usage_before = store.getMemoryUsage(); for (uint32_t pass = 0; pass < 15; ++pass) { CompactionStrategy compaction_strategy; - auto to_hold = store.start_compact_worst_btree_nodes(compaction_strategy); + auto compacting_buffers = store.start_compact_worst_btree_nodes(compaction_strategy); store.move_btree_nodes(refs); - store.finish_compact_worst_btree_nodes(to_hold); + compacting_buffers->finish(); inc_generation(); } EXPECT_EQ(make_exp_sequence(4, 40), get_sequence(refs[0])); -- cgit v1.2.3