From 79e553fb451b09437dad80070597fa317515ae3a Mon Sep 17 00:00:00 2001 From: Tor Egge Date: Tue, 11 Oct 2022 15:56:25 +0200 Subject: Rename ICompactable::move to move_on_compact. --- .../tensor/direct_tensor_store/direct_tensor_store_test.cpp | 7 ++++--- .../tensor/tensor_buffer_store/tensor_buffer_store_test.cpp | 10 +++++----- searchlib/src/vespa/searchlib/tensor/dense_tensor_store.cpp | 3 +-- searchlib/src/vespa/searchlib/tensor/dense_tensor_store.h | 2 +- searchlib/src/vespa/searchlib/tensor/direct_tensor_store.cpp | 6 ++---- searchlib/src/vespa/searchlib/tensor/direct_tensor_store.h | 2 +- searchlib/src/vespa/searchlib/tensor/tensor_buffer_store.cpp | 3 +-- searchlib/src/vespa/searchlib/tensor/tensor_buffer_store.h | 2 +- .../datastore/sharded_hash_map/sharded_hash_map_test.cpp | 8 ++++---- .../unique_store_string_allocator_test.cpp | 12 ++++++------ vespalib/src/vespa/vespalib/datastore/compaction_context.cpp | 2 +- .../src/vespa/vespalib/datastore/fixed_size_hash_map.cpp | 4 ++-- vespalib/src/vespa/vespalib/datastore/fixed_size_hash_map.h | 2 +- vespalib/src/vespa/vespalib/datastore/i_compactable.h | 7 ++++--- .../src/vespa/vespalib/datastore/i_unique_store_dictionary.h | 2 +- vespalib/src/vespa/vespalib/datastore/sharded_hash_map.cpp | 4 ++-- vespalib/src/vespa/vespalib/datastore/sharded_hash_map.h | 2 +- vespalib/src/vespa/vespalib/datastore/unique_store.hpp | 6 +++--- .../src/vespa/vespalib/datastore/unique_store_allocator.h | 2 +- .../src/vespa/vespalib/datastore/unique_store_allocator.hpp | 2 +- .../src/vespa/vespalib/datastore/unique_store_dictionary.h | 2 +- .../src/vespa/vespalib/datastore/unique_store_dictionary.hpp | 6 +++--- .../vespa/vespalib/datastore/unique_store_string_allocator.h | 2 +- .../vespalib/datastore/unique_store_string_allocator.hpp | 2 +- 24 files changed, 49 insertions(+), 51 deletions(-) diff --git a/searchlib/src/tests/tensor/direct_tensor_store/direct_tensor_store_test.cpp b/searchlib/src/tests/tensor/direct_tensor_store/direct_tensor_store_test.cpp index 8b21952b2d1..64cb6a6c146 100644 --- a/searchlib/src/tests/tensor/direct_tensor_store/direct_tensor_store_test.cpp +++ b/searchlib/src/tests/tensor/direct_tensor_store/direct_tensor_store_test.cpp @@ -107,7 +107,7 @@ TEST_F(DirectTensorStoreTest, hold_adds_entry_to_hold_list) EXPECT_GT(mem_2.allocatedBytesOnHold(), mem_1.allocatedBytesOnHold() + tensor_mem_usage.allocatedBytes()); } -TEST_F(DirectTensorStoreTest, move_allocates_new_entry_and_puts_old_entry_on_hold) +TEST_F(DirectTensorStoreTest, move_on_compact_allocates_new_entry_and_leaves_old_entry_alone) { auto t = make_tensor(5); auto* exp = t.get(); @@ -115,12 +115,13 @@ TEST_F(DirectTensorStoreTest, move_allocates_new_entry_and_puts_old_entry_on_hol auto ref_1 = store.store_tensor(std::move(t)); auto mem_1 = store.getMemoryUsage(); - auto ref_2 = store.move(ref_1); + auto ref_2 = store.move_on_compact(ref_1); auto mem_2 = store.getMemoryUsage(); EXPECT_NE(ref_1, ref_2); expect_tensor(exp, ref_1); expect_tensor(exp, ref_2); - EXPECT_GT(mem_2.allocatedBytesOnHold(), mem_1.allocatedBytesOnHold() + tensor_mem_usage.allocatedBytes()); + EXPECT_EQ(0, mem_2.allocatedBytesOnHold()); + EXPECT_GT(mem_2.usedBytes(), mem_1.usedBytes() + tensor_mem_usage.allocatedBytes()); } GTEST_MAIN_RUN_ALL_TESTS() diff --git a/searchlib/src/tests/tensor/tensor_buffer_store/tensor_buffer_store_test.cpp b/searchlib/src/tests/tensor/tensor_buffer_store/tensor_buffer_store_test.cpp index 101b84e01aa..3bbb6cd334e 100644 --- a/searchlib/src/tests/tensor/tensor_buffer_store/tensor_buffer_store_test.cpp +++ b/searchlib/src/tests/tensor/tensor_buffer_store/tensor_buffer_store_test.cpp @@ -29,7 +29,7 @@ protected: vespalib::nbostream encode_stored_tensor(EntryRef ref); void assert_store_load(const TensorSpec& tensor_spec); void assert_store_load_many(const TensorSpec& tensor_spec); - void assert_store_move_load(const TensorSpec& tensor_spec); + void assert_store_move_on_compact_load(const TensorSpec& tensor_spec); void assert_store_encode_store_encoded_load(const TensorSpec& tensor_spec); }; @@ -102,10 +102,10 @@ TensorBufferStoreTest::assert_store_load_many(const TensorSpec& tensor_spec) } void -TensorBufferStoreTest::assert_store_move_load(const TensorSpec& tensor_spec) +TensorBufferStoreTest::assert_store_move_on_compact_load(const TensorSpec& tensor_spec) { auto ref = store_tensor(tensor_spec); - auto ref2 = _store.move(ref); + auto ref2 = _store.move_on_compact(ref); EXPECT_NE(ref, ref2); auto loaded_spec = load_tensor_spec(ref2); _store.holdTensor(ref2); @@ -147,10 +147,10 @@ TEST_F(TensorBufferStoreTest, tensor_can_be_stored_and_loaded_many_times) } } -TEST_F(TensorBufferStoreTest, stored_tensor_can_be_copied) +TEST_F(TensorBufferStoreTest, stored_tensor_can_be_moved_on_compact) { for (auto& tensor_spec : tensor_specs) { - assert_store_move_load(tensor_spec); + assert_store_move_on_compact_load(tensor_spec); } } diff --git a/searchlib/src/vespa/searchlib/tensor/dense_tensor_store.cpp b/searchlib/src/vespa/searchlib/tensor/dense_tensor_store.cpp index ba7e8526146..60a3546578a 100644 --- a/searchlib/src/vespa/searchlib/tensor/dense_tensor_store.cpp +++ b/searchlib/src/vespa/searchlib/tensor/dense_tensor_store.cpp @@ -121,7 +121,7 @@ DenseTensorStore::holdTensor(EntryRef ref) } TensorStore::EntryRef -DenseTensorStore::move(EntryRef ref) +DenseTensorStore::move_on_compact(EntryRef ref) { if (!ref.valid()) { return RefType(); @@ -129,7 +129,6 @@ DenseTensorStore::move(EntryRef ref) auto oldraw = getRawBuffer(ref); auto newraw = allocRawBuffer(); memcpy(newraw.data, static_cast(oldraw), getBufSize()); - _concreteStore.holdElem(ref, _tensorSizeCalc.alignedSize()); return newraw.ref; } diff --git a/searchlib/src/vespa/searchlib/tensor/dense_tensor_store.h b/searchlib/src/vespa/searchlib/tensor/dense_tensor_store.h index 1b25bdad464..298e58ee410 100644 --- a/searchlib/src/vespa/searchlib/tensor/dense_tensor_store.h +++ b/searchlib/src/vespa/searchlib/tensor/dense_tensor_store.h @@ -63,7 +63,7 @@ public: } vespalib::datastore::Handle allocRawBuffer(); void holdTensor(EntryRef ref) override; - EntryRef move(EntryRef ref) override; + EntryRef move_on_compact(EntryRef ref) override; vespalib::MemoryUsage update_stat(const vespalib::datastore::CompactionStrategy& compaction_strategy) override; std::unique_ptr start_compact(const vespalib::datastore::CompactionStrategy& compaction_strategy) override; EntryRef store_tensor(const vespalib::eval::Value &tensor) override; diff --git a/searchlib/src/vespa/searchlib/tensor/direct_tensor_store.cpp b/searchlib/src/vespa/searchlib/tensor/direct_tensor_store.cpp index 1184cca37e7..013e7dedeba 100644 --- a/searchlib/src/vespa/searchlib/tensor/direct_tensor_store.cpp +++ b/searchlib/src/vespa/searchlib/tensor/direct_tensor_store.cpp @@ -71,16 +71,14 @@ DirectTensorStore::holdTensor(EntryRef ref) } EntryRef -DirectTensorStore::move(EntryRef ref) +DirectTensorStore::move_on_compact(EntryRef ref) { if (!ref.valid()) { return EntryRef(); } const auto& old_tensor = _tensor_store.getEntry(ref); assert(old_tensor); - auto new_ref = add_entry(old_tensor); - _tensor_store.holdElem(ref, 1, old_tensor->get_memory_usage().allocatedBytes()); - return new_ref; + return add_entry(old_tensor); } vespalib::MemoryUsage diff --git a/searchlib/src/vespa/searchlib/tensor/direct_tensor_store.h b/searchlib/src/vespa/searchlib/tensor/direct_tensor_store.h index ff9540a27b3..c55dda5646a 100644 --- a/searchlib/src/vespa/searchlib/tensor/direct_tensor_store.h +++ b/searchlib/src/vespa/searchlib/tensor/direct_tensor_store.h @@ -49,7 +49,7 @@ public: EntryRef store_tensor(std::unique_ptr tensor); void holdTensor(EntryRef ref) override; - EntryRef move(EntryRef ref) override; + EntryRef move_on_compact(EntryRef ref) override; vespalib::MemoryUsage update_stat(const vespalib::datastore::CompactionStrategy& compaction_strategy) override; std::unique_ptr start_compact(const vespalib::datastore::CompactionStrategy& compaction_strategy) override; EntryRef store_tensor(const vespalib::eval::Value& tensor) override; diff --git a/searchlib/src/vespa/searchlib/tensor/tensor_buffer_store.cpp b/searchlib/src/vespa/searchlib/tensor/tensor_buffer_store.cpp index 800311adfd6..6b96a91ec1c 100644 --- a/searchlib/src/vespa/searchlib/tensor/tensor_buffer_store.cpp +++ b/searchlib/src/vespa/searchlib/tensor/tensor_buffer_store.cpp @@ -48,7 +48,7 @@ TensorBufferStore::holdTensor(EntryRef ref) } EntryRef -TensorBufferStore::move(EntryRef ref) +TensorBufferStore::move_on_compact(EntryRef ref) { if (!ref.valid()) { return EntryRef(); @@ -56,7 +56,6 @@ TensorBufferStore::move(EntryRef ref) auto buf = _array_store.get(ref); auto new_ref = _array_store.add(buf); _ops.copied_labels(buf); - _array_store.remove(ref); return new_ref; } diff --git a/searchlib/src/vespa/searchlib/tensor/tensor_buffer_store.h b/searchlib/src/vespa/searchlib/tensor/tensor_buffer_store.h index 6611660b410..1b5520233e1 100644 --- a/searchlib/src/vespa/searchlib/tensor/tensor_buffer_store.h +++ b/searchlib/src/vespa/searchlib/tensor/tensor_buffer_store.h @@ -27,7 +27,7 @@ public: TensorBufferStore(const vespalib::eval::ValueType& tensor_type, std::shared_ptr allocator, uint32_t max_small_subspaces_type_id); ~TensorBufferStore(); void holdTensor(EntryRef ref) override; - EntryRef move(EntryRef ref) override; + EntryRef move_on_compact(EntryRef ref) override; vespalib::MemoryUsage update_stat(const vespalib::datastore::CompactionStrategy& compaction_strategy) override; std::unique_ptr start_compact(const vespalib::datastore::CompactionStrategy& compaction_strategy) override; EntryRef store_tensor(const vespalib::eval::Value& tensor) override; diff --git a/vespalib/src/tests/datastore/sharded_hash_map/sharded_hash_map_test.cpp b/vespalib/src/tests/datastore/sharded_hash_map/sharded_hash_map_test.cpp index 13f9ae251b6..d6970e20e8a 100644 --- a/vespalib/src/tests/datastore/sharded_hash_map/sharded_hash_map_test.cpp +++ b/vespalib/src/tests/datastore/sharded_hash_map/sharded_hash_map_test.cpp @@ -73,8 +73,8 @@ public: } ~MyCompactable() override = default; - EntryRef move(EntryRef ref) override { - auto new_ref = _allocator.move(ref); + EntryRef move_on_compact(EntryRef ref) override { + auto new_ref = _allocator.move_on_compact(ref); _allocator.hold(ref); _new_refs.emplace_back(new_ref); return new_ref; @@ -395,7 +395,7 @@ TEST_F(DataStoreShardedHashTest, foreach_key_works) } } -TEST_F(DataStoreShardedHashTest, move_keys_works) +TEST_F(DataStoreShardedHashTest, move_keys_on_compact_works) { populate_sample_data(small_population); std::vector refs; @@ -403,7 +403,7 @@ TEST_F(DataStoreShardedHashTest, move_keys_works) std::vector new_refs; MyCompactable my_compactable(_allocator, new_refs); auto filter = make_entry_ref_filter(false); - _hash_map.move_keys(my_compactable, filter); + _hash_map.move_keys_on_compact(my_compactable, filter); std::vector verify_new_refs; _hash_map.foreach_key([&verify_new_refs](EntryRef ref) { verify_new_refs.emplace_back(ref); }); EXPECT_EQ(small_population, refs.size()); diff --git a/vespalib/src/tests/datastore/unique_store_string_allocator/unique_store_string_allocator_test.cpp b/vespalib/src/tests/datastore/unique_store_string_allocator/unique_store_string_allocator_test.cpp index f68dd4dde66..0b34f587c6d 100644 --- a/vespalib/src/tests/datastore/unique_store_string_allocator/unique_store_string_allocator_test.cpp +++ b/vespalib/src/tests/datastore/unique_store_string_allocator/unique_store_string_allocator_test.cpp @@ -51,8 +51,8 @@ struct TestBase : public ::testing::Test { void remove(EntryRef ref) { allocator.hold(ref); } - EntryRef move(EntryRef ref) { - return allocator.move(ref); + EntryRef move_on_compact(EntryRef ref) { + return allocator.move_on_compact(ref); } uint32_t get_buffer_id(EntryRef ref) const { return EntryRefType(ref).bufferId(); @@ -104,7 +104,7 @@ TEST_F(StringTest, extra_bytes_used_is_tracked) assert_buffer_state(ref, TestBufferStats().used(2).hold(0).dead(2)); ref = add(spaces1000.c_str()); assert_buffer_state(ref, TestBufferStats().used(2).hold(0).dead(1).extra_used(1001)); - EntryRef ref2 = move(ref); + EntryRef ref2 = move_on_compact(ref); assert_get(ref2, spaces1000.c_str()); assert_buffer_state(ref, TestBufferStats().used(3).hold(0).dead(1).extra_used(2002)); remove(ref); @@ -159,7 +159,7 @@ TEST_F(StringTest, free_list_is_not_used_when_disabled) assert_buffer_state(ref2, TestBufferStats().used(3).hold(0).dead(2).extra_used(1001)); } -TEST_F(StringTest, free_list_is_never_used_for_move) +TEST_F(StringTest, free_list_is_never_used_for_move_on_compact) { // Free lists are default enabled for UniqueStoreStringAllocator EntryRef ref1 = add(small.c_str()); @@ -169,8 +169,8 @@ TEST_F(StringTest, free_list_is_never_used_for_move) remove(ref3); remove(ref4); trim_hold_lists(); - EntryRef ref5 = move(ref1); - EntryRef ref6 = move(ref2); + EntryRef ref5 = move_on_compact(ref1); + EntryRef ref6 = move_on_compact(ref2); EXPECT_NE(ref5, ref3); EXPECT_NE(ref6, ref4); assert_buffer_state(ref1, TestBufferStats().used(48).hold(0).dead(16)); diff --git a/vespalib/src/vespa/vespalib/datastore/compaction_context.cpp b/vespalib/src/vespa/vespalib/datastore/compaction_context.cpp index 65e028119a2..1ce6401605e 100644 --- a/vespalib/src/vespa/vespalib/datastore/compaction_context.cpp +++ b/vespalib/src/vespa/vespalib/datastore/compaction_context.cpp @@ -25,7 +25,7 @@ CompactionContext::compact(vespalib::ArrayRef refs) for (auto &atomic_entry_ref : refs) { auto ref = atomic_entry_ref.load_relaxed(); if (ref.valid() && _filter.has(ref)) { - EntryRef newRef = _store.move(ref); + EntryRef newRef = _store.move_on_compact(ref); atomic_entry_ref.store_release(newRef); } } diff --git a/vespalib/src/vespa/vespalib/datastore/fixed_size_hash_map.cpp b/vespalib/src/vespa/vespalib/datastore/fixed_size_hash_map.cpp index 6f001ce3c94..402905d7aca 100644 --- a/vespalib/src/vespa/vespalib/datastore/fixed_size_hash_map.cpp +++ b/vespalib/src/vespa/vespalib/datastore/fixed_size_hash_map.cpp @@ -183,7 +183,7 @@ FixedSizeHashMap::foreach_key(const std::function& callback) con } void -FixedSizeHashMap::move_keys(ICompactable& compactable, const EntryRefFilter &compacting_buffers) +FixedSizeHashMap::move_keys_on_compact(ICompactable& compactable, const EntryRefFilter &compacting_buffers) { for (auto& chain_head : _chain_heads) { uint32_t node_idx = chain_head.load_relaxed(); @@ -192,7 +192,7 @@ FixedSizeHashMap::move_keys(ICompactable& compactable, const EntryRefFilter &com EntryRef old_ref = node.get_kv().first.load_relaxed(); assert(old_ref.valid()); if (compacting_buffers.has(old_ref)) { - EntryRef new_ref = compactable.move(old_ref); + EntryRef new_ref = compactable.move_on_compact(old_ref); node.get_kv().first.store_release(new_ref); } node_idx = node.get_next_node_idx().load(std::memory_order_relaxed); diff --git a/vespalib/src/vespa/vespalib/datastore/fixed_size_hash_map.h b/vespalib/src/vespa/vespalib/datastore/fixed_size_hash_map.h index c522bcc3c33..dd56b4951bc 100644 --- a/vespalib/src/vespa/vespalib/datastore/fixed_size_hash_map.h +++ b/vespalib/src/vespa/vespalib/datastore/fixed_size_hash_map.h @@ -159,7 +159,7 @@ public: size_t size() const noexcept { return _count; } MemoryUsage get_memory_usage() const; void foreach_key(const std::function& callback) const; - void move_keys(ICompactable& compactable, const EntryRefFilter &compacting_buffers); + void move_keys_on_compact(ICompactable& compactable, const EntryRefFilter &compacting_buffers); /* * Scan dictionary and call normalize function for each value. If * returned value is different then write back the modified value to diff --git a/vespalib/src/vespa/vespalib/datastore/i_compactable.h b/vespalib/src/vespa/vespalib/datastore/i_compactable.h index 069d32bb481..31c082e4371 100644 --- a/vespalib/src/vespa/vespalib/datastore/i_compactable.h +++ b/vespalib/src/vespa/vespalib/datastore/i_compactable.h @@ -8,12 +8,13 @@ namespace vespalib::datastore { * Interface for moving an entry as part of compaction of data in old * buffers into new buffers. * - * Old entry is unchanged and not placed on any hold lists since we - * expect the old buffers to be freed soon anyway. + * A copy of the old entry is created and a reference to the new copy is + * returned. The old entry is unchanged and not placed on any hold + * lists since we expect the old buffers to be freed soon anyway. */ struct ICompactable { virtual ~ICompactable() = default; - virtual EntryRef move(EntryRef ref) = 0; + virtual EntryRef move_on_compact(EntryRef ref) = 0; }; } diff --git a/vespalib/src/vespa/vespalib/datastore/i_unique_store_dictionary.h b/vespalib/src/vespa/vespalib/datastore/i_unique_store_dictionary.h index bb105d41519..48abda45974 100644 --- a/vespalib/src/vespa/vespalib/datastore/i_unique_store_dictionary.h +++ b/vespalib/src/vespa/vespalib/datastore/i_unique_store_dictionary.h @@ -30,7 +30,7 @@ public: virtual UniqueStoreAddResult add(const EntryComparator& comp, std::function insertEntry) = 0; virtual EntryRef find(const EntryComparator& comp) = 0; virtual void remove(const EntryComparator& comp, EntryRef ref) = 0; - virtual void move_keys(ICompactable& compactable, const EntryRefFilter& compacting_buffers) = 0; + virtual void move_keys_on_compact(ICompactable& compactable, const EntryRefFilter& compacting_buffers) = 0; virtual uint32_t get_num_uniques() const = 0; virtual vespalib::MemoryUsage get_memory_usage() const = 0; virtual void build(vespalib::ConstArrayRef, vespalib::ConstArrayRef ref_counts, std::function hold) = 0; diff --git a/vespalib/src/vespa/vespalib/datastore/sharded_hash_map.cpp b/vespalib/src/vespa/vespalib/datastore/sharded_hash_map.cpp index 102aa1cefb3..86578f663a1 100644 --- a/vespalib/src/vespa/vespalib/datastore/sharded_hash_map.cpp +++ b/vespalib/src/vespa/vespalib/datastore/sharded_hash_map.cpp @@ -171,12 +171,12 @@ ShardedHashMap::foreach_key(std::function callback) const } void -ShardedHashMap::move_keys(ICompactable& compactable, const EntryRefFilter& compacting_buffers) +ShardedHashMap::move_keys_on_compact(ICompactable& compactable, const EntryRefFilter& compacting_buffers) { for (size_t i = 0; i < num_shards; ++i) { auto map = _maps[i].load(std::memory_order_relaxed); if (map != nullptr) { - map->move_keys(compactable, compacting_buffers); + map->move_keys_on_compact(compactable, compacting_buffers); } } } diff --git a/vespalib/src/vespa/vespalib/datastore/sharded_hash_map.h b/vespalib/src/vespa/vespalib/datastore/sharded_hash_map.h index e0ba9488351..80d14d187b0 100644 --- a/vespalib/src/vespa/vespalib/datastore/sharded_hash_map.h +++ b/vespalib/src/vespa/vespalib/datastore/sharded_hash_map.h @@ -58,7 +58,7 @@ public: const EntryComparator &get_default_comparator() const noexcept { return *_comp; } MemoryUsage get_memory_usage() const; void foreach_key(std::function callback) const; - void move_keys(ICompactable& compactable, const EntryRefFilter& compacting_buffers); + void move_keys_on_compact(ICompactable& compactable, const EntryRefFilter& compacting_buffers); bool normalize_values(std::function normalize); bool normalize_values(std::function&)> normalize, const EntryRefFilter& filter); void foreach_value(std::function&)> callback, const EntryRefFilter& filter); diff --git a/vespalib/src/vespa/vespalib/datastore/unique_store.hpp b/vespalib/src/vespa/vespalib/datastore/unique_store.hpp index 37a56bf2561..63592f82898 100644 --- a/vespalib/src/vespa/vespalib/datastore/unique_store.hpp +++ b/vespalib/src/vespa/vespalib/datastore/unique_store.hpp @@ -109,20 +109,20 @@ private: } } - EntryRef move(EntryRef oldRef) override { + EntryRef move_on_compact(EntryRef oldRef) override { RefT iRef(oldRef); uint32_t buffer_id = iRef.bufferId(); auto &inner_mapping = _mapping[buffer_id]; assert(iRef.offset() < inner_mapping.size()); EntryRef &mappedRef = inner_mapping[iRef.offset()]; assert(!mappedRef.valid()); - EntryRef newRef = _store.move(oldRef); + EntryRef newRef = _store.move_on_compact(oldRef); mappedRef = newRef; return newRef; } void fillMapping() { - _dict.move_keys(*this, _filter); + _dict.move_keys_on_compact(*this, _filter); } public: diff --git a/vespalib/src/vespa/vespalib/datastore/unique_store_allocator.h b/vespalib/src/vespa/vespalib/datastore/unique_store_allocator.h index 04df88ab4b9..0f6d9ddfc9b 100644 --- a/vespalib/src/vespa/vespalib/datastore/unique_store_allocator.h +++ b/vespalib/src/vespa/vespalib/datastore/unique_store_allocator.h @@ -35,7 +35,7 @@ public: ~UniqueStoreAllocator() override; EntryRef allocate(const EntryType& value); void hold(EntryRef ref); - EntryRef move(EntryRef ref) override; + EntryRef move_on_compact(EntryRef ref) override; const WrappedEntryType& get_wrapped(EntryRef ref) const { RefType iRef(ref); return *_store.template getEntry(iRef); diff --git a/vespalib/src/vespa/vespalib/datastore/unique_store_allocator.hpp b/vespalib/src/vespa/vespalib/datastore/unique_store_allocator.hpp index 04a229d4ffa..5d96b1e0314 100644 --- a/vespalib/src/vespa/vespalib/datastore/unique_store_allocator.hpp +++ b/vespalib/src/vespa/vespalib/datastore/unique_store_allocator.hpp @@ -48,7 +48,7 @@ UniqueStoreAllocator::hold(EntryRef ref) template EntryRef -UniqueStoreAllocator::move(EntryRef ref) +UniqueStoreAllocator::move_on_compact(EntryRef ref) { return _store.template allocator(0).alloc(get_wrapped(ref)).ref; } diff --git a/vespalib/src/vespa/vespalib/datastore/unique_store_dictionary.h b/vespalib/src/vespa/vespalib/datastore/unique_store_dictionary.h index 702bae38e7c..7aed81c3a79 100644 --- a/vespalib/src/vespa/vespalib/datastore/unique_store_dictionary.h +++ b/vespalib/src/vespa/vespalib/datastore/unique_store_dictionary.h @@ -79,7 +79,7 @@ public: UniqueStoreAddResult add(const EntryComparator& comp, std::function insertEntry) override; EntryRef find(const EntryComparator& comp) override; void remove(const EntryComparator& comp, EntryRef ref) override; - void move_keys(ICompactable& compactable, const EntryRefFilter& compacting_buffers) override; + void move_keys_on_compact(ICompactable& compactable, const EntryRefFilter& compacting_buffers) override; uint32_t get_num_uniques() const override; vespalib::MemoryUsage get_memory_usage() const override; void build(vespalib::ConstArrayRef, vespalib::ConstArrayRef ref_counts, std::function hold) override; diff --git a/vespalib/src/vespa/vespalib/datastore/unique_store_dictionary.hpp b/vespalib/src/vespa/vespalib/datastore/unique_store_dictionary.hpp index 8029b66309d..29c4b6514d7 100644 --- a/vespalib/src/vespa/vespalib/datastore/unique_store_dictionary.hpp +++ b/vespalib/src/vespa/vespalib/datastore/unique_store_dictionary.hpp @@ -140,7 +140,7 @@ UniqueStoreDictionary::remove(const template void -UniqueStoreDictionary::move_keys(ICompactable &compactable, const EntryRefFilter& compacting_buffers) +UniqueStoreDictionary::move_keys_on_compact(ICompactable &compactable, const EntryRefFilter& compacting_buffers) { if constexpr (has_btree_dictionary) { auto itr = this->_btree_dict.begin(); @@ -148,7 +148,7 @@ UniqueStoreDictionary::move_keys(ICo EntryRef oldRef(itr.getKey().load_relaxed()); assert(oldRef.valid()); if (compacting_buffers.has(oldRef)) { - EntryRef newRef(compactable.move(oldRef)); + EntryRef newRef(compactable.move_on_compact(oldRef)); this->_btree_dict.thaw(itr); itr.writeKey(AtomicEntryRef(newRef)); if constexpr (has_hash_dictionary) { @@ -160,7 +160,7 @@ UniqueStoreDictionary::move_keys(ICo ++itr; } } else { - this->_hash_dict.move_keys(compactable, compacting_buffers); + this->_hash_dict.move_keys_on_compact(compactable, compacting_buffers); } } diff --git a/vespalib/src/vespa/vespalib/datastore/unique_store_string_allocator.h b/vespalib/src/vespa/vespalib/datastore/unique_store_string_allocator.h index be5fa8f6c1e..8977fd1cce8 100644 --- a/vespalib/src/vespa/vespalib/datastore/unique_store_string_allocator.h +++ b/vespalib/src/vespa/vespalib/datastore/unique_store_string_allocator.h @@ -111,7 +111,7 @@ public: ~UniqueStoreStringAllocator() override; EntryRef allocate(const char *value); void hold(EntryRef ref); - EntryRef move(EntryRef ref) override; + EntryRef move_on_compact(EntryRef ref) override; const UniqueStoreEntryBase& get_wrapped(EntryRef ref) const { RefType iRef(ref); auto &state = _store.getBufferState(iRef.bufferId()); diff --git a/vespalib/src/vespa/vespalib/datastore/unique_store_string_allocator.hpp b/vespalib/src/vespa/vespalib/datastore/unique_store_string_allocator.hpp index b5405cd22b5..eeba2f463b9 100644 --- a/vespalib/src/vespa/vespalib/datastore/unique_store_string_allocator.hpp +++ b/vespalib/src/vespa/vespalib/datastore/unique_store_string_allocator.hpp @@ -71,7 +71,7 @@ UniqueStoreStringAllocator::hold(EntryRef ref) template EntryRef -UniqueStoreStringAllocator::move(EntryRef ref) +UniqueStoreStringAllocator::move_on_compact(EntryRef ref) { RefT iRef(ref); uint32_t type_id = _store.getTypeId(iRef.bufferId()); -- cgit v1.2.3