aboutsummaryrefslogtreecommitdiffstats
path: root/vespalib/src/vespa/vespalib/datastore
diff options
context:
space:
mode:
Diffstat (limited to 'vespalib/src/vespa/vespalib/datastore')
-rw-r--r--vespalib/src/vespa/vespalib/datastore/compaction_context.cpp2
-rw-r--r--vespalib/src/vespa/vespalib/datastore/fixed_size_hash_map.cpp4
-rw-r--r--vespalib/src/vespa/vespalib/datastore/fixed_size_hash_map.h2
-rw-r--r--vespalib/src/vespa/vespalib/datastore/i_compactable.h7
-rw-r--r--vespalib/src/vespa/vespalib/datastore/i_unique_store_dictionary.h2
-rw-r--r--vespalib/src/vespa/vespalib/datastore/sharded_hash_map.cpp4
-rw-r--r--vespalib/src/vespa/vespalib/datastore/sharded_hash_map.h2
-rw-r--r--vespalib/src/vespa/vespalib/datastore/unique_store.hpp6
-rw-r--r--vespalib/src/vespa/vespalib/datastore/unique_store_allocator.h2
-rw-r--r--vespalib/src/vespa/vespalib/datastore/unique_store_allocator.hpp2
-rw-r--r--vespalib/src/vespa/vespalib/datastore/unique_store_dictionary.h2
-rw-r--r--vespalib/src/vespa/vespalib/datastore/unique_store_dictionary.hpp6
-rw-r--r--vespalib/src/vespa/vespalib/datastore/unique_store_string_allocator.h2
-rw-r--r--vespalib/src/vespa/vespalib/datastore/unique_store_string_allocator.hpp2
14 files changed, 23 insertions, 22 deletions
diff --git a/vespalib/src/vespa/vespalib/datastore/compaction_context.cpp b/vespalib/src/vespa/vespalib/datastore/compaction_context.cpp
index 65e028119a2..1ce6401605e 100644
--- a/vespalib/src/vespa/vespalib/datastore/compaction_context.cpp
+++ b/vespalib/src/vespa/vespalib/datastore/compaction_context.cpp
@@ -25,7 +25,7 @@ CompactionContext::compact(vespalib::ArrayRef<AtomicEntryRef> refs)
for (auto &atomic_entry_ref : refs) {
auto ref = atomic_entry_ref.load_relaxed();
if (ref.valid() && _filter.has(ref)) {
- EntryRef newRef = _store.move(ref);
+ EntryRef newRef = _store.move_on_compact(ref);
atomic_entry_ref.store_release(newRef);
}
}
diff --git a/vespalib/src/vespa/vespalib/datastore/fixed_size_hash_map.cpp b/vespalib/src/vespa/vespalib/datastore/fixed_size_hash_map.cpp
index 6f001ce3c94..402905d7aca 100644
--- a/vespalib/src/vespa/vespalib/datastore/fixed_size_hash_map.cpp
+++ b/vespalib/src/vespa/vespalib/datastore/fixed_size_hash_map.cpp
@@ -183,7 +183,7 @@ FixedSizeHashMap::foreach_key(const std::function<void(EntryRef)>& callback) con
}
void
-FixedSizeHashMap::move_keys(ICompactable& compactable, const EntryRefFilter &compacting_buffers)
+FixedSizeHashMap::move_keys_on_compact(ICompactable& compactable, const EntryRefFilter &compacting_buffers)
{
for (auto& chain_head : _chain_heads) {
uint32_t node_idx = chain_head.load_relaxed();
@@ -192,7 +192,7 @@ FixedSizeHashMap::move_keys(ICompactable& compactable, const EntryRefFilter &com
EntryRef old_ref = node.get_kv().first.load_relaxed();
assert(old_ref.valid());
if (compacting_buffers.has(old_ref)) {
- EntryRef new_ref = compactable.move(old_ref);
+ EntryRef new_ref = compactable.move_on_compact(old_ref);
node.get_kv().first.store_release(new_ref);
}
node_idx = node.get_next_node_idx().load(std::memory_order_relaxed);
diff --git a/vespalib/src/vespa/vespalib/datastore/fixed_size_hash_map.h b/vespalib/src/vespa/vespalib/datastore/fixed_size_hash_map.h
index c522bcc3c33..dd56b4951bc 100644
--- a/vespalib/src/vespa/vespalib/datastore/fixed_size_hash_map.h
+++ b/vespalib/src/vespa/vespalib/datastore/fixed_size_hash_map.h
@@ -159,7 +159,7 @@ public:
size_t size() const noexcept { return _count; }
MemoryUsage get_memory_usage() const;
void foreach_key(const std::function<void(EntryRef)>& callback) const;
- void move_keys(ICompactable& compactable, const EntryRefFilter &compacting_buffers);
+ void move_keys_on_compact(ICompactable& compactable, const EntryRefFilter &compacting_buffers);
/*
* Scan dictionary and call normalize function for each value. If
* returned value is different then write back the modified value to
diff --git a/vespalib/src/vespa/vespalib/datastore/i_compactable.h b/vespalib/src/vespa/vespalib/datastore/i_compactable.h
index 069d32bb481..31c082e4371 100644
--- a/vespalib/src/vespa/vespalib/datastore/i_compactable.h
+++ b/vespalib/src/vespa/vespalib/datastore/i_compactable.h
@@ -8,12 +8,13 @@ namespace vespalib::datastore {
* Interface for moving an entry as part of compaction of data in old
* buffers into new buffers.
*
- * Old entry is unchanged and not placed on any hold lists since we
- * expect the old buffers to be freed soon anyway.
+ * A copy of the old entry is created and a reference to the new copy is
+ * returned. The old entry is unchanged and not placed on any hold
+ * lists since we expect the old buffers to be freed soon anyway.
*/
struct ICompactable {
virtual ~ICompactable() = default;
- virtual EntryRef move(EntryRef ref) = 0;
+ virtual EntryRef move_on_compact(EntryRef ref) = 0;
};
}
diff --git a/vespalib/src/vespa/vespalib/datastore/i_unique_store_dictionary.h b/vespalib/src/vespa/vespalib/datastore/i_unique_store_dictionary.h
index bb105d41519..48abda45974 100644
--- a/vespalib/src/vespa/vespalib/datastore/i_unique_store_dictionary.h
+++ b/vespalib/src/vespa/vespalib/datastore/i_unique_store_dictionary.h
@@ -30,7 +30,7 @@ public:
virtual UniqueStoreAddResult add(const EntryComparator& comp, std::function<EntryRef(void)> insertEntry) = 0;
virtual EntryRef find(const EntryComparator& comp) = 0;
virtual void remove(const EntryComparator& comp, EntryRef ref) = 0;
- virtual void move_keys(ICompactable& compactable, const EntryRefFilter& compacting_buffers) = 0;
+ virtual void move_keys_on_compact(ICompactable& compactable, const EntryRefFilter& compacting_buffers) = 0;
virtual uint32_t get_num_uniques() const = 0;
virtual vespalib::MemoryUsage get_memory_usage() const = 0;
virtual void build(vespalib::ConstArrayRef<EntryRef>, vespalib::ConstArrayRef<uint32_t> ref_counts, std::function<void(EntryRef)> hold) = 0;
diff --git a/vespalib/src/vespa/vespalib/datastore/sharded_hash_map.cpp b/vespalib/src/vespa/vespalib/datastore/sharded_hash_map.cpp
index 102aa1cefb3..86578f663a1 100644
--- a/vespalib/src/vespa/vespalib/datastore/sharded_hash_map.cpp
+++ b/vespalib/src/vespa/vespalib/datastore/sharded_hash_map.cpp
@@ -171,12 +171,12 @@ ShardedHashMap::foreach_key(std::function<void(EntryRef)> callback) const
}
void
-ShardedHashMap::move_keys(ICompactable& compactable, const EntryRefFilter& compacting_buffers)
+ShardedHashMap::move_keys_on_compact(ICompactable& compactable, const EntryRefFilter& compacting_buffers)
{
for (size_t i = 0; i < num_shards; ++i) {
auto map = _maps[i].load(std::memory_order_relaxed);
if (map != nullptr) {
- map->move_keys(compactable, compacting_buffers);
+ map->move_keys_on_compact(compactable, compacting_buffers);
}
}
}
diff --git a/vespalib/src/vespa/vespalib/datastore/sharded_hash_map.h b/vespalib/src/vespa/vespalib/datastore/sharded_hash_map.h
index e0ba9488351..80d14d187b0 100644
--- a/vespalib/src/vespa/vespalib/datastore/sharded_hash_map.h
+++ b/vespalib/src/vespa/vespalib/datastore/sharded_hash_map.h
@@ -58,7 +58,7 @@ public:
const EntryComparator &get_default_comparator() const noexcept { return *_comp; }
MemoryUsage get_memory_usage() const;
void foreach_key(std::function<void(EntryRef)> callback) const;
- void move_keys(ICompactable& compactable, const EntryRefFilter& compacting_buffers);
+ void move_keys_on_compact(ICompactable& compactable, const EntryRefFilter& compacting_buffers);
bool normalize_values(std::function<EntryRef(EntryRef)> normalize);
bool normalize_values(std::function<void(std::vector<EntryRef>&)> normalize, const EntryRefFilter& filter);
void foreach_value(std::function<void(const std::vector<EntryRef>&)> callback, const EntryRefFilter& filter);
diff --git a/vespalib/src/vespa/vespalib/datastore/unique_store.hpp b/vespalib/src/vespa/vespalib/datastore/unique_store.hpp
index 37a56bf2561..63592f82898 100644
--- a/vespalib/src/vespa/vespalib/datastore/unique_store.hpp
+++ b/vespalib/src/vespa/vespalib/datastore/unique_store.hpp
@@ -109,20 +109,20 @@ private:
}
}
- EntryRef move(EntryRef oldRef) override {
+ EntryRef move_on_compact(EntryRef oldRef) override {
RefT iRef(oldRef);
uint32_t buffer_id = iRef.bufferId();
auto &inner_mapping = _mapping[buffer_id];
assert(iRef.offset() < inner_mapping.size());
EntryRef &mappedRef = inner_mapping[iRef.offset()];
assert(!mappedRef.valid());
- EntryRef newRef = _store.move(oldRef);
+ EntryRef newRef = _store.move_on_compact(oldRef);
mappedRef = newRef;
return newRef;
}
void fillMapping() {
- _dict.move_keys(*this, _filter);
+ _dict.move_keys_on_compact(*this, _filter);
}
public:
diff --git a/vespalib/src/vespa/vespalib/datastore/unique_store_allocator.h b/vespalib/src/vespa/vespalib/datastore/unique_store_allocator.h
index 04df88ab4b9..0f6d9ddfc9b 100644
--- a/vespalib/src/vespa/vespalib/datastore/unique_store_allocator.h
+++ b/vespalib/src/vespa/vespalib/datastore/unique_store_allocator.h
@@ -35,7 +35,7 @@ public:
~UniqueStoreAllocator() override;
EntryRef allocate(const EntryType& value);
void hold(EntryRef ref);
- EntryRef move(EntryRef ref) override;
+ EntryRef move_on_compact(EntryRef ref) override;
const WrappedEntryType& get_wrapped(EntryRef ref) const {
RefType iRef(ref);
return *_store.template getEntry<WrappedEntryType>(iRef);
diff --git a/vespalib/src/vespa/vespalib/datastore/unique_store_allocator.hpp b/vespalib/src/vespa/vespalib/datastore/unique_store_allocator.hpp
index 04a229d4ffa..5d96b1e0314 100644
--- a/vespalib/src/vespa/vespalib/datastore/unique_store_allocator.hpp
+++ b/vespalib/src/vespa/vespalib/datastore/unique_store_allocator.hpp
@@ -48,7 +48,7 @@ UniqueStoreAllocator<EntryT, RefT>::hold(EntryRef ref)
template <typename EntryT, typename RefT>
EntryRef
-UniqueStoreAllocator<EntryT, RefT>::move(EntryRef ref)
+UniqueStoreAllocator<EntryT, RefT>::move_on_compact(EntryRef ref)
{
return _store.template allocator<WrappedEntryType>(0).alloc(get_wrapped(ref)).ref;
}
diff --git a/vespalib/src/vespa/vespalib/datastore/unique_store_dictionary.h b/vespalib/src/vespa/vespalib/datastore/unique_store_dictionary.h
index 702bae38e7c..7aed81c3a79 100644
--- a/vespalib/src/vespa/vespalib/datastore/unique_store_dictionary.h
+++ b/vespalib/src/vespa/vespalib/datastore/unique_store_dictionary.h
@@ -79,7 +79,7 @@ public:
UniqueStoreAddResult add(const EntryComparator& comp, std::function<EntryRef(void)> insertEntry) override;
EntryRef find(const EntryComparator& comp) override;
void remove(const EntryComparator& comp, EntryRef ref) override;
- void move_keys(ICompactable& compactable, const EntryRefFilter& compacting_buffers) override;
+ void move_keys_on_compact(ICompactable& compactable, const EntryRefFilter& compacting_buffers) override;
uint32_t get_num_uniques() const override;
vespalib::MemoryUsage get_memory_usage() const override;
void build(vespalib::ConstArrayRef<EntryRef>, vespalib::ConstArrayRef<uint32_t> ref_counts, std::function<void(EntryRef)> hold) override;
diff --git a/vespalib/src/vespa/vespalib/datastore/unique_store_dictionary.hpp b/vespalib/src/vespa/vespalib/datastore/unique_store_dictionary.hpp
index 8029b66309d..29c4b6514d7 100644
--- a/vespalib/src/vespa/vespalib/datastore/unique_store_dictionary.hpp
+++ b/vespalib/src/vespa/vespalib/datastore/unique_store_dictionary.hpp
@@ -140,7 +140,7 @@ UniqueStoreDictionary<BTreeDictionaryT, ParentT, HashDictionaryT>::remove(const
template <typename BTreeDictionaryT, typename ParentT, typename HashDictionaryT>
void
-UniqueStoreDictionary<BTreeDictionaryT, ParentT, HashDictionaryT>::move_keys(ICompactable &compactable, const EntryRefFilter& compacting_buffers)
+UniqueStoreDictionary<BTreeDictionaryT, ParentT, HashDictionaryT>::move_keys_on_compact(ICompactable &compactable, const EntryRefFilter& compacting_buffers)
{
if constexpr (has_btree_dictionary) {
auto itr = this->_btree_dict.begin();
@@ -148,7 +148,7 @@ UniqueStoreDictionary<BTreeDictionaryT, ParentT, HashDictionaryT>::move_keys(ICo
EntryRef oldRef(itr.getKey().load_relaxed());
assert(oldRef.valid());
if (compacting_buffers.has(oldRef)) {
- EntryRef newRef(compactable.move(oldRef));
+ EntryRef newRef(compactable.move_on_compact(oldRef));
this->_btree_dict.thaw(itr);
itr.writeKey(AtomicEntryRef(newRef));
if constexpr (has_hash_dictionary) {
@@ -160,7 +160,7 @@ UniqueStoreDictionary<BTreeDictionaryT, ParentT, HashDictionaryT>::move_keys(ICo
++itr;
}
} else {
- this->_hash_dict.move_keys(compactable, compacting_buffers);
+ this->_hash_dict.move_keys_on_compact(compactable, compacting_buffers);
}
}
diff --git a/vespalib/src/vespa/vespalib/datastore/unique_store_string_allocator.h b/vespalib/src/vespa/vespalib/datastore/unique_store_string_allocator.h
index be5fa8f6c1e..8977fd1cce8 100644
--- a/vespalib/src/vespa/vespalib/datastore/unique_store_string_allocator.h
+++ b/vespalib/src/vespa/vespalib/datastore/unique_store_string_allocator.h
@@ -111,7 +111,7 @@ public:
~UniqueStoreStringAllocator() override;
EntryRef allocate(const char *value);
void hold(EntryRef ref);
- EntryRef move(EntryRef ref) override;
+ EntryRef move_on_compact(EntryRef ref) override;
const UniqueStoreEntryBase& get_wrapped(EntryRef ref) const {
RefType iRef(ref);
auto &state = _store.getBufferState(iRef.bufferId());
diff --git a/vespalib/src/vespa/vespalib/datastore/unique_store_string_allocator.hpp b/vespalib/src/vespa/vespalib/datastore/unique_store_string_allocator.hpp
index b5405cd22b5..eeba2f463b9 100644
--- a/vespalib/src/vespa/vespalib/datastore/unique_store_string_allocator.hpp
+++ b/vespalib/src/vespa/vespalib/datastore/unique_store_string_allocator.hpp
@@ -71,7 +71,7 @@ UniqueStoreStringAllocator<RefT>::hold(EntryRef ref)
template <typename RefT>
EntryRef
-UniqueStoreStringAllocator<RefT>::move(EntryRef ref)
+UniqueStoreStringAllocator<RefT>::move_on_compact(EntryRef ref)
{
RefT iRef(ref);
uint32_t type_id = _store.getTypeId(iRef.bufferId());