summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorHenning Baldersheim <balder@yahoo-inc.com>2023-03-10 10:02:19 +0000
committerHenning Baldersheim <balder@yahoo-inc.com>2023-03-10 10:09:35 +0000
commit9cc13d5339f421e5fb032333157694d7f267b452 (patch)
treeee1511df9a440092b192de3c53c6834e1efc918f
parentfea300fa4b789ce8e27676f8daf4a3fbb0266ef6 (diff)
- rename BufferAndTypeId -> BufferAndMeta and add the array size to it.
- Use _buffers instead of _states to get array size.
-rw-r--r--vespalib/src/vespa/vespalib/btree/btreestore.h255
-rw-r--r--vespalib/src/vespa/vespalib/btree/btreestore.hpp2
-rw-r--r--vespalib/src/vespa/vespalib/datastore/bufferstate.h32
-rw-r--r--vespalib/src/vespa/vespalib/datastore/datastorebase.cpp10
-rw-r--r--vespalib/src/vespa/vespalib/datastore/datastorebase.h17
-rw-r--r--vespalib/src/vespa/vespalib/datastore/unique_store_string_allocator.h4
6 files changed, 98 insertions, 222 deletions
diff --git a/vespalib/src/vespa/vespalib/btree/btreestore.h b/vespalib/src/vespa/vespalib/btree/btreestore.h
index c228c084e6d..9d98a9ca514 100644
--- a/vespalib/src/vespa/vespalib/btree/btreestore.h
+++ b/vespalib/src/vespa/vespalib/btree/btreestore.h
@@ -95,202 +95,90 @@ protected:
public:
BTreeStore();
-
BTreeStore(bool init);
-
~BTreeStore();
const NodeAllocatorType &getAllocator() const { return _allocator; }
- void
- disableFreeLists() {
+ void disableFreeLists() {
_store.disableFreeLists();
_allocator.disableFreeLists();
}
- void
- disableElemHoldList()
- {
+ void disableElemHoldList() {
_store.disableElemHoldList();
_allocator.disableElemHoldList();
}
- BTreeTypeRefPair
- allocNewBTree() {
+ BTreeTypeRefPair allocNewBTree() {
return _store.allocator<BTreeType>(BUFFERTYPE_BTREE).alloc();
}
- BTreeTypeRefPair
- allocBTree() {
+ BTreeTypeRefPair allocBTree() {
return _store.freeListAllocator<BTreeType, TreeReclaimer>(BUFFERTYPE_BTREE).alloc();
}
- BTreeTypeRefPair
- allocNewBTreeCopy(const BTreeType &rhs) {
+ BTreeTypeRefPair allocNewBTreeCopy(const BTreeType &rhs) {
return _store.allocator<BTreeType>(BUFFERTYPE_BTREE).alloc(rhs);
}
- BTreeTypeRefPair
- allocBTreeCopy(const BTreeType &rhs) {
+ BTreeTypeRefPair allocBTreeCopy(const BTreeType &rhs) {
return _store.freeListAllocator<BTreeType, datastore::DefaultReclaimer<BTreeType> >(BUFFERTYPE_BTREE).alloc(rhs);
}
- KeyDataTypeRefPair
- allocNewKeyData(uint32_t clusterSize);
-
- KeyDataTypeRefPair
- allocKeyData(uint32_t clusterSize);
-
- KeyDataTypeRefPair
- allocNewKeyDataCopy(const KeyDataType *rhs, uint32_t clusterSize);
-
- KeyDataTypeRefPair
- allocKeyDataCopy(const KeyDataType *rhs, uint32_t clusterSize);
-
- const KeyDataType *
- lower_bound(const KeyDataType *b, const KeyDataType *e,
- const KeyType &key, CompareT comp);
-
- void
- makeTree(EntryRef &ref,
- const KeyDataType *array, uint32_t clusterSize);
-
- void
- makeArray(EntryRef &ref, EntryRef leafRef, LeafNodeType *leafNode);
-
- bool
- insert(EntryRef &ref,
- const KeyType &key, const DataType &data,
- CompareT comp = CompareT());
-
- bool
- remove(EntryRef &ref,
- const KeyType &key,
- CompareT comp = CompareT());
-
- uint32_t
- getNewClusterSize(const KeyDataType *o,
- const KeyDataType *oe,
- AddIter a,
- AddIter ae,
- RemoveIter r,
- RemoveIter re,
- CompareT comp);
-
- void
- applyCluster(const KeyDataType *o,
- const KeyDataType *oe,
- KeyDataType *d,
- const KeyDataType *de,
- AddIter a,
- AddIter ae,
- RemoveIter r,
- RemoveIter re,
- CompareT comp);
-
-
- void
- applyModifyTree(BTreeType *tree,
- AddIter a,
- AddIter ae,
- RemoveIter r,
- RemoveIter re,
- CompareT comp);
-
- void
- applyBuildTree(BTreeType *tree,
- AddIter a,
- AddIter ae,
- RemoveIter r,
- RemoveIter re,
- CompareT comp);
-
- void
- applyNewArray(EntryRef &ref,
- AddIter aOrg,
- AddIter ae);
-
- void
- applyNewTree(EntryRef &ref,
- AddIter a,
- AddIter ae,
- CompareT comp);
-
- void
- applyNew(EntryRef &ref,
- AddIter a,
- AddIter ae,
- CompareT comp);
-
-
- bool
- applyCluster(EntryRef &ref,
- uint32_t clusterSize,
- AddIter a,
- AddIter ae,
- RemoveIter r,
- RemoveIter re,
- CompareT comp);
-
- void
- applyTree(BTreeType *tree,
- AddIter a,
- AddIter ae,
- RemoveIter r,
- RemoveIter re,
- CompareT comp);
-
- void
- normalizeTree(EntryRef &ref,
- BTreeType *tree,
- bool wasArray);
+ KeyDataTypeRefPair allocNewKeyData(uint32_t clusterSize);
+ KeyDataTypeRefPair allocKeyData(uint32_t clusterSize);
+ KeyDataTypeRefPair allocNewKeyDataCopy(const KeyDataType *rhs, uint32_t clusterSize);
+ KeyDataTypeRefPair allocKeyDataCopy(const KeyDataType *rhs, uint32_t clusterSize);
+
+ const KeyDataType * lower_bound(const KeyDataType *b, const KeyDataType *e, const KeyType &key, CompareT comp);
+
+ void makeTree(EntryRef &ref, const KeyDataType *array, uint32_t clusterSize);
+ void makeArray(EntryRef &ref, EntryRef leafRef, LeafNodeType *leafNode);
+ bool insert(EntryRef &ref, const KeyType &key, const DataType &data, CompareT comp = CompareT());
+
+ bool remove(EntryRef &ref, const KeyType &key,CompareT comp = CompareT());
+
+ uint32_t getNewClusterSize(const KeyDataType *o, const KeyDataType *oe, AddIter a, AddIter ae,
+ RemoveIter r, RemoveIter re, CompareT comp);
+
+ void applyCluster(const KeyDataType *o, const KeyDataType *oe, KeyDataType *d, const KeyDataType *de,
+ AddIter a, AddIter ae, RemoveIter r, RemoveIter re, CompareT comp);
+
+ void applyModifyTree(BTreeType *tree, AddIter a, AddIter ae, RemoveIter r, RemoveIter re, CompareT comp);
+ void applyBuildTree(BTreeType *tree, AddIter a, AddIter ae, RemoveIter r, RemoveIter re, CompareT comp);
+ void applyNewArray(EntryRef &ref, AddIter aOrg, AddIter ae);
+ void applyNewTree(EntryRef &ref, AddIter a, AddIter ae, CompareT comp);
+ void applyNew(EntryRef &ref, AddIter a, AddIter ae, CompareT comp);
+
+ bool applyCluster(EntryRef &ref, uint32_t clusterSize, AddIter a, AddIter ae,
+ RemoveIter r, RemoveIter re, CompareT comp);
+
+ void applyTree(BTreeType *tree, AddIter a, AddIter ae, RemoveIter r, RemoveIter re, CompareT comp);
+
+ void normalizeTree(EntryRef &ref, BTreeType *tree, bool wasArray);
/**
* Apply multiple changes at once.
*
* additions and removals should be sorted on key without duplicates.
* Overlap between additions and removals indicates updates.
*/
- void
- apply(EntryRef &ref,
- AddIter a,
- AddIter ae,
- RemoveIter r,
- RemoveIter re,
- CompareT comp = CompareT());
-
- void
- clear(const EntryRef ref);
-
- size_t
- size(const EntryRef ref) const;
+ void apply(EntryRef &ref, AddIter a, AddIter ae, RemoveIter r, RemoveIter re, CompareT comp = CompareT());
- size_t
- frozenSize(const EntryRef ref) const;
+ void clear(const EntryRef ref);
+ size_t size(const EntryRef ref) const;
+ size_t frozenSize(const EntryRef ref) const;
+ Iterator begin(const EntryRef ref) const;
+ ConstIterator beginFrozen(const EntryRef ref) const;
- Iterator
- begin(const EntryRef ref) const;
+ void beginFrozen(const EntryRef ref, std::vector<ConstIterator> &where) const;
- ConstIterator
- beginFrozen(const EntryRef ref) const;
-
- void
- beginFrozen(const EntryRef ref, std::vector<ConstIterator> &where) const;
-
- uint32_t
- getTypeId(RefType ref) const
- {
- return _store.getBufferState(ref.bufferId()).getTypeId();
+ uint32_t getTypeId(RefType ref) const {
+ return _store.getBufferMeta(ref.bufferId()).getTypeId();
}
- static bool
- isSmallArray(uint32_t typeId)
- {
- return typeId < clusterLimit;
- }
-
- bool
- isSmallArray(const EntryRef ref) const;
-
+ static bool isSmallArray(uint32_t typeId) { return typeId < clusterLimit; }
+ bool isSmallArray(const EntryRef ref) const;
static bool isBTree(uint32_t typeId) { return typeId == BUFFERTYPE_BTREE; }
bool isBTree(RefType ref) const { return isBTree(getTypeId(ref)); }
@@ -299,9 +187,7 @@ public:
* Cluster size == 0 means we have a tree for the given reference.
* The reference must be valid.
**/
- static uint32_t
- getClusterSize(uint32_t typeId)
- {
+ static uint32_t getClusterSize(uint32_t typeId) {
return (typeId < clusterLimit) ? typeId + 1 : 0;
}
@@ -310,11 +196,7 @@ public:
* Cluster size == 0 means we have a tree for the given reference.
* The reference must be valid.
**/
- uint32_t
- getClusterSize(RefType ref) const
- {
- return getClusterSize(getTypeId(ref));
- }
+ uint32_t getClusterSize(RefType ref) const { return getClusterSize(getTypeId(ref)); }
const BTreeType * getTreeEntry(RefType ref) const {
return _store.getEntry<BTreeType>(ref);
@@ -329,24 +211,18 @@ public:
}
// Inherit doc from DataStoreBase
- void
- reclaim_memory(generation_t oldest_used_gen)
- {
+ void reclaim_memory(generation_t oldest_used_gen) {
_allocator.reclaim_memory(oldest_used_gen);
_store.reclaim_memory(oldest_used_gen);
}
// Inherit doc from DataStoreBase
- void
- assign_generation(generation_t current_gen)
- {
+ void assign_generation(generation_t current_gen) {
_allocator.assign_generation(current_gen);
_store.assign_generation(current_gen);
}
- void
- reclaim_all_memory()
- {
+ void reclaim_all_memory() {
_allocator.reclaim_all_memory();
_store.reclaim_all_memory();
}
@@ -360,30 +236,23 @@ public:
return usage;
}
- void
- clearBuilder()
- {
+ void clearBuilder() {
_builder.clear();
}
- AggregatedType
- getAggregated(const EntryRef ref) const;
+ AggregatedType getAggregated(const EntryRef ref) const;
template <typename FunctionType>
- void
- foreach_unfrozen_key(EntryRef ref, FunctionType func) const;
+ void foreach_unfrozen_key(EntryRef ref, FunctionType func) const;
template <typename FunctionType>
- void
- foreach_frozen_key(EntryRef ref, FunctionType func) const;
+ void foreach_frozen_key(EntryRef ref, FunctionType func) const;
template <typename FunctionType>
- void
- foreach_unfrozen(EntryRef ref, FunctionType func) const;
+ void foreach_unfrozen(EntryRef ref, FunctionType func) const;
template <typename FunctionType>
- void
- foreach_frozen(EntryRef ref, FunctionType func) const;
+ void foreach_frozen(EntryRef ref, FunctionType func) const;
std::unique_ptr<vespalib::datastore::CompactingBuffers> start_compact_worst_btree_nodes(const CompactionStrategy& compaction_strategy);
void move_btree_nodes(const std::vector<EntryRef>& refs);
@@ -394,12 +263,10 @@ public:
private:
static constexpr size_t MIN_BUFFER_ARRAYS = 128u;
template <typename FunctionType, bool Frozen>
- void
- foreach_key(EntryRef ref, FunctionType func) const;
+ void foreach_key(EntryRef ref, FunctionType func) const;
template <typename FunctionType, bool Frozen>
- void
- foreach(EntryRef ref, FunctionType func) const;
+ void foreach(EntryRef ref, FunctionType func) const;
};
template <typename KeyT, typename DataT, typename AggrT, typename CompareT,
diff --git a/vespalib/src/vespa/vespalib/btree/btreestore.hpp b/vespalib/src/vespa/vespalib/btree/btreestore.hpp
index 6b2c4d924cd..a19d0b34aa6 100644
--- a/vespalib/src/vespa/vespalib/btree/btreestore.hpp
+++ b/vespalib/src/vespa/vespalib/btree/btreestore.hpp
@@ -850,7 +850,7 @@ isSmallArray(const EntryRef ref) const
if (!ref.valid())
return true;
RefType iRef(ref);
- uint32_t typeId(_store.getBufferState(iRef.bufferId()).getTypeId());
+ uint32_t typeId(_store.getBufferMeta(iRef.bufferId()).getTypeId());
return typeId < clusterLimit;
}
diff --git a/vespalib/src/vespa/vespalib/datastore/bufferstate.h b/vespalib/src/vespa/vespalib/datastore/bufferstate.h
index 3f023b41c51..aa7f6dfdfa4 100644
--- a/vespalib/src/vespa/vespalib/datastore/bufferstate.h
+++ b/vespalib/src/vespa/vespalib/datastore/bufferstate.h
@@ -40,14 +40,14 @@ public:
private:
InternalBufferStats _stats;
- BufferFreeList _free_list;
+ BufferFreeList _free_list;
std::atomic<BufferTypeBase*> _typeHandler;
- Alloc _buffer;
- uint32_t _arraySize;
- uint16_t _typeId;
+ Alloc _buffer;
+ uint32_t _arraySize;
+ uint16_t _typeId;
std::atomic<State> _state;
- bool _disableElemHoldList : 1;
- bool _compacting : 1;
+ bool _disableElemHoldList : 1;
+ bool _compacting : 1;
public:
/**
@@ -130,7 +130,27 @@ public:
BufferTypeBase *getTypeHandler() { return _typeHandler.load(std::memory_order_relaxed); }
void resume_primary_buffer(uint32_t buffer_id);
+};
+class BufferAndMeta {
+public:
+ BufferAndMeta() : BufferAndMeta(nullptr, 0, 0) { }
+ BufferAndMeta(void* buffer, uint32_t typeId, uint32_t arraySize)
+ : _buffer(buffer),
+ _typeId(typeId),
+ _arraySize(arraySize)
+ { }
+ std::atomic<void*>& get_atomic_buffer() noexcept { return _buffer; }
+ void* get_buffer_relaxed() noexcept { return _buffer.load(std::memory_order_relaxed); }
+ const void* get_buffer_acquire() const noexcept { return _buffer.load(std::memory_order_acquire); }
+ uint32_t getTypeId() const { return _typeId; }
+ uint32_t getArraySize() const { return _arraySize; }
+ void setTypeId(uint32_t typeId) { _typeId = typeId; }
+ void setArraySize(uint32_t arraySize) { _arraySize = arraySize; }
+private:
+ std::atomic<void*> _buffer;
+ uint32_t _typeId;
+ uint32_t _arraySize;
};
}
diff --git a/vespalib/src/vespa/vespalib/datastore/datastorebase.cpp b/vespalib/src/vespa/vespalib/datastore/datastorebase.cpp
index 42234194040..a232bc2ef87 100644
--- a/vespalib/src/vespa/vespalib/datastore/datastorebase.cpp
+++ b/vespalib/src/vespa/vespalib/datastore/datastorebase.cpp
@@ -273,14 +273,14 @@ vespalib::MemoryUsage
DataStoreBase::getMemoryUsage() const {
auto usage = getDynamicMemoryUsage();
size_t extra_allocated = 0;
- extra_allocated += _buffers.capacity() * sizeof(BufferAndTypeId);
+ extra_allocated += _buffers.capacity() * sizeof(BufferAndMeta);
extra_allocated += _primary_buffer_ids.capacity() * sizeof(uint32_t);
extra_allocated += _states.capacity() * sizeof(BufferState);
extra_allocated += _typeHandlers.capacity() * sizeof(BufferTypeBase *);
extra_allocated += _free_lists.capacity() * sizeof(FreeList);
size_t extra_used = 0;
- extra_used += _buffers.size() * sizeof(BufferAndTypeId);
+ extra_used += _buffers.size() * sizeof(BufferAndMeta);
extra_used += _primary_buffer_ids.size() * sizeof(uint32_t);
extra_used += _states.size() * sizeof(BufferState);
extra_used += _typeHandlers.size() * sizeof(BufferTypeBase *);
@@ -398,9 +398,11 @@ DataStoreBase::onActive(uint32_t bufferId, uint32_t typeId, size_t elemsNeeded)
{
assert(typeId < _typeHandlers.size());
assert(bufferId < _numBuffers);
- _buffers[bufferId].setTypeId(typeId);
BufferState &state = getBufferState(bufferId);
- state.onActive(bufferId, typeId, _typeHandlers[typeId], elemsNeeded, _buffers[bufferId].get_atomic_buffer());
+ BufferAndMeta & bufferMeta = _buffers[bufferId];
+ state.onActive(bufferId, typeId, _typeHandlers[typeId], elemsNeeded, bufferMeta.get_atomic_buffer());
+ bufferMeta.setTypeId(typeId);
+ bufferMeta.setArraySize(state.getArraySize());
enableFreeList(bufferId);
}
diff --git a/vespalib/src/vespa/vespalib/datastore/datastorebase.h b/vespalib/src/vespa/vespalib/datastore/datastorebase.h
index 950e1967ee2..9efd91ef138 100644
--- a/vespalib/src/vespa/vespalib/datastore/datastorebase.h
+++ b/vespalib/src/vespa/vespalib/datastore/datastorebase.h
@@ -75,6 +75,7 @@ public:
uint32_t primary_buffer_id(uint32_t typeId) const { return _primary_buffer_ids[typeId]; }
const BufferState &getBufferState(uint32_t bufferId) const { return _states[bufferId]; }
BufferState &getBufferState(uint32_t bufferId) { return _states[bufferId]; }
+ const BufferAndMeta & getBufferMeta(uint32_t bufferId) const { return _buffers[bufferId]; }
uint32_t getNumBuffers() const { return _numBuffers; }
/**
@@ -246,21 +247,7 @@ private:
virtual void reclaim_all_entry_refs() = 0;
- class BufferAndTypeId {
- public:
- BufferAndTypeId() : BufferAndTypeId(nullptr, 0) { }
- BufferAndTypeId(void* buffer, uint32_t typeId) : _buffer(buffer), _typeId(typeId) { }
- std::atomic<void*>& get_atomic_buffer() noexcept { return _buffer; }
- void* get_buffer_relaxed() noexcept { return _buffer.load(std::memory_order_relaxed); }
- const void* get_buffer_acquire() const noexcept { return _buffer.load(std::memory_order_acquire); }
- uint32_t getTypeId() const { return _typeId; }
- void setTypeId(uint32_t typeId) { _typeId = typeId; }
- private:
- std::atomic<void*> _buffer;
- uint32_t _typeId;
- };
-
- std::vector<BufferAndTypeId> _buffers; // For fast mapping with known types
+ std::vector<BufferAndMeta> _buffers; // For fast mapping with known types
// Provides a mapping from typeId -> primary buffer for that type.
// The primary buffer is used for allocations of new element(s) if no available slots are found in free lists.
diff --git a/vespalib/src/vespa/vespalib/datastore/unique_store_string_allocator.h b/vespalib/src/vespa/vespalib/datastore/unique_store_string_allocator.h
index 265478fbaf5..a85b73f423d 100644
--- a/vespalib/src/vespa/vespalib/datastore/unique_store_string_allocator.h
+++ b/vespalib/src/vespa/vespalib/datastore/unique_store_string_allocator.h
@@ -114,7 +114,7 @@ public:
EntryRef move_on_compact(EntryRef ref) override;
const UniqueStoreEntryBase& get_wrapped(EntryRef ref) const {
RefType iRef(ref);
- auto &state = _store.getBufferState(iRef.bufferId());
+ auto &state = _store.getBufferMeta(iRef.bufferId());
auto type_id = state.getTypeId();
if (type_id != 0) {
return *reinterpret_cast<const UniqueStoreEntryBase *>(_store.template getEntryArray<char>(iRef, state.getArraySize()));
@@ -124,7 +124,7 @@ public:
}
const char *get(EntryRef ref) const {
RefType iRef(ref);
- auto &state = _store.getBufferState(iRef.bufferId());
+ auto &state = _store.getBufferMeta(iRef.bufferId());
auto type_id = state.getTypeId();
if (type_id != 0) {
return reinterpret_cast<const UniqueStoreSmallStringEntry *>(_store.template getEntryArray<char>(iRef, state.getArraySize()))->value();