summaryrefslogtreecommitdiffstats
path: root/vespalib
diff options
context:
space:
mode:
authorTor Egge <Tor.Egge@online.no>2023-04-04 10:41:47 +0200
committerTor Egge <Tor.Egge@online.no>2023-04-04 10:41:47 +0200
commit3b01b72973b9decf90f58239b900806a50047961 (patch)
tree4a56b8b153e726242610edb186d9eac8cce7f731 /vespalib
parent0e4087a7f26dcf324fdc378bf437f29fee9af2b2 (diff)
Switch from elements to entries for raw alloator alloc parameter.
Diffstat (limited to 'vespalib')
-rw-r--r--vespalib/src/tests/datastore/datastore/datastore_test.cpp14
-rw-r--r--vespalib/src/vespa/vespalib/btree/btreestore.hpp4
-rw-r--r--vespalib/src/vespa/vespalib/datastore/allocator.h2
-rw-r--r--vespalib/src/vespa/vespalib/datastore/allocator.hpp4
-rw-r--r--vespalib/src/vespa/vespalib/datastore/array_store.hpp2
-rw-r--r--vespalib/src/vespa/vespalib/datastore/free_list_allocator.h2
-rw-r--r--vespalib/src/vespa/vespalib/datastore/free_list_allocator.hpp6
-rw-r--r--vespalib/src/vespa/vespalib/datastore/free_list_raw_allocator.h2
-rw-r--r--vespalib/src/vespa/vespalib/datastore/free_list_raw_allocator.hpp9
-rw-r--r--vespalib/src/vespa/vespalib/datastore/raw_allocator.h6
-rw-r--r--vespalib/src/vespa/vespalib/datastore/raw_allocator.hpp9
-rw-r--r--vespalib/src/vespa/vespalib/datastore/unique_store_string_allocator.hpp4
12 files changed, 32 insertions, 32 deletions
diff --git a/vespalib/src/tests/datastore/datastore/datastore_test.cpp b/vespalib/src/tests/datastore/datastore/datastore_test.cpp
index df347267c7e..77e07ce8047 100644
--- a/vespalib/src/tests/datastore/datastore/datastore_test.cpp
+++ b/vespalib/src/tests/datastore/datastore/datastore_test.cpp
@@ -87,7 +87,7 @@ public:
while (sizes.size() < bufs) {
RefType iRef = (_type.getArraySize() == 1) ?
(_store.template allocator<DataType>(_typeId).alloc().ref) :
- (_store.template allocator<DataType>(_typeId).allocArray(_type.getArraySize()).ref);
+ (_store.template allocator<DataType>(_typeId).allocArray().ref);
int bufferId = iRef.bufferId();
if (bufferId != prevBufferId) {
if (prevBufferId >= 0) {
@@ -126,7 +126,7 @@ public:
while (buffers.size() < bufs) {
RefType iRef = (_type.getArraySize() == 1) ?
(_store.template allocator<DataType>(_typeId).alloc().ref) :
- (_store.template allocator<DataType>(_typeId).allocArray(_type.getArraySize()).ref);
+ (_store.template allocator<DataType>(_typeId).allocArray().ref);
int buffer_id = iRef.bufferId();
if (buffers.empty() || buffers.back() != buffer_id) {
buffers.push_back(buffer_id);
@@ -389,21 +389,21 @@ TEST(DataStoreTest, require_that_we_can_use_free_lists_with_raw_allocator)
s.enableFreeLists();
auto allocator = s.freeListRawAllocator<int>(grow_store.typeId());
- auto h1 = allocator.alloc(3);
- auto h2 = allocator.alloc(3);
+ auto h1 = allocator.alloc(1);
+ auto h2 = allocator.alloc(1);
expect_successive_handles(h1, h2);
s.holdElem(h1.ref, 3);
s.holdElem(h2.ref, 3);
s.assign_generation(10);
s.reclaim_entry_refs(11);
- auto h3 = allocator.alloc(3); // reuse h2.ref from free list
+ auto h3 = allocator.alloc(1); // reuse h2.ref from free list
EXPECT_EQ(h2, h3);
- auto h4 = allocator.alloc(3); // reuse h1.ref from free list
+ auto h4 = allocator.alloc(1); // reuse h1.ref from free list
EXPECT_EQ(h1, h4);
- auto h5 = allocator.alloc(3);
+ auto h5 = allocator.alloc(1);
expect_successive_handles(h2, h5);
expect_successive_handles(h3, h5);
}
diff --git a/vespalib/src/vespa/vespalib/btree/btreestore.hpp b/vespalib/src/vespa/vespalib/btree/btreestore.hpp
index a19d0b34aa6..7a5334593b8 100644
--- a/vespalib/src/vespa/vespalib/btree/btreestore.hpp
+++ b/vespalib/src/vespa/vespalib/btree/btreestore.hpp
@@ -74,7 +74,7 @@ allocNewKeyData(uint32_t clusterSize)
{
assert(clusterSize >= 1 && clusterSize <= clusterLimit);
uint32_t typeId = clusterSize - 1;
- return _store.allocator<KeyDataType>(typeId).allocArray(clusterSize);
+ return _store.allocator<KeyDataType>(typeId).allocArray();
}
@@ -87,7 +87,7 @@ allocKeyData(uint32_t clusterSize)
{
assert(clusterSize >= 1 && clusterSize <= clusterLimit);
uint32_t typeId = clusterSize - 1;
- return _store.freeListAllocator<KeyDataType, datastore::DefaultReclaimer<KeyDataType>>(typeId).allocArray(clusterSize);
+ return _store.freeListAllocator<KeyDataType, datastore::DefaultReclaimer<KeyDataType>>(typeId).allocArray();
}
diff --git a/vespalib/src/vespa/vespalib/datastore/allocator.h b/vespalib/src/vespa/vespalib/datastore/allocator.h
index 297270af0f5..30938bdc1c1 100644
--- a/vespalib/src/vespa/vespalib/datastore/allocator.h
+++ b/vespalib/src/vespa/vespalib/datastore/allocator.h
@@ -30,7 +30,7 @@ public:
HandleType alloc(Args && ... args);
HandleType allocArray(ConstArrayRef array);
- HandleType allocArray(size_t size);
+ HandleType allocArray();
};
}
diff --git a/vespalib/src/vespa/vespalib/datastore/allocator.hpp b/vespalib/src/vespa/vespalib/datastore/allocator.hpp
index 85f0e842519..12a4c2f8749 100644
--- a/vespalib/src/vespa/vespalib/datastore/allocator.hpp
+++ b/vespalib/src/vespa/vespalib/datastore/allocator.hpp
@@ -54,13 +54,13 @@ Allocator<EntryT, RefT>::allocArray(ConstArrayRef array)
template <typename EntryT, typename RefT>
typename Allocator<EntryT, RefT>::HandleType
-Allocator<EntryT, RefT>::allocArray(size_t size)
+Allocator<EntryT, RefT>::allocArray()
{
+ auto size = _store.getBufferState(_store.primary_buffer_id(_typeId)).getArraySize();
_store.ensureBufferCapacity(_typeId, size);
uint32_t buffer_id = _store.primary_buffer_id(_typeId);
BufferState &state = _store.getBufferState(buffer_id);
assert(state.isActive());
- assert(state.getArraySize() == size);
size_t oldBufferSize = state.size();
assert((oldBufferSize % size) == 0);
RefT ref((oldBufferSize / size), buffer_id);
diff --git a/vespalib/src/vespa/vespalib/datastore/array_store.hpp b/vespalib/src/vespa/vespalib/datastore/array_store.hpp
index 3ccde2e16a9..e55e7bfc15f 100644
--- a/vespalib/src/vespa/vespalib/datastore/array_store.hpp
+++ b/vespalib/src/vespa/vespalib/datastore/array_store.hpp
@@ -114,7 +114,7 @@ EntryRef
ArrayStore<ElemT, RefT, TypeMapperT>::allocate_small_array(size_t array_size)
{
uint32_t type_id = _mapper.get_type_id(array_size);
- return _store.template freeListRawAllocator<ElemT>(type_id).alloc(array_size).ref;
+ return _store.template freeListRawAllocator<ElemT>(type_id).alloc(1).ref;
}
template <typename ElemT, typename RefT, typename TypeMapperT>
diff --git a/vespalib/src/vespa/vespalib/datastore/free_list_allocator.h b/vespalib/src/vespa/vespalib/datastore/free_list_allocator.h
index cf899a76712..dc2d1ea3c34 100644
--- a/vespalib/src/vespa/vespalib/datastore/free_list_allocator.h
+++ b/vespalib/src/vespa/vespalib/datastore/free_list_allocator.h
@@ -29,7 +29,7 @@ public:
HandleType alloc(Args && ... args);
HandleType allocArray(ConstArrayRef array);
- HandleType allocArray(size_t size);
+ HandleType allocArray();
};
}
diff --git a/vespalib/src/vespa/vespalib/datastore/free_list_allocator.hpp b/vespalib/src/vespa/vespalib/datastore/free_list_allocator.hpp
index b793e4f77a2..0bc26260127 100644
--- a/vespalib/src/vespa/vespalib/datastore/free_list_allocator.hpp
+++ b/vespalib/src/vespa/vespalib/datastore/free_list_allocator.hpp
@@ -82,13 +82,13 @@ FreeListAllocator<EntryT, RefT, ReclaimerT>::allocArray(ConstArrayRef array)
template <typename EntryT, typename RefT, typename ReclaimerT>
typename Allocator<EntryT, RefT>::HandleType
-FreeListAllocator<EntryT, RefT, ReclaimerT>::allocArray(size_t size)
+FreeListAllocator<EntryT, RefT, ReclaimerT>::allocArray()
{
auto& free_list = _store.getFreeList(_typeId);
if (free_list.empty()) {
- return ParentType::allocArray(size);
+ return ParentType::allocArray();
}
- assert(free_list.array_size() == size);
+ auto size = free_list.array_size();
RefT ref = free_list.pop_entry();
EntryT *buf = _store.template getEntryArray<EntryT>(ref, size);
return HandleType(ref, buf);
diff --git a/vespalib/src/vespa/vespalib/datastore/free_list_raw_allocator.h b/vespalib/src/vespa/vespalib/datastore/free_list_raw_allocator.h
index 1b71c22f0ce..29684267546 100644
--- a/vespalib/src/vespa/vespalib/datastore/free_list_raw_allocator.h
+++ b/vespalib/src/vespa/vespalib/datastore/free_list_raw_allocator.h
@@ -27,7 +27,7 @@ private:
public:
FreeListRawAllocator(DataStoreBase &store, uint32_t typeId);
- HandleType alloc(size_t numElems);
+ HandleType alloc(size_t num_entries);
};
}
diff --git a/vespalib/src/vespa/vespalib/datastore/free_list_raw_allocator.hpp b/vespalib/src/vespa/vespalib/datastore/free_list_raw_allocator.hpp
index af832955cb7..c40e3db4dba 100644
--- a/vespalib/src/vespa/vespalib/datastore/free_list_raw_allocator.hpp
+++ b/vespalib/src/vespa/vespalib/datastore/free_list_raw_allocator.hpp
@@ -14,16 +14,17 @@ FreeListRawAllocator<EntryT, RefT>::FreeListRawAllocator(DataStoreBase &store, u
template <typename EntryT, typename RefT>
typename FreeListRawAllocator<EntryT, RefT>::HandleType
-FreeListRawAllocator<EntryT, RefT>::alloc(size_t numElems)
+FreeListRawAllocator<EntryT, RefT>::alloc(size_t num_entries)
{
auto& free_list = _store.getFreeList(_typeId);
if (free_list.empty()) {
- return ParentType::alloc(numElems);
+ return ParentType::alloc(num_entries);
}
- assert(free_list.array_size() == numElems);
+ auto array_size = free_list.array_size();
+ assert(num_entries == 1);
RefT ref = free_list.pop_entry();
// We must scale the offset according to array size as it was divided when the entry ref was created.
- EntryT *entry = _store.template getEntryArray<EntryT>(ref, numElems);
+ EntryT *entry = _store.template getEntryArray<EntryT>(ref, array_size);
return HandleType(ref, entry);
}
diff --git a/vespalib/src/vespa/vespalib/datastore/raw_allocator.h b/vespalib/src/vespa/vespalib/datastore/raw_allocator.h
index c10c8152e72..e7a59fadcf8 100644
--- a/vespalib/src/vespa/vespalib/datastore/raw_allocator.h
+++ b/vespalib/src/vespa/vespalib/datastore/raw_allocator.h
@@ -25,10 +25,10 @@ protected:
public:
RawAllocator(DataStoreBase &store, uint32_t typeId);
- HandleType alloc(size_t numElems) {
- return alloc(numElems, 0);
+ HandleType alloc(size_t num_entries) {
+ return alloc(num_entries, 0);
}
- HandleType alloc(size_t numElems, size_t extraElems);
+ HandleType alloc(size_t num_entries, size_t extra_entries);
};
}
diff --git a/vespalib/src/vespa/vespalib/datastore/raw_allocator.hpp b/vespalib/src/vespa/vespalib/datastore/raw_allocator.hpp
index 04d99588218..a8792e3a307 100644
--- a/vespalib/src/vespa/vespalib/datastore/raw_allocator.hpp
+++ b/vespalib/src/vespa/vespalib/datastore/raw_allocator.hpp
@@ -16,19 +16,18 @@ RawAllocator<EntryT, RefT>::RawAllocator(DataStoreBase &store, uint32_t typeId)
template <typename EntryT, typename RefT>
typename RawAllocator<EntryT, RefT>::HandleType
-RawAllocator<EntryT, RefT>::alloc(size_t numElems, size_t extraElems)
+RawAllocator<EntryT, RefT>::alloc(size_t num_entries, size_t extra_entries)
{
- _store.ensureBufferCapacity(_typeId, numElems + extraElems);
+ size_t arraySize = _store.getBufferState(_store.primary_buffer_id(_typeId)).getArraySize();
+ _store.ensureBufferCapacity(_typeId, (num_entries + extra_entries) * arraySize);
uint32_t buffer_id = _store.primary_buffer_id(_typeId);
BufferState &state = _store.getBufferState(buffer_id);
assert(state.isActive());
size_t oldBufferSize = state.size();
// Must perform scaling ourselves, according to array size
- size_t arraySize = state.getArraySize();
- assert((numElems % arraySize) == 0u);
RefT ref((oldBufferSize / arraySize), buffer_id);
EntryT *buffer = _store.getEntryArray<EntryT>(ref, arraySize);
- state.stats().pushed_back(numElems);
+ state.stats().pushed_back(num_entries * arraySize);
return HandleType(ref, buffer);
}
diff --git a/vespalib/src/vespa/vespalib/datastore/unique_store_string_allocator.hpp b/vespalib/src/vespa/vespalib/datastore/unique_store_string_allocator.hpp
index 65cab4850ba..c7cf8208615 100644
--- a/vespalib/src/vespa/vespalib/datastore/unique_store_string_allocator.hpp
+++ b/vespalib/src/vespa/vespalib/datastore/unique_store_string_allocator.hpp
@@ -42,7 +42,7 @@ UniqueStoreStringAllocator<RefT>::allocate(const char *value)
uint32_t type_id = string_allocator::get_type_id(value_len);
if (type_id != 0) {
size_t array_size = string_allocator::array_sizes[type_id - 1];
- auto handle = _store.template freeListRawAllocator<char>(type_id).alloc(array_size);
+ auto handle = _store.template freeListRawAllocator<char>(type_id).alloc(1);
new (static_cast<void *>(handle.data)) UniqueStoreSmallStringEntry(value, value_len, array_size);
return handle.ref;
} else {
@@ -79,7 +79,7 @@ UniqueStoreStringAllocator<RefT>::move_on_compact(EntryRef ref)
static_assert(std::is_trivially_copyable<UniqueStoreSmallStringEntry>::value,
"UniqueStoreSmallStringEntry must be trivially copyable");
size_t array_size = string_allocator::array_sizes[type_id - 1];
- auto handle = _store.template rawAllocator<char>(type_id).alloc(array_size);
+ auto handle = _store.template rawAllocator<char>(type_id).alloc(1);
auto orig = _store.template getEntryArray<char>(iRef, array_size);
memcpy(handle.data, orig, array_size);
return handle.ref;