aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--searchlib/src/vespa/searchlib/attribute/postingstore.h3
-rw-r--r--vespalib/src/tests/btree/btree_store/btree_store_test.cpp91
-rw-r--r--vespalib/src/vespa/vespalib/btree/btreestore.h7
-rw-r--r--vespalib/src/vespa/vespalib/btree/btreestore.hpp44
4 files changed, 100 insertions, 45 deletions
diff --git a/searchlib/src/vespa/searchlib/attribute/postingstore.h b/searchlib/src/vespa/searchlib/attribute/postingstore.h
index 74a147b3ccb..2b119a55158 100644
--- a/searchlib/src/vespa/searchlib/attribute/postingstore.h
+++ b/searchlib/src/vespa/searchlib/attribute/postingstore.h
@@ -89,6 +89,7 @@ public:
using Parent::getWTreeEntry;
using Parent::getTreeEntry;
using Parent::getKeyDataEntry;
+ using Parent::isBTree;
using Parent::clusterLimit;
using Parent::allocBTree;
using Parent::allocBTreeCopy;
@@ -107,8 +108,6 @@ public:
bool removeSparseBitVectors() override;
void consider_remove_sparse_bitvector(std::vector<EntryRef> &refs);
static bool isBitVector(uint32_t typeId) { return typeId == BUFFERTYPE_BITVECTOR; }
- static bool isBTree(uint32_t typeId) { return typeId == BUFFERTYPE_BTREE; }
- bool isBTree(RefType ref) const { return isBTree(getTypeId(ref)); }
void applyNew(EntryRef &ref, AddIter a, AddIter ae);
diff --git a/vespalib/src/tests/btree/btree_store/btree_store_test.cpp b/vespalib/src/tests/btree/btree_store/btree_store_test.cpp
index e7d923d0e87..77cb8e519e4 100644
--- a/vespalib/src/tests/btree/btree_store/btree_store_test.cpp
+++ b/vespalib/src/tests/btree/btree_store/btree_store_test.cpp
@@ -73,61 +73,112 @@ BTreeStoreTest::~BTreeStoreTest()
inc_generation();
}
+namespace {
+
+class ChangeWriter {
+ std::vector<EntryRef*> _old_refs;
+public:
+ ChangeWriter(uint32_t capacity);
+ ~ChangeWriter();
+ void write(const std::vector<EntryRef>& refs);
+ void emplace_back(EntryRef& ref) { _old_refs.emplace_back(&ref); }
+};
+
+ChangeWriter::ChangeWriter(uint32_t capacity)
+ : _old_refs()
+{
+ _old_refs.reserve(capacity);
+}
+
+ChangeWriter::~ChangeWriter() = default;
+
+void
+ChangeWriter::write(const std::vector<EntryRef> &refs)
+{
+ assert(refs.size() == _old_refs.size());
+ auto old_ref_itr = _old_refs.begin();
+ for (auto ref : refs) {
+ **old_ref_itr = ref;
+ ++old_ref_itr;
+ }
+ assert(old_ref_itr == _old_refs.end());
+ _old_refs.clear();
+}
+
+}
+
void
BTreeStoreTest::test_compact_sequence(uint32_t sequence_length)
{
auto &store = _store;
+ uint32_t entry_ref_offset_bits = TreeStore::RefType::offset_bits;
EntryRef ref1 = add_sequence(4, 4 + sequence_length);
EntryRef ref2 = add_sequence(5, 5 + sequence_length);
- EntryRef old_ref1 = ref1;
- EntryRef old_ref2 = ref2;
std::vector<EntryRef> refs;
+ refs.reserve(2);
+ refs.emplace_back(ref1);
+ refs.emplace_back(ref2);
+ std::vector<EntryRef> temp_refs;
for (int i = 0; i < 1000; ++i) {
- refs.emplace_back(add_sequence(i + 6, i + 6 + sequence_length));
+ temp_refs.emplace_back(add_sequence(i + 6, i + 6 + sequence_length));
}
- for (auto& ref : refs) {
+ for (auto& ref : temp_refs) {
store.clear(ref);
}
inc_generation();
+ ChangeWriter change_writer(refs.size());
+ std::vector<EntryRef> move_refs;
+ move_refs.reserve(refs.size());
auto usage_before = store.getMemoryUsage();
for (uint32_t pass = 0; pass < 15; ++pass) {
auto to_hold = store.start_compact_worst_buffers();
- ref1 = store.move(ref1);
- ref2 = store.move(ref2);
+ std::vector<bool> filter(TreeStore::RefType::numBuffers());
+ for (auto buffer_id : to_hold) {
+ filter[buffer_id] = true;
+ }
+ for (auto& ref : refs) {
+ if (ref.valid() && filter[ref.buffer_id(entry_ref_offset_bits)]) {
+ move_refs.emplace_back(ref);
+ change_writer.emplace_back(ref);
+ }
+ }
+ store.move(move_refs);
+ change_writer.write(move_refs);
+ move_refs.clear();
store.finishCompact(to_hold);
inc_generation();
}
- EXPECT_NE(old_ref1, ref1);
- EXPECT_NE(old_ref2, ref2);
- EXPECT_EQ(make_exp_sequence(4, 4 + sequence_length), get_sequence(ref1));
- EXPECT_EQ(make_exp_sequence(5, 5 + sequence_length), get_sequence(ref2));
+ EXPECT_NE(ref1, refs[0]);
+ EXPECT_NE(ref2, refs[1]);
+ EXPECT_EQ(make_exp_sequence(4, 4 + sequence_length), get_sequence(refs[0]));
+ EXPECT_EQ(make_exp_sequence(5, 5 + sequence_length), get_sequence(refs[1]));
auto usage_after = store.getMemoryUsage();
EXPECT_GT(usage_before.deadBytes(), usage_after.deadBytes());
- store.clear(ref1);
- store.clear(ref2);
+ store.clear(refs[0]);
+ store.clear(refs[1]);
}
TEST_F(BTreeStoreTest, require_that_nodes_for_multiple_btrees_are_compacted)
{
auto &store = this->_store;
- EntryRef ref1 = add_sequence(4, 40);
- EntryRef ref2 = add_sequence(100, 130);
+ std::vector<EntryRef> refs;
+ refs.emplace_back(add_sequence(4, 40));
+ refs.emplace_back(add_sequence(100, 130));
store.clear(add_sequence(1000, 20000));
inc_generation();
auto usage_before = store.getMemoryUsage();
for (uint32_t pass = 0; pass < 15; ++pass) {
auto to_hold = store.start_compact_worst_btree_nodes();
- store.move_btree_nodes(ref1);
- store.move_btree_nodes(ref2);
+ store.move_btree_nodes(refs);
store.finish_compact_worst_btree_nodes(to_hold);
inc_generation();
}
- EXPECT_EQ(make_exp_sequence(4, 40), get_sequence(ref1));
- EXPECT_EQ(make_exp_sequence(100, 130), get_sequence(ref2));
+ EXPECT_EQ(make_exp_sequence(4, 40), get_sequence(refs[0]));
+ EXPECT_EQ(make_exp_sequence(100, 130), get_sequence(refs[1]));
auto usage_after = store.getMemoryUsage();
EXPECT_GT(usage_before.deadBytes(), usage_after.deadBytes());
- store.clear(ref1);
- store.clear(ref2);
+ store.clear(refs[0]);
+ store.clear(refs[1]);
}
TEST_F(BTreeStoreTest, require_that_short_arrays_are_compacted)
diff --git a/vespalib/src/vespa/vespalib/btree/btreestore.h b/vespalib/src/vespa/vespalib/btree/btreestore.h
index 82913987e44..b4238757e46 100644
--- a/vespalib/src/vespa/vespalib/btree/btreestore.h
+++ b/vespalib/src/vespa/vespalib/btree/btreestore.h
@@ -298,6 +298,9 @@ public:
bool
isSmallArray(const EntryRef ref) const;
+ static bool isBTree(uint32_t typeId) { return typeId == BUFFERTYPE_BTREE; }
+ bool isBTree(RefType ref) const { return isBTree(getTypeId(ref)); }
+
/**
* Returns the cluster size for the type id.
* Cluster size == 0 means we have a tree for the given reference.
@@ -391,10 +394,10 @@ public:
std::vector<uint32_t> start_compact_worst_btree_nodes();
void finish_compact_worst_btree_nodes(const std::vector<uint32_t>& to_hold);
- void move_btree_nodes(EntryRef ref);
+ void move_btree_nodes(const std::vector<EntryRef>& refs);
std::vector<uint32_t> start_compact_worst_buffers();
- EntryRef move(EntryRef ref);
+ void move(std::vector<EntryRef>& refs);
private:
static constexpr size_t MIN_BUFFER_ARRAYS = 128u;
diff --git a/vespalib/src/vespa/vespalib/btree/btreestore.hpp b/vespalib/src/vespa/vespalib/btree/btreestore.hpp
index 15c546a0368..795e526f927 100644
--- a/vespalib/src/vespa/vespalib/btree/btreestore.hpp
+++ b/vespalib/src/vespa/vespalib/btree/btreestore.hpp
@@ -991,15 +991,15 @@ template <typename KeyT, typename DataT, typename AggrT, typename CompareT,
typename TraitsT, typename AggrCalcT>
void
BTreeStore<KeyT, DataT, AggrT, CompareT, TraitsT, AggrCalcT>::
-move_btree_nodes(EntryRef ref)
+move_btree_nodes(const std::vector<EntryRef>& refs)
{
- if (ref.valid()) {
+ for (auto& ref : refs) {
RefType iRef(ref);
- uint32_t clusterSize = getClusterSize(iRef);
- if (clusterSize == 0) {
- BTreeType *tree = getWTreeEntry(iRef);
- tree->move_nodes(_allocator);
- }
+ assert(iRef.valid());
+ uint32_t typeId = getTypeId(iRef);
+ assert(isBTree(typeId));
+ BTreeType *tree = getWTreeEntry(iRef);
+ tree->move_nodes(_allocator);
}
}
@@ -1015,23 +1015,25 @@ start_compact_worst_buffers()
template <typename KeyT, typename DataT, typename AggrT, typename CompareT,
typename TraitsT, typename AggrCalcT>
-typename BTreeStore<KeyT, DataT, AggrT, CompareT, TraitsT, AggrCalcT>::EntryRef
+void
BTreeStore<KeyT, DataT, AggrT, CompareT, TraitsT, AggrCalcT>::
-move(EntryRef ref)
+move(std::vector<EntryRef> &refs)
{
- if (!ref.valid() || !_store.getCompacting(ref)) {
- return ref;
- }
- RefType iRef(ref);
- uint32_t clusterSize = getClusterSize(iRef);
- if (clusterSize == 0) {
- BTreeType *tree = getWTreeEntry(iRef);
- auto ref_and_ptr = allocBTreeCopy(*tree);
- tree->prepare_hold();
- return ref_and_ptr.ref;
+ for (auto& ref : refs) {
+ RefType iRef(ref);
+ assert(iRef.valid());
+ assert(_store.getCompacting(iRef));
+ uint32_t clusterSize = getClusterSize(iRef);
+ if (clusterSize == 0) {
+ BTreeType *tree = getWTreeEntry(iRef);
+ auto ref_and_ptr = allocBTreeCopy(*tree);
+ tree->prepare_hold();
+ ref = ref_and_ptr.ref;
+ } else {
+ const KeyDataType *shortArray = getKeyDataEntry(iRef, clusterSize);
+ ref = allocKeyDataCopy(shortArray, clusterSize).ref;
+ }
}
- const KeyDataType *shortArray = getKeyDataEntry(iRef, clusterSize);
- return allocKeyDataCopy(shortArray, clusterSize).ref;
}
}