summaryrefslogtreecommitdiffstats
path: root/searchlib
diff options
context:
space:
mode:
authorTor Egge <Tor.Egge@online.no>2024-01-18 23:19:25 +0100
committerTor Egge <Tor.Egge@online.no>2024-01-18 23:19:25 +0100
commit67061e4c884604de3bd7cbb14be165e89488650b (patch)
tree655d3b5af3a9bac7b8a6988b9b6b91627e839c6a /searchlib
parent254862ddf5d55923232abef00e6c2fff32bf463b (diff)
Rename search::streaming::Hit member function context() to field_id().
Diffstat (limited to 'searchlib')
-rw-r--r--searchlib/src/tests/query/streaming_query_test.cpp18
-rw-r--r--searchlib/src/vespa/searchlib/query/streaming/dot_product_term.cpp2
-rw-r--r--searchlib/src/vespa/searchlib/query/streaming/hit.h6
-rw-r--r--searchlib/src/vespa/searchlib/query/streaming/in_term.cpp6
-rw-r--r--searchlib/src/vespa/searchlib/query/streaming/query.cpp14
-rw-r--r--searchlib/src/vespa/searchlib/query/streaming/weighted_set_term.cpp2
6 files changed, 24 insertions, 24 deletions
diff --git a/searchlib/src/tests/query/streaming_query_test.cpp b/searchlib/src/tests/query/streaming_query_test.cpp
index fe6149e6fba..97b3d88c25e 100644
--- a/searchlib/src/tests/query/streaming_query_test.cpp
+++ b/searchlib/src/tests/query/streaming_query_test.cpp
@@ -23,9 +23,9 @@ using TermType = QueryTerm::Type;
using search::fef::SimpleTermData;
using search::fef::MatchData;
-void assertHit(const Hit & h, size_t expWordpos, size_t expContext, int32_t weight) {
+void assertHit(const Hit & h, size_t expWordpos, size_t exp_field_id, int32_t weight) {
EXPECT_EQ(h.wordpos(), expWordpos);
- EXPECT_EQ(h.context(), expContext);
+ EXPECT_EQ(h.field_id(), exp_field_id);
EXPECT_EQ(h.weight(), weight);
}
@@ -479,11 +479,11 @@ TEST(StreamingQueryTest, test_phrase_evaluate)
p->evaluateHits(hits);
ASSERT_EQ(3u, hits.size());
EXPECT_EQ(hits[0].wordpos(), 2u);
- EXPECT_EQ(hits[0].context(), 0u);
+ EXPECT_EQ(hits[0].field_id(), 0u);
EXPECT_EQ(hits[1].wordpos(), 6u);
- EXPECT_EQ(hits[1].context(), 1u);
+ EXPECT_EQ(hits[1].field_id(), 1u);
EXPECT_EQ(hits[2].wordpos(), 2u);
- EXPECT_EQ(hits[2].context(), 3u);
+ EXPECT_EQ(hits[2].field_id(), 3u);
ASSERT_EQ(4u, p->getFieldInfoSize());
EXPECT_EQ(p->getFieldInfo(0).getHitOffset(), 0u);
EXPECT_EQ(p->getFieldInfo(0).getHitCount(), 1u);
@@ -847,22 +847,22 @@ TEST(StreamingQueryTest, test_same_element_evaluate)
sameElem->evaluateHits(hits);
EXPECT_EQ(4u, hits.size());
EXPECT_EQ(0u, hits[0].wordpos());
- EXPECT_EQ(2u, hits[0].context());
+ EXPECT_EQ(2u, hits[0].field_id());
EXPECT_EQ(0u, hits[0].elemId());
EXPECT_EQ(130, hits[0].weight());
EXPECT_EQ(0u, hits[1].wordpos());
- EXPECT_EQ(2u, hits[1].context());
+ EXPECT_EQ(2u, hits[1].field_id());
EXPECT_EQ(2u, hits[1].elemId());
EXPECT_EQ(140, hits[1].weight());
EXPECT_EQ(0u, hits[2].wordpos());
- EXPECT_EQ(2u, hits[2].context());
+ EXPECT_EQ(2u, hits[2].field_id());
EXPECT_EQ(4u, hits[2].elemId());
EXPECT_EQ(150, hits[2].weight());
EXPECT_EQ(0u, hits[3].wordpos());
- EXPECT_EQ(2u, hits[3].context());
+ EXPECT_EQ(2u, hits[3].field_id());
EXPECT_EQ(5u, hits[3].elemId());
EXPECT_EQ(160, hits[3].weight());
EXPECT_TRUE(sameElem->evaluate());
diff --git a/searchlib/src/vespa/searchlib/query/streaming/dot_product_term.cpp b/searchlib/src/vespa/searchlib/query/streaming/dot_product_term.cpp
index 1871bda564d..b3bfbb0e86b 100644
--- a/searchlib/src/vespa/searchlib/query/streaming/dot_product_term.cpp
+++ b/searchlib/src/vespa/searchlib/query/streaming/dot_product_term.cpp
@@ -24,7 +24,7 @@ DotProductTerm::build_scores(Scores& scores) const
for (const auto& term : _terms) {
auto& hl = term->evaluateHits(hl_store);
for (auto& hit : hl) {
- scores[hit.context()] += ((int64_t)term->weight().percent()) * hit.weight();
+ scores[hit.field_id()] += ((int64_t)term->weight().percent()) * hit.weight();
}
}
}
diff --git a/searchlib/src/vespa/searchlib/query/streaming/hit.h b/searchlib/src/vespa/searchlib/query/streaming/hit.h
index a798d293491..81d6816ab56 100644
--- a/searchlib/src/vespa/searchlib/query/streaming/hit.h
+++ b/searchlib/src/vespa/searchlib/query/streaming/hit.h
@@ -9,15 +9,15 @@ namespace search::streaming {
class Hit
{
public:
- Hit(uint32_t pos_, uint32_t context_, uint32_t elemId_, int32_t weight_) noexcept
- : _position(pos_ | (context_<<24)),
+ Hit(uint32_t pos_, uint32_t field_id_, uint32_t elemId_, int32_t weight_) noexcept
+ : _position(pos_ | (field_id_<<24)),
_elemId(elemId_),
_weight(weight_)
{ }
int32_t weight() const { return _weight; }
uint32_t pos() const { return _position; }
uint32_t wordpos() const { return _position & 0xffffff; }
- uint32_t context() const { return _position >> 24; }
+ uint32_t field_id() const noexcept { return _position >> 24; }
uint32_t elemId() const { return _elemId; }
bool operator < (const Hit & b) const { return cmp(b) < 0; }
private:
diff --git a/searchlib/src/vespa/searchlib/query/streaming/in_term.cpp b/searchlib/src/vespa/searchlib/query/streaming/in_term.cpp
index 3e75f4a5114..c164db69ba1 100644
--- a/searchlib/src/vespa/searchlib/query/streaming/in_term.cpp
+++ b/searchlib/src/vespa/searchlib/query/streaming/in_term.cpp
@@ -29,9 +29,9 @@ InTerm::unpack_match_data(uint32_t docid, const ITermData& td, MatchData& match_
for (const auto& term : _terms) {
auto& hl = term->evaluateHits(hl_store);
for (auto& hit : hl) {
- if (!prev_field_id.has_value() || prev_field_id.value() != hit.context()) {
- prev_field_id = hit.context();
- matching_field_ids.insert(hit.context());
+ if (!prev_field_id.has_value() || prev_field_id.value() != hit.field_id()) {
+ prev_field_id = hit.field_id();
+ matching_field_ids.insert(hit.field_id());
}
}
}
diff --git a/searchlib/src/vespa/searchlib/query/streaming/query.cpp b/searchlib/src/vespa/searchlib/query/streaming/query.cpp
index ca742aabe26..618922eced9 100644
--- a/searchlib/src/vespa/searchlib/query/streaming/query.cpp
+++ b/searchlib/src/vespa/searchlib/query/streaming/query.cpp
@@ -208,7 +208,7 @@ SameElementQueryNode::evaluateHits(HitList & hl) const
currMatchCount++;
if ((currMatchCount+1) == numFields) {
Hit h = nextHL[indexVector[currMatchCount]];
- hl.emplace_back(0, h.context(), h.elemId(), h.weight());
+ hl.emplace_back(0, h.field_id(), h.elemId(), h.weight());
currMatchCount = 0;
indexVector[0]++;
}
@@ -260,26 +260,26 @@ PhraseQueryNode::evaluateHits(HitList & hl) const
const auto & currHit = curr->evaluateHits(tmpHL)[currIndex];
size_t firstPosition = currHit.pos();
uint32_t currElemId = currHit.elemId();
- uint32_t currContext = currHit.context();
+ uint32_t curr_field_id = currHit.field_id();
const HitList & nextHL = next->evaluateHits(tmpHL);
int diff(0);
size_t nextIndexMax = nextHL.size();
while ((nextIndex < nextIndexMax) &&
- ((nextHL[nextIndex].context() < currContext) ||
- ((nextHL[nextIndex].context() == currContext) && (nextHL[nextIndex].elemId() <= currElemId))) &&
+ ((nextHL[nextIndex].field_id() < curr_field_id) ||
+ ((nextHL[nextIndex].field_id() == curr_field_id) && (nextHL[nextIndex].elemId() <= currElemId))) &&
((diff = nextHL[nextIndex].pos()-firstPosition) < 1))
{
nextIndex++;
}
- if ((diff == 1) && (nextHL[nextIndex].context() == currContext) && (nextHL[nextIndex].elemId() == currElemId)) {
+ if ((diff == 1) && (nextHL[nextIndex].field_id() == curr_field_id) && (nextHL[nextIndex].elemId() == currElemId)) {
currPhraseLen++;
if ((currPhraseLen+1) == fullPhraseLen) {
Hit h = nextHL[indexVector[currPhraseLen]];
hl.push_back(h);
- const QueryTerm::FieldInfo & fi = next->getFieldInfo(h.context());
- updateFieldInfo(h.context(), hl.size() - 1, fi.getFieldLength());
+ const QueryTerm::FieldInfo & fi = next->getFieldInfo(h.field_id());
+ updateFieldInfo(h.field_id(), hl.size() - 1, fi.getFieldLength());
currPhraseLen = 0;
indexVector[0]++;
}
diff --git a/searchlib/src/vespa/searchlib/query/streaming/weighted_set_term.cpp b/searchlib/src/vespa/searchlib/query/streaming/weighted_set_term.cpp
index 90d0be5d43c..d2d706eef3d 100644
--- a/searchlib/src/vespa/searchlib/query/streaming/weighted_set_term.cpp
+++ b/searchlib/src/vespa/searchlib/query/streaming/weighted_set_term.cpp
@@ -25,7 +25,7 @@ WeightedSetTerm::unpack_match_data(uint32_t docid, const ITermData& td, MatchDat
for (const auto& term : _terms) {
auto& hl = term->evaluateHits(hl_store);
for (auto& hit : hl) {
- scores[hit.context()].emplace_back(term->weight().percent());
+ scores[hit.field_id()].emplace_back(term->weight().percent());
}
}
auto num_fields = td.numFields();