summaryrefslogtreecommitdiffstats
path: root/searchcore
diff options
context:
space:
mode:
authorArne H Juul <arnej@yahooinc.com>2021-10-13 08:34:43 +0000
committerArne H Juul <arnej@yahooinc.com>2021-10-13 10:44:30 +0000
commit556f30a0d3afef8ff19fd1bcefe9b8fd11b52002 (patch)
tree811f732d7c3a7f724830ed3d8fe73754b7b73e59 /searchcore
parentbce6fccfd1027bb5b18b5bef2fd03c64f91e6024 (diff)
rewrite tests with only root slime docsums
Diffstat (limited to 'searchcore')
-rw-r--r--searchcore/src/tests/proton/docsummary/docsummary.cpp548
1 files changed, 242 insertions, 306 deletions
diff --git a/searchcore/src/tests/proton/docsummary/docsummary.cpp b/searchcore/src/tests/proton/docsummary/docsummary.cpp
index 47197bcbea1..349076542c3 100644
--- a/searchcore/src/tests/proton/docsummary/docsummary.cpp
+++ b/searchcore/src/tests/proton/docsummary/docsummary.cpp
@@ -34,6 +34,7 @@
#include <vespa/searchlib/transactionlog/nosyncproxy.h>
#include <vespa/searchlib/transactionlog/translogserver.h>
#include <vespa/vespalib/data/slime/slime.h>
+#include <vespa/vespalib/data/slime/json_format.h>
#include <vespa/vespalib/data/simple_buffer.h>
#include <vespa/vespalib/encoding/base64.h>
#include <vespa/vespalib/util/size_literals.h>
@@ -64,6 +65,7 @@ using vespa::config::search::core::ProtonConfig;
using vespa::config::content::core::BucketspacesConfig;
using vespalib::eval::TensorSpec;
using vespalib::eval::SimpleValue;
+using vespalib::Slime;
using namespace vespalib::slime;
typedef std::unique_ptr<GeneralResult> GeneralResultPtr;
@@ -106,8 +108,8 @@ public:
_noTlSyncer(),
_str(_summaryExecutor, "summary",
LogDocumentStore::Config(
- DocumentStore::Config(),
- LogDataStore::Config()),
+ DocumentStore::Config(),
+ LogDataStore::Config()),
GrowStrategy(),
TuneFileSummary(),
_fileHeaderContext,
@@ -133,7 +135,6 @@ public:
}
};
-
namespace {
const char *
@@ -167,7 +168,6 @@ std::string b64encode(const Inspector &value) {
} // namespace
-
class DBContext : public DummyDBOwner
{
public:
@@ -226,7 +226,7 @@ public:
_summaryExecutor, _summaryExecutor, _bucketExecutor, _tls, _dummy, _fileHeaderContext,
std::make_unique<MemoryConfigStore>(),
std::make_shared<vespalib::ThreadStackExecutor>(16, 128_Ki), _hwInfo),
- _ddb->start();
+ _ddb->start();
_ddb->waitForOnlineState();
_aw = std::make_unique<AttributeWriter>(_ddb->getReadySubDB()->getAttributeManager());
_sa = _ddb->getReadySubDB()->getSummaryAdapter();
@@ -313,7 +313,7 @@ getResult(DocumentStoreAdapter & dsa, uint32_t docId)
bool
assertString(const std::string & exp, const std::string & fieldName,
- DocumentStoreAdapter &dsa, uint32_t id)
+ DocumentStoreAdapter &dsa, uint32_t id)
{
GeneralResultPtr res = getResult(dsa, id);
return EXPECT_EQUAL(exp, std::string(res->GetEntry(fieldName.c_str())->_stringval,
@@ -322,62 +322,28 @@ assertString(const std::string & exp, const std::string & fieldName,
void
assertTensor(const vespalib::eval::Value::UP & exp, const std::string & fieldName,
- const DocsumReply & reply, uint32_t id, uint32_t)
+ const DocsumReply & reply, uint32_t id)
{
- const DocsumReply::Docsum & docsum = reply.docsums[id];
- uint32_t classId;
- ASSERT_LESS_EQUAL(sizeof(classId), docsum.data.size());
- memcpy(&classId, docsum.data.c_str(), sizeof(classId));
- ASSERT_EQUAL(::search::docsummary::SLIME_MAGIC_ID, classId);
- vespalib::Slime slime;
- vespalib::Memory serialized(docsum.data.c_str() + sizeof(classId),
- docsum.data.size() - sizeof(classId));
- size_t decodeRes = BinaryFormat::decode(serialized, slime);
- ASSERT_EQUAL(decodeRes, serialized.size);
-
- EXPECT_EQUAL(exp.get() != nullptr, slime.get()[fieldName].valid());
- vespalib::Memory data = slime.get()[fieldName].asData();
- EXPECT_EQUAL(exp.get() == nullptr, data.size == 0u);
if (exp) {
+ EXPECT_TRUE(reply.root()["docsums"].valid());
+ EXPECT_TRUE(reply.root()["docsums"][id].valid());
+ EXPECT_TRUE(reply.root()["docsums"][id]["docsum"].valid());
+ EXPECT_TRUE(reply.root()["docsums"][id]["docsum"][fieldName].valid());
+ vespalib::Memory data = reply.root()["docsums"][id]["docsum"][fieldName].asData();
vespalib::nbostream x(data.data, data.size);
auto tensor = SimpleValue::from_stream(x);
EXPECT_TRUE(tensor.get() != nullptr);
EXPECT_EQUAL(*exp, *tensor);
+ } else {
+ EXPECT_FALSE(reply.root()["docsums"][id][fieldName].valid());
}
}
-vespalib::Slime
-getSlime(const DocsumReply &reply, uint32_t id, bool relaxed)
-{
- const DocsumReply::Docsum & docsum = reply.docsums[id];
- uint32_t classId;
- ASSERT_LESS_EQUAL(sizeof(classId), docsum.data.size());
- memcpy(&classId, docsum.data.c_str(), sizeof(classId));
- ASSERT_EQUAL(search::docsummary::SLIME_MAGIC_ID, classId);
- vespalib::Slime slime;
- vespalib::Memory serialized(docsum.data.c_str() + sizeof(classId),
- docsum.data.size() - sizeof(classId));
- size_t decodeRes = BinaryFormat::decode(serialized, slime);
- ASSERT_EQUAL(decodeRes, serialized.size);
- if (relaxed) {
- vespalib::SimpleBuffer buf;
- JsonFormat::encode(slime, buf, false);
- vespalib::Slime tmpSlime;
- size_t used = JsonFormat::decode(buf.get(), tmpSlime);
- EXPECT_TRUE(used > 0);
- slime = std::move(tmpSlime);
- }
- return slime;
-}
-
-bool
-assertSlime(const std::string &exp, const DocsumReply &reply, uint32_t id, bool relaxed)
-{
- vespalib::Slime slime = getSlime(reply, id, relaxed);
+bool assertSlime(const std::string &exp, const DocsumReply &reply) {
vespalib::Slime expSlime;
size_t used = JsonFormat::decode(exp, expSlime);
EXPECT_TRUE(used > 0);
- return EXPECT_EQUAL(expSlime, slime);
+ return (EXPECT_EQUAL(expSlime.get(), reply.root()));
}
TEST_F("requireThatAdapterHandlesAllFieldTypes", Fixture)
@@ -425,20 +391,19 @@ TEST_F("requireThatAdapterHandlesAllFieldTypes", Fixture)
EXPECT_APPROX(1234.56, res->GetEntry("e")->_doubleval, 10e-5);
EXPECT_APPROX(9876.54, res->GetEntry("f")->_doubleval, 10e-5);
EXPECT_EQUAL("foo", std::string(res->GetEntry("g")->_stringval,
- res->GetEntry("g")->_stringlen));
+ res->GetEntry("g")->_stringlen));
EXPECT_EQUAL("bar", std::string(res->GetEntry("h")->_stringval,
- res->GetEntry("h")->_stringlen));
+ res->GetEntry("h")->_stringlen));
EXPECT_EQUAL("baz", std::string(res->GetEntry("i")->_dataval,
- res->GetEntry("i")->_datalen));
+ res->GetEntry("i")->_datalen));
EXPECT_EQUAL("qux", std::string(res->GetEntry("j")->_dataval,
- res->GetEntry("j")->_datalen));
+ res->GetEntry("j")->_datalen));
EXPECT_EQUAL("<foo>", std::string(res->GetEntry("k")->_stringval,
- res->GetEntry("k")->_stringlen));
+ res->GetEntry("k")->_stringlen));
EXPECT_EQUAL("{foo:10}", std::string(res->GetEntry("l")->_stringval,
- res->GetEntry("l")->_stringlen));
+ res->GetEntry("l")->_stringlen));
}
-
TEST_F("requireThatAdapterHandlesMultipleDocuments", Fixture)
{
Schema s;
@@ -479,7 +444,6 @@ TEST_F("requireThatAdapterHandlesMultipleDocuments", Fixture)
bc._str.flush(flushToken);
}
-
TEST_F("requireThatAdapterHandlesDocumentIdField", Fixture)
{
Schema s;
@@ -495,10 +459,9 @@ TEST_F("requireThatAdapterHandlesDocumentIdField", Fixture)
f.getMarkupFields());
GeneralResultPtr res = getResult(dsa, 0);
EXPECT_EQUAL("id:ns:searchdocument::0", std::string(res->GetEntry("documentid")->_stringval,
- res->GetEntry("documentid")->_stringlen));
+ res->GetEntry("documentid")->_stringlen));
}
-
GlobalId gid1 = DocumentId("id:ns:searchdocument::1").getGlobalId(); // lid 1
GlobalId gid2 = DocumentId("id:ns:searchdocument::2").getGlobalId(); // lid 2
GlobalId gid3 = DocumentId("id:ns:searchdocument::3").getGlobalId(); // lid 3
@@ -549,17 +512,9 @@ TEST("requireThatDocsumRequestIsProcessed")
req.hits.push_back(DocsumRequest::Hit(gid4));
req.hits.push_back(DocsumRequest::Hit(gid9));
DocsumReply::UP rep = dc._ddb->getDocsums(req);
-
- EXPECT_EQUAL(3u, rep->docsums.size());
- EXPECT_EQUAL(gid2, rep->docsums[0].gid);
- EXPECT_TRUE(assertSlime("{a:20}", *rep, 0, false));
- EXPECT_EQUAL(gid4, rep->docsums[1].gid);
- EXPECT_TRUE(assertSlime("{a:40}", *rep, 1, false));
- EXPECT_EQUAL(gid9, rep->docsums[2].gid);
- EXPECT_TRUE(rep->docsums[2].data.get() == nullptr);
+ EXPECT_TRUE(assertSlime("{docsums:[ {docsum:{a:20}}, {docsum:{a:40}}, {} ]}", *rep));
}
-
TEST("requireThatRewritersAreUsed")
{
Schema s;
@@ -582,8 +537,7 @@ TEST("requireThatRewritersAreUsed")
req.resultClassName = "class2";
req.hits.push_back(DocsumRequest::Hit(gid1));
DocsumReply::UP rep = dc._ddb->getDocsums(req);
- EXPECT_EQUAL(1u, rep->docsums.size());
- EXPECT_TRUE(assertSlime("{aa:20}", *rep, 0, false));
+ EXPECT_TRUE(assertSlime("{docsums:[ {docsum:{aa:20}} ]}", *rep));
}
TEST("requireThatSummariesTimeout")
@@ -595,13 +549,13 @@ TEST("requireThatSummariesTimeout")
BuildContext bc(s);
DBContext dc(bc._repo, getDocTypeName());
dc.put(*bc._bld.startDocument("id:ns:searchdocument::1").
- startSummaryField("aa").
- addInt(10).
- endField().
- startSummaryField("ab").
- addInt(20).
- endField().
- endDocument(),
+ startSummaryField("aa").
+ addInt(10).
+ endField().
+ startSummaryField("ab").
+ addInt(20).
+ endField().
+ endDocument(),
1);
DocsumRequest req;
@@ -610,12 +564,12 @@ TEST("requireThatSummariesTimeout")
req.resultClassName = "class2";
req.hits.push_back(DocsumRequest::Hit(gid1));
DocsumReply::UP rep = dc._ddb->getDocsums(req);
- EXPECT_EQUAL(1u, rep->docsums.size());
- vespalib::SimpleBuffer buf;
- vespalib::Slime summary = getSlime(*rep, 0, false);
- JsonFormat::encode(summary, buf, false);
- auto bufstring = buf.get().make_stringref();
- EXPECT_TRUE(std::regex_search(bufstring.data(), bufstring.data() + bufstring.size(), std::regex("Timed out with -[0-9]+us left.")));
+ //fprintf(stderr, "SLIME:\n%s\n", rep->root().toString().c_str());
+ const auto & field = rep->root()["errors"];
+ EXPECT_TRUE(field.valid());
+ EXPECT_EQUAL(field[0]["type"].asString(), "timeout");
+ auto bufstring = field[0]["message"].asString();
+ EXPECT_TRUE(std::regex_search(bufstring.data, bufstring.data + bufstring.size, std::regex("Timed out 1 summaries with -[0-9]+us left.")));
}
void
@@ -629,7 +583,6 @@ addField(Schema & s,
s.addAttributeField(Schema::AttributeField(name, dtype, ctype, tensor_spec));
}
-
TEST("requireThatAttributesAreUsed")
{
Schema s;
@@ -654,7 +607,7 @@ TEST("requireThatAttributesAreUsed")
addInt(10).
endField().
startAttributeField("bb").
- addFloat(10.1).
+ addFloat(10.1250).
endField().
startAttributeField("bc").
addStr("foo").
@@ -669,10 +622,10 @@ TEST("requireThatAttributesAreUsed")
endField().
startAttributeField("be").
startElement().
- addFloat(20.2).
+ addFloat(20.2500).
endElement().
startElement().
- addFloat(30.3).
+ addFloat(30.3125).
endElement().
endField().
startAttributeField("bf").
@@ -693,10 +646,10 @@ TEST("requireThatAttributesAreUsed")
endField().
startAttributeField("bh").
startElement(4).
- addFloat(40.4).
+ addFloat(40.4375).
endElement().
startElement(5).
- addFloat(50.5).
+ addFloat(50.5625).
endElement().
endField().
startAttributeField("bi").
@@ -722,28 +675,22 @@ TEST("requireThatAttributesAreUsed")
req.hits.push_back(DocsumRequest::Hit(gid2));
req.hits.push_back(DocsumRequest::Hit(gid3));
DocsumReply::UP rep = dc._ddb->getDocsums(req);
- uint32_t rclass = 3;
- EXPECT_EQUAL(2u, rep->docsums.size());
-
- // FIXME the expected output ordering of weighted set fields is currently inherently linked
- // to the internal ordering of such attributes. Should be decoupled, as this is very fragile.
- EXPECT_TRUE(assertSlime("{ba:10,bb:10.1,"
+ EXPECT_TRUE(assertSlime("{docsums:[ {docsum:{"
+ "ba:10,bb:10.1250,"
"bc:'foo',"
"bd:[20,30],"
- "be:[20.2,30.3],"
+ "be:[20.2500,30.3125],"
"bf:['bar','baz'],"
"bg:[{item:40,weight:2},{item:50,weight:3}],"
- "bh:[{item:50.5,weight:5},{item:40.4,weight:4}],"
+ "bh:[{item:40.4375,weight:4},{item:50.5625,weight:5}],"
"bi:[{item:'quux',weight:7},{item:'qux',weight:6}],"
- "bj:'0x01020178017901016601674008000000000000'}", *rep, 0, true));
+ "bj:x01020178017901016601674008000000000000}},"
+ "{docsum:{}}]}", *rep));
+
TEST_DO(assertTensor(make_tensor(TensorSpec("tensor(x{},y{})")
.add({{"x", "f"}, {"y", "g"}}, 3)),
- "bj", *rep, 0, rclass));
-
- // empty doc
- EXPECT_TRUE(assertSlime("{}", *rep, 1, false));
- TEST_DO(assertTensor(vespalib::eval::Value::UP(), "bj", *rep, 1, rclass));
+ "bj", *rep, 0));
proton::IAttributeManager::SP attributeManager = dc._ddb->getReadySubDB()->getAttributeManager();
vespalib::ISequencedTaskExecutor &attributeFieldWriter = attributeManager->getAttributeFieldWriter();
@@ -753,7 +700,7 @@ TEST("requireThatAttributesAreUsed")
attributeFieldWriter.execute(attributeFieldWriter.getExecutorIdFromName(bjAttr->getNamePrefix()),
[&]() {
bjTensorAttr->setTensor(3, *make_tensor(TensorSpec("tensor(x{},y{})")
- .add({{"x", "a"}, {"y", "b"}}, 4)));
+ .add({{"x", "a"}, {"y", "b"}}, 4)));
bjTensorAttr->commit();
});
attributeFieldWriter.sync();
@@ -761,18 +708,15 @@ TEST("requireThatAttributesAreUsed")
DocsumReply::UP rep2 = dc._ddb->getDocsums(req);
TEST_DO(assertTensor(make_tensor(TensorSpec("tensor(x{},y{})")
.add({{"x", "a"}, {"y", "b"}}, 4)),
- "bj", *rep2, 1, rclass));
+ "bj", *rep2, 1));
DocsumRequest req3;
req3.resultClassName = "class3";
req3.hits.push_back(DocsumRequest::Hit(gid3));
DocsumReply::UP rep3 = dc._ddb->getDocsums(req3);
-
- EXPECT_TRUE(assertSlime("{bj:'0x01020178017901016101624010000000000000'}",
- *rep3, 0, true));
+ EXPECT_TRUE(assertSlime("{docsums:[{docsum:{bj:x01020178017901016101624010000000000000}}]}", *rep3));
}
-
TEST("requireThatSummaryAdapterHandlesPutAndRemove")
{
Schema s;
@@ -780,10 +724,10 @@ TEST("requireThatSummaryAdapterHandlesPutAndRemove")
BuildContext bc(s);
DBContext dc(bc._repo, getDocTypeName());
Document::UP exp = bc._bld.startDocument("id:ns:searchdocument::1").
- startSummaryField("f1").
- addStr("foo").
- endField().
- endDocument();
+ startSummaryField("f1").
+ addStr("foo").
+ endField().
+ endDocument();
dc._sa->put(1, 1, *exp);
IDocumentStore & store = dc._ddb->getReadySubDB()->getSummaryManager()->getBackingStore();
Document::UP act = store.read(1, *bc._repo);
@@ -794,7 +738,6 @@ TEST("requireThatSummaryAdapterHandlesPutAndRemove")
EXPECT_TRUE(store.read(1, *bc._repo).get() == nullptr);
}
-
const std::string TERM_ORIG = "\357\277\271";
const std::string TERM_INDEX = "\357\277\272";
const std::string TERM_END = "\357\277\273";
@@ -802,7 +745,7 @@ const std::string TERM_SEP = "\037";
const std::string TERM_EMPTY = "";
namespace
{
- const std::string empty;
+const std::string empty;
}
TEST_F("requireThatAnnotationsAreUsed", Fixture)
@@ -815,27 +758,27 @@ TEST_F("requireThatAnnotationsAreUsed", Fixture)
BuildContext bc(s);
DBContext dc(bc._repo, getDocTypeName());
Document::UP exp = bc._bld.startDocument("id:ns:searchdocument::0").
- startIndexField("g").
- addStr("foo").
- addStr("bar").
- addTermAnnotation("baz").
- endField().
- startIndexField("dynamicstring").
- setAutoAnnotate(false).
- addStr("foo").
- addSpan().
- addAlphabeticTokenAnnotation().
- addTermAnnotation().
- addNoWordStr(" ").
- addSpan().
- addSpaceTokenAnnotation().
- addStr("bar").
- addSpan().
- addAlphabeticTokenAnnotation().
- addTermAnnotation("baz").
- setAutoAnnotate(true).
- endField().
- endDocument();
+ startIndexField("g").
+ addStr("foo").
+ addStr("bar").
+ addTermAnnotation("baz").
+ endField().
+ startIndexField("dynamicstring").
+ setAutoAnnotate(false).
+ addStr("foo").
+ addSpan().
+ addAlphabeticTokenAnnotation().
+ addTermAnnotation().
+ addNoWordStr(" ").
+ addSpan().
+ addSpaceTokenAnnotation().
+ addStr("bar").
+ addSpan().
+ addAlphabeticTokenAnnotation().
+ addTermAnnotation("baz").
+ setAutoAnnotate(true).
+ endField().
+ endDocument();
dc._sa->put(1, 1, *exp);
IDocumentStore & store = dc._ddb->getReadySubDB()->getSummaryManager()->getBackingStore();
@@ -850,10 +793,10 @@ TEST_F("requireThatAnnotationsAreUsed", Fixture)
f.getMarkupFields());
EXPECT_TRUE(assertString("foo bar", "g", dsa, 1));
EXPECT_TRUE(assertString(TERM_EMPTY + "foo" + TERM_SEP +
- " " + TERM_SEP +
- TERM_ORIG + "bar" + TERM_INDEX + "baz" + TERM_END +
- TERM_SEP,
- "dynamicstring", dsa, 1));
+ " " + TERM_SEP +
+ TERM_ORIG + "bar" + TERM_INDEX + "baz" + TERM_END +
+ TERM_SEP,
+ "dynamicstring", dsa, 1));
}
TEST_F("requireThatUrisAreUsed", Fixture)
@@ -868,126 +811,126 @@ TEST_F("requireThatUrisAreUsed", Fixture)
BuildContext bc(s);
DBContext dc(bc._repo, getDocTypeName());
Document::UP exp = bc._bld.startDocument("id:ns:searchdocument::0").
- startIndexField("urisingle").
- startSubField("all").
- addUrlTokenizedString("http://www.example.com:81/fluke?ab=2#4").
- endSubField().
- startSubField("scheme").
- addUrlTokenizedString("http").
- endSubField().
- startSubField("host").
- addUrlTokenizedString("www.example.com").
- endSubField().
- startSubField("port").
- addUrlTokenizedString("81").
- endSubField().
- startSubField("path").
- addUrlTokenizedString("/fluke").
- endSubField().
- startSubField("query").
- addUrlTokenizedString("ab=2").
- endSubField().
- startSubField("fragment").
- addUrlTokenizedString("4").
- endSubField().
- endField().
- startIndexField("uriarray").
- startElement(1).
- startSubField("all").
- addUrlTokenizedString("http://www.example.com:82/fluke?ab=2#8").
- endSubField().
- startSubField("scheme").
- addUrlTokenizedString("http").
- endSubField().
- startSubField("host").
- addUrlTokenizedString("www.example.com").
- endSubField().
- startSubField("port").
- addUrlTokenizedString("82").
- endSubField().
- startSubField("path").
- addUrlTokenizedString("/fluke").
- endSubField().
- startSubField("query").
- addUrlTokenizedString("ab=2").
- endSubField().
- startSubField("fragment").
- addUrlTokenizedString("8").
- endSubField().
- endElement().
- startElement(1).
- startSubField("all").
- addUrlTokenizedString("http://www.flickr.com:82/fluke?ab=2#9").
- endSubField().
- startSubField("scheme").
- addUrlTokenizedString("http").
- endSubField().
- startSubField("host").
- addUrlTokenizedString("www.flickr.com").
- endSubField().
- startSubField("port").
- addUrlTokenizedString("82").
- endSubField().
- startSubField("path").
- addUrlTokenizedString("/fluke").
- endSubField().
- startSubField("query").
- addUrlTokenizedString("ab=2").
- endSubField().
- startSubField("fragment").
- addUrlTokenizedString("9").
- endSubField().
- endElement().
- endField().
- startIndexField("uriwset").
- startElement(4).
- startSubField("all").
- addUrlTokenizedString("http://www.example.com:83/fluke?ab=2#12").
- endSubField().
- startSubField("scheme").
- addUrlTokenizedString("http").
- endSubField().
- startSubField("host").
- addUrlTokenizedString("www.example.com").
- endSubField().
- startSubField("port").
- addUrlTokenizedString("83").
- endSubField().
- startSubField("path").
- addUrlTokenizedString("/fluke").
- endSubField().
- startSubField("query").
- addUrlTokenizedString("ab=2").
- endSubField().
- startSubField("fragment").
- addUrlTokenizedString("12").
- endSubField().
- endElement().
- startElement(7).
- startSubField("all").
- addUrlTokenizedString("http://www.flickr.com:85/fluke?ab=2#13").
- endSubField().
- startSubField("scheme").
- addUrlTokenizedString("http").
- endSubField().
- startSubField("host").
- addUrlTokenizedString("www.flickr.com").
- endSubField().
- startSubField("port").
- addUrlTokenizedString("85").
- endSubField().
- startSubField("path").
- addUrlTokenizedString("/fluke").
- endSubField().
- startSubField("query").
- addUrlTokenizedString("ab=2").
- endSubField().
- startSubField("fragment").
- addUrlTokenizedString("13").
- endSubField().
- endElement().
- endField().
- endDocument();
+ startIndexField("urisingle").
+ startSubField("all").
+ addUrlTokenizedString("http://www.example.com:81/fluke?ab=2#4").
+ endSubField().
+ startSubField("scheme").
+ addUrlTokenizedString("http").
+ endSubField().
+ startSubField("host").
+ addUrlTokenizedString("www.example.com").
+ endSubField().
+ startSubField("port").
+ addUrlTokenizedString("81").
+ endSubField().
+ startSubField("path").
+ addUrlTokenizedString("/fluke").
+ endSubField().
+ startSubField("query").
+ addUrlTokenizedString("ab=2").
+ endSubField().
+ startSubField("fragment").
+ addUrlTokenizedString("4").
+ endSubField().
+ endField().
+ startIndexField("uriarray").
+ startElement(1).
+ startSubField("all").
+ addUrlTokenizedString("http://www.example.com:82/fluke?ab=2#8").
+ endSubField().
+ startSubField("scheme").
+ addUrlTokenizedString("http").
+ endSubField().
+ startSubField("host").
+ addUrlTokenizedString("www.example.com").
+ endSubField().
+ startSubField("port").
+ addUrlTokenizedString("82").
+ endSubField().
+ startSubField("path").
+ addUrlTokenizedString("/fluke").
+ endSubField().
+ startSubField("query").
+ addUrlTokenizedString("ab=2").
+ endSubField().
+ startSubField("fragment").
+ addUrlTokenizedString("8").
+ endSubField().
+ endElement().
+ startElement(1).
+ startSubField("all").
+ addUrlTokenizedString("http://www.flickr.com:82/fluke?ab=2#9").
+ endSubField().
+ startSubField("scheme").
+ addUrlTokenizedString("http").
+ endSubField().
+ startSubField("host").
+ addUrlTokenizedString("www.flickr.com").
+ endSubField().
+ startSubField("port").
+ addUrlTokenizedString("82").
+ endSubField().
+ startSubField("path").
+ addUrlTokenizedString("/fluke").
+ endSubField().
+ startSubField("query").
+ addUrlTokenizedString("ab=2").
+ endSubField().
+ startSubField("fragment").
+ addUrlTokenizedString("9").
+ endSubField().
+ endElement().
+ endField().
+ startIndexField("uriwset").
+ startElement(4).
+ startSubField("all").
+ addUrlTokenizedString("http://www.example.com:83/fluke?ab=2#12").
+ endSubField().
+ startSubField("scheme").
+ addUrlTokenizedString("http").
+ endSubField().
+ startSubField("host").
+ addUrlTokenizedString("www.example.com").
+ endSubField().
+ startSubField("port").
+ addUrlTokenizedString("83").
+ endSubField().
+ startSubField("path").
+ addUrlTokenizedString("/fluke").
+ endSubField().
+ startSubField("query").
+ addUrlTokenizedString("ab=2").
+ endSubField().
+ startSubField("fragment").
+ addUrlTokenizedString("12").
+ endSubField().
+ endElement().
+ startElement(7).
+ startSubField("all").
+ addUrlTokenizedString("http://www.flickr.com:85/fluke?ab=2#13").
+ endSubField().
+ startSubField("scheme").
+ addUrlTokenizedString("http").
+ endSubField().
+ startSubField("host").
+ addUrlTokenizedString("www.flickr.com").
+ endSubField().
+ startSubField("port").
+ addUrlTokenizedString("85").
+ endSubField().
+ startSubField("path").
+ addUrlTokenizedString("/fluke").
+ endSubField().
+ startSubField("query").
+ addUrlTokenizedString("ab=2").
+ endSubField().
+ startSubField("fragment").
+ addUrlTokenizedString("13").
+ endSubField().
+ endElement().
+ endField().
+ endDocument();
dc._sa->put(1, 1, *exp);
IDocumentStore & store =
@@ -1022,7 +965,6 @@ TEST_F("requireThatUrisAreUsed", Fixture)
}
}
-
TEST("requireThatPositionsAreUsed")
{
Schema s;
@@ -1033,18 +975,18 @@ TEST("requireThatPositionsAreUsed")
BuildContext bc(s);
DBContext dc(bc._repo, getDocTypeName());
Document::UP exp = bc._bld.startDocument("id:ns:searchdocument::1").
- startAttributeField("sp2").
- addPosition(1002, 1003).
- endField().
- startAttributeField("ap2").
- startElement().addPosition(1006, 1007).endElement().
- startElement().addPosition(1008, 1009).endElement().
- endField().
- startAttributeField("wp2").
- startElement(43).addPosition(1012, 1013).endElement().
- startElement(44).addPosition(1014, 1015).endElement().
- endField().
- endDocument();
+ startAttributeField("sp2").
+ addPosition(1002, 1003).
+ endField().
+ startAttributeField("ap2").
+ startElement().addPosition(1006, 1007).endElement().
+ startElement().addPosition(1008, 1009).endElement().
+ endField().
+ startAttributeField("wp2").
+ startElement(43).addPosition(1012, 1013).endElement().
+ startElement(44).addPosition(1014, 1015).endElement().
+ endField().
+ endDocument();
dc.put(*exp, 1);
IDocumentStore & store = dc._ddb->getReadySubDB()->getSummaryManager()->getBackingStore();
@@ -1056,22 +998,18 @@ TEST("requireThatPositionsAreUsed")
req.resultClassName = "class5";
req.hits.push_back(DocsumRequest::Hit(gid1));
DocsumReply::UP rep = dc._ddb->getDocsums(req);
- // uint32_t rclass = 5;
-
- EXPECT_EQUAL(1u, rep->docsums.size());
- EXPECT_EQUAL(gid1, rep->docsums[0].gid);
- EXPECT_TRUE(assertSlime("{sp2:'1047758'"
+ EXPECT_TRUE(assertSlime("{docsums:["
+ "{docsum:{sp2:'1047758'"
",sp2x:{x:1002, y:1003, latlong:'N0.001003;E0.001002'}"
",ap2:[1047806,1048322]"
",ap2x:[{x:1006, y:1007, latlong:'N0.001007;E0.001006'},"
- "{x:1008, y:1009, latlong:'N0.001009;E0.001008'}]"
+ "{x:1008, y:1009, latlong:'N0.001009;E0.001008'}]"
",wp2:[{item:1048370,weight:43},{item:1048382,weight:44}]"
",wp2x:[{ x:1012, y:1013, latlong:'N0.001013;E0.001012'},"
- "{ x:1014, y:1015, latlong:'N0.001015;E0.001014'}]}",
- *rep, 0, false));
+ "{ x:1014, y:1015, latlong:'N0.001015;E0.001014'}]}"
+ "}]}", *rep));
}
-
TEST_F("requireThatRawFieldsWorks", Fixture)
{
Schema s;
@@ -1102,26 +1040,26 @@ TEST_F("requireThatRawFieldsWorks", Fixture)
BuildContext bc(s);
DBContext dc(bc._repo, getDocTypeName());
Document::UP exp = bc._bld.startDocument("id:ns:searchdocument::0").
- startSummaryField("i").
- addRaw(raw1s.c_str(), raw1s.size()).
- endField().
- startSummaryField("araw").
- startElement().
- addRaw(raw1a0.c_str(), raw1a0.size()).
- endElement().
- startElement().
- addRaw(raw1a1.c_str(), raw1a1.size()).
- endElement().
- endField().
- startSummaryField("wraw").
- startElement(46).
- addRaw(raw1w1.c_str(), raw1w1.size()).
- endElement().
- startElement(45).
- addRaw(raw1w0.c_str(), raw1w0.size()).
- endElement().
- endField().
- endDocument();
+ startSummaryField("i").
+ addRaw(raw1s.c_str(), raw1s.size()).
+ endField().
+ startSummaryField("araw").
+ startElement().
+ addRaw(raw1a0.c_str(), raw1a0.size()).
+ endElement().
+ startElement().
+ addRaw(raw1a1.c_str(), raw1a1.size()).
+ endElement().
+ endField().
+ startSummaryField("wraw").
+ startElement(46).
+ addRaw(raw1w1.c_str(), raw1w1.size()).
+ endElement().
+ startElement(45).
+ addRaw(raw1w0.c_str(), raw1w0.size()).
+ endElement().
+ endField().
+ endDocument();
dc._sa->put(1, 1, *exp);
IDocumentStore & store = dc._ddb->getReadySubDB()->getSummaryManager()->getBackingStore();
@@ -1156,7 +1094,6 @@ TEST_F("requireThatRawFieldsWorks", Fixture)
}
}
-
TEST_F("requireThatFieldCacheRepoCanReturnDefaultFieldCache", Fixture)
{
Schema s;
@@ -1169,7 +1106,6 @@ TEST_F("requireThatFieldCacheRepoCanReturnDefaultFieldCache", Fixture)
EXPECT_EQUAL("a", cache->getField(0)->getName());
}
-
Fixture::Fixture()
: _summaryCfg(),
_resultCfg(),
@@ -1181,7 +1117,7 @@ Fixture::Fixture()
_resultCfg.ReadConfig(*_summaryCfg, cfgId.c_str());
std::string mapCfgId("summarymap");
std::unique_ptr<vespa::config::search::SummarymapConfig> mapCfg = config::ConfigGetter<vespa::config::search::SummarymapConfig>::getConfig(
- mapCfgId, config::FileSpec(TEST_PATH("summarymap.cfg")));
+ mapCfgId, config::FileSpec(TEST_PATH("summarymap.cfg")));
for (size_t i = 0; i < mapCfg->override.size(); ++i) {
const vespa::config::search::SummarymapConfig::Override & o = mapCfg->override[i];
if (o.command == "dynamicteaser") {