aboutsummaryrefslogtreecommitdiffstats
path: root/searchcore/src
diff options
context:
space:
mode:
authorHenning Baldersheim <balder@yahoo-inc.com>2019-08-16 14:48:11 +0000
committerHenning Baldersheim <balder@yahoo-inc.com>2019-08-16 17:22:45 +0000
commit224b5fbe12ecab75beee6efe24068a9ce7092110 (patch)
tree3e8c0dd797c583b032b960488b2bf3436ccbd2b9 /searchcore/src
parent5fb8e66dbd2d6e02a64a054e147ac7214943d563 (diff)
doc: -> id:
Diffstat (limited to 'searchcore/src')
-rw-r--r--searchcore/src/tests/proton/attribute/attribute_test.cpp34
-rw-r--r--searchcore/src/tests/proton/common/cachedselect_test.cpp22
-rw-r--r--searchcore/src/tests/proton/common/selectpruner_test.cpp67
-rw-r--r--searchcore/src/tests/proton/docsummary/docsummary.cpp59
-rw-r--r--searchcore/src/tests/proton/docsummary/summaryfieldconverter_test.cpp12
-rw-r--r--searchcore/src/tests/proton/document_iterator/document_iterator_test.cpp270
-rw-r--r--searchcore/src/tests/proton/documentdb/feedhandler/feedhandler_test.cpp8
-rw-r--r--searchcore/src/tests/proton/documentdb/feedview/feedview_test.cpp14
-rw-r--r--searchcore/src/tests/proton/documentmetastore/documentmetastore_test.cpp4
-rw-r--r--searchcore/src/tests/proton/feed_and_search/feed_and_search.cpp2
-rw-r--r--searchcore/src/tests/proton/feedoperation/feedoperation_test.cpp6
-rw-r--r--searchcore/src/tests/proton/index/fusionrunner_test.cpp2
-rw-r--r--searchcore/src/tests/proton/index/index_writer/index_writer_test.cpp2
-rw-r--r--searchcore/src/tests/proton/index/indexmanager_test.cpp2
-rw-r--r--searchcore/src/tests/proton/matching/matching_test.cpp46
-rw-r--r--searchcore/src/tests/proton/server/documentretriever_test.cpp9
-rw-r--r--searchcore/src/tests/proton/server/feeddebugger_test.cpp10
-rw-r--r--searchcore/src/tests/proton/server/feedstates_test.cpp2
-rw-r--r--searchcore/src/vespa/searchcore/proton/common/feeddebugger.h2
-rw-r--r--searchcore/src/vespa/searchcore/proton/server/documentretrieverbase.cpp14
20 files changed, 271 insertions, 316 deletions
diff --git a/searchcore/src/tests/proton/attribute/attribute_test.cpp b/searchcore/src/tests/proton/attribute/attribute_test.cpp
index 3734d2fe1dc..3da27cde10e 100644
--- a/searchcore/src/tests/proton/attribute/attribute_test.cpp
+++ b/searchcore/src/tests/proton/attribute/attribute_test.cpp
@@ -204,7 +204,7 @@ TEST_F("require that attribute writer handles put", Fixture)
attribute::ConstCharContent sbuf;
{ // empty document should give default values
EXPECT_EQUAL(1u, a1->getNumDocs());
- f.put(1, *idb.startDocument("doc::1").endDocument(), 1);
+ f.put(1, *idb.startDocument("id:ns:searchdocument::1").endDocument(), 1);
EXPECT_EQUAL(2u, a1->getNumDocs());
EXPECT_EQUAL(2u, a2->getNumDocs());
EXPECT_EQUAL(2u, a3->getNumDocs());
@@ -226,7 +226,7 @@ TEST_F("require that attribute writer handles put", Fixture)
EXPECT_EQUAL(strcmp("", sbuf[0]), 0);
}
{ // document with single value & multi value attribute
- Document::UP doc = idb.startDocument("doc::2").
+ Document::UP doc = idb.startDocument("id:ns:searchdocument::2").
startAttributeField("a1").addInt(10).endField().
startAttributeField("a2").startElement().addInt(20).endElement().
startElement().addInt(30).endElement().endField().endDocument();
@@ -246,7 +246,7 @@ TEST_F("require that attribute writer handles put", Fixture)
EXPECT_EQUAL(30u, ibuf[1]);
}
{ // replace existing document
- Document::UP doc = idb.startDocument("doc::2").
+ Document::UP doc = idb.startDocument("id:ns:searchdocument::2").
startAttributeField("a1").addInt(100).endField().
startAttributeField("a2").startElement().addInt(200).endElement().
startElement().addInt(300).endElement().
@@ -281,7 +281,7 @@ TEST_F("require that attribute writer handles predicate put", Fixture)
// empty document should give default values
EXPECT_EQUAL(1u, a1->getNumDocs());
- f.put(1, *idb.startDocument("doc::1").endDocument(), 1);
+ f.put(1, *idb.startDocument("id:ns:searchdocument::1").endDocument(), 1);
EXPECT_EQUAL(2u, a1->getNumDocs());
EXPECT_EQUAL(1u, a1->getStatus().getLastSyncToken());
EXPECT_EQUAL(0u, index.getZeroConstraintDocs().size());
@@ -289,7 +289,7 @@ TEST_F("require that attribute writer handles predicate put", Fixture)
// document with single value attribute
PredicateSlimeBuilder builder;
Document::UP doc =
- idb.startDocument("doc::2").startAttributeField("a1")
+ idb.startDocument("id:ns:searchdocument::2").startAttributeField("a1")
.addPredicate(builder.true_predicate().build())
.endField().endDocument();
f.put(2, *doc, 2);
@@ -301,7 +301,7 @@ TEST_F("require that attribute writer handles predicate put", Fixture)
EXPECT_FALSE(it.valid());
// replace existing document
- doc = idb.startDocument("doc::2").startAttributeField("a1")
+ doc = idb.startDocument("id:ns:searchdocument::2").startAttributeField("a1")
.addPredicate(builder.feature("foo").value("bar").build())
.endField().endDocument();
f.put(3, *doc, 2);
@@ -374,7 +374,7 @@ TEST_F("require that visibilitydelay is honoured", Fixture)
DocBuilder idb(s);
EXPECT_EQUAL(1u, a1->getNumDocs());
EXPECT_EQUAL(0u, a1->getStatus().getLastSyncToken());
- Document::UP doc = idb.startDocument("doc::1")
+ Document::UP doc = idb.startDocument("id:ns:searchdocument::1")
.startAttributeField("a1").addStr("10").endField()
.endDocument();
f.put(3, *doc, 1);
@@ -398,11 +398,11 @@ TEST_F("require that visibilitydelay is honoured", Fixture)
EXPECT_EQUAL(8u, a1->getStatus().getLastSyncToken());
verifyAttributeContent(*a1, 2, "10");
- awDelayed.put(9, *idb.startDocument("doc::1").startAttributeField("a1").addStr("11").endField().endDocument(),
+ awDelayed.put(9, *idb.startDocument("id:ns:searchdocument::1").startAttributeField("a1").addStr("11").endField().endDocument(),
2, false, emptyCallback);
- awDelayed.put(10, *idb.startDocument("doc::1").startAttributeField("a1").addStr("20").endField().endDocument(),
+ awDelayed.put(10, *idb.startDocument("id:ns:searchdocument::1").startAttributeField("a1").addStr("20").endField().endDocument(),
2, false, emptyCallback);
- awDelayed.put(11, *idb.startDocument("doc::1").startAttributeField("a1").addStr("30").endField().endDocument(),
+ awDelayed.put(11, *idb.startDocument("id:ns:searchdocument::1").startAttributeField("a1").addStr("30").endField().endDocument(),
2, false, emptyCallback);
EXPECT_EQUAL(8u, a1->getStatus().getLastSyncToken());
verifyAttributeContent(*a1, 2, "10");
@@ -422,7 +422,7 @@ TEST_F("require that attribute writer handles predicate remove", Fixture)
DocBuilder idb(s);
PredicateSlimeBuilder builder;
Document::UP doc =
- idb.startDocument("doc::1").startAttributeField("a1")
+ idb.startDocument("id:ns:searchdocument::1").startAttributeField("a1")
.addPredicate(builder.true_predicate().build())
.endField().endDocument();
f.put(1, *doc, 1);
@@ -447,7 +447,7 @@ TEST_F("require that attribute writer handles update", Fixture)
schema.addAttributeField(Schema::AttributeField("a2", schema::DataType::INT32, CollectionType::SINGLE));
DocBuilder idb(schema);
const document::DocumentType &dt(idb.getDocumentType());
- DocumentUpdate upd(*idb.getDocumentTypeRepo(), dt, DocumentId("doc::1"));
+ DocumentUpdate upd(*idb.getDocumentTypeRepo(), dt, DocumentId("id:ns:searchdocument::1"));
upd.addUpdate(FieldUpdate(upd.getType().getField("a1"))
.addUpdate(ArithmeticValueUpdate(ArithmeticValueUpdate::Add, 5)));
upd.addUpdate(FieldUpdate(upd.getType().getField("a2"))
@@ -484,14 +484,14 @@ TEST_F("require that attribute writer handles predicate update", Fixture)
DocBuilder idb(schema);
PredicateSlimeBuilder builder;
Document::UP doc =
- idb.startDocument("doc::1").startAttributeField("a1")
+ idb.startDocument("id:ns:searchdocument::1").startAttributeField("a1")
.addPredicate(builder.true_predicate().build())
.endField().endDocument();
f.put(1, *doc, 1);
EXPECT_EQUAL(2u, a1->getNumDocs());
const document::DocumentType &dt(idb.getDocumentType());
- DocumentUpdate upd(*idb.getDocumentTypeRepo(), dt, DocumentId("doc::1"));
+ DocumentUpdate upd(*idb.getDocumentTypeRepo(), dt, DocumentId("id:ns:searchdocument::1"));
PredicateFieldValue new_value(builder.feature("foo").value("bar").build());
upd.addUpdate(FieldUpdate(upd.getType().getField("a1"))
.addUpdate(AssignValueUpdate(new_value)));
@@ -633,7 +633,7 @@ createTensorSchema() {
Document::UP
createTensorPutDoc(DocBuilder &builder, const Tensor &tensor) {
- return builder.startDocument("doc::1").
+ return builder.startDocument("id:ns:searchdocument::1").
startAttributeField("a1").
addTensor(tensor.clone()).endField().endDocument();
}
@@ -678,7 +678,7 @@ TEST_F("require that attribute writer handles tensor assign update", Fixture)
EXPECT_TRUE(tensor->equals(*tensor2));
const document::DocumentType &dt(builder.getDocumentType());
- DocumentUpdate upd(*builder.getDocumentTypeRepo(), dt, DocumentId("doc::1"));
+ DocumentUpdate upd(*builder.getDocumentTypeRepo(), dt, DocumentId("id:ns:searchdocument::1"));
auto new_tensor = make_tensor(TensorSpec("tensor(x{},y{})")
.add({{"x", "8"}, {"y", "9"}}, 11));
TensorDataType xySparseTensorDataType(vespalib::eval::ValueType::from_spec("tensor(x{},y{})"));
@@ -728,7 +728,7 @@ putAttributes(Fixture &f, std::vector<uint32_t> expExecuteHistory)
EXPECT_EQUAL(1u, a1->getNumDocs());
EXPECT_EQUAL(1u, a2->getNumDocs());
EXPECT_EQUAL(1u, a3->getNumDocs());
- f.put(1, *idb.startDocument("doc::1").
+ f.put(1, *idb.startDocument("id:ns:searchdocument::1").
startAttributeField("a1").addInt(10).endField().
startAttributeField("a2").addInt(15).endField().
startAttributeField("a3").addInt(20).endField().
diff --git a/searchcore/src/tests/proton/common/cachedselect_test.cpp b/searchcore/src/tests/proton/common/cachedselect_test.cpp
index dcba8fda1c6..df414439bce 100644
--- a/searchcore/src/tests/proton/common/cachedselect_test.cpp
+++ b/searchcore/src/tests/proton/common/cachedselect_test.cpp
@@ -466,10 +466,10 @@ TEST_F("Test that basic select works", TestFixture)
{
MyDB &db(*f._db);
- db.addDoc(1u, "doc:test:1", "hello", "null", 45, 37);
- db.addDoc(2u, "doc:test:2", "gotcha", "foo", 3, 25);
- db.addDoc(3u, "doc:test:3", "gotcha", "foo", noIntVal, noIntVal);
- db.addDoc(4u, "doc:test:4", "null", "foo", noIntVal, noIntVal);
+ db.addDoc(1u, "id:ns:test::1", "hello", "null", 45, 37);
+ db.addDoc(2u, "id:ns:test::2", "gotcha", "foo", 3, 25);
+ db.addDoc(3u, "id:ns:test::3", "gotcha", "foo", noIntVal, noIntVal);
+ db.addDoc(4u, "id:ns:test::4", "null", "foo", noIntVal, noIntVal);
CachedSelect::SP cs;
@@ -566,9 +566,9 @@ struct PreDocSelectFixture : public TestFixture {
PreDocSelectFixture()
: TestFixture()
{
- db().addDoc(1u, "doc:test:1", "foo", "null", 3, 5);
- db().addDoc(2u, "doc:test:1", "bar", "null", 3, 5);
- db().addDoc(3u, "doc:test:2", "foo", "null", 7, 5);
+ db().addDoc(1u, "id:ns:test::1", "foo", "null", 3, 5);
+ db().addDoc(2u, "id:ns:test::1", "bar", "null", 3, 5);
+ db().addDoc(3u, "id:ns:test::2", "foo", "null", 7, 5);
}
};
@@ -602,10 +602,10 @@ TEST_F("Test performance when using attributes", TestFixture)
{
MyDB &db(*f._db);
- db.addDoc(1u, "doc:test:1", "hello", "null", 45, 37);
- db.addDoc(2u, "doc:test:2", "gotcha", "foo", 3, 25);
- db.addDoc(3u, "doc:test:3", "gotcha", "foo", noIntVal, noIntVal);
- db.addDoc(4u, "doc:test:4", "null", "foo", noIntVal, noIntVal);
+ db.addDoc(1u, "id:ns:test::1", "hello", "null", 45, 37);
+ db.addDoc(2u, "id:ns:test::2", "gotcha", "foo", 3, 25);
+ db.addDoc(3u, "id:ns:test::3", "gotcha", "foo", noIntVal, noIntVal);
+ db.addDoc(4u, "id:ns:test::4", "null", "foo", noIntVal, noIntVal);
CachedSelect::SP cs;
cs = f.testParse("test.aa < 45", "test");
diff --git a/searchcore/src/tests/proton/common/selectpruner_test.cpp b/searchcore/src/tests/proton/common/selectpruner_test.cpp
index a7feb865d96..5b1fa3ed4bf 100644
--- a/searchcore/src/tests/proton/common/selectpruner_test.cpp
+++ b/searchcore/src/tests/proton/common/selectpruner_test.cpp
@@ -36,8 +36,7 @@ using search::AttributeFactory;
typedef Node::UP NodeUP;
-namespace
-{
+namespace {
const int32_t doc_type_id = 787121340;
const string type_name = "test";
@@ -57,9 +56,6 @@ const string invalid_name("test_2.ac > 3999");
const string invalid2_name("test_2.ac > 4999");
const string empty("");
-const document::DocumentId docId("doc:test:1");
-
-
std::unique_ptr<const DocumentTypeRepo>
makeDocTypeRepo()
{
@@ -135,23 +131,12 @@ public:
bool _hasDocuments;
TestFixture();
-
~TestFixture();
- void
- testParse(const string &selection);
-
- void
- testParseFail(const string &selection);
-
- void
- testPrune(const string &selection,
- const string &exp);
-
- void
- testPrune(const string &selection,
- const string &exp,
- const string &docTypeName);
+ void testParse(const string &selection);
+ void testParseFail(const string &selection);
+ void testPrune(const string &selection, const string &exp);
+ void testPrune(const string &selection, const string &exp, const string &docTypeName);
};
@@ -169,28 +154,22 @@ TestFixture::TestFixture()
}
-TestFixture::~TestFixture()
-{
-}
+TestFixture::~TestFixture() = default;
void
TestFixture::testParse(const string &selection)
{
const DocumentTypeRepo &repo(*_repoUP);
- document::select::Parser parser(repo,
- document::BucketIdFactory());
+ document::select::Parser parser(repo,document::BucketIdFactory());
NodeUP select;
try {
- LOG(info,
- "Trying to parse '%s'",
- selection.c_str());
+ LOG(info, "Trying to parse '%s'", selection.c_str());
select = parser.parse(selection);
} catch (document::select::ParsingFailedException &e) {
- LOG(info,
- "Parse failed: %s", e.what());
+ LOG(info, "Parse failed: %s", e.what());
select.reset(0);
}
ASSERT_TRUE(select.get() != NULL);
@@ -201,20 +180,15 @@ void
TestFixture::testParseFail(const string &selection)
{
const DocumentTypeRepo &repo(*_repoUP);
- document::select::Parser parser(repo,
- document::BucketIdFactory());
+ document::select::Parser parser(repo,document::BucketIdFactory());
NodeUP select;
try {
- LOG(info,
- "Trying to parse '%s'",
- selection.c_str());
+ LOG(info, "Trying to parse '%s'", selection.c_str());
select = parser.parse(selection);
} catch (document::select::ParsingFailedException &e) {
- LOG(info,
- "Parse failed: %s",
- e.getMessage().c_str());
+ LOG(info, "Parse failed: %s", e.getMessage().c_str());
select.reset(0);
}
ASSERT_TRUE(select.get() == NULL);
@@ -222,25 +196,18 @@ TestFixture::testParseFail(const string &selection)
void
-TestFixture::testPrune(const string &selection,
- const string &exp,
- const string &docTypeName)
+TestFixture::testPrune(const string &selection, const string &exp, const string &docTypeName)
{
const DocumentTypeRepo &repo(*_repoUP);
- document::select::Parser parser(repo,
- document::BucketIdFactory());
+ document::select::Parser parser(repo,document::BucketIdFactory());
NodeUP select;
try {
- LOG(info,
- "Trying to parse '%s' with docType=%s",
- selection.c_str(),
- docTypeName.c_str());
+ LOG(info, "Trying to parse '%s' with docType=%s", selection.c_str(), docTypeName.c_str());
select = parser.parse(selection);
} catch (document::select::ParsingFailedException &e) {
- LOG(info,
- "Parse failed: %s", e.what());
+ LOG(info, "Parse failed: %s", e.what());
select.reset(0);
}
ASSERT_TRUE(select.get() != NULL);
@@ -249,7 +216,7 @@ TestFixture::testPrune(const string &selection,
LOG(info, "ParseTree: '%s'", os.str().c_str());
const DocumentType *docType = repo.getDocumentType(docTypeName);
ASSERT_TRUE(docType != NULL);
- Document::UP emptyDoc(new Document(*docType, docId));
+ Document::UP emptyDoc(new Document(*docType, document::DocumentId("id:ns:" + docTypeName + "::1")));
emptyDoc->setRepo(repo);
SelectPruner pruner(docTypeName, &_amgr, *emptyDoc, repo, _hasFields, _hasDocuments);
pruner.process(*select);
diff --git a/searchcore/src/tests/proton/docsummary/docsummary.cpp b/searchcore/src/tests/proton/docsummary/docsummary.cpp
index e8152161faa..0a9f3127844 100644
--- a/searchcore/src/tests/proton/docsummary/docsummary.cpp
+++ b/searchcore/src/tests/proton/docsummary/docsummary.cpp
@@ -429,7 +429,7 @@ Test::requireThatAdapterHandlesAllFieldTypes()
s.addSummaryField(Schema::SummaryField("l", schema::DataType::STRING));
BuildContext bc(s);
- bc._bld.startDocument("doc::0");
+ bc._bld.startDocument("id:ns:searchdocument::0");
bc._bld.startSummaryField("a").addInt(255).endField();
bc._bld.startSummaryField("b").addInt(32767).endField();
bc._bld.startSummaryField("c").addInt(2147483647).endField();
@@ -478,12 +478,12 @@ Test::requireThatAdapterHandlesMultipleDocuments()
s.addSummaryField(Schema::SummaryField("a", schema::DataType::INT32));
BuildContext bc(s);
- bc._bld.startDocument("doc::0").
+ bc._bld.startDocument("id:ns:searchdocument::0").
startSummaryField("a").
addInt(1000).
endField();
bc.endDocument(0);
- bc._bld.startDocument("doc::1").
+ bc._bld.startDocument("id:ns:searchdocument::1").
startSummaryField("a").
addInt(2000).endField();
bc.endDocument(1);
@@ -519,7 +519,7 @@ Test::requireThatAdapterHandlesDocumentIdField()
Schema s;
s.addSummaryField(Schema::SummaryField("documentid", schema::DataType::STRING));
BuildContext bc(s);
- bc._bld.startDocument("doc::0").
+ bc._bld.startDocument("id:ns:searchdocument::0").
startSummaryField("documentid").
addStr("foo").
endField();
@@ -528,16 +528,16 @@ Test::requireThatAdapterHandlesDocumentIdField()
bc.createFieldCacheRepo(getResultConfig())->getFieldCache("class4"),
getMarkupFields());
GeneralResultPtr res = getResult(dsa, 0);
- EXPECT_EQUAL("doc::0", std::string(res->GetEntry("documentid")->_stringval,
+ EXPECT_EQUAL("id:ns:searchdocument::0", std::string(res->GetEntry("documentid")->_stringval,
res->GetEntry("documentid")->_stringlen));
}
-GlobalId gid1 = DocumentId("doc::1").getGlobalId(); // lid 1
-GlobalId gid2 = DocumentId("doc::2").getGlobalId(); // lid 2
-GlobalId gid3 = DocumentId("doc::3").getGlobalId(); // lid 3
-GlobalId gid4 = DocumentId("doc::4").getGlobalId(); // lid 4
-GlobalId gid9 = DocumentId("doc::9").getGlobalId(); // not existing
+GlobalId gid1 = DocumentId("id:ns:searchdocument::1").getGlobalId(); // lid 1
+GlobalId gid2 = DocumentId("id:ns:searchdocument::2").getGlobalId(); // lid 2
+GlobalId gid3 = DocumentId("id:ns:searchdocument::3").getGlobalId(); // lid 3
+GlobalId gid4 = DocumentId("id:ns:searchdocument::4").getGlobalId(); // lid 4
+GlobalId gid9 = DocumentId("id:ns:searchdocument::9").getGlobalId(); // not existing
void
Test::requireThatDocsumRequestIsProcessed()
@@ -547,31 +547,31 @@ Test::requireThatDocsumRequestIsProcessed()
BuildContext bc(s);
DBContext dc(bc._repo, getDocTypeName());
- dc.put(*bc._bld.startDocument("doc::1").
+ dc.put(*bc._bld.startDocument("id:ns:searchdocument::1").
startSummaryField("a").
addInt(10).
endField().
endDocument(),
1);
- dc.put(*bc._bld.startDocument("doc::2").
+ dc.put(*bc._bld.startDocument("id:ns:searchdocument::2").
startSummaryField("a").
addInt(20).
endField().
endDocument(),
2);
- dc.put(*bc._bld.startDocument("doc::3").
+ dc.put(*bc._bld.startDocument("id:ns:searchdocument::3").
startSummaryField("a").
addInt(30).
endField().
endDocument(),
3);
- dc.put(*bc._bld.startDocument("doc::4").
+ dc.put(*bc._bld.startDocument("id:ns:searchdocument::4").
startSummaryField("a").
addInt(40).
endField().
endDocument(),
4);
- dc.put(*bc._bld.startDocument("doc::5").
+ dc.put(*bc._bld.startDocument("id:ns:searchdocument::5").
startSummaryField("a").
addInt(50).
endField().
@@ -607,7 +607,7 @@ Test::requireThatRewritersAreUsed()
BuildContext bc(s);
DBContext dc(bc._repo, getDocTypeName());
- dc.put(*bc._bld.startDocument("doc::1").
+ dc.put(*bc._bld.startDocument("id:ns:searchdocument::1").
startSummaryField("aa").
addInt(10).
endField().
@@ -634,7 +634,7 @@ Test::requireThatSummariesTimeout()
BuildContext bc(s);
DBContext dc(bc._repo, getDocTypeName());
- dc.put(*bc._bld.startDocument("doc::1").
+ dc.put(*bc._bld.startDocument("id:ns:searchdocument::1").
startSummaryField("aa").
addInt(10).
endField().
@@ -686,10 +686,10 @@ Test::requireThatAttributesAreUsed()
BuildContext bc(s);
DBContext dc(bc._repo, getDocTypeName());
- dc.put(*bc._bld.startDocument("doc::1").
+ dc.put(*bc._bld.startDocument("id:ns:searchdocument::1").
endDocument(),
1); // empty doc
- dc.put(*bc._bld.startDocument("doc::2").
+ dc.put(*bc._bld.startDocument("id:ns:searchdocument::2").
startAttributeField("ba").
addInt(10).
endField().
@@ -753,7 +753,7 @@ Test::requireThatAttributesAreUsed()
endField().
endDocument(),
2);
- dc.put(*bc._bld.startDocument("doc::3").
+ dc.put(*bc._bld.startDocument("id:ns:searchdocument::3").
endDocument(),
3); // empty doc
@@ -818,7 +818,7 @@ Test::requireThatSummaryAdapterHandlesPutAndRemove()
s.addSummaryField(Schema::SummaryField("f1", schema::DataType::STRING, CollectionType::SINGLE));
BuildContext bc(s);
DBContext dc(bc._repo, getDocTypeName());
- Document::UP exp = bc._bld.startDocument("doc::1").
+ Document::UP exp = bc._bld.startDocument("id:ns:searchdocument::1").
startSummaryField("f1").
addStr("foo").
endField().
@@ -854,7 +854,7 @@ Test::requireThatAnnotationsAreUsed()
s.addSummaryField(Schema::SummaryField("dynamicstring", schema::DataType::STRING, CollectionType::SINGLE));
BuildContext bc(s);
DBContext dc(bc._repo, getDocTypeName());
- Document::UP exp = bc._bld.startDocument("doc::0").
+ Document::UP exp = bc._bld.startDocument("id:ns:searchdocument::0").
startIndexField("g").
addStr("foo").
addStr("bar").
@@ -908,7 +908,7 @@ Test::requireThatUrisAreUsed()
s.addSummaryField(Schema::SummaryField("uriwset", schema::DataType::STRING, CollectionType::WEIGHTEDSET));
BuildContext bc(s);
DBContext dc(bc._repo, getDocTypeName());
- Document::UP exp = bc._bld.startDocument("doc::0").
+ Document::UP exp = bc._bld.startDocument("id:ns:searchdocument::0").
startIndexField("urisingle").
startSubField("all").
addUrlTokenizedString("http://www.example.com:81/fluke?ab=2#4").
@@ -1074,7 +1074,7 @@ Test::requireThatPositionsAreUsed()
BuildContext bc(s);
DBContext dc(bc._repo, getDocTypeName());
- Document::UP exp = bc._bld.startDocument("doc::1").
+ Document::UP exp = bc._bld.startDocument("id:ns:searchdocument::1").
startAttributeField("sp2").
addPosition(1002, 1003).
endField().
@@ -1146,7 +1146,7 @@ Test::requireThatRawFieldsWorks()
BuildContext bc(s);
DBContext dc(bc._repo, getDocTypeName());
- Document::UP exp = bc._bld.startDocument("doc::0").
+ Document::UP exp = bc._bld.startDocument("id:ns:searchdocument::0").
startSummaryField("i").
addRaw(raw1s.c_str(), raw1s.size()).
endField().
@@ -1178,8 +1178,7 @@ Test::requireThatRawFieldsWorks()
bc.createFieldCacheRepo(getResultConfig())->getFieldCache("class0"),
getMarkupFields());
- ASSERT_TRUE(assertString(raw1s,
- "i", dsa, 1));
+ ASSERT_TRUE(assertString(raw1s, "i", dsa, 1));
GeneralResultPtr res = getResult(dsa, 1);
{
@@ -1237,14 +1236,12 @@ Test::Test()
continue;
// Assume just one argument: source field that must contain markup
_markupFields.insert(markupField);
- LOG(info,
- "Field %s has markup",
- markupField.c_str());
+ LOG(info, "Field %s has markup", markupField.c_str());
}
}
}
-Test::~Test() {}
+Test::~Test() = default;
int
Test::Main()
diff --git a/searchcore/src/tests/proton/docsummary/summaryfieldconverter_test.cpp b/searchcore/src/tests/proton/docsummary/summaryfieldconverter_test.cpp
index cc6eef14fd6..b295926c64a 100644
--- a/searchcore/src/tests/proton/docsummary/summaryfieldconverter_test.cpp
+++ b/searchcore/src/tests/proton/docsummary/summaryfieldconverter_test.cpp
@@ -350,7 +350,7 @@ StringFieldValue Test::makeAnnotatedChineseString() {
}
Document Test::makeDocument() {
- Document doc(getDocType(), DocumentId("doc:scheme:"));
+ Document doc(getDocType(), DocumentId("id:ns:indexingdocument::"));
doc.setRepo(*_documentRepo);
doc.setValue("string", makeAnnotatedString());
@@ -667,7 +667,7 @@ Test::requireThatPredicateIsPrinted()
Cursor &arr = obj.setArray(Predicate::SET);
arr.addString("bar");
- Document doc(getDocType(), DocumentId("doc:scheme:"));
+ Document doc(getDocType(), DocumentId("id:ns:indexingdocument::"));
doc.setRepo(*_documentRepo);
doc.setValue("predicate", PredicateFieldValue(std::move(input)));
@@ -687,7 +687,7 @@ Test::requireThatTensorIsNotConverted()
TensorFieldValue tensorFieldValue(tensorDataType);
tensorFieldValue = make_tensor(TensorSpec("tensor(x{},y{})")
.add({{"x", "4"}, {"y", "5"}}, 7));
- Document doc(getDocType(), DocumentId("doc:scheme:"));
+ Document doc(getDocType(), DocumentId("id:ns:indexingdocument::"));
doc.setRepo(*_documentRepo);
doc.setValue("tensor", tensorFieldValue);
@@ -712,7 +712,7 @@ const ReferenceDataType& Test::getAsRefType(const string& name) const {
}
void Test::requireThatNonEmptyReferenceIsConvertedToStringWithId() {
- Document doc(getDocType(), DocumentId("doc:scheme:"));
+ Document doc(getDocType(), DocumentId("id:ns:indexingdocument::"));
doc.setRepo(*_documentRepo);
doc.setValue("ref", ReferenceFieldValue(
getAsRefType("Reference<target_dummy_document>"),
@@ -723,7 +723,7 @@ void Test::requireThatNonEmptyReferenceIsConvertedToStringWithId() {
}
void Test::requireThatEmptyReferenceIsConvertedToEmptyString() {
- Document doc(getDocType(), DocumentId("doc:scheme:"));
+ Document doc(getDocType(), DocumentId("id:ns:indexingdocument::"));
doc.setRepo(*_documentRepo);
doc.setValue("ref", ReferenceFieldValue(
getAsRefType("Reference<target_dummy_document>")));
@@ -735,7 +735,7 @@ void Test::requireThatEmptyReferenceIsConvertedToEmptyString() {
// Own test for this to ensure that SlimeFiller code path is executed,
// as this only triggers for composite field types.
void Test::requireThatReferenceInCompositeTypeEmitsSlimeData() {
- Document doc(getDocType(), DocumentId("doc:scheme:"));
+ Document doc(getDocType(), DocumentId("id:ns:indexingdocument::"));
doc.setRepo(*_documentRepo);
StructFieldValue sfv(getDataType("indexingdocument.header.nested"));
diff --git a/searchcore/src/tests/proton/document_iterator/document_iterator_test.cpp b/searchcore/src/tests/proton/document_iterator/document_iterator_test.cpp
index ad5ac55c5e9..9342ddd4b8a 100644
--- a/searchcore/src/tests/proton/document_iterator/document_iterator_test.cpp
+++ b/searchcore/src/tests/proton/document_iterator/document_iterator_test.cpp
@@ -416,17 +416,17 @@ void checkEntry(const IterateResult &res, size_t idx, const Document &doc, const
TEST("require that custom retrievers work as expected") {
IDocumentRetriever::SP dr =
- cat(cat(doc("doc:foo:1", Timestamp(2), bucket(5)),
- rem("doc:foo:2", Timestamp(3), bucket(5))),
- cat(doc("doc:foo:3", Timestamp(7), bucket(6)),
+ cat(cat(doc("id:ns:document::1", Timestamp(2), bucket(5)),
+ rem("id:ns:document::2", Timestamp(3), bucket(5))),
+ cat(doc("id:ns:document::3", Timestamp(7), bucket(6)),
nil()));
- EXPECT_FALSE(dr->getDocumentMetaData(DocumentId("doc:foo:bogus")).valid());
+ EXPECT_FALSE(dr->getDocumentMetaData(DocumentId("id:ns:document::bogus")).valid());
EXPECT_TRUE(dr->getDocument(1).get() == 0);
EXPECT_TRUE(dr->getDocument(2).get() == 0);
EXPECT_TRUE(dr->getDocument(3).get() != 0);
- TEST_DO(checkDoc(*dr, "doc:foo:1", 2, 5, false));
- TEST_DO(checkDoc(*dr, "doc:foo:2", 3, 5, true));
- TEST_DO(checkDoc(*dr, "doc:foo:3", 7, 6, false));
+ TEST_DO(checkDoc(*dr, "id:ns:document::1", 2, 5, false));
+ TEST_DO(checkDoc(*dr, "id:ns:document::2", 3, 5, true));
+ TEST_DO(checkDoc(*dr, "id:ns:document::3", 7, 6, false));
DocumentMetaData::Vector b5;
DocumentMetaData::Vector b6;
dr->getBucketMetaData(bucket(5), b5);
@@ -456,19 +456,19 @@ TEST("require that a list of empty retrievers can be iterated") {
TEST("require that normal documents can be iterated") {
DocumentIterator itr(bucket(5), document::AllFields(), selectAll(), newestV(), -1, false);
- itr.add(doc("doc:foo:1", Timestamp(2), bucket(5)));
- itr.add(cat(doc("doc:foo:2", Timestamp(3), bucket(5)),
- doc("doc:foo:3", Timestamp(4), bucket(5))));
+ itr.add(doc("id:ns:document::1", Timestamp(2), bucket(5)));
+ itr.add(cat(doc("id:ns:document::2", Timestamp(3), bucket(5)),
+ doc("id:ns:document::3", Timestamp(4), bucket(5))));
IterateResult res = itr.iterate(largeNum);
EXPECT_TRUE(res.isCompleted());
EXPECT_EQUAL(3u, res.getEntries().size());
- TEST_DO(checkEntry(res, 0, Document(*DataType::DOCUMENT, DocumentId("doc:foo:1")), Timestamp(2)));
- TEST_DO(checkEntry(res, 1, Document(*DataType::DOCUMENT, DocumentId("doc:foo:2")), Timestamp(3)));
- TEST_DO(checkEntry(res, 2, Document(*DataType::DOCUMENT, DocumentId("doc:foo:3")), Timestamp(4)));
+ TEST_DO(checkEntry(res, 0, Document(*DataType::DOCUMENT, DocumentId("id:ns:document::1")), Timestamp(2)));
+ TEST_DO(checkEntry(res, 1, Document(*DataType::DOCUMENT, DocumentId("id:ns:document::2")), Timestamp(3)));
+ TEST_DO(checkEntry(res, 2, Document(*DataType::DOCUMENT, DocumentId("id:ns:document::3")), Timestamp(4)));
}
void verifyIterateIgnoringStopSignal(DocumentIterator & itr) {
- itr.add(doc("doc:foo:1", Timestamp(2), bucket(5)));
+ itr.add(doc("id:ns:document::1", Timestamp(2), bucket(5)));
IterateResult res = itr.iterate(largeNum);
EXPECT_TRUE(res.isCompleted());
EXPECT_EQUAL(1u, res.getEntries().size());
@@ -488,14 +488,14 @@ TEST("require that iterator ignoring maxbytes stops at the end, and does not aut
}
void verifyReadConsistency(DocumentIterator & itr, Committer & committer) {
- IDocumentRetriever::SP retriever = doc("doc:foo:1", Timestamp(2), bucket(5));
+ IDocumentRetriever::SP retriever = doc("id:ns:document::1", Timestamp(2), bucket(5));
IDocumentRetriever::SP commitAndWaitRetriever(new CommitAndWaitDocumentRetriever(retriever, committer));
itr.add(commitAndWaitRetriever);
IterateResult res = itr.iterate(largeNum);
EXPECT_TRUE(res.isCompleted());
EXPECT_EQUAL(1u, res.getEntries().size());
- TEST_DO(checkEntry(res, 0, Document(*DataType::DOCUMENT, DocumentId("doc:foo:1")), Timestamp(2)));
+ TEST_DO(checkEntry(res, 0, Document(*DataType::DOCUMENT, DocumentId("id:ns:document::1")), Timestamp(2)));
EXPECT_EQUAL(0u, committer._commitCount);
}
@@ -516,7 +516,7 @@ TEST("require that readconsistency::strong does commit") {
}
TEST("require that docid limit is honoured") {
- IDocumentRetriever::SP retriever = doc("doc:foo:1", Timestamp(2), bucket(5));
+ IDocumentRetriever::SP retriever = doc("id:ns:document::1", Timestamp(2), bucket(5));
UnitDR & udr = dynamic_cast<UnitDR &>(*retriever);
udr.docid = 7;
DocumentIterator itr(bucket(5), document::AllFields(), selectAll(), newestV(), -1, false);
@@ -524,7 +524,7 @@ TEST("require that docid limit is honoured") {
IterateResult res = itr.iterate(largeNum);
EXPECT_TRUE(res.isCompleted());
EXPECT_EQUAL(1u, res.getEntries().size());
- TEST_DO(checkEntry(res, 0, Document(*DataType::DOCUMENT, DocumentId("doc:foo:1")), Timestamp(2)));
+ TEST_DO(checkEntry(res, 0, Document(*DataType::DOCUMENT, DocumentId("id:ns:document::1")), Timestamp(2)));
udr.setDocIdLimit(7);
DocumentIterator limited(bucket(5), document::AllFields(), selectAll(), newestV(), -1, false);
@@ -536,46 +536,46 @@ TEST("require that docid limit is honoured") {
TEST("require that remove entries can be iterated") {
DocumentIterator itr(bucket(5), document::AllFields(), selectAll(), newestV(), -1, false);
- itr.add(rem("doc:foo:1", Timestamp(2), bucket(5)));
- itr.add(cat(rem("doc:foo:2", Timestamp(3), bucket(5)),
- rem("doc:foo:3", Timestamp(4), bucket(5))));
+ itr.add(rem("id:ns:document::1", Timestamp(2), bucket(5)));
+ itr.add(cat(rem("id:ns:document::2", Timestamp(3), bucket(5)),
+ rem("id:ns:document::3", Timestamp(4), bucket(5))));
IterateResult res = itr.iterate(largeNum);
EXPECT_TRUE(res.isCompleted());
EXPECT_EQUAL(3u, res.getEntries().size());
- TEST_DO(checkEntry(res, 0, DocumentId("doc:foo:1"), Timestamp(2)));
- TEST_DO(checkEntry(res, 1, DocumentId("doc:foo:2"), Timestamp(3)));
- TEST_DO(checkEntry(res, 2, DocumentId("doc:foo:3"), Timestamp(4)));
+ TEST_DO(checkEntry(res, 0, DocumentId("id:ns:document::1"), Timestamp(2)));
+ TEST_DO(checkEntry(res, 1, DocumentId("id:ns:document::2"), Timestamp(3)));
+ TEST_DO(checkEntry(res, 2, DocumentId("id:ns:document::3"), Timestamp(4)));
}
TEST("require that remove entries can be ignored") {
DocumentIterator itr(bucket(5), document::AllFields(), selectAll(), docV(), -1, false);
- itr.add(rem("doc:foo:1", Timestamp(2), bucket(5)));
- itr.add(cat(doc("doc:foo:2", Timestamp(3), bucket(5)),
- rem("doc:foo:3", Timestamp(4), bucket(5))));
+ itr.add(rem("id:ns:document::1", Timestamp(2), bucket(5)));
+ itr.add(cat(doc("id:ns:document::2", Timestamp(3), bucket(5)),
+ rem("id:ns:document::3", Timestamp(4), bucket(5))));
IterateResult res = itr.iterate(largeNum);
EXPECT_TRUE(res.isCompleted());
EXPECT_EQUAL(1u, res.getEntries().size());
- TEST_DO(checkEntry(res, 0, Document(*DataType::DOCUMENT, DocumentId("doc:foo:2")), Timestamp(3)));
+ TEST_DO(checkEntry(res, 0, Document(*DataType::DOCUMENT, DocumentId("id:ns:document::2")), Timestamp(3)));
}
TEST("require that iterating all versions returns both documents and removes") {
DocumentIterator itr(bucket(5), document::AllFields(), selectAll(), allV(), -1, false);
- itr.add(rem("doc:foo:1", Timestamp(2), bucket(5)));
- itr.add(cat(doc("doc:foo:2", Timestamp(3), bucket(5)),
- rem("doc:foo:3", Timestamp(4), bucket(5))));
+ itr.add(rem("id:ns:document::1", Timestamp(2), bucket(5)));
+ itr.add(cat(doc("id:ns:document::2", Timestamp(3), bucket(5)),
+ rem("id:ns:document::3", Timestamp(4), bucket(5))));
IterateResult res = itr.iterate(largeNum);
EXPECT_TRUE(res.isCompleted());
EXPECT_EQUAL(3u, res.getEntries().size());
- TEST_DO(checkEntry(res, 0, DocumentId("doc:foo:1"), Timestamp(2)));
- TEST_DO(checkEntry(res, 1, Document(*DataType::DOCUMENT, DocumentId("doc:foo:2")), Timestamp(3)));
- TEST_DO(checkEntry(res, 2, DocumentId("doc:foo:3"), Timestamp(4)));
+ TEST_DO(checkEntry(res, 0, DocumentId("id:ns:document::1"), Timestamp(2)));
+ TEST_DO(checkEntry(res, 1, Document(*DataType::DOCUMENT, DocumentId("id:ns:document::2")), Timestamp(3)));
+ TEST_DO(checkEntry(res, 2, DocumentId("id:ns:document::3"), Timestamp(4)));
}
TEST("require that using an empty field set returns meta-data only") {
DocumentIterator itr(bucket(5), document::NoFields(), selectAll(), newestV(), -1, false);
- itr.add(doc("doc:foo:1", Timestamp(2), bucket(5)));
- itr.add(cat(doc("doc:foo:2", Timestamp(3), bucket(5)),
- rem("doc:foo:3", Timestamp(4), bucket(5))));
+ itr.add(doc("id:ns:document::1", Timestamp(2), bucket(5)));
+ itr.add(cat(doc("id:ns:document::2", Timestamp(3), bucket(5)),
+ rem("id:ns:document::3", Timestamp(4), bucket(5))));
IterateResult res = itr.iterate(largeNum);
EXPECT_TRUE(res.isCompleted());
EXPECT_EQUAL(3u, res.getEntries().size());
@@ -586,30 +586,30 @@ TEST("require that using an empty field set returns meta-data only") {
TEST("require that entries in other buckets are skipped") {
DocumentIterator itr(bucket(5), document::AllFields(), selectAll(), newestV(), -1, false);
- itr.add(rem("doc:foo:1", Timestamp(2), bucket(6)));
- itr.add(cat(doc("doc:foo:2", Timestamp(3), bucket(5)),
- doc("doc:foo:3", Timestamp(4), bucket(6))));
+ itr.add(rem("id:ns:document::1", Timestamp(2), bucket(6)));
+ itr.add(cat(doc("id:ns:document::2", Timestamp(3), bucket(5)),
+ doc("id:ns:document::3", Timestamp(4), bucket(6))));
IterateResult res = itr.iterate(largeNum);
EXPECT_TRUE(res.isCompleted());
EXPECT_EQUAL(1u, res.getEntries().size());
- TEST_DO(checkEntry(res, 0, Document(*DataType::DOCUMENT, DocumentId("doc:foo:2")), Timestamp(3)));
+ TEST_DO(checkEntry(res, 0, Document(*DataType::DOCUMENT, DocumentId("id:ns:document::2")), Timestamp(3)));
}
TEST("require that maxBytes splits iteration results") {
DocumentIterator itr(bucket(5), document::AllFields(), selectAll(), newestV(), -1, false);
- itr.add(doc("doc:foo:1", Timestamp(2), bucket(5)));
- itr.add(cat(rem("doc:foo:2", Timestamp(3), bucket(5)),
- doc("doc:foo:3", Timestamp(4), bucket(5))));
- IterateResult res1 = itr.iterate(getSize(Document(*DataType::DOCUMENT, DocumentId("doc:foo:1"))) +
- getSize(DocumentId("doc:foo:2")));
+ itr.add(doc("id:ns:document::1", Timestamp(2), bucket(5)));
+ itr.add(cat(rem("id:ns:document::2", Timestamp(3), bucket(5)),
+ doc("id:ns:document::3", Timestamp(4), bucket(5))));
+ IterateResult res1 = itr.iterate(getSize(Document(*DataType::DOCUMENT, DocumentId("id:ns:document::1"))) +
+ getSize(DocumentId("id:ns:document::2")));
EXPECT_TRUE(!res1.isCompleted());
EXPECT_EQUAL(2u, res1.getEntries().size());
- TEST_DO(checkEntry(res1, 0, Document(*DataType::DOCUMENT, DocumentId("doc:foo:1")), Timestamp(2)));
- TEST_DO(checkEntry(res1, 1, DocumentId("doc:foo:2"), Timestamp(3)));
+ TEST_DO(checkEntry(res1, 0, Document(*DataType::DOCUMENT, DocumentId("id:ns:document::1")), Timestamp(2)));
+ TEST_DO(checkEntry(res1, 1, DocumentId("id:ns:document::2"), Timestamp(3)));
IterateResult res2 = itr.iterate(largeNum);
EXPECT_TRUE(res2.isCompleted());
- TEST_DO(checkEntry(res2, 0, Document(*DataType::DOCUMENT, DocumentId("doc:foo:3")), Timestamp(4)));
+ TEST_DO(checkEntry(res2, 0, Document(*DataType::DOCUMENT, DocumentId("id:ns:document::3")), Timestamp(4)));
IterateResult res3 = itr.iterate(largeNum);
EXPECT_TRUE(res3.isCompleted());
@@ -618,9 +618,9 @@ TEST("require that maxBytes splits iteration results") {
TEST("require that maxBytes splits iteration results for meta-data only iteration") {
DocumentIterator itr(bucket(5), document::NoFields(), selectAll(), newestV(), -1, false);
- itr.add(doc("doc:foo:1", Timestamp(2), bucket(5)));
- itr.add(cat(rem("doc:foo:2", Timestamp(3), bucket(5)),
- doc("doc:foo:3", Timestamp(4), bucket(5))));
+ itr.add(doc("id:ns:document::1", Timestamp(2), bucket(5)));
+ itr.add(cat(rem("id:ns:document::2", Timestamp(3), bucket(5)),
+ doc("id:ns:document::3", Timestamp(4), bucket(5))));
IterateResult res1 = itr.iterate(getSize() + getSize());
EXPECT_TRUE(!res1.isCompleted());
EXPECT_EQUAL(2u, res1.getEntries().size());
@@ -638,122 +638,122 @@ TEST("require that maxBytes splits iteration results for meta-data only iteratio
TEST("require that at least one document is returned by visit") {
DocumentIterator itr(bucket(5), document::AllFields(), selectAll(), newestV(), -1, false);
- itr.add(doc("doc:foo:1", Timestamp(2), bucket(5)));
- itr.add(cat(rem("doc:foo:2", Timestamp(3), bucket(5)),
- doc("doc:foo:3", Timestamp(4), bucket(5))));
+ itr.add(doc("id:ns:document::1", Timestamp(2), bucket(5)));
+ itr.add(cat(rem("id:ns:document::2", Timestamp(3), bucket(5)),
+ doc("id:ns:document::3", Timestamp(4), bucket(5))));
IterateResult res1 = itr.iterate(0);
EXPECT_TRUE(1u <= res1.getEntries().size());
- TEST_DO(checkEntry(res1, 0, Document(*DataType::DOCUMENT,DocumentId("doc:foo:1")), Timestamp(2)));
+ TEST_DO(checkEntry(res1, 0, Document(*DataType::DOCUMENT,DocumentId("id:ns:document::1")), Timestamp(2)));
}
TEST("require that documents outside the timestamp limits are ignored") {
DocumentIterator itr(bucket(5), document::AllFields(), selectTimestampRange(100, 200), newestV(), -1, false);
- itr.add(doc("doc:foo:1", Timestamp(99), bucket(5)));
- itr.add(doc("doc:foo:2", Timestamp(100), bucket(5)));
- itr.add(doc("doc:foo:3", Timestamp(200), bucket(5)));
- itr.add(doc("doc:foo:4", Timestamp(201), bucket(5)));
- itr.add(rem("doc:foo:5", Timestamp(99), bucket(5)));
- itr.add(rem("doc:foo:6", Timestamp(100), bucket(5)));
- itr.add(rem("doc:foo:7", Timestamp(200), bucket(5)));
- itr.add(rem("doc:foo:8", Timestamp(201), bucket(5)));
+ itr.add(doc("id:ns:document::1", Timestamp(99), bucket(5)));
+ itr.add(doc("id:ns:document::2", Timestamp(100), bucket(5)));
+ itr.add(doc("id:ns:document::3", Timestamp(200), bucket(5)));
+ itr.add(doc("id:ns:document::4", Timestamp(201), bucket(5)));
+ itr.add(rem("id:ns:document::5", Timestamp(99), bucket(5)));
+ itr.add(rem("id:ns:document::6", Timestamp(100), bucket(5)));
+ itr.add(rem("id:ns:document::7", Timestamp(200), bucket(5)));
+ itr.add(rem("id:ns:document::8", Timestamp(201), bucket(5)));
IterateResult res = itr.iterate(largeNum);
EXPECT_TRUE(res.isCompleted());
EXPECT_EQUAL(4u, res.getEntries().size());
- TEST_DO(checkEntry(res, 0, Document(*DataType::DOCUMENT, DocumentId("doc:foo:2")), Timestamp(100)));
- TEST_DO(checkEntry(res, 1, Document(*DataType::DOCUMENT, DocumentId("doc:foo:3")), Timestamp(200)));
- TEST_DO(checkEntry(res, 2, DocumentId("doc:foo:6"), Timestamp(100)));
- TEST_DO(checkEntry(res, 3, DocumentId("doc:foo:7"), Timestamp(200)));
+ TEST_DO(checkEntry(res, 0, Document(*DataType::DOCUMENT, DocumentId("id:ns:document::2")), Timestamp(100)));
+ TEST_DO(checkEntry(res, 1, Document(*DataType::DOCUMENT, DocumentId("id:ns:document::3")), Timestamp(200)));
+ TEST_DO(checkEntry(res, 2, DocumentId("id:ns:document::6"), Timestamp(100)));
+ TEST_DO(checkEntry(res, 3, DocumentId("id:ns:document::7"), Timestamp(200)));
}
TEST("require that timestamp subset returns the appropriate documents") {
DocumentIterator itr(bucket(5), document::AllFields(), selectTimestampSet(200, 350, 400), newestV(), -1, false);
- itr.add(doc("doc:foo:1", Timestamp(500), bucket(5)));
- itr.add(doc("doc:foo:2", Timestamp(400), bucket(5)));
- itr.add(doc("doc:foo:3", Timestamp(300), bucket(5)));
- itr.add(doc("doc:foo:4", Timestamp(200), bucket(5)));
- itr.add(rem("doc:foo:5", Timestamp(250), bucket(5)));
- itr.add(rem("doc:foo:6", Timestamp(350), bucket(5)));
- itr.add(rem("doc:foo:7", Timestamp(450), bucket(5)));
- itr.add(rem("doc:foo:8", Timestamp(550), bucket(5)));
+ itr.add(doc("id:ns:document::1", Timestamp(500), bucket(5)));
+ itr.add(doc("id:ns:document::2", Timestamp(400), bucket(5)));
+ itr.add(doc("id:ns:document::3", Timestamp(300), bucket(5)));
+ itr.add(doc("id:ns:document::4", Timestamp(200), bucket(5)));
+ itr.add(rem("id:ns:document::5", Timestamp(250), bucket(5)));
+ itr.add(rem("id:ns:document::6", Timestamp(350), bucket(5)));
+ itr.add(rem("id:ns:document::7", Timestamp(450), bucket(5)));
+ itr.add(rem("id:ns:document::8", Timestamp(550), bucket(5)));
IterateResult res = itr.iterate(largeNum);
EXPECT_TRUE(res.isCompleted());
EXPECT_EQUAL(3u, res.getEntries().size());
- TEST_DO(checkEntry(res, 0, Document(*DataType::DOCUMENT, DocumentId("doc:foo:2")), Timestamp(400)));
- TEST_DO(checkEntry(res, 1, Document(*DataType::DOCUMENT, DocumentId("doc:foo:4")), Timestamp(200)));
- TEST_DO(checkEntry(res, 2, DocumentId("doc:foo:6"), Timestamp(350)));
+ TEST_DO(checkEntry(res, 0, Document(*DataType::DOCUMENT, DocumentId("id:ns:document::2")), Timestamp(400)));
+ TEST_DO(checkEntry(res, 1, Document(*DataType::DOCUMENT, DocumentId("id:ns:document::4")), Timestamp(200)));
+ TEST_DO(checkEntry(res, 2, DocumentId("id:ns:document::6"), Timestamp(350)));
}
TEST("require that document selection will filter results") {
- DocumentIterator itr(bucket(5), document::AllFields(), selectDocs("id=\"doc:foo:xxx*\""), newestV(), -1, false);
- itr.add(doc("doc:foo:xxx1", Timestamp(99), bucket(5)));
- itr.add(doc("doc:foo:yyy1", Timestamp(100), bucket(5)));
- itr.add(doc("doc:foo:xxx2", Timestamp(200), bucket(5)));
- itr.add(doc("doc:foo:yyy2", Timestamp(201), bucket(5)));
- itr.add(rem("doc:foo:xxx3", Timestamp(99), bucket(5)));
- itr.add(rem("doc:foo:yyy3", Timestamp(100), bucket(5)));
- itr.add(rem("doc:foo:xxx4", Timestamp(200), bucket(5)));
- itr.add(rem("doc:foo:yyy4", Timestamp(201), bucket(5)));
+ DocumentIterator itr(bucket(5), document::AllFields(), selectDocs("id=\"id:ns:document::xxx*\""), newestV(), -1, false);
+ itr.add(doc("id:ns:document::xxx1", Timestamp(99), bucket(5)));
+ itr.add(doc("id:ns:document::yyy1", Timestamp(100), bucket(5)));
+ itr.add(doc("id:ns:document::xxx2", Timestamp(200), bucket(5)));
+ itr.add(doc("id:ns:document::yyy2", Timestamp(201), bucket(5)));
+ itr.add(rem("id:ns:document::xxx3", Timestamp(99), bucket(5)));
+ itr.add(rem("id:ns:document::yyy3", Timestamp(100), bucket(5)));
+ itr.add(rem("id:ns:document::xxx4", Timestamp(200), bucket(5)));
+ itr.add(rem("id:ns:document::yyy4", Timestamp(201), bucket(5)));
IterateResult res = itr.iterate(largeNum);
EXPECT_TRUE(res.isCompleted());
EXPECT_EQUAL(4u, res.getEntries().size());
- TEST_DO(checkEntry(res, 0, Document(*DataType::DOCUMENT, DocumentId("doc:foo:xxx1")), Timestamp(99)));
- TEST_DO(checkEntry(res, 1, Document(*DataType::DOCUMENT, DocumentId("doc:foo:xxx2")), Timestamp(200)));
- TEST_DO(checkEntry(res, 2, DocumentId("doc:foo:xxx3"), Timestamp(99)));
- TEST_DO(checkEntry(res, 3, DocumentId("doc:foo:xxx4"), Timestamp(200)));
+ TEST_DO(checkEntry(res, 0, Document(*DataType::DOCUMENT, DocumentId("id:ns:document::xxx1")), Timestamp(99)));
+ TEST_DO(checkEntry(res, 1, Document(*DataType::DOCUMENT, DocumentId("id:ns:document::xxx2")), Timestamp(200)));
+ TEST_DO(checkEntry(res, 2, DocumentId("id:ns:document::xxx3"), Timestamp(99)));
+ TEST_DO(checkEntry(res, 3, DocumentId("id:ns:document::xxx4"), Timestamp(200)));
}
TEST("require that document selection handles 'field == null'") {
DocumentIterator itr(bucket(5), document::AllFields(), selectDocs("foo.aa == null"), newestV(), -1, false);
- itr.add(doc_with_null_fields("doc:foo:xxx1", Timestamp(99), bucket(5)));
- itr.add(doc_with_null_fields("doc:foo:xxx2", Timestamp(100), bucket(5)));
+ itr.add(doc_with_null_fields("id:ns:foo::xxx1", Timestamp(99), bucket(5)));
+ itr.add(doc_with_null_fields("id:ns:foo::xxx2", Timestamp(100), bucket(5)));
IterateResult res = itr.iterate(largeNum);
EXPECT_TRUE(res.isCompleted());
ASSERT_EQUAL(2u, res.getEntries().size());
- Document expected1(getAttrDocType(), DocumentId("doc:foo:xxx1"));
+ Document expected1(getAttrDocType(), DocumentId("id:ns:foo::xxx1"));
TEST_DO(checkEntry(res, 0, expected1, Timestamp(99)));
- Document expected2(getAttrDocType(), DocumentId("doc:foo:xxx2"));
+ Document expected2(getAttrDocType(), DocumentId("id:ns:foo::xxx2"));
TEST_DO(checkEntry(res, 1, expected2, Timestamp(100)));
}
TEST("require that invalid document selection returns no documents") {
DocumentIterator itr(bucket(5), document::AllFields(), selectDocs("=="), newestV(), -1, false);
- itr.add(doc("doc:foo:xxx1", Timestamp(99), bucket(5)));
- itr.add(doc("doc:foo:yyy1", Timestamp(100), bucket(5)));
- itr.add(doc("doc:foo:xxx2", Timestamp(200), bucket(5)));
- itr.add(doc("doc:foo:yyy2", Timestamp(201), bucket(5)));
- itr.add(rem("doc:foo:xxx3", Timestamp(99), bucket(5)));
- itr.add(rem("doc:foo:yyy3", Timestamp(100), bucket(5)));
- itr.add(rem("doc:foo:xxx4", Timestamp(200), bucket(5)));
- itr.add(rem("doc:foo:yyy4", Timestamp(201), bucket(5)));
+ itr.add(doc("id:ns:document::xxx1", Timestamp(99), bucket(5)));
+ itr.add(doc("id:ns:document::yyy1", Timestamp(100), bucket(5)));
+ itr.add(doc("id:ns:document::xxx2", Timestamp(200), bucket(5)));
+ itr.add(doc("id:ns:document::yyy2", Timestamp(201), bucket(5)));
+ itr.add(rem("id:ns:document::xxx3", Timestamp(99), bucket(5)));
+ itr.add(rem("id:ns:document::yyy3", Timestamp(100), bucket(5)));
+ itr.add(rem("id:ns:document::xxx4", Timestamp(200), bucket(5)));
+ itr.add(rem("id:ns:document::yyy4", Timestamp(201), bucket(5)));
IterateResult res = itr.iterate(largeNum);
EXPECT_TRUE(res.isCompleted());
EXPECT_EQUAL(0u, res.getEntries().size());
}
TEST("require that document selection and timestamp range works together") {
- DocumentIterator itr(bucket(5), document::AllFields(), selectDocsWithinRange("id=\"doc:foo:xxx*\"", 100, 200), newestV(), -1, false);
- itr.add(doc("doc:foo:xxx1", Timestamp(99), bucket(5)));
- itr.add(doc("doc:foo:yyy1", Timestamp(100), bucket(5)));
- itr.add(doc("doc:foo:xxx2", Timestamp(200), bucket(5)));
- itr.add(doc("doc:foo:yyy2", Timestamp(201), bucket(5)));
- itr.add(rem("doc:foo:xxx3", Timestamp(99), bucket(5)));
- itr.add(rem("doc:foo:yyy3", Timestamp(100), bucket(5)));
- itr.add(rem("doc:foo:xxx4", Timestamp(200), bucket(5)));
- itr.add(rem("doc:foo:yyy4", Timestamp(201), bucket(5)));
+ DocumentIterator itr(bucket(5), document::AllFields(), selectDocsWithinRange("id=\"id:ns:document::xxx*\"", 100, 200), newestV(), -1, false);
+ itr.add(doc("id:ns:document::xxx1", Timestamp(99), bucket(5)));
+ itr.add(doc("id:ns:document::yyy1", Timestamp(100), bucket(5)));
+ itr.add(doc("id:ns:document::xxx2", Timestamp(200), bucket(5)));
+ itr.add(doc("id:ns:document::yyy2", Timestamp(201), bucket(5)));
+ itr.add(rem("id:ns:document::xxx3", Timestamp(99), bucket(5)));
+ itr.add(rem("id:ns:document::yyy3", Timestamp(100), bucket(5)));
+ itr.add(rem("id:ns:document::xxx4", Timestamp(200), bucket(5)));
+ itr.add(rem("id:ns:document::yyy4", Timestamp(201), bucket(5)));
IterateResult res = itr.iterate(largeNum);
EXPECT_TRUE(res.isCompleted());
EXPECT_EQUAL(2u, res.getEntries().size());
- TEST_DO(checkEntry(res, 0, Document(*DataType::DOCUMENT, DocumentId("doc:foo:xxx2")), Timestamp(200)));
- TEST_DO(checkEntry(res, 1, DocumentId("doc:foo:xxx4"), Timestamp(200)));
+ TEST_DO(checkEntry(res, 0, Document(*DataType::DOCUMENT, DocumentId("id:ns:document::xxx2")), Timestamp(200)));
+ TEST_DO(checkEntry(res, 1, DocumentId("id:ns:document::xxx4"), Timestamp(200)));
}
TEST("require that fieldset limits fields returned") {
DocumentIterator itr(bucket(5), document::HeaderFields(), selectAll(), newestV(), -1, false);
- itr.add(doc_with_fields("doc:foo:xxx1", Timestamp(1), bucket(5)));
+ itr.add(doc_with_fields("id:ns:foo::xxx1", Timestamp(1), bucket(5)));
IterateResult res = itr.iterate(largeNum);
EXPECT_TRUE(res.isCompleted());
EXPECT_EQUAL(1u, res.getEntries().size());
- Document expected(getDocType(), DocumentId("doc:foo:xxx1"));
+ Document expected(getDocType(), DocumentId("id:ns:foo::xxx1"));
expected.set("header", "foo");
TEST_DO(checkEntry(res, 0, expected, Timestamp(1)));
}
@@ -798,26 +798,26 @@ TEST("require that attributes are used")
{
UnitDR::reset();
DocumentIterator itr(bucket(5), document::AllFields(), selectDocs("foo.aa == 45"), docV(), -1, false);
- itr.add(doc_with_attr_fields("doc:foo:xx1", Timestamp(1), bucket(5),
+ itr.add(doc_with_attr_fields("id:ns:foo::xx1", Timestamp(1), bucket(5),
27, 28, 27, 2.7, 2.8, "x27", "x28"));
- itr.add(doc_with_attr_fields("doc:foo:xx2", Timestamp(2), bucket(5),
+ itr.add(doc_with_attr_fields("id:ns:foo::xx2", Timestamp(2), bucket(5),
27, 28, 45, 2.7, 4.5, "x27", "x45"));
- itr.add(doc_with_attr_fields("doc:foo:xx3", Timestamp(3), bucket(5),
+ itr.add(doc_with_attr_fields("id:ns:foo::xx3", Timestamp(3), bucket(5),
45, 46, 27, 4.5, 2.7, "x45", "x27"));
- itr.add(doc_with_attr_fields("doc:foo:xx4", Timestamp(4), bucket(5),
+ itr.add(doc_with_attr_fields("id:ns:foo::xx4", Timestamp(4), bucket(5),
45, 46, 45, 4.5, 4.5, "x45", "x45"));
IterateResult res = itr.iterate(largeNum);
EXPECT_TRUE(res.isCompleted());
EXPECT_EQUAL(2u, res.getEntries().size());
- Document expected1(getAttrDocType(), DocumentId("doc:foo:xx2"));
+ Document expected1(getAttrDocType(), DocumentId("id:ns:foo::xx2"));
expected1.set("header", "foo");
expected1.set("body", "bar");
expected1.set("aa", 27);
expected1.set("ab", 28);
expected1.set("dd", 2.7);
expected1.set("ss", "x27");
- Document expected2(getAttrDocType(), DocumentId("doc:foo:xx4"));
+ Document expected2(getAttrDocType(), DocumentId("id:ns:foo::xx4"));
expected2.set("header", "foo");
expected2.set("body", "bar");
expected2.set("aa", 45);
@@ -828,26 +828,26 @@ TEST("require that attributes are used")
TEST_DO(checkEntry(res, 1, expected2, Timestamp(4)));
DocumentIterator itr2(bucket(5), document::AllFields(), selectDocs("foo.dd == 4.5"), docV(), -1, false);
- itr2.add(doc_with_attr_fields("doc:foo:xx5", Timestamp(5), bucket(5),
+ itr2.add(doc_with_attr_fields("id:ns:foo::xx5", Timestamp(5), bucket(5),
27, 28, 27, 2.7, 2.8, "x27", "x28"));
- itr2.add(doc_with_attr_fields("doc:foo:xx6", Timestamp(6), bucket(5),
+ itr2.add(doc_with_attr_fields("id:ns:foo::xx6", Timestamp(6), bucket(5),
27, 28, 45, 2.7, 4.5, "x27", "x45"));
- itr2.add(doc_with_attr_fields("doc:foo:xx7", Timestamp(7), bucket(5),
+ itr2.add(doc_with_attr_fields("id:ns:foo::xx7", Timestamp(7), bucket(5),
45, 46, 27, 4.5, 2.7, "x45", "x27"));
- itr2.add(doc_with_attr_fields("doc:foo:xx8", Timestamp(8), bucket(5),
+ itr2.add(doc_with_attr_fields("id:ns:foo::xx8", Timestamp(8), bucket(5),
45, 46, 45, 4.5, 4.5, "x45", "x45"));
IterateResult res2 = itr2.iterate(largeNum);
EXPECT_TRUE(res2.isCompleted());
EXPECT_EQUAL(2u, res2.getEntries().size());
- Document expected3(getAttrDocType(), DocumentId("doc:foo:xx6"));
+ Document expected3(getAttrDocType(), DocumentId("id:ns:foo::xx6"));
expected3.set("header", "foo");
expected3.set("body", "bar");
expected3.set("aa", 27);
expected3.set("ab", 28);
expected3.set("dd", 2.7);
expected3.set("ss", "x27");
- Document expected4(getAttrDocType(), DocumentId("doc:foo:xx8"));
+ Document expected4(getAttrDocType(), DocumentId("id:ns:foo::xx8"));
expected4.set("header", "foo");
expected4.set("body", "bar");
expected4.set("aa", 45);
@@ -858,26 +858,26 @@ TEST("require that attributes are used")
TEST_DO(checkEntry(res2, 1, expected4, Timestamp(8)));
DocumentIterator itr3(bucket(5), document::AllFields(), selectDocs("foo.ss == \"x45\""), docV(), -1, false);
- itr3.add(doc_with_attr_fields("doc:foo:xx9", Timestamp(9), bucket(5),
+ itr3.add(doc_with_attr_fields("id:ns:foo::xx9", Timestamp(9), bucket(5),
27, 28, 27, 2.7, 2.8, "x27", "x28"));
- itr3.add(doc_with_attr_fields("doc:foo:xx10", Timestamp(10), bucket(5),
+ itr3.add(doc_with_attr_fields("id:ns:foo::xx10", Timestamp(10), bucket(5),
27, 28, 45, 2.7, 4.5, "x27", "x45"));
- itr3.add(doc_with_attr_fields("doc:foo:xx11", Timestamp(11), bucket(5),
+ itr3.add(doc_with_attr_fields("id:ns:foo::xx11", Timestamp(11), bucket(5),
45, 46, 27, 4.5, 2.7, "x45", "x27"));
- itr3.add(doc_with_attr_fields("doc:foo:xx12", Timestamp(12), bucket(5),
+ itr3.add(doc_with_attr_fields("id:ns:foo::xx12", Timestamp(12), bucket(5),
45, 46, 45, 4.5, 4.5, "x45", "x45"));
IterateResult res3 = itr3.iterate(largeNum);
EXPECT_TRUE(res3.isCompleted());
EXPECT_EQUAL(2u, res3.getEntries().size());
- Document expected5(getAttrDocType(), DocumentId("doc:foo:xx10"));
+ Document expected5(getAttrDocType(), DocumentId("id:ns:foo::xx10"));
expected5.set("header", "foo");
expected5.set("body", "bar");
expected5.set("aa", 27);
expected5.set("ab", 28);
expected5.set("dd", 2.7);
expected5.set("ss", "x27");
- Document expected6(getAttrDocType(), DocumentId("doc:foo:xx12"));
+ Document expected6(getAttrDocType(), DocumentId("id:ns:foo::xx12"));
expected6.set("header", "foo");
expected6.set("body", "bar");
expected6.set("aa", 45);
diff --git a/searchcore/src/tests/proton/documentdb/feedhandler/feedhandler_test.cpp b/searchcore/src/tests/proton/documentdb/feedhandler/feedhandler_test.cpp
index 4b3b68a85ea..f99668a13f8 100644
--- a/searchcore/src/tests/proton/documentdb/feedhandler/feedhandler_test.cpp
+++ b/searchcore/src/tests/proton/documentdb/feedhandler/feedhandler_test.cpp
@@ -537,7 +537,7 @@ TEST_F("require that heartBeat calls FeedView's heartBeat",
TEST_F("require that outdated remove is ignored", FeedHandlerFixture)
{
- DocumentContext doc_context("doc:test:foo", *f.schema.builder);
+ DocumentContext doc_context("id:ns:searchdocument::foo", *f.schema.builder);
FeedOperation::UP op(new RemoveOperation(doc_context.bucketId, Timestamp(10), doc_context.doc->getId()));
static_cast<DocumentOperation &>(*op).setPrevDbDocumentId(DbDocumentId(4));
static_cast<DocumentOperation &>(*op).setPrevTimestamp(Timestamp(10000));
@@ -549,7 +549,7 @@ TEST_F("require that outdated remove is ignored", FeedHandlerFixture)
TEST_F("require that outdated put is ignored", FeedHandlerFixture)
{
- DocumentContext doc_context("doc:test:foo", *f.schema.builder);
+ DocumentContext doc_context("id:ns:searchdocument::foo", *f.schema.builder);
FeedOperation::UP op(new PutOperation(doc_context.bucketId,
Timestamp(10), doc_context.doc));
static_cast<DocumentOperation &>(*op).setPrevTimestamp(Timestamp(10000));
@@ -570,7 +570,7 @@ addLidToRemove(RemoveDocumentsOperation &op)
TEST_F("require that handleMove calls FeedView", FeedHandlerFixture)
{
- DocumentContext doc_context("doc:test:foo", *f.schema.builder);
+ DocumentContext doc_context("id:ns:searchdocument::foo", *f.schema.builder);
MoveOperation op(doc_context.bucketId, Timestamp(2), doc_context.doc, DbDocumentId(0, 2), 1);
op.setDbDocumentId(DbDocumentId(1, 2));
f.runAsMaster([&]() { f.handler.handleMove(op, IDestructorCallback::SP()); });
@@ -806,7 +806,7 @@ TEST_F("require that tensor update with wrong tensor type fails", FeedHandlerFix
TEST_F("require that put with different document type repo is ok", FeedHandlerFixture)
{
TwoFieldsSchemaContext schema;
- DocumentContext doc_context("doc:test:foo", *schema.builder);
+ DocumentContext doc_context("id:ns:searchdocument::foo", *schema.builder);
auto op = std::make_unique<PutOperation>(doc_context.bucketId,
Timestamp(10), doc_context.doc);
FeedTokenContext token_context;
diff --git a/searchcore/src/tests/proton/documentdb/feedview/feedview_test.cpp b/searchcore/src/tests/proton/documentdb/feedview/feedview_test.cpp
index b39b70572e0..144f4ca4ff7 100644
--- a/searchcore/src/tests/proton/documentdb/feedview/feedview_test.cpp
+++ b/searchcore/src/tests/proton/documentdb/feedview/feedview_test.cpp
@@ -577,7 +577,7 @@ struct FixtureBase
}
DocumentContext doc1(uint64_t timestamp = 10) {
- return doc("doc:test:1", timestamp);
+ return doc("id:ns:searchdocument::1", timestamp);
}
void performPut(FeedToken token, PutOperation &op) {
@@ -661,7 +661,7 @@ struct FixtureBase
uint32_t id = first + i;
uint64_t ts = tsfirst + i;
vespalib::asciistream os;
- os << "doc:test:" << id;
+ os << "id:ns:searchdocument::" << id;
docs.push_back(doc(os.str(), ts));
}
return docs;
@@ -822,7 +822,7 @@ TEST_F("require that put() calls attribute adapter", SearchableFeedViewFixture)
f.putAndWait(dc);
EXPECT_EQUAL(1u, f.maw._putSerial);
- EXPECT_EQUAL(DocumentId("doc:test:1"), f.maw._putDocId);
+ EXPECT_EQUAL(DocumentId("id:ns:searchdocument::1"), f.maw._putDocId);
EXPECT_EQUAL(1u, f.maw._putLid);
EXPECT_EQUAL(2u, f._docIdLimit.get());
}
@@ -861,7 +861,7 @@ TEST_F("require that update() calls attribute adapter", SearchableFeedViewFixtur
f.putAndWait(dc1);
f.updateAndWait(dc2);
- assertAttributeUpdate(2u, DocumentId("doc:test:1"), 1u, f.maw);
+ assertAttributeUpdate(2u, DocumentId("id:ns:searchdocument::1"), 1u, f.maw);
}
TEST_F("require that remove() updates document meta store with bucket info",
@@ -1064,7 +1064,7 @@ void putDocumentAndUpdate(Fixture &f, const vespalib::string &fieldName)
f.putAndWait(dc1);
EXPECT_EQUAL(1u, f.msa._store._lastSyncToken);
- DocumentContext dc2("doc:test:1", 20, f.getBuilder());
+ DocumentContext dc2("id:ns:searchdocument::1", 20, f.getBuilder());
dc2.addFieldUpdate(f.getBuilder(), fieldName);
f.updateAndWait(dc2);
}
@@ -1076,7 +1076,7 @@ void requireThatUpdateOnlyUpdatesAttributeAndNotDocumentStore(Fixture &f,
putDocumentAndUpdate(f, fieldName);
EXPECT_EQUAL(1u, f.msa._store._lastSyncToken); // document store not updated
- assertAttributeUpdate(2u, DocumentId("doc:test:1"), 1, f.maw);
+ assertAttributeUpdate(2u, DocumentId("id:ns:searchdocument::1"), 1, f.maw);
}
template <typename Fixture>
@@ -1086,7 +1086,7 @@ void requireThatUpdateUpdatesAttributeAndDocumentStore(Fixture &f,
putDocumentAndUpdate(f, fieldName);
EXPECT_EQUAL(2u, f.msa._store._lastSyncToken); // document store updated
- assertAttributeUpdate(2u, DocumentId("doc:test:1"), 1, f.maw);
+ assertAttributeUpdate(2u, DocumentId("id:ns:searchdocument::1"), 1, f.maw);
}
TEST_F("require that update() to fast-access attribute only updates attribute and not document store",
diff --git a/searchcore/src/tests/proton/documentmetastore/documentmetastore_test.cpp b/searchcore/src/tests/proton/documentmetastore/documentmetastore_test.cpp
index f6f0c2b0806..2fc6cc87631 100644
--- a/searchcore/src/tests/proton/documentmetastore/documentmetastore_test.cpp
+++ b/searchcore/src/tests/proton/documentmetastore/documentmetastore_test.cpp
@@ -545,14 +545,14 @@ TEST(DocumentMetaStoreTest, lid_and_gid_space_is_reused)
GlobalId
createGid(uint32_t lid)
{
- DocumentId docId(vespalib::make_string("doc:id:%u", lid));
+ DocumentId docId(vespalib::make_string("id:ns:testdoc::%u", lid));
return docId.getGlobalId();
}
GlobalId
createGid(uint32_t userId, uint32_t lid)
{
- DocumentId docId(vespalib::make_string("id:id:testdoc:n=%u:%u", userId, lid));
+ DocumentId docId(vespalib::make_string("id:ns:testdoc:n=%u:%u", userId, lid));
return docId.getGlobalId();
}
diff --git a/searchcore/src/tests/proton/feed_and_search/feed_and_search.cpp b/searchcore/src/tests/proton/feed_and_search/feed_and_search.cpp
index 23a87415f7f..4580865b3a4 100644
--- a/searchcore/src/tests/proton/feed_and_search/feed_and_search.cpp
+++ b/searchcore/src/tests/proton/feed_and_search/feed_and_search.cpp
@@ -105,7 +105,7 @@ Schema getSchema() {
Document::UP buildDocument(DocBuilder & doc_builder, int id,
const string &word) {
ostringstream ost;
- ost << "doc::" << id;
+ ost << "id:ns:searchdocument::" << id;
doc_builder.startDocument(ost.str());
doc_builder.startIndexField(field_name)
.addStr(noise).addStr(word).endField();
diff --git a/searchcore/src/tests/proton/feedoperation/feedoperation_test.cpp b/searchcore/src/tests/proton/feedoperation/feedoperation_test.cpp
index 6a9dc42b56d..5a3ed4b7274 100644
--- a/searchcore/src/tests/proton/feedoperation/feedoperation_test.cpp
+++ b/searchcore/src/tests/proton/feedoperation/feedoperation_test.cpp
@@ -145,7 +145,7 @@ TEST("require that toString() on derived classes are meaningful")
uint32_t sub_db_id = 1;
MyStreamHandler stream_handler;
DocumentIdT doc_id_limit = 15;
- DocumentId doc_id("doc:foo:bar");
+ DocumentId doc_id("id:ns:foo:::bar");
DocumentUpdate::SP update(new DocumentUpdate(repo, *DataType::DOCUMENT, doc_id));
EXPECT_EQUAL("DeleteBucket(BucketId(0x0000000000000000), serialNum=0)",
@@ -196,7 +196,7 @@ TEST("require that toString() on derived classes are meaningful")
EXPECT_EQUAL("Remove(null::, BucketId(0x0000000000000000), timestamp=0, dbdId=(subDbId=0, lid=0), "
"prevDbdId=(subDbId=0, lid=0), prevMarkedAsRemoved=false, prevTimestamp=0, serialNum=0)",
RemoveOperation().toString());
- EXPECT_EQUAL("Remove(doc:foo:bar, BucketId(0x000000000000002a), timestamp=10, dbdId=(subDbId=0, lid=0), "
+ EXPECT_EQUAL("Remove(id:ns:foo:::bar, BucketId(0x000000000000002a), timestamp=10, dbdId=(subDbId=0, lid=0), "
"prevDbdId=(subDbId=0, lid=0), prevMarkedAsRemoved=false, prevTimestamp=0, serialNum=0)",
RemoveOperation(bucket_id1, timestamp, doc_id).toString());
@@ -214,7 +214,7 @@ TEST("require that toString() on derived classes are meaningful")
EXPECT_EQUAL("Update(NULL, BucketId(0x0000000000000000), timestamp=0, dbdId=(subDbId=0, lid=0), "
"prevDbdId=(subDbId=0, lid=0), prevMarkedAsRemoved=false, prevTimestamp=0, serialNum=0)",
UpdateOperation().toString());
- EXPECT_EQUAL("Update(doc:foo:bar, BucketId(0x000000000000002a), timestamp=10, dbdId=(subDbId=0, lid=0), "
+ EXPECT_EQUAL("Update(id:ns:foo:::bar, BucketId(0x000000000000002a), timestamp=10, dbdId=(subDbId=0, lid=0), "
"prevDbdId=(subDbId=0, lid=0), prevMarkedAsRemoved=false, prevTimestamp=0, serialNum=0)",
UpdateOperation(bucket_id1, timestamp, update).toString());
diff --git a/searchcore/src/tests/proton/index/fusionrunner_test.cpp b/searchcore/src/tests/proton/index/fusionrunner_test.cpp
index e6cdbf8d6cb..49b452aec2e 100644
--- a/searchcore/src/tests/proton/index/fusionrunner_test.cpp
+++ b/searchcore/src/tests/proton/index/fusionrunner_test.cpp
@@ -143,7 +143,7 @@ void Test::tearDown() {
Document::UP buildDocument(DocBuilder & doc_builder, int id, const string &word) {
vespalib::asciistream ost;
- ost << "doc::" << id;
+ ost << "id:ns:searchdocument::" << id;
doc_builder.startDocument(ost.str());
doc_builder.startIndexField(field_name).addStr(word).endField();
return doc_builder.endDocument();
diff --git a/searchcore/src/tests/proton/index/index_writer/index_writer_test.cpp b/searchcore/src/tests/proton/index/index_writer/index_writer_test.cpp
index d92ac0dcdc2..73919a7c628 100644
--- a/searchcore/src/tests/proton/index/index_writer/index_writer_test.cpp
+++ b/searchcore/src/tests/proton/index/index_writer/index_writer_test.cpp
@@ -89,7 +89,7 @@ struct Fixture
{
}
Document::UP createDoc(uint32_t lid) {
- builder.startDocument(vespalib::make_string("doc:test:%u", lid));
+ builder.startDocument(vespalib::make_string("id:ns:searchdocument::%u", lid));
return builder.endDocument();
}
void put(SerialNum serialNum, const search::DocumentIdT lid) {
diff --git a/searchcore/src/tests/proton/index/indexmanager_test.cpp b/searchcore/src/tests/proton/index/indexmanager_test.cpp
index d92cc62c5a1..80b1f9f0560 100644
--- a/searchcore/src/tests/proton/index/indexmanager_test.cpp
+++ b/searchcore/src/tests/proton/index/indexmanager_test.cpp
@@ -89,7 +89,7 @@ void removeTestData() {
Document::UP buildDocument(DocBuilder &doc_builder, int id,
const string &word) {
vespalib::asciistream ost;
- ost << "doc::" << id;
+ ost << "id:ns:searchdocument::" << id;
doc_builder.startDocument(ost.str());
doc_builder.startIndexField(field_name).addStr(word).endField();
return doc_builder.endDocument();
diff --git a/searchcore/src/tests/proton/matching/matching_test.cpp b/searchcore/src/tests/proton/matching/matching_test.cpp
index e46ed997d0f..3f68b54aca2 100644
--- a/searchcore/src/tests/proton/matching/matching_test.cpp
+++ b/searchcore/src/tests/proton/matching/matching_test.cpp
@@ -192,7 +192,7 @@ struct MyWorld {
// metaStore
for (uint32_t i = 0; i < NUM_DOCS; ++i) {
- document::DocumentId docId(vespalib::make_string("doc::%u", i));
+ document::DocumentId docId(vespalib::make_string("id:ns:searchdocument::%u", i));
const document::GlobalId &gid = docId.getGlobalId();
document::BucketId bucketId(BucketFactory::getBucketId(docId));
uint32_t docSize = 1;
@@ -455,11 +455,11 @@ TEST("require that ranking is performed (multi-threaded)") {
EXPECT_EQUAL(9u, world.matchingStats.docsRanked());
EXPECT_EQUAL(0u, world.matchingStats.docsReRanked());
ASSERT_TRUE(reply->hits.size() == 9u);
- EXPECT_EQUAL(document::DocumentId("doc::900").getGlobalId(), reply->hits[0].gid);
+ EXPECT_EQUAL(document::DocumentId("id:ns:searchdocument::900").getGlobalId(), reply->hits[0].gid);
EXPECT_EQUAL(900.0, reply->hits[0].metric);
- EXPECT_EQUAL(document::DocumentId("doc::800").getGlobalId(), reply->hits[1].gid);
+ EXPECT_EQUAL(document::DocumentId("id:ns:searchdocument::800").getGlobalId(), reply->hits[1].gid);
EXPECT_EQUAL(800.0, reply->hits[1].metric);
- EXPECT_EQUAL(document::DocumentId("doc::700").getGlobalId(), reply->hits[2].gid);
+ EXPECT_EQUAL(document::DocumentId("id:ns:searchdocument::700").getGlobalId(), reply->hits[2].gid);
EXPECT_EQUAL(700.0, reply->hits[2].metric);
EXPECT_GREATER(world.matchingStats.matchTimeAvg(), 0.0000001);
EXPECT_EQUAL(0.0, world.matchingStats.rerankTimeAvg());
@@ -478,15 +478,15 @@ TEST("require that re-ranking is performed (multi-threaded)") {
EXPECT_EQUAL(9u, world.matchingStats.docsRanked());
EXPECT_EQUAL(3u, world.matchingStats.docsReRanked());
ASSERT_TRUE(reply->hits.size() == 9u);
- EXPECT_EQUAL(document::DocumentId("doc::900").getGlobalId(), reply->hits[0].gid);
+ EXPECT_EQUAL(document::DocumentId("id:ns:searchdocument::900").getGlobalId(), reply->hits[0].gid);
EXPECT_EQUAL(1800.0, reply->hits[0].metric);
- EXPECT_EQUAL(document::DocumentId("doc::800").getGlobalId(), reply->hits[1].gid);
+ EXPECT_EQUAL(document::DocumentId("id:ns:searchdocument::800").getGlobalId(), reply->hits[1].gid);
EXPECT_EQUAL(1600.0, reply->hits[1].metric);
- EXPECT_EQUAL(document::DocumentId("doc::700").getGlobalId(), reply->hits[2].gid);
+ EXPECT_EQUAL(document::DocumentId("id:ns:searchdocument::700").getGlobalId(), reply->hits[2].gid);
EXPECT_EQUAL(1400.0, reply->hits[2].metric);
- EXPECT_EQUAL(document::DocumentId("doc::600").getGlobalId(), reply->hits[3].gid);
+ EXPECT_EQUAL(document::DocumentId("id:ns:searchdocument::600").getGlobalId(), reply->hits[3].gid);
EXPECT_EQUAL(600.0, reply->hits[3].metric);
- EXPECT_EQUAL(document::DocumentId("doc::500").getGlobalId(), reply->hits[4].gid);
+ EXPECT_EQUAL(document::DocumentId("id:ns:searchdocument::500").getGlobalId(), reply->hits[4].gid);
EXPECT_EQUAL(500.0, reply->hits[4].metric);
EXPECT_GREATER(world.matchingStats.matchTimeAvg(), 0.0000001);
EXPECT_GREATER(world.matchingStats.rerankTimeAvg(), 0.0000001);
@@ -532,15 +532,15 @@ TEST("require that re-ranking is diverse with diversity = 1/1") {
EXPECT_EQUAL(9u, world.matchingStats.docsRanked());
EXPECT_EQUAL(3u, world.matchingStats.docsReRanked());
ASSERT_TRUE(reply->hits.size() == 9u);
- EXPECT_EQUAL(document::DocumentId("doc::900").getGlobalId(), reply->hits[0].gid);
+ EXPECT_EQUAL(document::DocumentId("id:ns:searchdocument::900").getGlobalId(), reply->hits[0].gid);
EXPECT_EQUAL(1800.0, reply->hits[0].metric);
- EXPECT_EQUAL(document::DocumentId("doc::800").getGlobalId(), reply->hits[1].gid);
+ EXPECT_EQUAL(document::DocumentId("id:ns:searchdocument::800").getGlobalId(), reply->hits[1].gid);
EXPECT_EQUAL(1600.0, reply->hits[1].metric);
- EXPECT_EQUAL(document::DocumentId("doc::700").getGlobalId(), reply->hits[2].gid);
+ EXPECT_EQUAL(document::DocumentId("id:ns:searchdocument::700").getGlobalId(), reply->hits[2].gid);
EXPECT_EQUAL(1400.0, reply->hits[2].metric);
- EXPECT_EQUAL(document::DocumentId("doc::600").getGlobalId(), reply->hits[3].gid);
+ EXPECT_EQUAL(document::DocumentId("id:ns:searchdocument::600").getGlobalId(), reply->hits[3].gid);
EXPECT_EQUAL(600.0, reply->hits[3].metric);
- EXPECT_EQUAL(document::DocumentId("doc::500").getGlobalId(), reply->hits[4].gid);
+ EXPECT_EQUAL(document::DocumentId("id:ns:searchdocument::500").getGlobalId(), reply->hits[4].gid);
EXPECT_EQUAL(500.0, reply->hits[4].metric);
}
@@ -559,16 +559,16 @@ TEST("require that re-ranking is diverse with diversity = 1/10") {
EXPECT_EQUAL(9u, world.matchingStats.docsRanked());
EXPECT_EQUAL(1u, world.matchingStats.docsReRanked());
ASSERT_TRUE(reply->hits.size() == 9u);
- EXPECT_EQUAL(document::DocumentId("doc::900").getGlobalId(), reply->hits[0].gid);
+ EXPECT_EQUAL(document::DocumentId("id:ns:searchdocument::900").getGlobalId(), reply->hits[0].gid);
EXPECT_EQUAL(1800.0, reply->hits[0].metric);
//TODO This is of course incorrect until the selectBest method sees everything.
- EXPECT_EQUAL(document::DocumentId("doc::800").getGlobalId(), reply->hits[1].gid);
+ EXPECT_EQUAL(document::DocumentId("id:ns:searchdocument::800").getGlobalId(), reply->hits[1].gid);
EXPECT_EQUAL(800.0, reply->hits[1].metric);
- EXPECT_EQUAL(document::DocumentId("doc::700").getGlobalId(), reply->hits[2].gid);
+ EXPECT_EQUAL(document::DocumentId("id:ns:searchdocument::700").getGlobalId(), reply->hits[2].gid);
EXPECT_EQUAL(700.0, reply->hits[2].metric);
- EXPECT_EQUAL(document::DocumentId("doc::600").getGlobalId(), reply->hits[3].gid);
+ EXPECT_EQUAL(document::DocumentId("id:ns:searchdocument::600").getGlobalId(), reply->hits[3].gid);
EXPECT_EQUAL(600.0, reply->hits[3].metric);
- EXPECT_EQUAL(document::DocumentId("doc::500").getGlobalId(), reply->hits[4].gid);
+ EXPECT_EQUAL(document::DocumentId("id:ns:searchdocument::500").getGlobalId(), reply->hits[4].gid);
EXPECT_EQUAL(500.0, reply->hits[4].metric);
}
@@ -585,11 +585,11 @@ TEST("require that sortspec can be used (multi-threaded)") {
}
SearchReply::UP reply = world.performSearch(request, threads);
ASSERT_EQUAL(9u, reply->hits.size());
- EXPECT_EQUAL(document::DocumentId("doc::100").getGlobalId(), reply->hits[0].gid);
+ EXPECT_EQUAL(document::DocumentId("id:ns:searchdocument::100").getGlobalId(), reply->hits[0].gid);
EXPECT_EQUAL(zero_rank_value, reply->hits[0].metric);
- EXPECT_EQUAL(document::DocumentId("doc::200").getGlobalId(), reply->hits[1].gid);
+ EXPECT_EQUAL(document::DocumentId("id:ns:searchdocument::200").getGlobalId(), reply->hits[1].gid);
EXPECT_EQUAL(zero_rank_value, reply->hits[1].metric);
- EXPECT_EQUAL(document::DocumentId("doc::300").getGlobalId(), reply->hits[2].gid);
+ EXPECT_EQUAL(document::DocumentId("id:ns:searchdocument::300").getGlobalId(), reply->hits[2].gid);
EXPECT_EQUAL(zero_rank_value, reply->hits[2].metric);
EXPECT_EQUAL(drop_sort_data, reply->sortIndex.empty());
EXPECT_EQUAL(drop_sort_data, reply->sortData.empty());
@@ -911,7 +911,7 @@ TEST("require that same element search works (note that this does not test/use t
SearchRequest::SP request = world.createSameElementRequest("foo", "bar");
SearchReply::UP reply = world.performSearch(request, 1);
ASSERT_EQUAL(1u, reply->hits.size());
- EXPECT_EQUAL(document::DocumentId("doc::20").getGlobalId(), reply->hits[0].gid);
+ EXPECT_EQUAL(document::DocumentId("id:ns:searchdocument::20").getGlobalId(), reply->hits[0].gid);
}
TEST_MAIN() { TEST_RUN_ALL(); }
diff --git a/searchcore/src/tests/proton/server/documentretriever_test.cpp b/searchcore/src/tests/proton/server/documentretriever_test.cpp
index d3fbaebcffb..d5e40592b12 100644
--- a/searchcore/src/tests/proton/server/documentretriever_test.cpp
+++ b/searchcore/src/tests/proton/server/documentretriever_test.cpp
@@ -22,7 +22,6 @@
#include <vespa/eval/tensor/tensor.h>
#include <vespa/eval/tensor/test/test_utils.h>
#include <vespa/persistence/spi/bucket.h>
-#include <vespa/persistence/spi/result.h>
#include <vespa/persistence/spi/test.h>
#include <vespa/searchcommon/common/schema.h>
#include <vespa/searchcore/proton/documentmetastore/documentmetastorecontext.h>
@@ -121,7 +120,7 @@ const char dyn_wset_field_i[] = "dynamic int wset field";
const char dyn_wset_field_d[] = "dynamic double wset field";
const char dyn_wset_field_s[] = "dynamic string wset field";
const char dyn_wset_field_n[] = "dynamic null wset field";
-const DocumentId doc_id("doc:test:1");
+const DocumentId doc_id("id:ns:type_name::1");
const int32_t static_value = 4;
const int32_t dyn_value_i = 17;
const double dyn_value_d = 42.42;
@@ -144,8 +143,7 @@ struct MyDocumentStore : proton::test::DummyDocumentStore {
~MyDocumentStore() override;
- virtual Document::UP read(DocumentIdT lid,
- const DocumentTypeRepo &r) const override {
+ Document::UP read(DocumentIdT lid, const DocumentTypeRepo &r) const override {
if (lid == 0) {
return Document::UP();
}
@@ -489,8 +487,7 @@ TEST_F("require that position fields are regenerated from zcurves", Fixture) {
EXPECT_EQUAL(-123096000, static_cast<IntFieldValue&>(*x).getValue());
EXPECT_EQUAL(49401000, static_cast<IntFieldValue&>(*y).getValue());
- checkFieldValue<LongFieldValue>(doc->getValue(zcurve_field),
- dynamic_zcurve_value);
+ checkFieldValue<LongFieldValue>(doc->getValue(zcurve_field), dynamic_zcurve_value);
}
TEST_F("require that non-existing lid returns null pointer", Fixture) {
diff --git a/searchcore/src/tests/proton/server/feeddebugger_test.cpp b/searchcore/src/tests/proton/server/feeddebugger_test.cpp
index c54e13f4840..b5bd1cfafa8 100644
--- a/searchcore/src/tests/proton/server/feeddebugger_test.cpp
+++ b/searchcore/src/tests/proton/server/feeddebugger_test.cpp
@@ -65,18 +65,18 @@ TEST("require that setting an environment variable turns on docid-specific"
" debugging.") {
EnvSaver save_lid_env(lid_env_name);
EnvSaver save_docid_env(docid_env_name);
- setenv(docid_env_name, "doc:test:foo,doc:test:bar,doc:test:baz", true);
+ setenv(docid_env_name, "id:ns:type::test:foo,id:ns:type::test:bar,id:ns:type::test:baz", true);
FeedDebugger debugger;
EXPECT_TRUE(debugger.isDebugging());
EXPECT_EQUAL(ns_log::Logger::info,
- debugger.getDebugLevel(1, DocumentId("doc:test:foo")));
+ debugger.getDebugLevel(1, DocumentId("id:ns:type::test:foo")));
EXPECT_EQUAL(ns_log::Logger::info,
- debugger.getDebugLevel(1, DocumentId("doc:test:bar")));
+ debugger.getDebugLevel(1, DocumentId("id:ns:type::test:bar")));
EXPECT_EQUAL(ns_log::Logger::info,
- debugger.getDebugLevel(1, DocumentId("doc:test:baz")));
+ debugger.getDebugLevel(1, DocumentId("id:ns:type::test:baz")));
EXPECT_EQUAL(ns_log::Logger::spam,
- debugger.getDebugLevel(1, DocumentId("doc:test:qux")));
+ debugger.getDebugLevel(1, DocumentId("id:ns:type::test:qux")));
}
} // namespace
diff --git a/searchcore/src/tests/proton/server/feedstates_test.cpp b/searchcore/src/tests/proton/server/feedstates_test.cpp
index f206ffc9b17..96096c0401f 100644
--- a/searchcore/src/tests/proton/server/feedstates_test.cpp
+++ b/searchcore/src/tests/proton/server/feedstates_test.cpp
@@ -100,7 +100,7 @@ struct RemoveOperationContext
};
RemoveOperationContext::RemoveOperationContext(search::SerialNum serial)
- : doc_id("doc:foo:bar"),
+ : doc_id("id:ns:doctypename::bar"),
op(BucketFactory::getBucketId(doc_id), Timestamp(10), doc_id),
str(), packet()
{
diff --git a/searchcore/src/vespa/searchcore/proton/common/feeddebugger.h b/searchcore/src/vespa/searchcore/proton/common/feeddebugger.h
index 3b02c0f2b76..5c582157174 100644
--- a/searchcore/src/vespa/searchcore/proton/common/feeddebugger.h
+++ b/searchcore/src/vespa/searchcore/proton/common/feeddebugger.h
@@ -27,7 +27,7 @@ private:
ns_log::Logger::LogLevel getDebugDebuggerInternal(uint32_t lid, const document::DocumentId * docid) const;
bool _enableDebugging;
std::vector<uint32_t> _debugLidList; // List of lids to dump when feeding/replaying log.
- std::vector<document::DocumentId> _debugDocIdList; // List of docids("doc:bla:blu" to dump when feeding/replaying log.
+ std::vector<document::DocumentId> _debugDocIdList; // List of docids("id:ns:doctype::xyz" to dump when feeding/replaying log.
};
} // namespace proton
diff --git a/searchcore/src/vespa/searchcore/proton/server/documentretrieverbase.cpp b/searchcore/src/vespa/searchcore/proton/server/documentretrieverbase.cpp
index d06319ae7f9..65a4f7e7c4a 100644
--- a/searchcore/src/vespa/searchcore/proton/server/documentretrieverbase.cpp
+++ b/searchcore/src/vespa/searchcore/proton/server/documentretrieverbase.cpp
@@ -4,16 +4,11 @@
#include <vespa/document/repo/documenttyperepo.h>
#include <vespa/document/datatype/documenttype.h>
#include <vespa/vespalib/stllike/lrucache_map.hpp>
+#include <vespa/vespalib/util/stringfmt.h>
using document::DocumentId;
using document::GlobalId;
-namespace {
-
-const DocumentId docId("doc:test:1");
-
-}
-
namespace proton {
DocumentRetrieverBase::DocumentRetrieverBase(
@@ -30,13 +25,12 @@ DocumentRetrieverBase::DocumentRetrieverBase(
_emptyDoc(),
_hasFields(hasFields)
{
- const document::DocumentType *
- docType(_repo.getDocumentType(_docTypeName.getName()));
- _emptyDoc.reset(new document::Document(*docType, docId));
+ const document::DocumentType * docType(_repo.getDocumentType(_docTypeName.getName()));
+ _emptyDoc.reset(new document::Document(*docType, DocumentId("id:empty:" + _docTypeName.getName() + "::empty")));
_emptyDoc->setRepo(_repo);
}
-DocumentRetrieverBase::~DocumentRetrieverBase() { }
+DocumentRetrieverBase::~DocumentRetrieverBase() = default;
const document::DocumentTypeRepo &
DocumentRetrieverBase::getDocumentTypeRepo() const {