aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--bundle-plugin-test/integration-test/src/test/java/com/yahoo/container/plugin/BundleTest.java1
-rw-r--r--bundle-plugin-test/test-bundles/main/pom.xml5
-rw-r--r--bundle-plugin-test/test-bundles/main/src/main/java/com/yahoo/test/SimpleSearcher.java8
-rw-r--r--cloud-tenant-base-dependencies-enforcer/pom.xml2
-rw-r--r--clustercontroller-core/src/main/java/com/yahoo/vespa/clustercontroller/core/ClusterStateBundle.java36
-rw-r--r--clustercontroller-core/src/main/java/com/yahoo/vespa/clustercontroller/core/FleetController.java7
-rw-r--r--clustercontroller-core/src/main/java/com/yahoo/vespa/clustercontroller/core/ResourceExhaustionCalculator.java23
-rw-r--r--clustercontroller-core/src/main/java/com/yahoo/vespa/clustercontroller/core/hostinfo/ResourceUsage.java9
-rw-r--r--clustercontroller-core/src/test/java/com/yahoo/vespa/clustercontroller/core/ClusterFeedBlockTest.java36
-rw-r--r--clustercontroller-core/src/test/java/com/yahoo/vespa/clustercontroller/core/ClusterStateBundleTest.java34
-rw-r--r--config-model-api/src/main/java/com/yahoo/config/model/api/ModelContext.java6
-rw-r--r--config-model/src/main/java/com/yahoo/config/model/deploy/TestProperties.java7
-rw-r--r--config-model/src/main/java/com/yahoo/vespa/model/admin/clustercontroller/ClusterControllerContainerCluster.java4
-rw-r--r--config-model/src/main/java/com/yahoo/vespa/model/application/validation/change/NodeResourceChangeValidator.java2
-rw-r--r--config-model/src/main/java/com/yahoo/vespa/model/container/xml/AccessLogBuilder.java24
-rw-r--r--config-model/src/main/java/com/yahoo/vespa/model/container/xml/ConfigServerContainerModelBuilder.java2
-rw-r--r--config-model/src/main/java/com/yahoo/vespa/model/container/xml/ContainerModelBuilder.java4
-rw-r--r--config-model/src/main/java/com/yahoo/vespa/model/content/ClusterControllerConfig.java51
-rw-r--r--config-model/src/main/java/com/yahoo/vespa/model/content/ClusterResourceLimits.java103
-rw-r--r--config-model/src/main/java/com/yahoo/vespa/model/content/ContentSearchCluster.java9
-rw-r--r--config-model/src/main/java/com/yahoo/vespa/model/content/ResourceLimits.java33
-rw-r--r--config-model/src/main/java/com/yahoo/vespa/model/content/cluster/ContentCluster.java12
-rw-r--r--config-model/src/main/java/com/yahoo/vespa/model/content/cluster/DomResourceLimitsBuilder.java8
-rw-r--r--config-model/src/main/java/com/yahoo/vespa/model/content/storagecluster/FileStorProducer.java8
-rw-r--r--config-model/src/main/python/ES_Vespa_parser.py2
-rw-r--r--config-model/src/main/resources/schema/content.rnc3
-rw-r--r--config-model/src/test/java/com/yahoo/vespa/model/application/validation/change/NodeResourceChangeValidatorTest.java6
-rw-r--r--config-model/src/test/java/com/yahoo/vespa/model/content/ClusterResourceLimitsTest.java101
-rw-r--r--config-model/src/test/java/com/yahoo/vespa/model/content/ContentSearchClusterTest.java43
-rw-r--r--config-model/src/test/java/com/yahoo/vespa/model/content/FleetControllerClusterTest.java46
-rw-r--r--config-model/src/test/java/com/yahoo/vespa/model/content/StorageClusterTest.java13
-rw-r--r--config-model/src/test/java/com/yahoo/vespa/model/content/utils/ContentClusterBuilder.java45
-rw-r--r--configserver/src/main/java/com/yahoo/vespa/config/server/deploy/ModelContextImpl.java6
-rw-r--r--configserver/src/test/java/com/yahoo/vespa/config/server/ModelContextImplTest.java1
-rw-r--r--container-core/pom.xml1
-rw-r--r--container-core/src/main/java/com/yahoo/container/handler/metrics/HttpHandlerBase.java23
-rw-r--r--container-core/src/main/java/com/yahoo/container/jdisc/state/CoredumpGatherer.java25
-rw-r--r--container-core/src/main/java/com/yahoo/container/jdisc/state/HostLifeGatherer.java31
-rw-r--r--container-core/src/main/java/com/yahoo/container/jdisc/state/JSONObjectWithLegibleException.java87
-rw-r--r--container-core/src/main/java/com/yahoo/container/jdisc/state/JsonUtil.java15
-rw-r--r--container-core/src/main/java/com/yahoo/container/jdisc/state/MetricGatherer.java6
-rw-r--r--container-core/src/main/java/com/yahoo/container/jdisc/state/MetricsPacketsHandler.java68
-rw-r--r--container-core/src/main/java/com/yahoo/container/jdisc/state/StateHandler.java120
-rw-r--r--container-core/src/main/java/org/json/package-info.java2
-rw-r--r--container-core/src/test/java/com/yahoo/container/handler/metrics/MetricsV2HandlerTest.java49
-rw-r--r--container-core/src/test/java/com/yahoo/container/handler/metrics/PrometheusV1HandlerTest.java27
-rw-r--r--container-core/src/test/java/com/yahoo/container/jdisc/state/CoredumpGathererTest.java13
-rw-r--r--container-core/src/test/java/com/yahoo/container/jdisc/state/HostLifeGathererTest.java21
-rw-r--r--container-core/src/test/java/com/yahoo/container/jdisc/state/StateHandlerTest.java63
-rw-r--r--container-dependency-versions/pom.xml2
-rw-r--r--container-di/src/main/java/com/yahoo/container/di/CloudSubscriberFactory.java3
-rw-r--r--container-disc/src/main/java/com/yahoo/container/usability/BindingsOverviewHandler.java81
-rw-r--r--container-search-and-docproc/src/main/java/com/yahoo/container/handler/observability/ApplicationStatusHandler.java198
-rw-r--r--container-search-gui/src/main/java/com/yahoo/search/query/gui/GUIHandler.java82
-rw-r--r--container-search/pom.xml5
-rw-r--r--container-search/src/main/java/com/yahoo/prelude/hitfield/JSONString.java4
-rw-r--r--container-search/src/main/java/com/yahoo/search/rendering/JsonRenderer.java10
-rw-r--r--container-search/src/test/java/com/yahoo/prelude/fastsearch/SlimeSummaryTestCase.java23
-rw-r--r--container-search/src/test/java/com/yahoo/search/handler/test/JSONSearchHandlerTestCase.java189
-rw-r--r--container-search/src/test/java/com/yahoo/search/rendering/JsonRendererTestCase.java42
-rw-r--r--container-search/src/test/java/com/yahoo/select/SelectTestCase.java196
-rw-r--r--controller-api/src/main/java/com/yahoo/vespa/hosted/controller/api/application/v4/model/ProtonMetrics.java23
-rw-r--r--controller-server/src/main/java/com/yahoo/vespa/hosted/controller/dns/NameServiceQueue.java3
-rw-r--r--controller-server/src/main/java/com/yahoo/vespa/hosted/controller/maintenance/ControllerMaintenance.java6
-rw-r--r--controller-server/src/main/java/com/yahoo/vespa/hosted/controller/maintenance/JobRunner.java10
-rw-r--r--controller-server/src/main/java/com/yahoo/vespa/hosted/controller/restapi/application/ApplicationApiHandler.java30
-rw-r--r--controller-server/src/test/java/com/yahoo/vespa/hosted/controller/restapi/ContainerTester.java15
-rw-r--r--controller-server/src/test/java/com/yahoo/vespa/hosted/controller/restapi/application/ApplicationApiTest.java20
-rw-r--r--controller-server/src/test/java/com/yahoo/vespa/hosted/controller/restapi/application/JobControllerApiHandlerHelperTest.java9
-rw-r--r--controller-server/src/test/java/com/yahoo/vespa/hosted/controller/restapi/application/responses/deployment-job-accepted-2.json11
-rw-r--r--controller-server/src/test/java/com/yahoo/vespa/hosted/controller/restapi/application/responses/proton-metrics.json45
-rw-r--r--dist/vespa.spec1
-rw-r--r--docker-api/src/main/java/com/yahoo/vespa/hosted/dockerapi/ContainerStats.java197
-rw-r--r--docker-api/src/main/java/com/yahoo/vespa/hosted/dockerapi/DockerEngine.java42
-rw-r--r--document/src/main/java/com/yahoo/document/json/SingleDocumentParser.java53
-rw-r--r--document/src/vespa/document/bucket/bucket.h2
-rw-r--r--document/src/vespa/document/bucket/bucketid.cpp8
-rw-r--r--document/src/vespa/document/bucket/bucketid.h42
-rw-r--r--eval/CMakeLists.txt1
-rw-r--r--eval/src/tests/eval/fast_value/fast_value_test.cpp38
-rw-r--r--eval/src/tests/eval/gen_spec/gen_spec_test.cpp2
-rw-r--r--eval/src/tests/eval/reference_operations/reference_operations_test.cpp2
-rw-r--r--eval/src/tests/eval/simple_value/simple_value_test.cpp107
-rw-r--r--eval/src/tests/eval/tensor_lambda/tensor_lambda_test.cpp18
-rw-r--r--eval/src/tests/eval/value_codec/value_codec_test.cpp52
-rw-r--r--eval/src/tests/instruction/add_trivial_dimension_optimizer/add_trivial_dimension_optimizer_test.cpp12
-rw-r--r--eval/src/tests/instruction/dense_replace_type_function/dense_replace_type_function_test.cpp4
-rw-r--r--eval/src/tests/instruction/fast_rename_optimizer/fast_rename_optimizer_test.cpp16
-rw-r--r--eval/src/tests/instruction/generic_concat/generic_concat_test.cpp107
-rw-r--r--eval/src/tests/instruction/generic_create/generic_create_test.cpp44
-rw-r--r--eval/src/tests/instruction/generic_join/generic_join_test.cpp70
-rw-r--r--eval/src/tests/instruction/generic_map/generic_map_test.cpp44
-rw-r--r--eval/src/tests/instruction/generic_merge/generic_merge_test.cpp50
-rw-r--r--eval/src/tests/instruction/generic_peek/generic_peek_test.cpp36
-rw-r--r--eval/src/tests/instruction/generic_reduce/generic_reduce_test.cpp59
-rw-r--r--eval/src/tests/instruction/generic_rename/generic_rename_test.cpp49
-rw-r--r--eval/src/tests/instruction/join_with_number/join_with_number_function_test.cpp26
-rw-r--r--eval/src/tests/instruction/mixed_inner_product_function/mixed_inner_product_function_test.cpp71
-rw-r--r--eval/src/tests/instruction/mixed_map_function/mixed_map_function_test.cpp14
-rw-r--r--eval/src/tests/instruction/mixed_simple_join_function/mixed_simple_join_function_test.cpp42
-rw-r--r--eval/src/tests/instruction/pow_as_map_optimizer/pow_as_map_optimizer_test.cpp18
-rw-r--r--eval/src/tests/instruction/remove_trivial_dimension_optimizer/remove_trivial_dimension_optimizer_test.cpp10
-rw-r--r--eval/src/tests/instruction/sparse_dot_product_function/CMakeLists.txt9
-rw-r--r--eval/src/tests/instruction/sparse_dot_product_function/sparse_dot_product_function_test.cpp85
-rw-r--r--eval/src/tests/instruction/sum_max_dot_product_function/sum_max_dot_product_function_test.cpp51
-rw-r--r--eval/src/tests/streamed/value/streamed_value_test.cpp107
-rw-r--r--eval/src/tests/tensor/instruction_benchmark/instruction_benchmark.cpp229
-rw-r--r--eval/src/vespa/eval/eval/optimize_tensor_function.cpp8
-rw-r--r--eval/src/vespa/eval/eval/test/eval_fixture.cpp22
-rw-r--r--eval/src/vespa/eval/eval/test/eval_fixture.h5
-rw-r--r--eval/src/vespa/eval/eval/test/gen_spec.cpp5
-rw-r--r--eval/src/vespa/eval/eval/test/gen_spec.h7
-rw-r--r--eval/src/vespa/eval/eval/test/param_variants.h23
-rw-r--r--eval/src/vespa/eval/instruction/CMakeLists.txt1
-rw-r--r--eval/src/vespa/eval/instruction/generic_join.cpp11
-rw-r--r--eval/src/vespa/eval/instruction/generic_join.h10
-rw-r--r--eval/src/vespa/eval/instruction/sparse_dot_product_function.cpp111
-rw-r--r--eval/src/vespa/eval/instruction/sparse_dot_product_function.h23
-rw-r--r--fastos/src/vespa/fastos/ringbuffer.h17
-rw-r--r--flags/src/main/java/com/yahoo/vespa/flags/Flags.java44
-rw-r--r--metrics-proxy/pom.xml5
-rw-r--r--metrics-proxy/src/main/java/ai/vespa/metricsproxy/node/NodeMetricGatherer.java36
-rw-r--r--metrics-proxy/src/main/java/ai/vespa/metricsproxy/service/RemoteHealthMetricFetcher.java16
-rw-r--r--metrics-proxy/src/main/java/ai/vespa/metricsproxy/service/RemoteMetricsFetcher.java55
-rw-r--r--metrics-proxy/src/test/java/ai/vespa/metricsproxy/http/application/ApplicationMetricsHandlerTest.java25
-rw-r--r--metrics-proxy/src/test/java/ai/vespa/metricsproxy/http/metrics/MetricsHandlerTestBase.java18
-rw-r--r--metrics-proxy/src/test/java/ai/vespa/metricsproxy/http/prometheus/PrometheusHandlerTest.java17
-rw-r--r--metrics-proxy/src/test/java/ai/vespa/metricsproxy/node/NodeMetricGathererTest.java19
-rw-r--r--metrics-proxy/src/test/java/ai/vespa/metricsproxy/rpc/RpcMetricsTest.java79
-rw-r--r--metrics-proxy/src/test/java/ai/vespa/metricsproxy/service/ContainerServiceTest.java3
-rw-r--r--node-repository/src/main/java/com/yahoo/vespa/hosted/provision/maintenance/CapacityChecker.java2
-rw-r--r--node-repository/src/main/java/com/yahoo/vespa/hosted/provision/provisioning/GroupPreparer.java22
-rw-r--r--persistence/src/vespa/persistence/spi/bucket.h8
-rw-r--r--searchcommon/src/vespa/searchcommon/common/CMakeLists.txt2
-rw-r--r--searchcommon/src/vespa/searchcommon/common/compaction_strategy.cpp15
-rw-r--r--searchcommon/src/vespa/searchcommon/common/compaction_strategy.h3
-rw-r--r--searchcommon/src/vespa/searchcommon/common/growstrategy.cpp18
-rw-r--r--searchcommon/src/vespa/searchcommon/common/growstrategy.h7
-rw-r--r--searchcore/CMakeLists.txt1
-rw-r--r--searchcore/src/apps/tests/persistenceconformance_test.cpp2
-rw-r--r--searchcore/src/apps/vespa-feed-bm/vespa_feed_bm.cpp2
-rw-r--r--searchcore/src/tests/proton/attribute/attribute_manager/attribute_manager_test.cpp8
-rw-r--r--searchcore/src/tests/proton/attribute/attribute_test.cpp2
-rw-r--r--searchcore/src/tests/proton/common/alloc_config/CMakeLists.txt9
-rw-r--r--searchcore/src/tests/proton/common/alloc_config/alloc_config_test.cpp35
-rw-r--r--searchcore/src/tests/proton/documentdb/configurer/configurer_test.cpp7
-rw-r--r--searchcore/src/tests/proton/documentdb/document_subdbs/document_subdbs_test.cpp3
-rw-r--r--searchcore/src/tests/proton/documentdb/documentbucketmover/documentbucketmover_test.cpp23
-rw-r--r--searchcore/src/tests/proton/flushengine/flushengine_test.cpp106
-rw-r--r--searchcore/src/tests/proton/proton_config_fetcher/proton_config_fetcher_test.cpp30
-rw-r--r--searchcore/src/tests/proton/proton_configurer/proton_configurer_test.cpp2
-rw-r--r--searchcore/src/vespa/searchcore/config/proton.def6
-rw-r--r--searchcore/src/vespa/searchcore/proton/attribute/attribute_collection_spec_factory.cpp11
-rw-r--r--searchcore/src/vespa/searchcore/proton/attribute/attribute_collection_spec_factory.h8
-rw-r--r--searchcore/src/vespa/searchcore/proton/attribute/attribute_manager_initializer.cpp9
-rw-r--r--searchcore/src/vespa/searchcore/proton/attribute/attribute_manager_initializer.h8
-rw-r--r--searchcore/src/vespa/searchcore/proton/attribute/attributemanager.cpp4
-rw-r--r--searchcore/src/vespa/searchcore/proton/bucketdb/CMakeLists.txt1
-rw-r--r--searchcore/src/vespa/searchcore/proton/bucketdb/bucketdb.h2
-rw-r--r--searchcore/src/vespa/searchcore/proton/bucketdb/bucketscaniterator.cpp29
-rw-r--r--searchcore/src/vespa/searchcore/proton/bucketdb/bucketscaniterator.h49
-rw-r--r--searchcore/src/vespa/searchcore/proton/common/CMakeLists.txt2
-rw-r--r--searchcore/src/vespa/searchcore/proton/common/alloc_config.cpp56
-rw-r--r--searchcore/src/vespa/searchcore/proton/common/alloc_config.h34
-rw-r--r--searchcore/src/vespa/searchcore/proton/common/alloc_strategy.cpp41
-rw-r--r--searchcore/src/vespa/searchcore/proton/common/alloc_strategy.h41
-rw-r--r--searchcore/src/vespa/searchcore/proton/flushengine/flushengine.h2
-rw-r--r--searchcore/src/vespa/searchcore/proton/server/bucketmovejob.cpp40
-rw-r--r--searchcore/src/vespa/searchcore/proton/server/bucketmovejob.h48
-rw-r--r--searchcore/src/vespa/searchcore/proton/server/documentdb.cpp32
-rw-r--r--searchcore/src/vespa/searchcore/proton/server/documentdbconfig.cpp17
-rw-r--r--searchcore/src/vespa/searchcore/proton/server/documentdbconfig.h7
-rw-r--r--searchcore/src/vespa/searchcore/proton/server/documentdbconfigmanager.cpp21
-rw-r--r--searchcore/src/vespa/searchcore/proton/server/documentsubdbcollection.cpp15
-rw-r--r--searchcore/src/vespa/searchcore/proton/server/documentsubdbcollection.h15
-rw-r--r--searchcore/src/vespa/searchcore/proton/server/fast_access_doc_subdb.cpp15
-rw-r--r--searchcore/src/vespa/searchcore/proton/server/fast_access_doc_subdb.h2
-rw-r--r--searchcore/src/vespa/searchcore/proton/server/reconfig_params.cpp5
-rw-r--r--searchcore/src/vespa/searchcore/proton/server/searchabledocsubdb.cpp6
-rw-r--r--searchcore/src/vespa/searchcore/proton/server/storeonlydocsubdb.cpp34
-rw-r--r--searchcore/src/vespa/searchcore/proton/server/storeonlydocsubdb.h16
-rw-r--r--searchcore/src/vespa/searchcore/proton/test/documentdb_config_builder.cpp4
-rw-r--r--searchcore/src/vespa/searchcore/proton/test/documentdb_config_builder.h1
-rw-r--r--searchlib/src/tests/attribute/compaction/attribute_compaction_test.cpp20
-rw-r--r--searchlib/src/vespa/searchlib/attribute/attributevector.cpp26
-rw-r--r--searchlib/src/vespa/searchlib/attribute/attributevector.h3
-rw-r--r--storage/src/vespa/storage/bucketdb/bucketcopy.h44
-rw-r--r--storage/src/vespa/storage/bucketdb/bucketmanager.cpp8
-rw-r--r--storage/src/vespa/storage/distributor/pending_bucket_space_db_transition_entry.h2
-rw-r--r--storage/src/vespa/storage/distributor/pendingmessagetracker.h12
-rw-r--r--storage/src/vespa/storage/persistence/asynchandler.cpp2
-rw-r--r--storage/src/vespa/storage/persistence/filestorage/filestormanager.cpp11
-rw-r--r--storage/src/vespa/storage/persistence/filestorage/service_layer_host_info_reporter.h2
-rw-r--r--storage/src/vespa/storage/persistence/messages.cpp12
-rw-r--r--storage/src/vespa/storage/persistence/messages.h5
-rw-r--r--storageapi/src/vespa/storageapi/buckets/bucketinfo.cpp5
-rw-r--r--storageapi/src/vespa/storageapi/buckets/bucketinfo.h40
-rw-r--r--vespalib/src/vespa/vespalib/gtest/gtest.h13
-rw-r--r--vespalib/src/vespa/vespalib/util/executor.h4
-rw-r--r--vespalib/src/vespa/vespalib/util/growstrategy.h2
-rw-r--r--yolean/src/main/java/com/yahoo/yolean/chain/Chain.java2
201 files changed, 3326 insertions, 2311 deletions
diff --git a/bundle-plugin-test/integration-test/src/test/java/com/yahoo/container/plugin/BundleTest.java b/bundle-plugin-test/integration-test/src/test/java/com/yahoo/container/plugin/BundleTest.java
index a46abce1dff..35a95ed3d89 100644
--- a/bundle-plugin-test/integration-test/src/test/java/com/yahoo/container/plugin/BundleTest.java
+++ b/bundle-plugin-test/integration-test/src/test/java/com/yahoo/container/plugin/BundleTest.java
@@ -91,7 +91,6 @@ public class BundleTest {
// From SimpleSearcher
assertThat(importPackage, containsString("com.yahoo.prelude.hitfield"));
- assertThat(importPackage, containsString("org.json"));
// From SimpleSearcher2
assertThat(importPackage, containsString("com.yahoo.processing"));
diff --git a/bundle-plugin-test/test-bundles/main/pom.xml b/bundle-plugin-test/test-bundles/main/pom.xml
index c9c9ea270eb..190e1c9d90f 100644
--- a/bundle-plugin-test/test-bundles/main/pom.xml
+++ b/bundle-plugin-test/test-bundles/main/pom.xml
@@ -16,11 +16,6 @@
<packaging>container-plugin</packaging>
<dependencies>
<dependency>
- <groupId>org.json</groupId>
- <artifactId>json</artifactId>
- <scope>provided</scope>
- </dependency>
- <dependency>
<groupId>com.yahoo.vespa</groupId>
<artifactId>jrt</artifactId>
<version>${project.version}</version>
diff --git a/bundle-plugin-test/test-bundles/main/src/main/java/com/yahoo/test/SimpleSearcher.java b/bundle-plugin-test/test-bundles/main/src/main/java/com/yahoo/test/SimpleSearcher.java
index dddca3f4d59..ae9644aa010 100644
--- a/bundle-plugin-test/test-bundles/main/src/main/java/com/yahoo/test/SimpleSearcher.java
+++ b/bundle-plugin-test/test-bundles/main/src/main/java/com/yahoo/test/SimpleSearcher.java
@@ -8,8 +8,6 @@ import com.yahoo.search.Searcher;
import com.yahoo.search.result.Hit;
import com.yahoo.search.searchchain.Execution;
import com.yahoo.text.BooleanParser;
-import org.json.JSONException;
-import org.json.JSONObject;
/**
* A searcher adding a new hit.
@@ -19,19 +17,13 @@ import org.json.JSONObject;
public class SimpleSearcher extends Searcher {
public Result search(Query query,Execution execution) {
- try {
BooleanParser.parseBoolean("true");
XMLString xmlString = new XMLString("<sampleXmlString/>");
Hit hit = new Hit("Hello world!");
- hit.setField("json", new JSONObject().put("price", 42).toString());
Result result = execution.search(query);
result.hits().add(hit);
return result;
-
- } catch (JSONException e) {
- throw new RuntimeException(e);
- }
}
}
diff --git a/cloud-tenant-base-dependencies-enforcer/pom.xml b/cloud-tenant-base-dependencies-enforcer/pom.xml
index a8655a82860..8bb7b75be89 100644
--- a/cloud-tenant-base-dependencies-enforcer/pom.xml
+++ b/cloud-tenant-base-dependencies-enforcer/pom.xml
@@ -34,7 +34,7 @@
<junit5.version>5.7.0</junit5.version>
<junit5.platform.version>1.7.0</junit5.platform.version>
<org.lz4.version>1.7.1</org.lz4.version>
- <org.json.version>20090211</org.json.version>
+ <org.json.version>20090211</org.json.version><!-- TODO Vespa 8: remove as provided dependency -->
<slf4j.version>1.7.5</slf4j.version>
<tensorflow.version>1.12.0</tensorflow.version>
<xml-apis.version>1.4.01</xml-apis.version>
diff --git a/clustercontroller-core/src/main/java/com/yahoo/vespa/clustercontroller/core/ClusterStateBundle.java b/clustercontroller-core/src/main/java/com/yahoo/vespa/clustercontroller/core/ClusterStateBundle.java
index 0ca4f5632a8..c4e61b1d3d0 100644
--- a/clustercontroller-core/src/main/java/com/yahoo/vespa/clustercontroller/core/ClusterStateBundle.java
+++ b/clustercontroller-core/src/main/java/com/yahoo/vespa/clustercontroller/core/ClusterStateBundle.java
@@ -44,16 +44,30 @@ public class ClusterStateBundle {
public static class FeedBlock {
private final boolean blockFeedInCluster;
private final String description;
+ private final Set<NodeResourceExhaustion> concreteExhaustions;
public FeedBlock(boolean blockFeedInCluster, String description) {
this.blockFeedInCluster = blockFeedInCluster;
this.description = description;
+ this.concreteExhaustions = Collections.emptySet();
+ }
+
+ public FeedBlock(boolean blockFeedInCluster, String description,
+ Set<NodeResourceExhaustion> concreteExhaustions)
+ {
+ this.blockFeedInCluster = blockFeedInCluster;
+ this.description = description;
+ this.concreteExhaustions = concreteExhaustions;
}
public static FeedBlock blockedWithDescription(String desc) {
return new FeedBlock(true, desc);
}
+ public static FeedBlock blockedWith(String description, Set<NodeResourceExhaustion> concreteExhaustions) {
+ return new FeedBlock(true, description, concreteExhaustions);
+ }
+
public boolean blockFeedInCluster() {
return blockFeedInCluster;
}
@@ -62,18 +76,31 @@ public class ClusterStateBundle {
return description;
}
+ public Set<NodeResourceExhaustion> getConcreteExhaustions() {
+ return concreteExhaustions;
+ }
+
+ public boolean similarTo(FeedBlock other) {
+ // We check everything _but_ the description, as that includes current usage
+ // as floating point and we don't care about reporting changes in that. We do
+ // however care about reporting changes to the actual set of exhaustions.
+ return (blockFeedInCluster == other.blockFeedInCluster &&
+ Objects.equals(concreteExhaustions, other.concreteExhaustions));
+ }
+
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
FeedBlock feedBlock = (FeedBlock) o;
- return (blockFeedInCluster == feedBlock.blockFeedInCluster &&
- Objects.equals(description, feedBlock.description));
+ return blockFeedInCluster == feedBlock.blockFeedInCluster &&
+ Objects.equals(description, feedBlock.description) &&
+ Objects.equals(concreteExhaustions, feedBlock.concreteExhaustions);
}
@Override
public int hashCode() {
- return Objects.hash(blockFeedInCluster, description);
+ return Objects.hash(blockFeedInCluster, description, concreteExhaustions);
}
}
@@ -229,6 +256,9 @@ public class ClusterStateBundle {
if (clusterFeedIsBlocked() != other.clusterFeedIsBlocked()) {
return false;
}
+ if (clusterFeedIsBlocked() && !feedBlock.similarTo(other.feedBlock)) {
+ return false;
+ }
// FIXME we currently treat mismatching bucket space sets as unchanged to avoid breaking some tests
return derivedBucketSpaceStates.entrySet().stream()
.allMatch(entry -> other.derivedBucketSpaceStates.getOrDefault(entry.getKey(), entry.getValue())
diff --git a/clustercontroller-core/src/main/java/com/yahoo/vespa/clustercontroller/core/FleetController.java b/clustercontroller-core/src/main/java/com/yahoo/vespa/clustercontroller/core/FleetController.java
index b3151916a90..e238303b58b 100644
--- a/clustercontroller-core/src/main/java/com/yahoo/vespa/clustercontroller/core/FleetController.java
+++ b/clustercontroller-core/src/main/java/com/yahoo/vespa/clustercontroller/core/FleetController.java
@@ -346,12 +346,11 @@ public class FleetController implements NodeStateOrHostInfoChangeHandler, NodeAd
return;
}
// TODO hysteresis to prevent oscillations!
- // TODO also ensure we trigger if CC options have changed
var calc = createResourceExhaustionCalculator();
// Important: nodeInfo contains the _current_ host info _prior_ to newHostInfo being applied.
- boolean previouslyExhausted = !calc.enumerateNodeResourceExhaustions(nodeInfo).isEmpty();
- boolean nowExhausted = !calc.resourceExhaustionsFromHostInfo(nodeInfo, newHostInfo).isEmpty();
- if (previouslyExhausted != nowExhausted) {
+ var previouslyExhausted = calc.enumerateNodeResourceExhaustions(nodeInfo);
+ var nowExhausted = calc.resourceExhaustionsFromHostInfo(nodeInfo, newHostInfo);
+ if (!previouslyExhausted.equals(nowExhausted)) {
log.fine(() -> String.format("Triggering state recomputation due to change in cluster feed block: %s -> %s",
previouslyExhausted, nowExhausted));
stateChangeHandler.setStateChangedFlag();
diff --git a/clustercontroller-core/src/main/java/com/yahoo/vespa/clustercontroller/core/ResourceExhaustionCalculator.java b/clustercontroller-core/src/main/java/com/yahoo/vespa/clustercontroller/core/ResourceExhaustionCalculator.java
index 21f8d6a1f2d..e2e61eb8ed0 100644
--- a/clustercontroller-core/src/main/java/com/yahoo/vespa/clustercontroller/core/ResourceExhaustionCalculator.java
+++ b/clustercontroller-core/src/main/java/com/yahoo/vespa/clustercontroller/core/ResourceExhaustionCalculator.java
@@ -8,8 +8,10 @@ import com.yahoo.vespa.clustercontroller.core.hostinfo.HostInfo;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
+import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
+import java.util.Set;
import java.util.stream.Collectors;
/**
@@ -43,7 +45,10 @@ public class ResourceExhaustionCalculator {
if (exhaustions.size() > maxDescriptions) {
description += String.format(" (... and %d more)", exhaustions.size() - maxDescriptions);
}
- return ClusterStateBundle.FeedBlock.blockedWithDescription(description);
+ // FIXME we currently will trigger a cluster state recomputation even if the number of
+ // exhaustions is greater than what is returned as part of the description. Though at
+ // that point, cluster state recomputations will be the least of your worries...!
+ return ClusterStateBundle.FeedBlock.blockedWith(description, exhaustions);
}
private static String formatNodeResourceExhaustion(NodeResourceExhaustion n) {
@@ -66,34 +71,34 @@ public class ResourceExhaustionCalculator {
return spec.host();
}
- public List<NodeResourceExhaustion> resourceExhaustionsFromHostInfo(NodeInfo nodeInfo, HostInfo hostInfo) {
- List<NodeResourceExhaustion> exceedingLimit = null;
+ public Set<NodeResourceExhaustion> resourceExhaustionsFromHostInfo(NodeInfo nodeInfo, HostInfo hostInfo) {
+ Set<NodeResourceExhaustion> exceedingLimit = null;
for (var usage : hostInfo.getContentNode().getResourceUsage().entrySet()) {
double limit = feedBlockLimits.getOrDefault(usage.getKey(), 1.0);
if (usage.getValue().getUsage() > limit) {
if (exceedingLimit == null) {
- exceedingLimit = new ArrayList<>();
+ exceedingLimit = new LinkedHashSet<>();
}
exceedingLimit.add(new NodeResourceExhaustion(nodeInfo.getNode(), usage.getKey(), usage.getValue(),
limit, nodeInfo.getRpcAddress()));
}
}
- return (exceedingLimit != null) ? exceedingLimit : Collections.emptyList();
+ return (exceedingLimit != null) ? exceedingLimit : Collections.emptySet();
}
- public List<NodeResourceExhaustion> enumerateNodeResourceExhaustions(NodeInfo nodeInfo) {
+ public Set<NodeResourceExhaustion> enumerateNodeResourceExhaustions(NodeInfo nodeInfo) {
if (!nodeInfo.isStorage()) {
- return Collections.emptyList();
+ return Collections.emptySet();
}
return resourceExhaustionsFromHostInfo(nodeInfo, nodeInfo.getHostInfo());
}
// Returns 0-n entries per content node in the cluster, where n is the number of exhausted
// resource types on any given node.
- public List<NodeResourceExhaustion> enumerateNodeResourceExhaustionsAcrossAllNodes(Collection<NodeInfo> nodeInfos) {
+ public Set<NodeResourceExhaustion> enumerateNodeResourceExhaustionsAcrossAllNodes(Collection<NodeInfo> nodeInfos) {
return nodeInfos.stream()
.flatMap(info -> enumerateNodeResourceExhaustions(info).stream())
- .collect(Collectors.toList());
+ .collect(Collectors.toCollection(LinkedHashSet::new));
}
}
diff --git a/clustercontroller-core/src/main/java/com/yahoo/vespa/clustercontroller/core/hostinfo/ResourceUsage.java b/clustercontroller-core/src/main/java/com/yahoo/vespa/clustercontroller/core/hostinfo/ResourceUsage.java
index 876bf9480a6..da0862d7de9 100644
--- a/clustercontroller-core/src/main/java/com/yahoo/vespa/clustercontroller/core/hostinfo/ResourceUsage.java
+++ b/clustercontroller-core/src/main/java/com/yahoo/vespa/clustercontroller/core/hostinfo/ResourceUsage.java
@@ -8,6 +8,11 @@ import java.util.Objects;
/**
* Encapsulation of the usage levels for a particular resource type. The resource type
* itself is not tracked in this class; this must be done on a higher level.
+ *
+ * Note: equality checks and hash code computations do NOT include the actual floating
+ * point usage! This is so sets of ResourceUsages are de-duplicated at the resource level
+ * regardless of the relative usage (all cases where these are compared is assumed to
+ * be when feed is blocked anyway, so just varying levels over the feed block limit).
*/
public class ResourceUsage {
private final Double usage;
@@ -33,11 +38,11 @@ public class ResourceUsage {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
ResourceUsage that = (ResourceUsage) o;
- return Objects.equals(usage, that.usage) && Objects.equals(name, that.name);
+ return Objects.equals(name, that.name);
}
@Override
public int hashCode() {
- return Objects.hash(usage, name);
+ return Objects.hash(name);
}
}
diff --git a/clustercontroller-core/src/test/java/com/yahoo/vespa/clustercontroller/core/ClusterFeedBlockTest.java b/clustercontroller-core/src/test/java/com/yahoo/vespa/clustercontroller/core/ClusterFeedBlockTest.java
index 5c6fcd21701..8dd2a9ca55c 100644
--- a/clustercontroller-core/src/test/java/com/yahoo/vespa/clustercontroller/core/ClusterFeedBlockTest.java
+++ b/clustercontroller-core/src/test/java/com/yahoo/vespa/clustercontroller/core/ClusterFeedBlockTest.java
@@ -23,6 +23,7 @@ import static com.yahoo.vespa.clustercontroller.core.FeedBlockUtil.mapOf;
import static com.yahoo.vespa.clustercontroller.core.FeedBlockUtil.setOf;
import static com.yahoo.vespa.clustercontroller.core.FeedBlockUtil.usage;
import static com.yahoo.vespa.clustercontroller.core.FeedBlockUtil.createResourceUsageJson;
+import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
@@ -133,4 +134,39 @@ public class ClusterFeedBlockTest extends FleetControllerTest {
assertFalse(ctrl.getClusterStateBundle().clusterFeedIsBlocked());
}
+ @Test
+ public void cluster_feed_block_state_is_recomputed_when_resource_block_set_differs() throws Exception {
+ initialize(createOptions(mapOf(usage("cheese", 0.7), usage("wine", 0.4))));
+ assertFalse(ctrl.getClusterStateBundle().clusterFeedIsBlocked());
+
+ reportResourceUsageFromNode(1, setOf(usage("cheese", 0.8), usage("wine", 0.3)));
+ var bundle = ctrl.getClusterStateBundle();
+ assertTrue(bundle.clusterFeedIsBlocked());
+ assertEquals("cheese on node 1 [unknown hostname] (0.800 > 0.700)", bundle.getFeedBlock().get().getDescription());
+
+ reportResourceUsageFromNode(1, setOf(usage("cheese", 0.8), usage("wine", 0.5)));
+ bundle = ctrl.getClusterStateBundle();
+ assertTrue(bundle.clusterFeedIsBlocked());
+ assertEquals("cheese on node 1 [unknown hostname] (0.800 > 0.700), " +
+ "wine on node 1 [unknown hostname] (0.500 > 0.400)",
+ bundle.getFeedBlock().get().getDescription());
+ }
+
+ @Test
+ public void cluster_feed_block_state_is_not_recomputed_when_only_resource_usage_levels_differ() throws Exception {
+ initialize(createOptions(mapOf(usage("cheese", 0.7), usage("wine", 0.4))));
+ assertFalse(ctrl.getClusterStateBundle().clusterFeedIsBlocked());
+
+ reportResourceUsageFromNode(1, setOf(usage("cheese", 0.8), usage("wine", 0.3)));
+ var bundle = ctrl.getClusterStateBundle();
+ assertTrue(bundle.clusterFeedIsBlocked());
+ assertEquals("cheese on node 1 [unknown hostname] (0.800 > 0.700)", bundle.getFeedBlock().get().getDescription());
+
+ // 80% -> 90%, should not trigger new state.
+ reportResourceUsageFromNode(1, setOf(usage("cheese", 0.9), usage("wine", 0.4)));
+ bundle = ctrl.getClusterStateBundle();
+ assertTrue(bundle.clusterFeedIsBlocked());
+ assertEquals("cheese on node 1 [unknown hostname] (0.800 > 0.700)", bundle.getFeedBlock().get().getDescription());
+ }
+
}
diff --git a/clustercontroller-core/src/test/java/com/yahoo/vespa/clustercontroller/core/ClusterStateBundleTest.java b/clustercontroller-core/src/test/java/com/yahoo/vespa/clustercontroller/core/ClusterStateBundleTest.java
index d2db47131bd..ceddf7cdcf3 100644
--- a/clustercontroller-core/src/test/java/com/yahoo/vespa/clustercontroller/core/ClusterStateBundleTest.java
+++ b/clustercontroller-core/src/test/java/com/yahoo/vespa/clustercontroller/core/ClusterStateBundleTest.java
@@ -6,9 +6,14 @@ import com.yahoo.vdslib.state.Node;
import com.yahoo.vdslib.state.NodeState;
import com.yahoo.vdslib.state.NodeType;
import com.yahoo.vdslib.state.State;
+import com.yahoo.vespa.clustercontroller.core.hostinfo.ResourceUsage;
import org.junit.Test;
+import java.util.Arrays;
+import java.util.LinkedHashSet;
+import java.util.Set;
import java.util.function.Function;
+import java.util.stream.Collectors;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.MatcherAssert.assertThat;
@@ -53,6 +58,12 @@ public class ClusterStateBundleTest {
.deriveAndBuild();
}
+ private static ClusterStateBundle createTestBundleWithFeedBlock(String description, Set<NodeResourceExhaustion> concreteExhaustions) {
+ return createTestBundleBuilder(false)
+ .feedBlock(ClusterStateBundle.FeedBlock.blockedWith(description, concreteExhaustions))
+ .deriveAndBuild();
+ }
+
private static ClusterStateBundle createTestBundle() {
return createTestBundle(true);
}
@@ -109,6 +120,29 @@ public class ClusterStateBundleTest {
assertTrue(blockingBundle.similarTo(blockingBundleWithOtherDesc));
}
+ static NodeResourceExhaustion createDummyExhaustion(String type) {
+ return new NodeResourceExhaustion(new Node(NodeType.STORAGE, 1), type, new ResourceUsage(0.8, null), 0.7, "foo");
+ }
+
+ static Set<NodeResourceExhaustion> exhaustionsOf(String... types) {
+ return Arrays.stream(types)
+ .map(t -> createDummyExhaustion(t))
+ .collect(Collectors.toCollection(LinkedHashSet::new));
+ }
+
+ @Test
+ public void similarity_test_considers_cluster_feed_block_concrete_exhaustion_set() {
+ var blockingBundleNoSet = createTestBundleWithFeedBlock("foo");
+ var blockingBundleWithSet = createTestBundleWithFeedBlock("bar", exhaustionsOf("beer", "wine"));
+ var blockingBundleWithOtherSet = createTestBundleWithFeedBlock("bar", exhaustionsOf("beer", "soda"));
+
+ assertTrue(blockingBundleNoSet.similarTo(blockingBundleNoSet));
+ assertTrue(blockingBundleWithSet.similarTo(blockingBundleWithSet));
+ assertFalse(blockingBundleWithSet.similarTo(blockingBundleWithOtherSet));
+ assertFalse(blockingBundleNoSet.similarTo(blockingBundleWithSet));
+ assertFalse(blockingBundleNoSet.similarTo(blockingBundleWithOtherSet));
+ }
+
@Test
public void feed_block_state_is_available() {
var nonBlockingBundle = createTestBundle(false);
diff --git a/config-model-api/src/main/java/com/yahoo/config/model/api/ModelContext.java b/config-model-api/src/main/java/com/yahoo/config/model/api/ModelContext.java
index a9a83386573..6517c3e57fc 100644
--- a/config-model-api/src/main/java/com/yahoo/config/model/api/ModelContext.java
+++ b/config-model-api/src/main/java/com/yahoo/config/model/api/ModelContext.java
@@ -64,8 +64,8 @@ public interface ModelContext {
* - Remove below method once all config-model versions in hosted production include changes from 1)
*/
interface FeatureFlags {
- @ModelFeatureFlag(owners = {"bjorncs", "jonmv"}) default boolean enableAutomaticReindexing() { return false; }
- @ModelFeatureFlag(owners = {"bjorncs", "jonmv"}) default double reindexerWindowSizeIncrement() { return 0.2; }
+ @ModelFeatureFlag(owners = {"bjorncs", "jonmv"}, removeAfter = "7.352") default boolean enableAutomaticReindexing() { return true; }
+ @ModelFeatureFlag(owners = {"jonmv"}) default double reindexerWindowSizeIncrement() { return 0.2; }
@ModelFeatureFlag(owners = {"baldersheim"}, comment = "Revisit in May or June 2021") default double defaultTermwiseLimit() { throw new UnsupportedOperationException("TODO specify default value"); }
@ModelFeatureFlag(owners = {"vekterli"}) default boolean useThreePhaseUpdates() { throw new UnsupportedOperationException("TODO specify default value"); }
@ModelFeatureFlag(owners = {"baldersheim"}, comment = "Select sequencer type use while feeding") default String feedSequencerType() { throw new UnsupportedOperationException("TODO specify default value"); }
@@ -77,7 +77,7 @@ public interface ModelContext {
@ModelFeatureFlag(owners = {"tokle"}) default boolean useAccessControlTlsHandshakeClientAuth() { return false; }
@ModelFeatureFlag(owners = {"baldersheim"}) default boolean useAsyncMessageHandlingOnSchedule() { throw new UnsupportedOperationException("TODO specify default value"); }
@ModelFeatureFlag(owners = {"baldersheim"}, removeAfter = "7.348") default int contentNodeBucketDBStripeBits() { return 4; }
- @ModelFeatureFlag(owners = {"baldersheim"}) default int mergeChunkSize() { throw new UnsupportedOperationException("TODO specify default value"); }
+ @ModelFeatureFlag(owners = {"baldersheim"}, removeAfter = "7.350") default int mergeChunkSize() { return 0x2000000; }
@ModelFeatureFlag(owners = {"baldersheim"}) default double feedConcurrency() { throw new UnsupportedOperationException("TODO specify default value"); }
@ModelFeatureFlag(owners = {"baldersheim"}) default boolean useBucketExecutorForLidSpaceCompact() { throw new UnsupportedOperationException("TODO specify default value"); }
@ModelFeatureFlag(owners = {"musum", "mpolden"}, comment = "Revisit in February 2021") default boolean reconfigurableZookeeperServer() { return false; }
diff --git a/config-model/src/main/java/com/yahoo/config/model/deploy/TestProperties.java b/config-model/src/main/java/com/yahoo/config/model/deploy/TestProperties.java
index 686cb7ce7c6..6ca1c8f2b79 100644
--- a/config-model/src/main/java/com/yahoo/config/model/deploy/TestProperties.java
+++ b/config-model/src/main/java/com/yahoo/config/model/deploy/TestProperties.java
@@ -47,7 +47,6 @@ public class TestProperties implements ModelContext.Properties, ModelContext.Fea
private Quota quota = Quota.unlimited();
private boolean useAccessControlTlsHandshakeClientAuth;
private boolean useAsyncMessageHandlingOnSchedule = false;
- private int mergeChunkSize = 0x400000 - 0x1000; // 4M -4k
private double feedConcurrency = 0.5;
private boolean enableAutomaticReindexing = false;
private boolean reconfigurableZookeeperServer = false;
@@ -82,7 +81,6 @@ public class TestProperties implements ModelContext.Properties, ModelContext.Fea
@Override public Quota quota() { return quota; }
@Override public boolean useAccessControlTlsHandshakeClientAuth() { return useAccessControlTlsHandshakeClientAuth; }
@Override public boolean useAsyncMessageHandlingOnSchedule() { return useAsyncMessageHandlingOnSchedule; }
- @Override public int mergeChunkSize() { return mergeChunkSize; }
@Override public double feedConcurrency() { return feedConcurrency; }
@Override public boolean enableAutomaticReindexing() { return enableAutomaticReindexing; }
@Override public boolean reconfigurableZookeeperServer() { return reconfigurableZookeeperServer; }
@@ -94,11 +92,6 @@ public class TestProperties implements ModelContext.Properties, ModelContext.Fea
return this;
}
- public TestProperties setMergeChunkSize(int size) {
- mergeChunkSize = size;
- return this;
- }
-
public TestProperties setAsyncMessageHandlingOnSchedule(boolean value) {
useAsyncMessageHandlingOnSchedule = value;
return this;
diff --git a/config-model/src/main/java/com/yahoo/vespa/model/admin/clustercontroller/ClusterControllerContainerCluster.java b/config-model/src/main/java/com/yahoo/vespa/model/admin/clustercontroller/ClusterControllerContainerCluster.java
index 3fe6ce3ff27..8423f7d723a 100644
--- a/config-model/src/main/java/com/yahoo/vespa/model/admin/clustercontroller/ClusterControllerContainerCluster.java
+++ b/config-model/src/main/java/com/yahoo/vespa/model/admin/clustercontroller/ClusterControllerContainerCluster.java
@@ -32,9 +32,7 @@ public class ClusterControllerContainerCluster extends ContainerCluster<ClusterC
public ReindexingContext reindexingContext() { return reindexingContext; }
private static ReindexingContext createReindexingContext(DeployState deployState) {
- Reindexing reindexing = deployState.featureFlags().enableAutomaticReindexing()
- ? deployState.reindexing().orElse(Reindexing.DISABLED_INSTANCE)
- : Reindexing.DISABLED_INSTANCE;
+ Reindexing reindexing = deployState.reindexing().orElse(Reindexing.DISABLED_INSTANCE);
return new ReindexingContext(reindexing, deployState.featureFlags().reindexerWindowSizeIncrement());
}
diff --git a/config-model/src/main/java/com/yahoo/vespa/model/application/validation/change/NodeResourceChangeValidator.java b/config-model/src/main/java/com/yahoo/vespa/model/application/validation/change/NodeResourceChangeValidator.java
index 0add9f243fe..a462cb4fdb3 100644
--- a/config-model/src/main/java/com/yahoo/vespa/model/application/validation/change/NodeResourceChangeValidator.java
+++ b/config-model/src/main/java/com/yahoo/vespa/model/application/validation/change/NodeResourceChangeValidator.java
@@ -39,7 +39,7 @@ public class NodeResourceChangeValidator implements ChangeValidator {
}
private boolean changeRequiresRestart(NodeResources currentResources, NodeResources nextResources) {
- return currentResources.memoryGb() != nextResources.memoryGb();
+ return !currentResources.equals(nextResources);
}
private Optional<NodeResources> resourcesOf(ClusterSpec.Id clusterId, VespaModel model) {
diff --git a/config-model/src/main/java/com/yahoo/vespa/model/container/xml/AccessLogBuilder.java b/config-model/src/main/java/com/yahoo/vespa/model/container/xml/AccessLogBuilder.java
index 6a858bd2e02..936b9f1c851 100644
--- a/config-model/src/main/java/com/yahoo/vespa/model/container/xml/AccessLogBuilder.java
+++ b/config-model/src/main/java/com/yahoo/vespa/model/container/xml/AccessLogBuilder.java
@@ -46,17 +46,19 @@ public class AccessLogBuilder {
private static class DomBuilder extends VespaDomBuilder.DomConfigProducerBuilder<AccessLogComponent> {
private final AccessLogType accessLogType;
private final boolean isHostedVespa;
+ private final boolean isConfigserver;
- public DomBuilder(AccessLogType accessLogType, boolean isHostedVespa) {
+ public DomBuilder(AccessLogType accessLogType, boolean isHostedVespa, boolean isConfigserver) {
this.accessLogType = accessLogType;
this.isHostedVespa = isHostedVespa;
+ this.isConfigserver = isConfigserver;
}
@Override
protected AccessLogComponent doBuild(DeployState deployState, AbstractConfigProducer<?> ancestor, Element spec) {
return new AccessLogComponent(
accessLogType,
- compressionType(spec, deployState, isHostedVespa),
+ compressionType(spec, deployState, isHostedVespa, isConfigserver),
fileNamePattern(spec),
rotationInterval(spec),
compressOnRotation(spec),
@@ -81,7 +83,14 @@ public class AccessLogBuilder {
return nullIfEmpty(spec.getAttribute("fileNamePattern"));
}
- private static CompressionType compressionType(Element spec, DeployState deployState, boolean isHostedVespa) {
+ private static CompressionType compressionType(Element spec, DeployState deployState, boolean isHostedVespa, boolean isConfigserver) {
+ CompressionType fallback;
+ if (isHostedVespa && (isConfigserver || deployState.featureFlags().enableZstdCompressionAccessLog())) {
+ fallback = CompressionType.ZSTD;
+ } else {
+ fallback = CompressionType.GZIP;
+ }
+ if (isConfigserver && isHostedVespa) return CompressionType.ZSTD;
return Optional.ofNullable(spec.getAttribute("compressionType"))
.filter(value -> !value.isBlank())
.map(value -> {
@@ -94,7 +103,7 @@ public class AccessLogBuilder {
throw new IllegalArgumentException("Unknown compression type: " + value);
}
})
- .orElse(isHostedVespa && deployState.featureFlags().enableZstdCompressionAccessLog() ? CompressionType.ZSTD : CompressionType.GZIP);
+ .orElse(fallback);
}
}
@@ -111,7 +120,7 @@ public class AccessLogBuilder {
}
}
- public static Optional<AccessLogComponent> buildIfNotDisabled(DeployState deployState, ContainerCluster<?> cluster, Element accessLogSpec) {
+ public static Optional<AccessLogComponent> buildIfNotDisabled(DeployState deployState, ContainerCluster<?> cluster, Element accessLogSpec, boolean isConfigserver) {
AccessLogTypeLiteral typeLiteral =
getOptionalAttribute(accessLogSpec, "type").
map(AccessLogTypeLiteral::fromAttributeValue).
@@ -121,6 +130,9 @@ public class AccessLogBuilder {
return Optional.empty();
}
boolean hosted = cluster.isHostedVespa();
- return Optional.of(new DomBuilder(logType, hosted).build(deployState, cluster, accessLogSpec));
+ if (hosted && isConfigserver && logType != AccessLogType.jsonAccessLog) {
+ return Optional.empty(); // Only enable JSON access logging for hosted configserver/controller
+ }
+ return Optional.of(new DomBuilder(logType, hosted, isConfigserver).build(deployState, cluster, accessLogSpec));
}
}
diff --git a/config-model/src/main/java/com/yahoo/vespa/model/container/xml/ConfigServerContainerModelBuilder.java b/config-model/src/main/java/com/yahoo/vespa/model/container/xml/ConfigServerContainerModelBuilder.java
index ab734c506c1..e7db4ab0564 100644
--- a/config-model/src/main/java/com/yahoo/vespa/model/container/xml/ConfigServerContainerModelBuilder.java
+++ b/config-model/src/main/java/com/yahoo/vespa/model/container/xml/ConfigServerContainerModelBuilder.java
@@ -22,6 +22,8 @@ public class ConfigServerContainerModelBuilder extends ContainerModelBuilder {
this.options = options;
}
+ @Override protected boolean isConfigserver() { return true; }
+
@Override
public void doBuild(ContainerModel model, Element spec, ConfigModelContext modelContext) {
ConfigserverCluster cluster = new ConfigserverCluster(modelContext.getParentProducer(), "configserver",
diff --git a/config-model/src/main/java/com/yahoo/vespa/model/container/xml/ContainerModelBuilder.java b/config-model/src/main/java/com/yahoo/vespa/model/container/xml/ContainerModelBuilder.java
index d650b10a910..07ccf3808fd 100644
--- a/config-model/src/main/java/com/yahoo/vespa/model/container/xml/ContainerModelBuilder.java
+++ b/config-model/src/main/java/com/yahoo/vespa/model/container/xml/ContainerModelBuilder.java
@@ -147,6 +147,8 @@ public class ContainerModelBuilder extends ConfigModelBuilder<ContainerModel> {
this.httpServerEnabled = networking == Networking.enable;
}
+ protected boolean isConfigserver() { return false; }
+
@Override
public List<ConfigModelId> handlesElements() {
return configModelIds;
@@ -342,7 +344,7 @@ public class ContainerModelBuilder extends ConfigModelBuilder<ContainerModel> {
List<Element> accessLogElements = getAccessLogElements(spec);
for (Element accessLog : accessLogElements) {
- AccessLogBuilder.buildIfNotDisabled(deployState, cluster, accessLog).ifPresent(cluster::addComponent);
+ AccessLogBuilder.buildIfNotDisabled(deployState, cluster, accessLog, isConfigserver()).ifPresent(cluster::addComponent);
}
if (accessLogElements.isEmpty() && deployState.getAccessLoggingEnabledByDefault())
diff --git a/config-model/src/main/java/com/yahoo/vespa/model/content/ClusterControllerConfig.java b/config-model/src/main/java/com/yahoo/vespa/model/content/ClusterControllerConfig.java
index 8d6127970c8..66ec0d81947 100644
--- a/config-model/src/main/java/com/yahoo/vespa/model/content/ClusterControllerConfig.java
+++ b/config-model/src/main/java/com/yahoo/vespa/model/content/ClusterControllerConfig.java
@@ -19,12 +19,14 @@ import org.w3c.dom.Element;
public class ClusterControllerConfig extends AbstractConfigProducer<ClusterControllerConfig> implements FleetcontrollerConfig.Producer {
public static class Builder extends VespaDomBuilder.DomConfigProducerBuilder<ClusterControllerConfig> {
- String clusterName;
- ModelElement clusterElement;
+ private final String clusterName;
+ private final ModelElement clusterElement;
+ private final ResourceLimits resourceLimits;
- public Builder(String clusterName, ModelElement clusterElement) {
+ public Builder(String clusterName, ModelElement clusterElement, ResourceLimits resourceLimits) {
this.clusterName = clusterName;
this.clusterElement = clusterElement;
+ this.resourceLimits = resourceLimits;
}
@Override
@@ -51,27 +53,29 @@ public class ClusterControllerConfig extends AbstractConfigProducer<ClusterContr
tuning.childAsDouble("min-storage-up-ratio"),
bucketSplittingMinimumBits,
minNodeRatioPerGroup,
- enableClusterFeedBlock);
+ enableClusterFeedBlock,
+ resourceLimits);
} else {
return new ClusterControllerConfig(ancestor, clusterName,
null, null, null, null, null, null,
bucketSplittingMinimumBits,
minNodeRatioPerGroup,
- enableClusterFeedBlock);
+ enableClusterFeedBlock, resourceLimits);
}
}
}
- String clusterName;
- Duration initProgressTime;
- Duration transitionTime;
- Long maxPrematureCrashes;
- Duration stableStateTimePeriod;
- Double minDistributorUpRatio;
- Double minStorageUpRatio;
- Integer minSplitBits;
- private Double minNodeRatioPerGroup;
- private boolean enableClusterFeedBlock = false;
+ private final String clusterName;
+ private final Duration initProgressTime;
+ private final Duration transitionTime;
+ private final Long maxPrematureCrashes;
+ private final Duration stableStateTimePeriod;
+ private final Double minDistributorUpRatio;
+ private final Double minStorageUpRatio;
+ private final Integer minSplitBits;
+ private final Double minNodeRatioPerGroup;
+ private final boolean enableClusterFeedBlock;
+ private final ResourceLimits resourceLimits;
// TODO refactor; too many args
private ClusterControllerConfig(AbstractConfigProducer parent,
@@ -84,7 +88,8 @@ public class ClusterControllerConfig extends AbstractConfigProducer<ClusterContr
Double minStorageUpRatio,
Integer minSplitBits,
Double minNodeRatioPerGroup,
- boolean enableClusterFeedBlock) {
+ boolean enableClusterFeedBlock,
+ ResourceLimits resourceLimits) {
super(parent, "fleetcontroller");
this.clusterName = clusterName;
@@ -97,6 +102,7 @@ public class ClusterControllerConfig extends AbstractConfigProducer<ClusterContr
this.minSplitBits = minSplitBits;
this.minNodeRatioPerGroup = minNodeRatioPerGroup;
this.enableClusterFeedBlock = enableClusterFeedBlock;
+ this.resourceLimits = resourceLimits;
}
@Override
@@ -139,18 +145,7 @@ public class ClusterControllerConfig extends AbstractConfigProducer<ClusterContr
builder.min_node_ratio_per_group(minNodeRatioPerGroup);
}
builder.enable_cluster_feed_block(enableClusterFeedBlock);
- setDefaultClusterFeedBlockLimits(builder);
+ resourceLimits.getConfig(builder);
}
- private static void setDefaultClusterFeedBlockLimits(FleetcontrollerConfig.Builder builder) {
- // TODO: Override these based on resource-limits in services.xml (if they are specified).
- // TODO: Choose other defaults when this is default enabled.
- // Note: The resource categories must match the ones used in host info reporting
- // between content nodes and cluster controller:
- // storage/src/vespa/storage/persistence/filestorage/service_layer_host_info_reporter.cpp
- builder.cluster_feed_block_limit.put("memory", 0.79);
- builder.cluster_feed_block_limit.put("disk", 0.79);
- builder.cluster_feed_block_limit.put("attribute-enum-store", 0.89);
- builder.cluster_feed_block_limit.put("attribute-multi-value", 0.89);
- }
}
diff --git a/config-model/src/main/java/com/yahoo/vespa/model/content/ClusterResourceLimits.java b/config-model/src/main/java/com/yahoo/vespa/model/content/ClusterResourceLimits.java
new file mode 100644
index 00000000000..5324ee171ec
--- /dev/null
+++ b/config-model/src/main/java/com/yahoo/vespa/model/content/ClusterResourceLimits.java
@@ -0,0 +1,103 @@
+// Copyright Verizon Media. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root.
+package com.yahoo.vespa.model.content;
+
+import com.yahoo.vespa.model.builder.xml.dom.ModelElement;
+import com.yahoo.vespa.model.content.cluster.DomResourceLimitsBuilder;
+
+import java.util.Optional;
+import java.util.function.Consumer;
+
+/**
+ * Class tracking the feed block resource limits for a content cluster.
+ *
+ * This includes the limits used by the cluster controller and the content nodes (proton).
+ *
+ * @author geirst
+ */
+public class ClusterResourceLimits {
+
+ private final ResourceLimits clusterControllerLimits;
+ private final ResourceLimits contentNodeLimits;
+
+ private ClusterResourceLimits(Builder builder) {
+ clusterControllerLimits = builder.ctrlBuilder.build();
+ contentNodeLimits = builder.nodeBuilder.build();
+ }
+
+ public ResourceLimits getClusterControllerLimits() {
+ return clusterControllerLimits;
+ }
+
+ public ResourceLimits getContentNodeLimits() {
+ return contentNodeLimits;
+ }
+
+ public static class Builder {
+
+ private ResourceLimits.Builder ctrlBuilder = new ResourceLimits.Builder();
+ private ResourceLimits.Builder nodeBuilder = new ResourceLimits.Builder();
+
+ public ClusterResourceLimits build(ModelElement clusterElem) {
+
+ ModelElement tuningElem = clusterElem.childByPath("tuning");
+ if (tuningElem != null) {
+ ctrlBuilder = DomResourceLimitsBuilder.createBuilder(tuningElem);
+ }
+
+ ModelElement protonElem = clusterElem.childByPath("engine.proton");
+ if (protonElem != null) {
+ nodeBuilder = DomResourceLimitsBuilder.createBuilder(protonElem);
+ }
+
+ deriveLimits();
+ return new ClusterResourceLimits(this);
+ }
+
+ public void setClusterControllerBuilder(ResourceLimits.Builder builder) {
+ ctrlBuilder = builder;
+ }
+
+ public void setContentNodeBuilder(ResourceLimits.Builder builder) {
+ nodeBuilder = builder;
+ }
+
+ public ClusterResourceLimits build() {
+ deriveLimits();
+ return new ClusterResourceLimits(this);
+ }
+
+ private void deriveLimits() {
+ deriveClusterControllerLimit(ctrlBuilder.getDiskLimit(), nodeBuilder.getDiskLimit(), ctrlBuilder::setDiskLimit);
+ deriveClusterControllerLimit(ctrlBuilder.getMemoryLimit(), nodeBuilder.getMemoryLimit(), ctrlBuilder::setMemoryLimit);
+
+ deriveContentNodeLimit(nodeBuilder.getDiskLimit(), ctrlBuilder.getDiskLimit(), nodeBuilder::setDiskLimit);
+ deriveContentNodeLimit(nodeBuilder.getMemoryLimit(), ctrlBuilder.getMemoryLimit(), nodeBuilder::setMemoryLimit);
+ }
+
+ private void deriveClusterControllerLimit(Optional<Double> clusterControllerLimit,
+ Optional<Double> contentNodeLimit,
+ Consumer<Double> setter) {
+ if (!clusterControllerLimit.isPresent()) {
+ contentNodeLimit.ifPresent(limit ->
+ // TODO: emit warning when using cluster controller resource limits are default enabled.
+ setter.accept(limit));
+ }
+ }
+
+ private void deriveContentNodeLimit(Optional<Double> contentNodeLimit,
+ Optional<Double> clusterControllerLimit,
+ Consumer<Double> setter) {
+ if (!contentNodeLimit.isPresent()) {
+ clusterControllerLimit.ifPresent(limit ->
+ setter.accept(calcContentNodeLimit(limit)));
+ }
+ }
+
+ private double calcContentNodeLimit(double clusterControllerLimit) {
+ // Note that validation in the range [0.0-1.0] is handled by the rnc schema.
+ return clusterControllerLimit + ((1.0 - clusterControllerLimit) / 2);
+ }
+
+ }
+
+}
diff --git a/config-model/src/main/java/com/yahoo/vespa/model/content/ContentSearchCluster.java b/config-model/src/main/java/com/yahoo/vespa/model/content/ContentSearchCluster.java
index dd29df61f35..d7df62d56cf 100644
--- a/config-model/src/main/java/com/yahoo/vespa/model/content/ContentSearchCluster.java
+++ b/config-model/src/main/java/com/yahoo/vespa/model/content/ContentSearchCluster.java
@@ -79,13 +79,15 @@ public class ContentSearchCluster extends AbstractConfigProducer<SearchCluster>
private final Map<String, NewDocumentType> documentDefinitions;
private final Set<NewDocumentType> globallyDistributedDocuments;
private final boolean combined;
+ private final ResourceLimits resourceLimits;
public Builder(Map<String, NewDocumentType> documentDefinitions,
Set<NewDocumentType> globallyDistributedDocuments,
- boolean combined) {
+ boolean combined, ResourceLimits resourceLimits) {
this.documentDefinitions = documentDefinitions;
this.globallyDistributedDocuments = globallyDistributedDocuments;
this.combined = combined;
+ this.resourceLimits = resourceLimits;
}
@Override
@@ -106,10 +108,7 @@ public class ContentSearchCluster extends AbstractConfigProducer<SearchCluster>
if (tuning != null) {
search.setTuning(new DomSearchTuningBuilder().build(deployState, search, tuning.getXml()));
}
- ModelElement protonElem = clusterElem.childByPath("engine.proton");
- if (protonElem != null) {
- search.setResourceLimits(DomResourceLimitsBuilder.build(protonElem));
- }
+ search.setResourceLimits(resourceLimits);
buildAllStreamingSearchClusters(deployState, clusterElem, clusterName, search);
buildIndexedSearchCluster(deployState, clusterElem, clusterName, search);
diff --git a/config-model/src/main/java/com/yahoo/vespa/model/content/ResourceLimits.java b/config-model/src/main/java/com/yahoo/vespa/model/content/ResourceLimits.java
index 28e8c36d202..e96ba47c6b3 100644
--- a/config-model/src/main/java/com/yahoo/vespa/model/content/ResourceLimits.java
+++ b/config-model/src/main/java/com/yahoo/vespa/model/content/ResourceLimits.java
@@ -1,16 +1,17 @@
// Copyright 2017 Yahoo Holdings. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root.
package com.yahoo.vespa.model.content;
+import com.yahoo.vespa.config.content.FleetcontrollerConfig;
import com.yahoo.vespa.config.search.core.ProtonConfig;
import java.util.Optional;
/**
- * Class tracking resource limits for a content cluster with engine proton.
+ * Class tracking feed block resource limits used by a component in a content cluster (e.g. cluster controller or content node).
*
* @author geirst
*/
-public class ResourceLimits implements ProtonConfig.Producer {
+public class ResourceLimits implements FleetcontrollerConfig.Producer, ProtonConfig.Producer {
private final Optional<Double> diskLimit;
private final Optional<Double> memoryLimit;
@@ -20,6 +21,26 @@ public class ResourceLimits implements ProtonConfig.Producer {
this.memoryLimit = builder.memoryLimit;
}
+ public Optional<Double> getDiskLimit() {
+ return diskLimit;
+ }
+
+ public Optional<Double> getMemoryLimit() {
+ return memoryLimit;
+ }
+
+ @Override
+ public void getConfig(FleetcontrollerConfig.Builder builder) {
+ // TODO: Choose other defaults when this is default enabled.
+ // Note: The resource categories must match the ones used in host info reporting
+ // between content nodes and cluster controller:
+ // storage/src/vespa/storage/persistence/filestorage/service_layer_host_info_reporter.cpp
+ builder.cluster_feed_block_limit.put("memory", memoryLimit.orElse(0.79));
+ builder.cluster_feed_block_limit.put("disk", diskLimit.orElse(0.79));
+ builder.cluster_feed_block_limit.put("attribute-enum-store", 0.89);
+ builder.cluster_feed_block_limit.put("attribute-multi-value", 0.89);
+ }
+
@Override
public void getConfig(ProtonConfig.Builder builder) {
if (diskLimit.isPresent()) {
@@ -39,11 +60,19 @@ public class ResourceLimits implements ProtonConfig.Producer {
return new ResourceLimits(this);
}
+ public Optional<Double> getDiskLimit() {
+ return diskLimit;
+ }
+
public Builder setDiskLimit(double diskLimit) {
this.diskLimit = Optional.of(diskLimit);
return this;
}
+ public Optional<Double> getMemoryLimit() {
+ return memoryLimit;
+ }
+
public Builder setMemoryLimit(double memoryLimit) {
this.memoryLimit = Optional.of(memoryLimit);
return this;
diff --git a/config-model/src/main/java/com/yahoo/vespa/model/content/cluster/ContentCluster.java b/config-model/src/main/java/com/yahoo/vespa/model/content/cluster/ContentCluster.java
index a627e030156..44de4a1abec 100644
--- a/config-model/src/main/java/com/yahoo/vespa/model/content/cluster/ContentCluster.java
+++ b/config-model/src/main/java/com/yahoo/vespa/model/content/cluster/ContentCluster.java
@@ -38,6 +38,7 @@ import com.yahoo.vespa.model.container.Container;
import com.yahoo.vespa.model.container.ContainerCluster;
import com.yahoo.vespa.model.container.ContainerModel;
import com.yahoo.vespa.model.content.ClusterControllerConfig;
+import com.yahoo.vespa.model.content.ClusterResourceLimits;
import com.yahoo.vespa.model.content.ContentSearch;
import com.yahoo.vespa.model.content.ContentSearchCluster;
import com.yahoo.vespa.model.content.DistributionBitCalculator;
@@ -134,11 +135,14 @@ public class ContentCluster extends AbstractConfigProducer implements
ContentCluster c = new ContentCluster(context.getParentProducer(), getClusterId(contentElement), documentDefinitions,
globallyDistributedDocuments, routingSelection,
deployState.zone(), deployState.isHosted());
- c.clusterControllerConfig = new ClusterControllerConfig.Builder(getClusterId(contentElement), contentElement).build(deployState, c, contentElement.getXml());
+ var resourceLimits = new ClusterResourceLimits.Builder().build(contentElement);
+ c.clusterControllerConfig = new ClusterControllerConfig.Builder(getClusterId(contentElement),
+ contentElement,
+ resourceLimits.getClusterControllerLimits()).build(deployState, c, contentElement.getXml());
c.search = new ContentSearchCluster.Builder(documentDefinitions,
- globallyDistributedDocuments,
- isCombined(getClusterId(contentElement), containers))
- .build(deployState, c, contentElement.getXml());
+ globallyDistributedDocuments,
+ isCombined(getClusterId(contentElement), containers),
+ resourceLimits.getContentNodeLimits()).build(deployState, c, contentElement.getXml());
c.persistenceFactory = new EngineFactoryBuilder().build(contentElement, c);
c.storageNodes = new StorageCluster.Builder().build(deployState, c, w3cContentElement);
c.distributorNodes = new DistributorCluster.Builder(c).build(deployState, c, w3cContentElement);
diff --git a/config-model/src/main/java/com/yahoo/vespa/model/content/cluster/DomResourceLimitsBuilder.java b/config-model/src/main/java/com/yahoo/vespa/model/content/cluster/DomResourceLimitsBuilder.java
index 8e91f14238e..210f062f9b2 100644
--- a/config-model/src/main/java/com/yahoo/vespa/model/content/cluster/DomResourceLimitsBuilder.java
+++ b/config-model/src/main/java/com/yahoo/vespa/model/content/cluster/DomResourceLimitsBuilder.java
@@ -5,17 +5,17 @@ import com.yahoo.vespa.model.builder.xml.dom.ModelElement;
import com.yahoo.vespa.model.content.ResourceLimits;
/**
- * Builder for resource limits for a content cluster with engine proton.
+ * Builder for feed block resource limits.
*
* @author geirst
*/
public class DomResourceLimitsBuilder {
- public static ResourceLimits build(ModelElement contentXml) {
+ public static ResourceLimits.Builder createBuilder(ModelElement contentXml) {
ResourceLimits.Builder builder = new ResourceLimits.Builder();
ModelElement resourceLimits = contentXml.child("resource-limits");
if (resourceLimits == null) {
- return builder.build();
+ return builder;
}
if (resourceLimits.child("disk") != null) {
builder.setDiskLimit(resourceLimits.childAsDouble("disk"));
@@ -23,7 +23,7 @@ public class DomResourceLimitsBuilder {
if (resourceLimits.child("memory") != null) {
builder.setMemoryLimit(resourceLimits.childAsDouble("memory"));
}
- return builder.build();
+ return builder;
}
}
diff --git a/config-model/src/main/java/com/yahoo/vespa/model/content/storagecluster/FileStorProducer.java b/config-model/src/main/java/com/yahoo/vespa/model/content/storagecluster/FileStorProducer.java
index ab5dab4fbb9..57292b32a35 100644
--- a/config-model/src/main/java/com/yahoo/vespa/model/content/storagecluster/FileStorProducer.java
+++ b/config-model/src/main/java/com/yahoo/vespa/model/content/storagecluster/FileStorProducer.java
@@ -47,7 +47,6 @@ public class FileStorProducer implements StorFilestorConfig.Producer {
private final int reponseNumThreads;
private final StorFilestorConfig.Response_sequencer_type.Enum responseSequencerType;
private final boolean useAsyncMessageHandlingOnSchedule;
- private final int mergeChunkSize;
private static StorFilestorConfig.Response_sequencer_type.Enum convertResponseSequencerType(String sequencerType) {
try {
@@ -56,17 +55,13 @@ public class FileStorProducer implements StorFilestorConfig.Producer {
return StorFilestorConfig.Response_sequencer_type.Enum.ADAPTIVE;
}
}
- private static int alignUp2MiB(int value) {
- final int twoMB = 0x200000;
- return ((value + twoMB - 1)/twoMB) * twoMB;
- }
+
public FileStorProducer(ModelContext.FeatureFlags featureFlags, ContentCluster parent, Integer numThreads) {
this.numThreads = numThreads;
this.cluster = parent;
this.reponseNumThreads = featureFlags.defaultNumResponseThreads();
this.responseSequencerType = convertResponseSequencerType(featureFlags.responseSequencerType());
useAsyncMessageHandlingOnSchedule = featureFlags.useAsyncMessageHandlingOnSchedule();
- mergeChunkSize = alignUp2MiB(featureFlags.mergeChunkSize()); // Align up to default huge page size.
}
@Override
@@ -78,7 +73,6 @@ public class FileStorProducer implements StorFilestorConfig.Producer {
builder.num_response_threads(reponseNumThreads);
builder.response_sequencer_type(responseSequencerType);
builder.use_async_message_handling_on_schedule(useAsyncMessageHandlingOnSchedule);
- builder.bucket_merge_chunk_size(mergeChunkSize);
}
}
diff --git a/config-model/src/main/python/ES_Vespa_parser.py b/config-model/src/main/python/ES_Vespa_parser.py
index b3398fd0403..86df16981f0 100644
--- a/config-model/src/main/python/ES_Vespa_parser.py
+++ b/config-model/src/main/python/ES_Vespa_parser.py
@@ -32,7 +32,7 @@ class ElasticSearchParser:
self.application_name = args.application_name
def main(self):
- self.path = os.getcwd() + "/application/"
+ self.path = os.getcwd() + "/" + self.application_name + "/"
try:
os.mkdir(self.path, 0o777)
print(" > Created folder '" + self.path + "'")
diff --git a/config-model/src/main/resources/schema/content.rnc b/config-model/src/main/resources/schema/content.rnc
index 5646bc72056..a48d38b9f2c 100644
--- a/config-model/src/main/resources/schema/content.rnc
+++ b/config-model/src/main/resources/schema/content.rnc
@@ -98,7 +98,8 @@ ClusterTuning = element tuning {
ClusterControllerTuning? &
Maintenance? &
PersistenceThreads? &
- MinNodeRatioPerGroup?
+ MinNodeRatioPerGroup? &
+ ResourceLimits?
}
Content = element content {
diff --git a/config-model/src/test/java/com/yahoo/vespa/model/application/validation/change/NodeResourceChangeValidatorTest.java b/config-model/src/test/java/com/yahoo/vespa/model/application/validation/change/NodeResourceChangeValidatorTest.java
index ecf026e7d88..180e4913d5c 100644
--- a/config-model/src/test/java/com/yahoo/vespa/model/application/validation/change/NodeResourceChangeValidatorTest.java
+++ b/config-model/src/test/java/com/yahoo/vespa/model/application/validation/change/NodeResourceChangeValidatorTest.java
@@ -61,7 +61,7 @@ public class NodeResourceChangeValidatorTest {
Clock.systemUTC().instant());
}
- private static VespaModel model(int mem1, int mem2, int mem3, int mem4) {
+ private static VespaModel model(int mem1, int mem2, int cpu1, int cpu2) {
var properties = new TestProperties();
properties.setHostedVespa(true);
var deployState = new DeployState.Builder().properties(properties)
@@ -82,7 +82,7 @@ public class NodeResourceChangeValidatorTest {
" </container>\n" +
" <content id='content1' version='1.0'>\n" +
" <nodes count='3'>\n" +
- " <resources vcpu='1' memory='" + mem3 + "Gb' disk='100Gb'/>" +
+ " <resources vcpu='" + cpu1 + "' memory='8Gb' disk='100Gb'/>" +
" </nodes>\n" +
" <documents>\n" +
" <document mode='index' type='test'/>\n" +
@@ -91,7 +91,7 @@ public class NodeResourceChangeValidatorTest {
" </content>\n" +
" <content id='content2' version='1.0'>\n" +
" <nodes count='4'>\n" +
- " <resources vcpu='1' memory='" + mem4 + "Gb' disk='100Gb'/>" +
+ " <resources vcpu='" + cpu2 + "' memory='8Gb' disk='100Gb'/>" +
" </nodes>\n" +
" <documents>\n" +
" <document mode='streaming' type='test'/>\n" +
diff --git a/config-model/src/test/java/com/yahoo/vespa/model/content/ClusterResourceLimitsTest.java b/config-model/src/test/java/com/yahoo/vespa/model/content/ClusterResourceLimitsTest.java
new file mode 100644
index 00000000000..bc830c079d0
--- /dev/null
+++ b/config-model/src/test/java/com/yahoo/vespa/model/content/ClusterResourceLimitsTest.java
@@ -0,0 +1,101 @@
+// Copyright Verizon Media. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root.
+package com.yahoo.vespa.model.content;
+
+import org.junit.Test;
+
+import java.util.Optional;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+
+/**
+ * @author geirst
+ */
+public class ClusterResourceLimitsTest {
+
+ private static class Fixture {
+ ResourceLimits.Builder ctrlBuilder = new ResourceLimits.Builder();
+ ResourceLimits.Builder nodeBuilder = new ResourceLimits.Builder();
+
+ public Fixture ctrlDisk(double limit) {
+ ctrlBuilder.setDiskLimit(limit);
+ return this;
+ }
+ public Fixture ctrlMemory(double limit) {
+ ctrlBuilder.setMemoryLimit(limit);
+ return this;
+ }
+ public Fixture nodeDisk(double limit) {
+ nodeBuilder.setDiskLimit(limit);
+ return this;
+ }
+ public Fixture nodeMemory(double limit) {
+ nodeBuilder.setMemoryLimit(limit);
+ return this;
+ }
+ public ClusterResourceLimits build() {
+ var builder = new ClusterResourceLimits.Builder();
+ builder.setClusterControllerBuilder(ctrlBuilder);
+ builder.setContentNodeBuilder(nodeBuilder);
+ return builder.build();
+ }
+ }
+
+ @Test
+ public void content_node_limits_are_derived_from_cluster_controller_limits_if_not_set() {
+ assertLimits(0.6, 0.7, 0.8, 0.85,
+ new Fixture().ctrlDisk(0.6).ctrlMemory(0.7));
+ assertLimits(0.6, null, 0.8, null,
+ new Fixture().ctrlDisk(0.6));
+ assertLimits(null, 0.7, null, 0.85,
+ new Fixture().ctrlMemory(0.7));
+ }
+
+ @Test
+ public void content_node_limits_can_be_set_explicit() {
+ assertLimits(0.6, 0.7, 0.9, 0.95,
+ new Fixture().ctrlDisk(0.6).ctrlMemory(0.7).nodeDisk(0.9).nodeMemory(0.95));
+ assertLimits(0.6, null, 0.9, null,
+ new Fixture().ctrlDisk(0.6).nodeDisk(0.9));
+ assertLimits(null, 0.7, null, 0.95,
+ new Fixture().ctrlMemory(0.7).nodeMemory(0.95));
+ }
+
+ @Test
+ public void cluster_controller_limits_are_equal_to_content_node_limits_if_not_set() {
+ assertLimits(0.9, 0.95, 0.9, 0.95,
+ new Fixture().nodeDisk(0.9).nodeMemory(0.95));
+ assertLimits(0.9, null, 0.9, null,
+ new Fixture().nodeDisk(0.9));
+ assertLimits(null, 0.95, null, 0.95,
+ new Fixture().nodeMemory(0.95));
+ }
+
+ @Test
+ public void limits_are_derived_from_the_other_if_not_set() {
+ assertLimits(0.6, 0.95, 0.8, 0.95,
+ new Fixture().ctrlDisk(0.6).nodeMemory(0.95));
+ assertLimits(0.9, 0.7, 0.9, 0.85,
+ new Fixture().ctrlMemory(0.7).nodeDisk(0.9));
+ }
+
+ private void assertLimits(Double expCtrlDisk, Double expCtrlMemory, Double expNodeDisk, Double expNodeMemory, Fixture f) {
+ var limits = f.build();
+ assertLimits(expCtrlDisk, expCtrlMemory, limits.getClusterControllerLimits());
+ assertLimits(expNodeDisk, expNodeMemory, limits.getContentNodeLimits());
+ }
+
+ private void assertLimits(Double expDisk, Double expMemory, ResourceLimits limits) {
+ assertLimit(expDisk, limits.getDiskLimit());
+ assertLimit(expMemory, limits.getMemoryLimit());
+ }
+
+ private void assertLimit(Double expLimit, Optional<Double> actLimit) {
+ if (expLimit == null) {
+ assertFalse(actLimit.isPresent());
+ } else {
+ assertEquals(expLimit, actLimit.get(), 0.00001);
+ }
+ }
+
+}
diff --git a/config-model/src/test/java/com/yahoo/vespa/model/content/ContentSearchClusterTest.java b/config-model/src/test/java/com/yahoo/vespa/model/content/ContentSearchClusterTest.java
index 3415044b088..bc60908e268 100644
--- a/config-model/src/test/java/com/yahoo/vespa/model/content/ContentSearchClusterTest.java
+++ b/config-model/src/test/java/com/yahoo/vespa/model/content/ContentSearchClusterTest.java
@@ -69,15 +69,30 @@ public class ContentSearchClusterTest {
}
private static ProtonConfig getProtonConfig(ContentCluster cluster) {
- ProtonConfig.Builder protonCfgBuilder = new ProtonConfig.Builder();
- cluster.getSearch().getConfig(protonCfgBuilder);
- return new ProtonConfig(protonCfgBuilder);
+ var builder = new ProtonConfig.Builder();
+ cluster.getSearch().getConfig(builder);
+ return new ProtonConfig(builder);
}
- private static void assertProtonResourceLimits(double expDiskLimit, double expMemoryLimits, String clusterXml) throws Exception {
- ProtonConfig cfg = getProtonConfig(createCluster(clusterXml));
+ private static void assertProtonResourceLimits(double expDiskLimit, double expMemoryLimit, String clusterXml) throws Exception {
+ assertProtonResourceLimits(expDiskLimit, expMemoryLimit, createCluster(clusterXml));
+ }
+
+ private static void assertProtonResourceLimits(double expDiskLimit, double expMemoryLimit, ContentCluster cluster) {
+ var cfg = getProtonConfig(cluster);
assertEquals(expDiskLimit, cfg.writefilter().disklimit(), EPSILON);
- assertEquals(expMemoryLimits, cfg.writefilter().memorylimit(), EPSILON);
+ assertEquals(expMemoryLimit, cfg.writefilter().memorylimit(), EPSILON);
+ }
+
+ private static void assertClusterControllerResourceLimits(double expDiskLimit, double expMemoryLimit, String clusterXml) throws Exception {
+ assertClusterControllerResourceLimits(expDiskLimit, expMemoryLimit, createCluster(clusterXml));
+ }
+
+ private static void assertClusterControllerResourceLimits(double expDiskLimit, double expMemoryLimit, ContentCluster cluster) {
+ var limits = getFleetcontrollerConfig(cluster).cluster_feed_block_limit();
+ assertEquals(4, limits.size());
+ assertEquals(expDiskLimit, limits.get("disk"), EPSILON);
+ assertEquals(expMemoryLimit, limits.get("memory"), EPSILON);
}
@Test
@@ -105,6 +120,19 @@ public class ContentSearchClusterTest {
}
@Test
+ public void cluster_controller_resource_limits_can_be_set() throws Exception {
+ assertClusterControllerResourceLimits(0.92, 0.93,
+ new ContentClusterBuilder().clusterControllerDiskLimit(0.92).clusterControllerMemoryLimit(0.93).getXml());
+ }
+
+ @Test
+ public void resource_limits_are_derived_from_the_other_if_not_specified() throws Exception {
+ var cluster = createCluster(new ContentClusterBuilder().clusterControllerDiskLimit(0.5).protonMemoryLimit(0.95).getXml());
+ assertProtonResourceLimits(0.75, 0.95, cluster);
+ assertClusterControllerResourceLimits(0.5, 0.95, cluster);
+ }
+
+ @Test
public void requireThatGloballyDistributedDocumentTypeIsTaggedAsSuch() throws Exception {
ProtonConfig cfg = getProtonConfig(createClusterWithGlobalType());
assertEquals(2, cfg.documentdb().size());
@@ -149,8 +177,9 @@ public class ContentSearchClusterTest {
}
private static FleetcontrollerConfig getFleetcontrollerConfig(ContentCluster cluster) {
- FleetcontrollerConfig.Builder builder = new FleetcontrollerConfig.Builder();
+ var builder = new FleetcontrollerConfig.Builder();
cluster.getConfig(builder);
+ cluster.getClusterControllerConfig().getConfig(builder);
builder.cluster_name("unknown");
builder.index(0);
builder.zookeeper_server("unknown");
diff --git a/config-model/src/test/java/com/yahoo/vespa/model/content/FleetControllerClusterTest.java b/config-model/src/test/java/com/yahoo/vespa/model/content/FleetControllerClusterTest.java
index 01bbffce360..3a59f35ce2e 100644
--- a/config-model/src/test/java/com/yahoo/vespa/model/content/FleetControllerClusterTest.java
+++ b/config-model/src/test/java/com/yahoo/vespa/model/content/FleetControllerClusterTest.java
@@ -10,6 +10,7 @@ import com.yahoo.vespa.model.builder.xml.dom.ModelElement;
import org.junit.Test;
import org.w3c.dom.Document;
+import static com.yahoo.config.model.test.TestUtil.joinLines;
import static org.junit.Assert.assertEquals;
public class FleetControllerClusterTest {
@@ -19,8 +20,11 @@ public class FleetControllerClusterTest {
var deployState = new DeployState.Builder().properties(
new TestProperties().enableFeedBlockInDistributor(enableFeedBlockInDistributor)).build();
MockRoot root = new MockRoot("", deployState);
- return new ClusterControllerConfig.Builder("storage", new ModelElement(doc.getDocumentElement())).build(root.getDeployState(), root,
- new ModelElement(doc.getDocumentElement()).getXml());
+ var clusterElement = new ModelElement(doc.getDocumentElement());
+ return new ClusterControllerConfig.Builder("storage",
+ clusterElement,
+ new ClusterResourceLimits.Builder().build(clusterElement).getClusterControllerLimits()).
+ build(root.getDeployState(), root, clusterElement.getXml());
}
private ClusterControllerConfig parse(String xml) {
@@ -94,15 +98,43 @@ public class FleetControllerClusterTest {
assertEquals(0.0, config.min_node_ratio_per_group(), 0.01);
}
+
@Test
public void default_cluster_feed_block_limits_are_set() {
- var config = getConfigForBasicCluster();
+ assertLimits(0.79, 0.79, getConfigForBasicCluster());
+ }
+
+ @Test
+ public void resource_limits_can_be_set_in_tuning() {
+ assertLimits(0.6, 0.7, getConfigForResourceLimitsTuning(0.6, 0.7));
+ assertLimits(0.6, 0.79, getConfigForResourceLimitsTuning(0.6, null));
+ assertLimits(0.79, 0.7, getConfigForResourceLimitsTuning(null, 0.7));
+ }
+
+ private static double DELTA = 0.00001;
+
+ private void assertLimits(double expDisk, double expMemory, FleetcontrollerConfig config) {
var limits = config.cluster_feed_block_limit();
assertEquals(4, limits.size());
- assertEquals(0.79, limits.get("memory"), 0.0001);
- assertEquals(0.79, limits.get("disk"), 0.0001);
- assertEquals(0.89, limits.get("attribute-enum-store"), 0.0001);
- assertEquals(0.89, limits.get("attribute-multi-value"), 0.0001);
+ assertEquals(expDisk, limits.get("disk"), DELTA);
+ assertEquals(expMemory, limits.get("memory"), DELTA);
+ assertEquals(0.89, limits.get("attribute-enum-store"), DELTA);
+ assertEquals(0.89, limits.get("attribute-multi-value"), DELTA);
+ }
+
+ private FleetcontrollerConfig getConfigForResourceLimitsTuning(Double diskLimit, Double memoryLimit) {
+ FleetcontrollerConfig.Builder builder = new FleetcontrollerConfig.Builder();
+ parse(joinLines("<cluster id=\"test\">",
+ "<documents/>",
+ "<tuning>",
+ " <resource-limits>",
+ (diskLimit != null ? (" <disk>" + diskLimit + "</disk>") : ""),
+ (memoryLimit != null ? (" <memory>" + memoryLimit + "</memory>") : ""),
+ " </resource-limits>",
+ "</tuning>" +
+ "</cluster>")).
+ getConfig(builder);
+ return new FleetcontrollerConfig(builder);
}
@Test
diff --git a/config-model/src/test/java/com/yahoo/vespa/model/content/StorageClusterTest.java b/config-model/src/test/java/com/yahoo/vespa/model/content/StorageClusterTest.java
index 9c857414717..5cf57430f91 100644
--- a/config-model/src/test/java/com/yahoo/vespa/model/content/StorageClusterTest.java
+++ b/config-model/src/test/java/com/yahoo/vespa/model/content/StorageClusterTest.java
@@ -287,19 +287,6 @@ public class StorageClusterTest {
assertEquals(StorFilestorConfig.Response_sequencer_type.THROUGHPUT, config.response_sequencer_type());
}
- private void verifyMergeChunkSize(int expected, int value) {
- StorFilestorConfig.Builder builder = new StorFilestorConfig.Builder();
- simpleCluster(new TestProperties().setMergeChunkSize(value)).getConfig(builder);
- StorFilestorConfig config = new StorFilestorConfig(builder);
- assertEquals(expected, config.bucket_merge_chunk_size());
- }
-
- @Test
- public void testFeatureFlagControlOfMergeChunkSize() {
- verifyMergeChunkSize(0x200000, 13);
- verifyMergeChunkSize(0x1600000, 0x1500000);
- }
-
private void verifyAsyncMessageHandlingOnSchedule(boolean expected, boolean value) {
StorFilestorConfig.Builder builder = new StorFilestorConfig.Builder();
simpleCluster(new TestProperties().setAsyncMessageHandlingOnSchedule(value)).getConfig(builder);
diff --git a/config-model/src/test/java/com/yahoo/vespa/model/content/utils/ContentClusterBuilder.java b/config-model/src/test/java/com/yahoo/vespa/model/content/utils/ContentClusterBuilder.java
index 866c03d82f0..491326fdc9c 100644
--- a/config-model/src/test/java/com/yahoo/vespa/model/content/utils/ContentClusterBuilder.java
+++ b/config-model/src/test/java/com/yahoo/vespa/model/content/utils/ContentClusterBuilder.java
@@ -26,6 +26,8 @@ public class ContentClusterBuilder {
private Optional<String> dispatchXml = Optional.empty();
private Optional<Double> protonDiskLimit = Optional.empty();
private Optional<Double> protonMemoryLimit = Optional.empty();
+ private Optional<Double> clusterControllerDiskLimit = Optional.empty();
+ private Optional<Double> clusterControllerMemoryLimit = Optional.empty();
public ContentClusterBuilder() {
}
@@ -67,13 +69,23 @@ public class ContentClusterBuilder {
return this;
}
- public ContentClusterBuilder protonDiskLimit(double diskLimit) {
- protonDiskLimit = Optional.of(diskLimit);
+ public ContentClusterBuilder protonDiskLimit(double limit) {
+ protonDiskLimit = Optional.of(limit);
return this;
}
- public ContentClusterBuilder protonMemoryLimit(double memoryLimit) {
- protonMemoryLimit = Optional.of(memoryLimit);
+ public ContentClusterBuilder protonMemoryLimit(double limit) {
+ protonMemoryLimit = Optional.of(limit);
+ return this;
+ }
+
+ public ContentClusterBuilder clusterControllerDiskLimit(double limit) {
+ clusterControllerDiskLimit = Optional.of(limit);
+ return this;
+ }
+
+ public ContentClusterBuilder clusterControllerMemoryLimit(double limit) {
+ clusterControllerMemoryLimit = Optional.of(limit);
return this;
}
@@ -88,14 +100,17 @@ public class ContentClusterBuilder {
" <engine>",
" <proton>",
" <searchable-copies>" + searchableCopies + "</searchable-copies>",
- getResourceLimitsXml(" "),
+ getProtonResourceLimitsXml(" "),
" </proton>",
" </engine>");
if (dispatchXml.isPresent()) {
xml += dispatchXml.get();
}
- return xml + groupXml +
- "</content>";
+ xml += groupXml;
+ xml += joinLines(" <tuning>",
+ getTuningResourceLimitsXml(" "),
+ " </tuning>");
+ return xml + "</content>";
}
private static String getSimpleGroupXml() {
@@ -104,11 +119,19 @@ public class ContentClusterBuilder {
" </group>");
}
- private String getResourceLimitsXml(String indent) {
- if (protonDiskLimit.isPresent() || protonMemoryLimit.isPresent()) {
+ private String getProtonResourceLimitsXml(String indent) {
+ return getResourceLimitsXml(indent, protonDiskLimit, protonMemoryLimit);
+ }
+
+ private String getTuningResourceLimitsXml(String indent) {
+ return getResourceLimitsXml(indent, clusterControllerDiskLimit, clusterControllerMemoryLimit);
+ }
+
+ private String getResourceLimitsXml(String indent, Optional<Double> diskLimit, Optional<Double> memoryLimit) {
+ if (diskLimit.isPresent() || memoryLimit.isPresent()) {
String xml = joinLines(indent + "<resource-limits>",
- getXmlLine("disk", protonDiskLimit, indent + " "),
- getXmlLine("memory", protonMemoryLimit, indent + " "),
+ getXmlLine("disk", diskLimit, indent + " "),
+ getXmlLine("memory", memoryLimit, indent + " "),
indent + "</resource-limits>");
return xml;
}
diff --git a/configserver/src/main/java/com/yahoo/vespa/config/server/deploy/ModelContextImpl.java b/configserver/src/main/java/com/yahoo/vespa/config/server/deploy/ModelContextImpl.java
index fa30cb895c0..aa9ae65394f 100644
--- a/configserver/src/main/java/com/yahoo/vespa/config/server/deploy/ModelContextImpl.java
+++ b/configserver/src/main/java/com/yahoo/vespa/config/server/deploy/ModelContextImpl.java
@@ -147,7 +147,6 @@ public class ModelContextImpl implements ModelContext {
public static class FeatureFlags implements ModelContext.FeatureFlags {
- private final boolean enableAutomaticReindexing;
private final double reindexerWindowSizeIncrement;
private final double defaultTermwiseLimit;
private final boolean useThreePhaseUpdates;
@@ -159,7 +158,6 @@ public class ModelContextImpl implements ModelContext {
private final boolean skipMbusReplyThread;
private final boolean useAccessControlTlsHandshakeClientAuth;
private final boolean useAsyncMessageHandlingOnSchedule;
- private final int mergeChunkSize;
private final double feedConcurrency;
private final boolean reconfigurableZookeeperServer;
private final boolean enableJdiscConnectionLog;
@@ -168,7 +166,6 @@ public class ModelContextImpl implements ModelContext {
private final boolean enableFeedBlockInDistributor;
public FeatureFlags(FlagSource source, ApplicationId appId) {
- this.enableAutomaticReindexing = flagValue(source, appId, Flags.ENABLE_AUTOMATIC_REINDEXING);
this.reindexerWindowSizeIncrement = flagValue(source, appId, Flags.REINDEXER_WINDOW_SIZE_INCREMENT);
this.defaultTermwiseLimit = flagValue(source, appId, Flags.DEFAULT_TERM_WISE_LIMIT);
this.useThreePhaseUpdates = flagValue(source, appId, Flags.USE_THREE_PHASE_UPDATES);
@@ -180,7 +177,6 @@ public class ModelContextImpl implements ModelContext {
this.skipMbusReplyThread = flagValue(source, appId, Flags.SKIP_MBUS_REPLY_THREAD);
this.useAccessControlTlsHandshakeClientAuth = flagValue(source, appId, Flags.USE_ACCESS_CONTROL_CLIENT_AUTHENTICATION);
this.useAsyncMessageHandlingOnSchedule = flagValue(source, appId, Flags.USE_ASYNC_MESSAGE_HANDLING_ON_SCHEDULE);
- this.mergeChunkSize = flagValue(source, appId, Flags.MERGE_CHUNK_SIZE);
this.feedConcurrency = flagValue(source, appId, Flags.FEED_CONCURRENCY);
this.reconfigurableZookeeperServer = flagValue(source, appId, Flags.RECONFIGURABLE_ZOOKEEPER_SERVER_FOR_CLUSTER_CONTROLLER);
this.enableJdiscConnectionLog = flagValue(source, appId, Flags.ENABLE_JDISC_CONNECTION_LOG);
@@ -189,7 +185,6 @@ public class ModelContextImpl implements ModelContext {
this.enableFeedBlockInDistributor = flagValue(source, appId, Flags.ENABLE_FEED_BLOCK_IN_DISTRIBUTOR);
}
- @Override public boolean enableAutomaticReindexing() { return enableAutomaticReindexing; }
@Override public double reindexerWindowSizeIncrement() { return reindexerWindowSizeIncrement; }
@Override public double defaultTermwiseLimit() { return defaultTermwiseLimit; }
@Override public boolean useThreePhaseUpdates() { return useThreePhaseUpdates; }
@@ -201,7 +196,6 @@ public class ModelContextImpl implements ModelContext {
@Override public boolean skipMbusReplyThread() { return skipMbusReplyThread; }
@Override public boolean useAccessControlTlsHandshakeClientAuth() { return useAccessControlTlsHandshakeClientAuth; }
@Override public boolean useAsyncMessageHandlingOnSchedule() { return useAsyncMessageHandlingOnSchedule; }
- @Override public int mergeChunkSize() { return mergeChunkSize; }
@Override public double feedConcurrency() { return feedConcurrency; }
@Override public boolean reconfigurableZookeeperServer() { return reconfigurableZookeeperServer; }
@Override public boolean enableJdiscConnectionLog() { return enableJdiscConnectionLog; }
diff --git a/configserver/src/test/java/com/yahoo/vespa/config/server/ModelContextImplTest.java b/configserver/src/test/java/com/yahoo/vespa/config/server/ModelContextImplTest.java
index 582ecd13ce5..b1edc031e0b 100644
--- a/configserver/src/test/java/com/yahoo/vespa/config/server/ModelContextImplTest.java
+++ b/configserver/src/test/java/com/yahoo/vespa/config/server/ModelContextImplTest.java
@@ -95,7 +95,6 @@ public class ModelContextImplTest {
assertEquals(new Version(8), context.wantedNodeVespaVersion());
assertEquals(1.0, context.properties().featureFlags().defaultTermwiseLimit(), 0.0);
assertFalse(context.properties().featureFlags().useAsyncMessageHandlingOnSchedule());
- assertEquals(0x2000000, context.properties().featureFlags().mergeChunkSize());
assertEquals(0.5, context.properties().featureFlags().feedConcurrency(), 0.0);
}
diff --git a/container-core/pom.xml b/container-core/pom.xml
index 7c98b524c73..051b572b28f 100644
--- a/container-core/pom.xml
+++ b/container-core/pom.xml
@@ -42,6 +42,7 @@
<scope>test</scope>
</dependency>
<dependency>
+ <!-- TODO Vespa 8: stop providing org.json:json -->
<groupId>org.json</groupId>
<artifactId>json</artifactId>
</dependency>
diff --git a/container-core/src/main/java/com/yahoo/container/handler/metrics/HttpHandlerBase.java b/container-core/src/main/java/com/yahoo/container/handler/metrics/HttpHandlerBase.java
index 92840cee48f..8c902f88e38 100644
--- a/container-core/src/main/java/com/yahoo/container/handler/metrics/HttpHandlerBase.java
+++ b/container-core/src/main/java/com/yahoo/container/handler/metrics/HttpHandlerBase.java
@@ -1,13 +1,14 @@
// Copyright 2020 Oath Inc. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root.
package com.yahoo.container.handler.metrics;
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.node.ArrayNode;
+import com.fasterxml.jackson.databind.node.ObjectNode;
import com.yahoo.container.jdisc.HttpRequest;
import com.yahoo.container.jdisc.HttpResponse;
import com.yahoo.container.jdisc.ThreadedHttpRequestHandler;
import com.yahoo.restapi.Path;
-import org.json.JSONArray;
-import org.json.JSONException;
-import org.json.JSONObject;
import java.net.URI;
import java.util.List;
@@ -26,6 +27,8 @@ import static java.util.logging.Level.WARNING;
*/
public abstract class HttpHandlerBase extends ThreadedHttpRequestHandler {
+ private static final ObjectMapper jsonMapper = new ObjectMapper();
+
protected HttpHandlerBase(Executor executor) {
super(executor);
}
@@ -49,15 +52,14 @@ public abstract class HttpHandlerBase extends ThreadedHttpRequestHandler {
protected JsonResponse resourceListResponse(URI requestUri, List<String> resources) {
try {
return new JsonResponse(OK, resourceList(requestUri, resources));
- } catch (JSONException e) {
+ } catch (JsonProcessingException e) {
log.log(WARNING, "Bad JSON construction in generated resource list for " + requestUri.getPath(), e);
return new ErrorResponse(INTERNAL_SERVER_ERROR,
"An error occurred when generating the list of api resources.");
}
}
- // TODO: Use jackson with a "Resources" class instead of JSONObject
- private static String resourceList(URI requestUri, List<String> resources) throws JSONException {
+ private static String resourceList(URI requestUri, List<String> resources) throws JsonProcessingException {
int port = requestUri.getPort();
String host = requestUri.getHost();
StringBuilder base = new StringBuilder("http://");
@@ -66,13 +68,14 @@ public abstract class HttpHandlerBase extends ThreadedHttpRequestHandler {
base.append(":").append(port);
}
String uriBase = base.toString();
- JSONArray linkList = new JSONArray();
+ ArrayNode linkList = jsonMapper.createArrayNode();
for (String api : resources) {
- JSONObject resource = new JSONObject();
+ ObjectNode resource = jsonMapper.createObjectNode();
resource.put("url", uriBase + api);
- linkList.put(resource);
+ linkList.add(resource);
}
- return new JSONObject().put("resources", linkList).toString(4);
+ return jsonMapper.writerWithDefaultPrettyPrinter()
+ .writeValueAsString(jsonMapper.createObjectNode().set("resources", linkList));
}
}
diff --git a/container-core/src/main/java/com/yahoo/container/jdisc/state/CoredumpGatherer.java b/container-core/src/main/java/com/yahoo/container/jdisc/state/CoredumpGatherer.java
index d105eaa9d98..f1ef7894511 100644
--- a/container-core/src/main/java/com/yahoo/container/jdisc/state/CoredumpGatherer.java
+++ b/container-core/src/main/java/com/yahoo/container/jdisc/state/CoredumpGatherer.java
@@ -1,17 +1,16 @@
// Copyright 2019 Oath Inc. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root.
package com.yahoo.container.jdisc.state;
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.node.ObjectNode;
import com.yahoo.vespa.defaults.Defaults;
-import org.json.JSONException;
-import org.json.JSONObject;
import java.io.IOException;
import java.io.UncheckedIOException;
-import java.nio.file.Files;
import java.nio.file.NoSuchFileException;
import java.nio.file.Path;
import java.time.Instant;
-import java.util.Set;
import java.util.stream.Stream;
/**
@@ -19,19 +18,17 @@ import java.util.stream.Stream;
*/
public class CoredumpGatherer {
+ private static final ObjectMapper jsonMapper = new ObjectMapper();
+
private static final Path COREDUMP_PATH = Path.of(Defaults.getDefaults().underVespaHome("var/crash/processing"));
- public static JSONObject gatherCoredumpMetrics(FileWrapper fileWrapper) {
+ public static JsonNode gatherCoredumpMetrics(FileWrapper fileWrapper) {
int coredumps = getNumberOfCoredumps(fileWrapper);
- JSONObject packet = new JSONObject();
-
- try {
- packet.put("status_code", coredumps == 0 ? 0 : 1);
- packet.put("status_msg", coredumps == 0 ? "OK" : String.format("Found %d coredump(s)", coredumps));
- packet.put("timestamp", Instant.now().getEpochSecond());
- packet.put("application", "system-coredumps-processing");
-
- } catch (JSONException e) {}
+ ObjectNode packet = jsonMapper.createObjectNode();
+ packet.put("status_code", coredumps == 0 ? 0 : 1);
+ packet.put("status_msg", coredumps == 0 ? "OK" : String.format("Found %d coredump(s)", coredumps));
+ packet.put("timestamp", Instant.now().getEpochSecond());
+ packet.put("application", "system-coredumps-processing");
return packet;
}
diff --git a/container-core/src/main/java/com/yahoo/container/jdisc/state/HostLifeGatherer.java b/container-core/src/main/java/com/yahoo/container/jdisc/state/HostLifeGatherer.java
index 730f7bc13cd..28f99096d84 100644
--- a/container-core/src/main/java/com/yahoo/container/jdisc/state/HostLifeGatherer.java
+++ b/container-core/src/main/java/com/yahoo/container/jdisc/state/HostLifeGatherer.java
@@ -1,8 +1,9 @@
// Copyright 2019 Oath Inc. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root.
package com.yahoo.container.jdisc.state;
-import org.json.JSONException;
-import org.json.JSONObject;
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.node.ObjectNode;
import java.io.IOException;
import java.nio.file.Path;
@@ -13,9 +14,11 @@ import java.time.Instant;
*/
public class HostLifeGatherer {
+ private static final ObjectMapper jsonMapper = new ObjectMapper();
+
private static final Path UPTIME_PATH = Path.of("/proc");
- public static JSONObject getHostLifePacket(FileWrapper fileWrapper) {
+ public static JsonNode getHostLifePacket(FileWrapper fileWrapper) {
long upTime;
int statusCode = 0;
String statusMessage = "OK";
@@ -29,19 +32,15 @@ public class HostLifeGatherer {
}
- JSONObject jsonObject = new JSONObject();
- try {
- jsonObject.put("status_code", statusCode);
- jsonObject.put("status_msg", statusMessage);
- jsonObject.put("timestamp", Instant.now().getEpochSecond());
- jsonObject.put("application", "host_life");
- JSONObject metrics = new JSONObject();
- metrics.put("uptime", upTime);
- metrics.put("alive", 1);
- jsonObject.put("metrics", metrics);
-
- } catch (JSONException e) {}
-
+ ObjectNode jsonObject = jsonMapper.createObjectNode();
+ jsonObject.put("status_code", statusCode);
+ jsonObject.put("status_msg", statusMessage);
+ jsonObject.put("timestamp", Instant.now().getEpochSecond());
+ jsonObject.put("application", "host_life");
+ ObjectNode metrics = jsonMapper.createObjectNode();
+ metrics.put("uptime", upTime);
+ metrics.put("alive", 1);
+ jsonObject.set("metrics", metrics);
return jsonObject;
}
diff --git a/container-core/src/main/java/com/yahoo/container/jdisc/state/JSONObjectWithLegibleException.java b/container-core/src/main/java/com/yahoo/container/jdisc/state/JSONObjectWithLegibleException.java
deleted file mode 100644
index d22dd9d6f4b..00000000000
--- a/container-core/src/main/java/com/yahoo/container/jdisc/state/JSONObjectWithLegibleException.java
+++ /dev/null
@@ -1,87 +0,0 @@
-// Copyright 2018 Yahoo Holdings. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root.
-package com.yahoo.container.jdisc.state;
-
-import org.json.JSONException;
-import org.json.JSONObject;
-
-import java.util.Collection;
-import java.util.Map;
-
-/**
- * A JSONObject that wraps the checked JSONException in a RuntimeException with a legible error message.
- *
- * @author gjoranv
- */
-class JSONObjectWithLegibleException extends JSONObject {
-
- @Override
- public JSONObject put(String s, boolean b) {
- try {
- return super.put(s, b);
- } catch (JSONException e) {
- throw new RuntimeException(getErrorMessage(s, b, e), e);
- }
- }
-
- @Override
- public JSONObject put(String s, double v) {
- try {
- Double guardedVal = (((Double) v).isNaN() || ((Double) v).isInfinite()) ?
- 0.0 : v;
- return super.put(s, guardedVal);
- } catch (JSONException e) {
- throw new RuntimeException(getErrorMessage(s, v, e), e);
- }
- }
-
- @Override
- public JSONObject put(String s, int i) {
- try {
- return super.put(s, i);
- } catch (JSONException e) {
- throw new RuntimeException(getErrorMessage(s, i, e), e);
- }
- }
-
- @Override
- public JSONObject put(String s, long l) {
- try {
- return super.put(s, l);
- } catch (JSONException e) {
- throw new RuntimeException(getErrorMessage(s, l, e), e);
- }
- }
-
- @Override
- public JSONObject put(String s, Collection collection) {
- try {
- return super.put(s, collection);
- } catch (JSONException e) {
- throw new RuntimeException(getErrorMessage(s, collection, e), e);
- }
- }
-
- @Override
- public JSONObject put(String s, Map map) {
- try {
- return super.put(s, map);
- } catch (JSONException e) {
- throw new RuntimeException(getErrorMessage(s, map, e), e);
- }
- }
-
- @Override
- public JSONObject put(String s, Object o) {
- try {
- return super.put(s, o);
- } catch (JSONException e) {
- throw new RuntimeException(getErrorMessage(s, o, e), e);
- }
- }
-
- private String getErrorMessage(String key, Object value, JSONException e) {
- return "Trying to add invalid JSON object with key '" + key +
- "' and value '" + value + "' - " + e.getMessage();
- }
-
-}
diff --git a/container-core/src/main/java/com/yahoo/container/jdisc/state/JsonUtil.java b/container-core/src/main/java/com/yahoo/container/jdisc/state/JsonUtil.java
new file mode 100644
index 00000000000..4c697fb5ada
--- /dev/null
+++ b/container-core/src/main/java/com/yahoo/container/jdisc/state/JsonUtil.java
@@ -0,0 +1,15 @@
+// Copyright Verizon Media. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root.
+package com.yahoo.container.jdisc.state;
+
+/**
+ * @author bjorncs
+ */
+class JsonUtil {
+
+ private JsonUtil() {}
+
+ static double sanitizeDouble(double value) {
+ return (((Double) value).isNaN() || ((Double) value).isInfinite()) ? 0.0 : value;
+ }
+
+}
diff --git a/container-core/src/main/java/com/yahoo/container/jdisc/state/MetricGatherer.java b/container-core/src/main/java/com/yahoo/container/jdisc/state/MetricGatherer.java
index 6a06a6362f5..add69403455 100644
--- a/container-core/src/main/java/com/yahoo/container/jdisc/state/MetricGatherer.java
+++ b/container-core/src/main/java/com/yahoo/container/jdisc/state/MetricGatherer.java
@@ -1,7 +1,7 @@
// Copyright 2019 Oath Inc. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root.
package com.yahoo.container.jdisc.state;
-import org.json.JSONObject;
+import com.fasterxml.jackson.databind.JsonNode;
import java.util.ArrayList;
import java.util.List;
@@ -13,9 +13,9 @@ import java.util.List;
*/
public class MetricGatherer {
- static List<JSONObject> getAdditionalMetrics() {
+ static List<JsonNode> getAdditionalMetrics() {
FileWrapper fileWrapper = new FileWrapper();
- List<JSONObject> packetList = new ArrayList<>();
+ List<JsonNode> packetList = new ArrayList<>();
packetList.add(CoredumpGatherer.gatherCoredumpMetrics(fileWrapper));
if (System.getProperty("os.name").contains("nux"))
packetList.add(HostLifeGatherer.getHostLifePacket(fileWrapper));
diff --git a/container-core/src/main/java/com/yahoo/container/jdisc/state/MetricsPacketsHandler.java b/container-core/src/main/java/com/yahoo/container/jdisc/state/MetricsPacketsHandler.java
index 3d3f0e4b677..824323af3d6 100644
--- a/container-core/src/main/java/com/yahoo/container/jdisc/state/MetricsPacketsHandler.java
+++ b/container-core/src/main/java/com/yahoo/container/jdisc/state/MetricsPacketsHandler.java
@@ -1,6 +1,11 @@
// Copyright 2018 Yahoo Holdings. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root.
package com.yahoo.container.jdisc.state;
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.node.ArrayNode;
+import com.fasterxml.jackson.databind.node.ObjectNode;
import com.google.inject.Inject;
import com.yahoo.collections.Tuple2;
import com.yahoo.component.provider.ComponentRegistry;
@@ -13,9 +18,6 @@ import com.yahoo.jdisc.handler.ResponseDispatch;
import com.yahoo.jdisc.handler.ResponseHandler;
import com.yahoo.jdisc.http.HttpHeaders;
import com.yahoo.metrics.MetricsPresentationConfig;
-import org.json.JSONArray;
-import org.json.JSONException;
-import org.json.JSONObject;
import java.nio.ByteBuffer;
import java.nio.charset.StandardCharsets;
@@ -26,6 +28,7 @@ import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit;
+import static com.yahoo.container.jdisc.state.JsonUtil.sanitizeDouble;
import static com.yahoo.container.jdisc.state.StateHandler.getSnapshotPreprocessor;
/**
@@ -44,6 +47,8 @@ import static com.yahoo.container.jdisc.state.StateHandler.getSnapshotPreprocess
*/
public class MetricsPacketsHandler extends AbstractRequestHandler {
+ private static final ObjectMapper jsonMapper = new ObjectMapper();
+
static final String APPLICATION_KEY = "application";
static final String TIMESTAMP_KEY = "timestamp";
static final String STATUS_CODE_KEY = "status_code";
@@ -97,19 +102,19 @@ public class MetricsPacketsHandler extends AbstractRequestHandler {
}
String output = jsonToString(getStatusPacket()) + getAllMetricsPackets() + "\n";
return output.getBytes(StandardCharsets.UTF_8);
- } catch (JSONException e) {
+ } catch (JsonProcessingException e) {
throw new RuntimeException("Bad JSON construction.", e);
}
}
- private byte[] getMetricsArray() throws JSONException {
- JSONObject root = new JSONObject();
- JSONArray jsonArray = new JSONArray();
- jsonArray.put(getStatusPacket());
+ private byte[] getMetricsArray() throws JsonProcessingException {
+ ObjectNode root = jsonMapper.createObjectNode();
+ ArrayNode jsonArray = jsonMapper.createArrayNode();
+ jsonArray.add(getStatusPacket());
getPacketsForSnapshot(getSnapshot(), applicationName, timer.currentTimeMillis())
- .forEach(jsonArray::put);
- MetricGatherer.getAdditionalMetrics().forEach(jsonArray::put);
- root.put("metrics", jsonArray);
+ .forEach(jsonArray::add);
+ MetricGatherer.getAdditionalMetrics().forEach(jsonArray::add);
+ root.set("metrics", jsonArray);
return jsonToString(root)
.getBytes(StandardCharsets.UTF_8);
}
@@ -117,8 +122,8 @@ public class MetricsPacketsHandler extends AbstractRequestHandler {
/**
* Exactly one status packet is added to the response.
*/
- private JSONObject getStatusPacket() throws JSONException {
- JSONObject packet = new JSONObjectWithLegibleException();
+ private JsonNode getStatusPacket() {
+ ObjectNode packet = jsonMapper.createObjectNode();
packet.put(APPLICATION_KEY, applicationName);
StateMonitor.Status status = monitor.status();
@@ -127,14 +132,15 @@ public class MetricsPacketsHandler extends AbstractRequestHandler {
return packet;
}
- private String jsonToString(JSONObject jsonObject) throws JSONException {
- return jsonObject.toString(4);
+ private static String jsonToString(JsonNode jsonObject) throws JsonProcessingException {
+ return jsonMapper.writerWithDefaultPrettyPrinter()
+ .writeValueAsString(jsonObject);
}
- private String getAllMetricsPackets() throws JSONException {
+ private String getAllMetricsPackets() throws JsonProcessingException {
StringBuilder ret = new StringBuilder();
- List<JSONObject> metricsPackets = getPacketsForSnapshot(getSnapshot(), applicationName, timer.currentTimeMillis());
- for (JSONObject packet : metricsPackets) {
+ List<JsonNode> metricsPackets = getPacketsForSnapshot(getSnapshot(), applicationName, timer.currentTimeMillis());
+ for (JsonNode packet : metricsPackets) {
ret.append(PACKET_SEPARATOR); // For legibility and parsing in unit tests
ret.append(jsonToString(packet));
}
@@ -150,16 +156,16 @@ public class MetricsPacketsHandler extends AbstractRequestHandler {
}
}
- private List<JSONObject> getPacketsForSnapshot(MetricSnapshot metricSnapshot, String application, long timestamp) throws JSONException {
+ private List<JsonNode> getPacketsForSnapshot(MetricSnapshot metricSnapshot, String application, long timestamp) {
if (metricSnapshot == null) return Collections.emptyList();
- List<JSONObject> packets = new ArrayList<>();
+ List<JsonNode> packets = new ArrayList<>();
for (Map.Entry<MetricDimensions, MetricSet> snapshotEntry : metricSnapshot) {
MetricDimensions metricDimensions = snapshotEntry.getKey();
MetricSet metricSet = snapshotEntry.getValue();
- JSONObjectWithLegibleException packet = new JSONObjectWithLegibleException();
+ ObjectNode packet = jsonMapper.createObjectNode();
addMetaData(timestamp, application, packet);
addDimensions(metricDimensions, packet);
addMetrics(metricSet, packet);
@@ -168,27 +174,27 @@ public class MetricsPacketsHandler extends AbstractRequestHandler {
return packets;
}
- private void addMetaData(long timestamp, String application, JSONObjectWithLegibleException packet) {
+ private void addMetaData(long timestamp, String application, ObjectNode packet) {
packet.put(APPLICATION_KEY, application);
packet.put(TIMESTAMP_KEY, TimeUnit.MILLISECONDS.toSeconds(timestamp));
}
- private void addDimensions(MetricDimensions metricDimensions, JSONObjectWithLegibleException packet) throws JSONException {
+ private void addDimensions(MetricDimensions metricDimensions, ObjectNode packet) {
if (metricDimensions == null) return;
Iterator<Map.Entry<String, String>> dimensionsIterator = metricDimensions.iterator();
if (dimensionsIterator.hasNext()) {
- JSONObject jsonDim = new JSONObjectWithLegibleException();
- packet.put(DIMENSIONS_KEY, jsonDim);
+ ObjectNode jsonDim = jsonMapper.createObjectNode();
+ packet.set(DIMENSIONS_KEY, jsonDim);
for (Map.Entry<String, String> dimensionEntry : metricDimensions) {
jsonDim.put(dimensionEntry.getKey(), dimensionEntry.getValue());
}
}
}
- private void addMetrics(MetricSet metricSet, JSONObjectWithLegibleException packet) throws JSONException {
- JSONObjectWithLegibleException metrics = new JSONObjectWithLegibleException();
- packet.put(METRICS_KEY, metrics);
+ private void addMetrics(MetricSet metricSet, ObjectNode packet) {
+ ObjectNode metrics = jsonMapper.createObjectNode();
+ packet.set(METRICS_KEY, metrics);
for (Map.Entry<String, MetricValue> metric : metricSet) {
String name = metric.getKey();
MetricValue value = metric.getValue();
@@ -196,9 +202,9 @@ public class MetricsPacketsHandler extends AbstractRequestHandler {
metrics.put(name + ".count", ((CountMetric) value).getCount());
} else if (value instanceof GaugeMetric) {
GaugeMetric gauge = (GaugeMetric) value;
- metrics.put(name + ".average", gauge.getAverage())
- .put(name + ".last", gauge.getLast())
- .put(name + ".max", gauge.getMax());
+ metrics.put(name + ".average", sanitizeDouble(gauge.getAverage()))
+ .put(name + ".last", sanitizeDouble(gauge.getLast()))
+ .put(name + ".max", sanitizeDouble(gauge.getMax()));
if (gauge.getPercentiles().isPresent()) {
for (Tuple2<String, Double> prefixAndValue : gauge.getPercentiles().get()) {
metrics.put(name + "." + prefixAndValue.first + "percentile", prefixAndValue.second.doubleValue());
diff --git a/container-core/src/main/java/com/yahoo/container/jdisc/state/StateHandler.java b/container-core/src/main/java/com/yahoo/container/jdisc/state/StateHandler.java
index b14dc50edcb..5ac2871d9dd 100644
--- a/container-core/src/main/java/com/yahoo/container/jdisc/state/StateHandler.java
+++ b/container-core/src/main/java/com/yahoo/container/jdisc/state/StateHandler.java
@@ -1,6 +1,11 @@
// Copyright 2017 Yahoo Holdings. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root.
package com.yahoo.container.jdisc.state;
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.node.ArrayNode;
+import com.fasterxml.jackson.databind.node.ObjectNode;
import com.google.inject.Inject;
import com.yahoo.collections.Tuple2;
import com.yahoo.component.Vtag;
@@ -16,21 +21,18 @@ import com.yahoo.jdisc.handler.ResponseHandler;
import com.yahoo.jdisc.http.HttpHeaders;
import com.yahoo.metrics.MetricsPresentationConfig;
-import org.json.JSONArray;
-import org.json.JSONException;
-import org.json.JSONObject;
-
+import java.io.ByteArrayOutputStream;
+import java.io.PrintStream;
import java.net.URI;
import java.nio.ByteBuffer;
-import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit;
-import java.io.PrintStream;
-import java.io.ByteArrayOutputStream;
+
+import static com.yahoo.container.jdisc.state.JsonUtil.sanitizeDouble;
/**
* A handler which returns state (health) information from this container instance: Status, metrics and vespa version.
@@ -39,6 +41,8 @@ import java.io.ByteArrayOutputStream;
*/
public class StateHandler extends AbstractRequestHandler {
+ private static final ObjectMapper jsonMapper = new ObjectMapper();
+
public static final String STATE_API_ROOT = "/state/v1";
private static final String METRICS_PATH = "metrics";
private static final String HISTOGRAMS_PATH = "metrics/histograms";
@@ -124,17 +128,16 @@ public class StateHandler extends AbstractRequestHandler {
}
base.append(STATE_API_ROOT);
String uriBase = base.toString();
- JSONArray linkList = new JSONArray();
+ ArrayNode linkList = jsonMapper.createArrayNode();
for (String api : new String[] {METRICS_PATH, CONFIG_GENERATION_PATH, HEALTH_PATH, VERSION_PATH}) {
- JSONObject resource = new JSONObject();
+ ObjectNode resource = jsonMapper.createObjectNode();
resource.put("url", uriBase + "/" + api);
- linkList.put(resource);
+ linkList.add(resource);
}
- return new JSONObjectWithLegibleException()
- .put("resources", linkList)
- .toString(4).getBytes(StandardCharsets.UTF_8);
- } catch (JSONException e) {
- throw new RuntimeException("Bad JSON construction.", e);
+ JsonNode resources = jsonMapper.createObjectNode().set("resources", linkList);
+ return toPrettyString(resources);
+ } catch (JsonProcessingException e) {
+ throw new RuntimeException("Bad JSON construction", e);
}
}
@@ -154,31 +157,31 @@ public class StateHandler extends AbstractRequestHandler {
private static byte[] buildConfigOutput(ApplicationMetadataConfig config) {
try {
- return new JSONObjectWithLegibleException()
- .put(CONFIG_GENERATION_PATH, new JSONObjectWithLegibleException()
- .put("generation", config.generation())
- .put("container", new JSONObjectWithLegibleException()
- .put("generation", config.generation())))
- .toString(4).getBytes(StandardCharsets.UTF_8);
- } catch (JSONException e) {
+ return toPrettyString(
+ jsonMapper.createObjectNode()
+ .set(CONFIG_GENERATION_PATH, jsonMapper.createObjectNode()
+ .put("generation", config.generation())
+ .set("container", jsonMapper.createObjectNode()
+ .put("generation", config.generation()))));
+ } catch (JsonProcessingException e) {
throw new RuntimeException("Bad JSON construction.", e);
}
}
private static byte[] buildVersionOutput() {
try {
- return new JSONObjectWithLegibleException()
- .put("version", Vtag.currentVersion)
- .toString(4).getBytes(StandardCharsets.UTF_8);
- } catch (JSONException e) {
+ return toPrettyString(
+ jsonMapper.createObjectNode()
+ .put("version", Vtag.currentVersion.toString()));
+ } catch (JsonProcessingException e) {
throw new RuntimeException("Bad JSON construction.", e);
}
}
private byte[] buildMetricOutput(String consumer) {
try {
- return buildJsonForConsumer(consumer).toString(4).getBytes(StandardCharsets.UTF_8);
- } catch (JSONException e) {
+ return toPrettyString(buildJsonForConsumer(consumer));
+ } catch (JsonProcessingException e) {
throw new RuntimeException("Bad JSON construction.", e);
}
}
@@ -191,11 +194,11 @@ public class StateHandler extends AbstractRequestHandler {
return baos.toByteArray();
}
- private JSONObjectWithLegibleException buildJsonForConsumer(String consumer) throws JSONException {
- JSONObjectWithLegibleException ret = new JSONObjectWithLegibleException();
+ private ObjectNode buildJsonForConsumer(String consumer) {
+ ObjectNode ret = jsonMapper.createObjectNode();
ret.put("time", timer.currentTimeMillis());
- ret.put("status", new JSONObjectWithLegibleException().put("code", getStatus().name()));
- ret.put(METRICS_PATH, buildJsonForSnapshot(consumer, getSnapshot()));
+ ret.set("status", jsonMapper.createObjectNode().put("code", getStatus().name()));
+ ret.set(METRICS_PATH, buildJsonForSnapshot(consumer, getSnapshot()));
return ret;
}
@@ -212,57 +215,62 @@ public class StateHandler extends AbstractRequestHandler {
return monitor.status();
}
- private JSONObjectWithLegibleException buildJsonForSnapshot(String consumer, MetricSnapshot metricSnapshot) throws JSONException {
+ private ObjectNode buildJsonForSnapshot(String consumer, MetricSnapshot metricSnapshot) {
if (metricSnapshot == null) {
- return new JSONObjectWithLegibleException();
+ return jsonMapper.createObjectNode();
}
- JSONObjectWithLegibleException jsonMetric = new JSONObjectWithLegibleException();
- jsonMetric.put("snapshot", new JSONObjectWithLegibleException()
- .put("from", metricSnapshot.getFromTime(TimeUnit.MILLISECONDS) / 1000.0)
- .put("to", metricSnapshot.getToTime(TimeUnit.MILLISECONDS) / 1000.0));
+ ObjectNode jsonMetric = jsonMapper.createObjectNode();
+ jsonMetric.set("snapshot", jsonMapper.createObjectNode()
+ .put("from", sanitizeDouble(metricSnapshot.getFromTime(TimeUnit.MILLISECONDS) / 1000.0))
+ .put("to", sanitizeDouble(metricSnapshot.getToTime(TimeUnit.MILLISECONDS) / 1000.0)));
boolean includeDimensions = !consumer.equals(HEALTH_PATH);
long periodInMillis = metricSnapshot.getToTime(TimeUnit.MILLISECONDS) -
metricSnapshot.getFromTime(TimeUnit.MILLISECONDS);
for (Tuple tuple : collapseMetrics(metricSnapshot, consumer)) {
- JSONObjectWithLegibleException jsonTuple = new JSONObjectWithLegibleException();
+ ObjectNode jsonTuple = jsonMapper.createObjectNode();
jsonTuple.put("name", tuple.key);
if (tuple.val instanceof CountMetric) {
CountMetric count = (CountMetric)tuple.val;
- jsonTuple.put("values", new JSONObjectWithLegibleException()
+ jsonTuple.set("values", jsonMapper.createObjectNode()
.put("count", count.getCount())
- .put("rate", (count.getCount() * 1000.0) / periodInMillis));
+ .put("rate", sanitizeDouble(count.getCount() * 1000.0) / periodInMillis));
} else if (tuple.val instanceof GaugeMetric) {
GaugeMetric gauge = (GaugeMetric) tuple.val;
- JSONObjectWithLegibleException valueFields = new JSONObjectWithLegibleException();
- valueFields.put("average", gauge.getAverage())
- .put("sum", gauge.getSum())
+ ObjectNode valueFields = jsonMapper.createObjectNode();
+ valueFields.put("average", sanitizeDouble(gauge.getAverage()))
+ .put("sum", sanitizeDouble(gauge.getSum()))
.put("count", gauge.getCount())
- .put("last", gauge.getLast())
- .put("max", gauge.getMax())
- .put("min", gauge.getMin())
- .put("rate", (gauge.getCount() * 1000.0) / periodInMillis);
+ .put("last", sanitizeDouble(gauge.getLast()))
+ .put("max", sanitizeDouble(gauge.getMax()))
+ .put("min", sanitizeDouble(gauge.getMin()))
+ .put("rate", sanitizeDouble((gauge.getCount() * 1000.0) / periodInMillis));
if (gauge.getPercentiles().isPresent()) {
for (Tuple2<String, Double> prefixAndValue : gauge.getPercentiles().get()) {
- valueFields.put(prefixAndValue.first + "percentile", prefixAndValue.second.doubleValue());
+ valueFields.put(prefixAndValue.first + "percentile", sanitizeDouble(prefixAndValue.second));
}
}
- jsonTuple.put("values", valueFields);
+ jsonTuple.set("values", valueFields);
} else {
throw new UnsupportedOperationException(tuple.val.getClass().getName());
}
if (tuple.dim != null) {
Iterator<Map.Entry<String, String>> it = tuple.dim.iterator();
if (it.hasNext() && includeDimensions) {
- JSONObjectWithLegibleException jsonDim = new JSONObjectWithLegibleException();
+ ObjectNode jsonDim = jsonMapper.createObjectNode();
while (it.hasNext()) {
Map.Entry<String, String> entry = it.next();
jsonDim.put(entry.getKey(), entry.getValue());
}
- jsonTuple.put("dimensions", jsonDim);
+ jsonTuple.set("dimensions", jsonDim);
}
}
- jsonMetric.append("values", jsonTuple);
+ ArrayNode values = (ArrayNode) jsonMetric.get("values");
+ if (values == null) {
+ values = jsonMapper.createArrayNode();
+ jsonMetric.set("values", values);
+ }
+ values.add(jsonTuple);
}
return jsonMetric;
}
@@ -316,6 +324,12 @@ public class StateHandler extends AbstractRequestHandler {
return metrics;
}
+ private static byte[] toPrettyString(JsonNode resources) throws JsonProcessingException {
+ return jsonMapper.writerWithDefaultPrettyPrinter()
+ .writeValueAsString(resources)
+ .getBytes();
+ }
+
static class Tuple {
final MetricDimensions dim;
diff --git a/container-core/src/main/java/org/json/package-info.java b/container-core/src/main/java/org/json/package-info.java
index 44630ad235a..7ca9fe91e31 100644
--- a/container-core/src/main/java/org/json/package-info.java
+++ b/container-core/src/main/java/org/json/package-info.java
@@ -1,5 +1,5 @@
// Copyright 2017 Yahoo Holdings. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root.
@ExportPackage
package org.json;
-
+// TODO Vespa 8: stop providing org.json
import com.yahoo.osgi.annotation.ExportPackage;
diff --git a/container-core/src/test/java/com/yahoo/container/handler/metrics/MetricsV2HandlerTest.java b/container-core/src/test/java/com/yahoo/container/handler/metrics/MetricsV2HandlerTest.java
index 9020ed91026..ca4bec30322 100644
--- a/container-core/src/test/java/com/yahoo/container/handler/metrics/MetricsV2HandlerTest.java
+++ b/container-core/src/test/java/com/yahoo/container/handler/metrics/MetricsV2HandlerTest.java
@@ -1,17 +1,18 @@
// Copyright 2019 Oath Inc. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root.
package com.yahoo.container.handler.metrics;
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.node.ArrayNode;
import com.github.tomakehurst.wiremock.junit.WireMockRule;
import com.yahoo.container.jdisc.RequestHandlerTestDriver;
-import org.json.JSONArray;
-import org.json.JSONException;
-import org.json.JSONObject;
import org.junit.Before;
import org.junit.Ignore;
import org.junit.Rule;
import org.junit.Test;
import java.io.BufferedReader;
+import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.util.concurrent.Executors;
@@ -34,6 +35,8 @@ import static org.junit.Assert.fail;
*/
public class MetricsV2HandlerTest {
+ private static final ObjectMapper jsonMapper = new ObjectMapper();
+
private static final String URI_BASE = "http://localhost";
private static final String V2_URI = URI_BASE + V2_PATH;
@@ -79,29 +82,29 @@ public class MetricsV2HandlerTest {
@Test
public void v2_response_contains_values_uri() throws Exception {
String response = testDriver.sendRequest(V2_URI).readAll();
- JSONObject root = new JSONObject(response);
+ JsonNode root = jsonMapper.readTree(response);
assertTrue(root.has("resources"));
- JSONArray resources = root.getJSONArray("resources");
- assertEquals(1, resources.length());
+ ArrayNode resources = (ArrayNode) root.get("resources");
+ assertEquals(1, resources.size());
- JSONObject valuesUri = resources.getJSONObject(0);
- assertEquals(VALUES_URI, valuesUri.getString("url"));
+ JsonNode valuesUri = resources.get(0);
+ assertEquals(VALUES_URI, valuesUri.get("url").textValue());
}
@Ignore
@Test
- public void visually_inspect_values_response() throws Exception {
- JSONObject responseJson = getResponseAsJson(null);
- System.out.println(responseJson.toString(4));
+ public void visually_inspect_values_response() {
+ JsonNode responseJson = getResponseAsJson(null);
+ System.out.println(responseJson);
}
@Test
public void invalid_path_yields_error_response() throws Exception {
String response = testDriver.sendRequest(V2_URI + "/invalid").readAll();
- JSONObject root = new JSONObject(response);
+ JsonNode root = jsonMapper.readTree(response);
assertTrue(root.has("error"));
- assertTrue(root.getString("error" ).startsWith("No content"));
+ assertTrue(root.get("error" ).textValue().startsWith("No content"));
}
@Test
@@ -111,23 +114,23 @@ public class MetricsV2HandlerTest {
}
@Test
- public void consumer_is_propagated_to_metrics_proxy_api() throws JSONException {
- JSONObject responseJson = getResponseAsJson(CUSTOM_CONSUMER);
+ public void consumer_is_propagated_to_metrics_proxy_api() {
+ JsonNode responseJson = getResponseAsJson(CUSTOM_CONSUMER);
- JSONObject firstNodeMetricsValues =
- responseJson.getJSONArray("nodes").getJSONObject(0)
- .getJSONObject("node")
- .getJSONArray("metrics").getJSONObject(0)
- .getJSONObject("values");
+ JsonNode firstNodeMetricsValues =
+ responseJson.get("nodes").get(0)
+ .get("node")
+ .get("metrics").get(0)
+ .get("values");
assertTrue(firstNodeMetricsValues.has(REPLACED_CPU_METRIC));
}
- private JSONObject getResponseAsJson(String consumer) {
+ private JsonNode getResponseAsJson(String consumer) {
String response = testDriver.sendRequest(VALUES_URI + consumerQuery(consumer)).readAll();
try {
- return new JSONObject(response);
- } catch (JSONException e) {
+ return jsonMapper.readTree(response);
+ } catch (IOException e) {
fail("Failed to create json object: " + e.getMessage());
throw new RuntimeException(e);
}
diff --git a/container-core/src/test/java/com/yahoo/container/handler/metrics/PrometheusV1HandlerTest.java b/container-core/src/test/java/com/yahoo/container/handler/metrics/PrometheusV1HandlerTest.java
index a0e8c131c2b..9ffce6d1c28 100644
--- a/container-core/src/test/java/com/yahoo/container/handler/metrics/PrometheusV1HandlerTest.java
+++ b/container-core/src/test/java/com/yahoo/container/handler/metrics/PrometheusV1HandlerTest.java
@@ -1,15 +1,11 @@
// Copyright 2019 Oath Inc. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root.
package com.yahoo.container.handler.metrics;
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.node.ArrayNode;
import com.github.tomakehurst.wiremock.junit.WireMockRule;
import com.yahoo.container.jdisc.RequestHandlerTestDriver;
-import java.io.BufferedReader;
-import java.io.InputStream;
-import java.io.InputStreamReader;
-import java.util.stream.Collectors;
-import org.json.JSONArray;
-import org.json.JSONException;
-import org.json.JSONObject;
import org.junit.Before;
import org.junit.Ignore;
import org.junit.Rule;
@@ -26,13 +22,14 @@ import static com.yahoo.container.handler.metrics.MetricsV2Handler.consumerQuery
import static com.yahoo.container.handler.metrics.MetricsV2HandlerTest.getFileContents;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.fail;
/**
* @author gjoranv
*/
public class PrometheusV1HandlerTest {
+ private static final ObjectMapper jsonMapper = new ObjectMapper();
+
private static final String URI_BASE = "http://localhost";
private static final String V1_URI = URI_BASE + PrometheusV1Handler.V1_PATH;
@@ -79,14 +76,14 @@ public class PrometheusV1HandlerTest {
@Test
public void v1_response_contains_values_uri() throws Exception {
String response = testDriver.sendRequest(V1_URI).readAll();
- JSONObject root = new JSONObject(response);
+ JsonNode root = jsonMapper.readTree(response);
assertTrue(root.has("resources"));
- JSONArray resources = root.getJSONArray("resources");
- assertEquals(1, resources.length());
+ ArrayNode resources = (ArrayNode) root.get("resources");
+ assertEquals(1, resources.size());
- JSONObject valuesUri = resources.getJSONObject(0);
- assertEquals(VALUES_URI, valuesUri.getString("url"));
+ JsonNode valuesUri = resources.get(0);
+ assertEquals(VALUES_URI, valuesUri.get("url").asText());
}
@Ignore
@@ -99,9 +96,9 @@ public class PrometheusV1HandlerTest {
@Test
public void invalid_path_yields_error_response() throws Exception {
String response = testDriver.sendRequest(V1_URI + "/invalid").readAll();
- JSONObject root = new JSONObject(response);
+ JsonNode root = jsonMapper.readTree(response);
assertTrue(root.has("error"));
- assertTrue(root.getString("error" ).startsWith("No content"));
+ assertTrue(root.get("error" ).textValue().startsWith("No content"));
}
@Test
diff --git a/container-core/src/test/java/com/yahoo/container/jdisc/state/CoredumpGathererTest.java b/container-core/src/test/java/com/yahoo/container/jdisc/state/CoredumpGathererTest.java
index c1f7d790fa5..8a3d0e837c5 100644
--- a/container-core/src/test/java/com/yahoo/container/jdisc/state/CoredumpGathererTest.java
+++ b/container-core/src/test/java/com/yahoo/container/jdisc/state/CoredumpGathererTest.java
@@ -1,8 +1,7 @@
// Copyright 2019 Oath Inc. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root.
package com.yahoo.container.jdisc.state;
-import org.json.JSONException;
-import org.json.JSONObject;
+import com.fasterxml.jackson.databind.JsonNode;
import org.junit.Test;
import java.nio.file.Path;
@@ -17,12 +16,12 @@ import static org.junit.Assert.assertEquals;
public class CoredumpGathererTest {
@Test
- public void finds_one_coredump() throws JSONException {
- JSONObject packet = CoredumpGatherer.gatherCoredumpMetrics(new MockFileWrapper());
+ public void finds_one_coredump() {
+ JsonNode packet = CoredumpGatherer.gatherCoredumpMetrics(new MockFileWrapper());
- assertEquals("system-coredumps-processing", packet.getString("application"));
- assertEquals(1, packet.getInt("status_code"));
- assertEquals("Found 1 coredump(s)", packet.getString("status_msg"));
+ assertEquals("system-coredumps-processing", packet.get("application").textValue());
+ assertEquals(1, packet.get("status_code").intValue());
+ assertEquals("Found 1 coredump(s)", packet.get("status_msg").textValue());
}
diff --git a/container-core/src/test/java/com/yahoo/container/jdisc/state/HostLifeGathererTest.java b/container-core/src/test/java/com/yahoo/container/jdisc/state/HostLifeGathererTest.java
index d025b9662d2..12852c9d54c 100644
--- a/container-core/src/test/java/com/yahoo/container/jdisc/state/HostLifeGathererTest.java
+++ b/container-core/src/test/java/com/yahoo/container/jdisc/state/HostLifeGathererTest.java
@@ -1,8 +1,7 @@
// Copyright 2019 Oath Inc. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root.
package com.yahoo.container.jdisc.state;
-import org.json.JSONException;
-import org.json.JSONObject;
+import com.fasterxml.jackson.databind.JsonNode;
import org.junit.Test;
import java.nio.file.Path;
@@ -16,15 +15,15 @@ import static org.junit.Assert.assertEquals;
public class HostLifeGathererTest {
@Test
- public void host_is_alive() throws JSONException {
- JSONObject packet = HostLifeGatherer.getHostLifePacket(new MockFileWrapper());
- JSONObject metrics = packet.getJSONObject("metrics");
- assertEquals("host_life", packet.getString("application"));
- assertEquals(0, packet.getInt("status_code"));
- assertEquals("OK", packet.getString("status_msg"));
-
- assertEquals(123l, metrics.getLong("uptime"));
- assertEquals(1, metrics.getInt("alive"));
+ public void host_is_alive() {
+ JsonNode packet = HostLifeGatherer.getHostLifePacket(new MockFileWrapper());
+ JsonNode metrics = packet.get("metrics");
+ assertEquals("host_life", packet.get("application").textValue());
+ assertEquals(0, packet.get("status_code").intValue());
+ assertEquals("OK", packet.get("status_msg").textValue());
+
+ assertEquals(123L, metrics.get("uptime").longValue());
+ assertEquals(1, metrics.get("alive").intValue());
}
diff --git a/container-core/src/test/java/com/yahoo/container/jdisc/state/StateHandlerTest.java b/container-core/src/test/java/com/yahoo/container/jdisc/state/StateHandlerTest.java
index 1258ecdc46f..385eb627427 100644
--- a/container-core/src/test/java/com/yahoo/container/jdisc/state/StateHandlerTest.java
+++ b/container-core/src/test/java/com/yahoo/container/jdisc/state/StateHandlerTest.java
@@ -98,37 +98,38 @@ public class StateHandlerTest extends StateHandlerTestBase {
snapshotProvider.setSnapshot(snapshot);
advanceToNextSnapshot();
assertEquals("{\n" +
- " \"metrics\": {\n" +
- " \"snapshot\": {\n" +
- " \"from\": 0,\n" +
- " \"to\": 300\n" +
- " },\n" +
- " \"values\": [\n" +
- " {\n" +
- " \"name\": \"foo\",\n" +
- " \"values\": {\n" +
- " \"count\": 1,\n" +
- " \"rate\": 0.0033333333333333335\n" +
- " }\n" +
- " },\n" +
- " {\n" +
- " \"dimensions\": {\"component\": \"test\"},\n" +
- " \"name\": \"bar\",\n" +
- " \"values\": {\n" +
- " \"average\": 3.5,\n" +
- " \"count\": 4,\n" +
- " \"last\": 5,\n" +
- " \"max\": 5,\n" +
- " \"min\": 2,\n" +
- " \"rate\": 0.013333333333333334,\n" +
- " \"sum\": 14\n" +
- " }\n" +
- " }\n" +
- " ]\n" +
- " },\n" +
- " \"status\": {\"code\": \"up\"},\n" +
- " \"time\": 300000\n" +
- "}",
+ " \"time\" : 300000,\n" +
+ " \"status\" : {\n" +
+ " \"code\" : \"up\"\n" +
+ " },\n" +
+ " \"metrics\" : {\n" +
+ " \"snapshot\" : {\n" +
+ " \"from\" : 0.0,\n" +
+ " \"to\" : 300.0\n" +
+ " },\n" +
+ " \"values\" : [ {\n" +
+ " \"name\" : \"foo\",\n" +
+ " \"values\" : {\n" +
+ " \"count\" : 1,\n" +
+ " \"rate\" : 0.0033333333333333335\n" +
+ " }\n" +
+ " }, {\n" +
+ " \"name\" : \"bar\",\n" +
+ " \"values\" : {\n" +
+ " \"average\" : 3.5,\n" +
+ " \"sum\" : 14.0,\n" +
+ " \"count\" : 4,\n" +
+ " \"last\" : 5.0,\n" +
+ " \"max\" : 5.0,\n" +
+ " \"min\" : 2.0,\n" +
+ " \"rate\" : 0.013333333333333334\n" +
+ " },\n" +
+ " \"dimensions\" : {\n" +
+ " \"component\" : \"test\"\n" +
+ " }\n" +
+ " } ]\n" +
+ " }\n" +
+ "}",
requestAsString(V1_URI + "all"));
}
diff --git a/container-dependency-versions/pom.xml b/container-dependency-versions/pom.xml
index 8691d9a7ffb..08d6e0103bf 100644
--- a/container-dependency-versions/pom.xml
+++ b/container-dependency-versions/pom.xml
@@ -313,7 +313,7 @@
<artifactId>javassist</artifactId>
<version>${javassist.version}</version>
</dependency>
- <dependency> <!-- TODO Vespa 8: upgrade to newest version. Consider removing as provided dependency -->
+ <dependency> <!-- TODO Vespa 8: remove as provided dependency -->
<groupId>org.json</groupId>
<artifactId>json</artifactId>
<version>${org.json.version}</version>
diff --git a/container-di/src/main/java/com/yahoo/container/di/CloudSubscriberFactory.java b/container-di/src/main/java/com/yahoo/container/di/CloudSubscriberFactory.java
index d43f96c9b4a..065733a719a 100644
--- a/container-di/src/main/java/com/yahoo/container/di/CloudSubscriberFactory.java
+++ b/container-di/src/main/java/com/yahoo/container/di/CloudSubscriberFactory.java
@@ -114,7 +114,8 @@ public class CloudSubscriberFactory implements SubscriberFactory {
catch (IllegalArgumentException e) {
numExceptions++;
log.log(Level.WARNING, "Got exception from the config system (ignore if you just removed a " +
- "component from your application that used the mentioned config): ", e);
+ "component from your application that used the mentioned config) Subscriber info: " +
+ subscriber.toString(), e);
if (numExceptions >= 5)
throw new IllegalArgumentException("Failed retrieving the next config generation", e);
}
diff --git a/container-disc/src/main/java/com/yahoo/container/usability/BindingsOverviewHandler.java b/container-disc/src/main/java/com/yahoo/container/usability/BindingsOverviewHandler.java
index 709441999d0..df7cacdc768 100644
--- a/container-disc/src/main/java/com/yahoo/container/usability/BindingsOverviewHandler.java
+++ b/container-disc/src/main/java/com/yahoo/container/usability/BindingsOverviewHandler.java
@@ -1,9 +1,13 @@
// Copyright 2017 Yahoo Holdings. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root.
package com.yahoo.container.usability;
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.node.ArrayNode;
+import com.fasterxml.jackson.databind.node.ObjectNode;
import com.google.inject.Inject;
import com.yahoo.component.ComponentId;
-import com.yahoo.component.provider.ComponentRegistry;
import com.yahoo.container.Container;
import com.yahoo.container.jdisc.JdiscBindingsConfig;
import com.yahoo.jdisc.handler.AbstractRequestHandler;
@@ -15,16 +19,11 @@ import com.yahoo.jdisc.handler.ResponseDispatch;
import com.yahoo.jdisc.handler.ResponseHandler;
import com.yahoo.jdisc.http.HttpRequest;
import com.yahoo.jdisc.http.HttpRequest.Method;
-
-import org.json.JSONArray;
-import org.json.JSONException;
-import org.json.JSONObject;
import org.osgi.framework.Bundle;
import org.osgi.framework.FrameworkUtil;
import java.nio.ByteBuffer;
import java.util.ArrayList;
-import java.util.Arrays;
import java.util.List;
import java.util.Map;
@@ -33,6 +32,8 @@ import java.util.Map;
*/
public class BindingsOverviewHandler extends AbstractRequestHandler {
+ private static final ObjectMapper jsonMapper = new ObjectMapper();
+
private final JdiscBindingsConfig bindingsConfig;
@Inject
@@ -42,7 +43,7 @@ public class BindingsOverviewHandler extends AbstractRequestHandler {
@Override
public ContentChannel handleRequest(com.yahoo.jdisc.Request request, ResponseHandler handler) {
- JSONObject json;
+ JsonNode json;
int statusToReturn;
if (request instanceof HttpRequest && ((HttpRequest) request).getMethod() != Method.GET) {
@@ -63,7 +64,9 @@ public class BindingsOverviewHandler extends AbstractRequestHandler {
}.connect(handler));
try {
- writer.write(json.toString());
+ writer.write(jsonMapper.writerWithDefaultPrettyPrinter().writeValueAsBytes(json));
+ } catch (JsonProcessingException e) {
+ throw new RuntimeException(e);
} finally {
writer.close();
}
@@ -71,63 +74,58 @@ public class BindingsOverviewHandler extends AbstractRequestHandler {
return new IgnoredContent();
}
- private JSONObject errorMessageInJson() {
- JSONObject error = new JSONObject();
- try {
- error.put("error", "This API, "
- + this.getClass().getSimpleName()
- + ", only supports HTTP GET."
- + " You are probably looking for another API/path.");
- } catch (org.json.JSONException e) {
- // just ignore it
- }
+ private static JsonNode errorMessageInJson() {
+ ObjectNode error = jsonMapper.createObjectNode();
+ error.put("error", "This API, "
+ + BindingsOverviewHandler.class.getSimpleName()
+ + ", only supports HTTP GET."
+ + " You are probably looking for another API/path.");
return error;
}
- static JSONArray renderRequestHandlers(JdiscBindingsConfig bindingsConfig,
+ static ArrayNode renderRequestHandlers(JdiscBindingsConfig bindingsConfig,
Map<ComponentId, ? extends RequestHandler> handlersById) {
- JSONArray ret = new JSONArray();
+ ArrayNode ret = jsonMapper.createArrayNode();
for (Map.Entry<ComponentId, ? extends RequestHandler> handlerEntry : handlersById.entrySet()) {
String id = handlerEntry.getKey().stringValue();
RequestHandler handler = handlerEntry.getValue();
- JSONObject handlerJson = renderComponent(handler, handlerEntry.getKey());
+ ObjectNode handlerJson = renderComponent(handler, handlerEntry.getKey());
addBindings(bindingsConfig, id, handlerJson);
- ret.put(handlerJson);
+ ret.add(handlerJson);
}
return ret;
}
- private static void addBindings(JdiscBindingsConfig bindingsConfig, String id, JSONObject handlerJson) {
+ private static void addBindings(JdiscBindingsConfig bindingsConfig, String id, ObjectNode handlerJson) {
List<String> serverBindings = new ArrayList<>();
JdiscBindingsConfig.Handlers handlerConfig = bindingsConfig.handlers(id);
if (handlerConfig != null) {
serverBindings = handlerConfig.serverBindings();
}
- putJson(handlerJson, "serverBindings", renderBindings(serverBindings));
+ handlerJson.set("serverBindings", renderBindings(serverBindings));
}
- private static JSONArray renderBindings(List<String> bindings) {
- JSONArray array = new JSONArray();
+ private static JsonNode renderBindings(List<String> bindings) {
+ ArrayNode array = jsonMapper.createArrayNode();
for (String binding : bindings)
- array.put(binding);
+ array.add(binding);
return array;
}
- private static JSONObject renderComponent(Object component, ComponentId id) {
- JSONObject jc = new JSONObject();
- putJson(jc, "id", id.stringValue());
+ private static ObjectNode renderComponent(Object component, ComponentId id) {
+ ObjectNode jc = jsonMapper.createObjectNode();
+ jc.put("id", id.stringValue());
addBundleInfo(jc, component);
return jc;
}
- private static void addBundleInfo(JSONObject jsonObject, Object component) {
+ private static void addBundleInfo(ObjectNode jsonObject, Object component) {
BundleInfo bundleInfo = bundleInfo(component);
- putJson(jsonObject, "class", bundleInfo.className);
- putJson(jsonObject, "bundle", bundleInfo.bundleName);
-
+ jsonObject.put("class", bundleInfo.className);
+ jsonObject.put("bundle", bundleInfo.bundleName);
}
private static BundleInfo bundleInfo(Object component) {
@@ -143,15 +141,6 @@ public class BindingsOverviewHandler extends AbstractRequestHandler {
}
}
- private static void putJson(JSONObject json, String key, Object value) {
- try {
- json.put(key, value);
- } catch (JSONException e) {
- // The original JSONException lacks key-value info.
- throw new RuntimeException("Trying to add invalid JSON object with key '" + key + "' and value '" + value + "' - " + e.getMessage(), e);
- }
- }
-
static final class BundleInfo {
public final String className;
@@ -172,10 +161,10 @@ public class BindingsOverviewHandler extends AbstractRequestHandler {
this.bindingsConfig = bindingsConfig;
}
- public JSONObject render() {
- JSONObject root = new JSONObject();
+ public JsonNode render() {
+ ObjectNode root = jsonMapper.createObjectNode();
- putJson(root, "handlers",
+ root.set("handlers",
renderRequestHandlers(bindingsConfig, Container.get().getRequestHandlerRegistry().allComponentsById()));
return root;
diff --git a/container-search-and-docproc/src/main/java/com/yahoo/container/handler/observability/ApplicationStatusHandler.java b/container-search-and-docproc/src/main/java/com/yahoo/container/handler/observability/ApplicationStatusHandler.java
index 57c5e768cfb..943eef1e0bf 100644
--- a/container-search-and-docproc/src/main/java/com/yahoo/container/handler/observability/ApplicationStatusHandler.java
+++ b/container-search-and-docproc/src/main/java/com/yahoo/container/handler/observability/ApplicationStatusHandler.java
@@ -1,6 +1,11 @@
// Copyright 2017 Yahoo Holdings. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root.
package com.yahoo.container.handler.observability;
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.node.ArrayNode;
+import com.fasterxml.jackson.databind.node.ObjectNode;
import com.google.inject.Inject;
import com.yahoo.component.AbstractComponent;
import com.yahoo.component.ComponentId;
@@ -29,15 +34,11 @@ import com.yahoo.processing.execution.chain.ChainRegistry;
import com.yahoo.processing.handler.ProcessingHandler;
import com.yahoo.search.handler.SearchHandler;
import com.yahoo.search.searchchain.SearchChainRegistry;
-import org.json.JSONArray;
-import org.json.JSONException;
-import org.json.JSONObject;
import org.osgi.framework.Bundle;
import org.osgi.framework.FrameworkUtil;
import java.nio.ByteBuffer;
import java.util.ArrayList;
-import java.util.Arrays;
import java.util.Date;
import java.util.Iterator;
import java.util.List;
@@ -51,11 +52,13 @@ import java.util.Map;
*/
public class ApplicationStatusHandler extends AbstractRequestHandler {
- private final JSONObject applicationJson;
- private final JSONArray clientsJson;
- private final JSONArray serversJson;
- private final JSONArray requestFiltersJson;
- private final JSONArray responseFiltersJson;
+ private static final ObjectMapper jsonMapper = new ObjectMapper();
+
+ private final JsonNode applicationJson;
+ private final JsonNode clientsJson;
+ private final JsonNode serversJson;
+ private final JsonNode requestFiltersJson;
+ private final JsonNode responseFiltersJson;
private final JdiscBindingsConfig bindingsConfig;
@Inject
@@ -78,7 +81,7 @@ public class ApplicationStatusHandler extends AbstractRequestHandler {
@Override
public ContentChannel handleRequest(com.yahoo.jdisc.Request request, ResponseHandler handler) {
- JSONObject json = new StatusResponse(applicationJson, clientsJson, serversJson,
+ JsonNode json = new StatusResponse(applicationJson, clientsJson, serversJson,
requestFiltersJson, responseFiltersJson, bindingsConfig)
.render();
@@ -91,62 +94,66 @@ public class ApplicationStatusHandler extends AbstractRequestHandler {
}
}.connect(handler));
- writer.write(json.toString());
+ try {
+ writer.write(jsonMapper.writerWithDefaultPrettyPrinter().writeValueAsBytes(json));
+ } catch (JsonProcessingException e) {
+ throw new RuntimeException("Invalid JSON: " + e.getMessage(), e);
+ }
writer.close();
return new IgnoredContent();
}
- static JSONObject renderApplicationConfigs(ApplicationMetadataConfig metaConfig,
+ static JsonNode renderApplicationConfigs(ApplicationMetadataConfig metaConfig,
ApplicationUserdataConfig userConfig) {
- JSONObject vespa = new JSONObject();
- putJson(vespa, "version", Vtag.currentVersion);
-
- JSONObject meta = new JSONObject();
- putJson(meta, "name", metaConfig.name());
- putJson(meta, "user", metaConfig.user());
- putJson(meta, "path", metaConfig.path());
- putJson(meta, "generation", metaConfig.generation());
- putJson(meta, "timestamp", metaConfig.timestamp());
- putJson(meta, "date", new Date(metaConfig.timestamp()).toString());
- putJson(meta, "checksum", metaConfig.checksum());
-
- JSONObject user = new JSONObject();
- putJson(user, "version", userConfig.version());
-
- JSONObject application = new JSONObject();
- putJson(application, "vespa", vespa);
- putJson(application, "meta", meta);
- putJson(application, "user", user);
+ ObjectNode vespa = jsonMapper.createObjectNode();
+ vespa.put("version", Vtag.currentVersion.toString());
+
+ ObjectNode meta = jsonMapper.createObjectNode();
+ meta.put("name", metaConfig.name());
+ meta.put("user", metaConfig.user());
+ meta.put("path", metaConfig.path());
+ meta.put("generation", metaConfig.generation());
+ meta.put("timestamp", metaConfig.timestamp());
+ meta.put("date", new Date(metaConfig.timestamp()).toString());
+ meta.put("checksum", metaConfig.checksum());
+
+ ObjectNode user = jsonMapper.createObjectNode();
+ user.put("version", userConfig.version());
+
+ ObjectNode application = jsonMapper.createObjectNode();
+ application.set("vespa", vespa);
+ application.set("meta", meta);
+ application.set("user", user);
return application;
}
- static JSONArray renderObjectComponents(Map<ComponentId, ?> componentsById) {
- JSONArray ret = new JSONArray();
+ static JsonNode renderObjectComponents(Map<ComponentId, ?> componentsById) {
+ ArrayNode ret = jsonMapper.createArrayNode();
for (Map.Entry<ComponentId, ?> componentEntry : componentsById.entrySet()) {
- JSONObject jc = renderComponent(componentEntry.getValue(), componentEntry.getKey());
- ret.put(jc);
+ JsonNode jc = renderComponent(componentEntry.getValue(), componentEntry.getKey());
+ ret.add(jc);
}
return ret;
}
- static JSONArray renderRequestHandlers(JdiscBindingsConfig bindingsConfig,
+ static JsonNode renderRequestHandlers(JdiscBindingsConfig bindingsConfig,
Map<ComponentId, ? extends RequestHandler> handlersById) {
- JSONArray ret = new JSONArray();
+ ArrayNode ret = jsonMapper.createArrayNode();
for (Map.Entry<ComponentId, ? extends RequestHandler> handlerEntry : handlersById.entrySet()) {
String id = handlerEntry.getKey().stringValue();
RequestHandler handler = handlerEntry.getValue();
- JSONObject handlerJson = renderComponent(handler, handlerEntry.getKey());
+ ObjectNode handlerJson = renderComponent(handler, handlerEntry.getKey());
addBindings(bindingsConfig, id, handlerJson);
- ret.put(handlerJson);
+ ret.add(handlerJson);
}
return ret;
}
- private static void addBindings(JdiscBindingsConfig bindingsConfig, String id, JSONObject handlerJson) {
+ private static void addBindings(JdiscBindingsConfig bindingsConfig, String id, ObjectNode handlerJson) {
List<String> serverBindings = new ArrayList<>();
List<String> clientBindings = new ArrayList<>();
@@ -155,40 +162,40 @@ public class ApplicationStatusHandler extends AbstractRequestHandler {
serverBindings = handlerConfig.serverBindings();
clientBindings = handlerConfig.clientBindings();
}
- putJson(handlerJson, "serverBindings", renderBindings(serverBindings));
- putJson(handlerJson, "clientBindings", renderBindings(clientBindings));
+ handlerJson.set("serverBindings", renderBindings(serverBindings));
+ handlerJson.set("clientBindings", renderBindings(clientBindings));
}
- private static JSONArray renderBindings(List<String> bindings) {
- JSONArray ret = new JSONArray();
+ private static JsonNode renderBindings(List<String> bindings) {
+ ArrayNode ret = jsonMapper.createArrayNode();
for (String binding : bindings)
- ret.put(binding);
+ ret.add(binding);
return ret;
}
- private static JSONArray renderAbstractComponents(List<? extends AbstractComponent> components) {
- JSONArray ret = new JSONArray();
+ private static JsonNode renderAbstractComponents(List<? extends AbstractComponent> components) {
+ ArrayNode ret = jsonMapper.createArrayNode();
for (AbstractComponent c : components) {
- JSONObject jc = renderComponent(c, c.getId());
- ret.put(jc);
+ JsonNode jc = renderComponent(c, c.getId());
+ ret.add(jc);
}
return ret;
}
- private static JSONObject renderComponent(Object component, ComponentId id) {
- JSONObject jc = new JSONObject();
- putJson(jc, "id", id.stringValue());
+ private static ObjectNode renderComponent(Object component, ComponentId id) {
+ ObjectNode jc = jsonMapper.createObjectNode();
+ jc.put("id", id.stringValue());
addBundleInfo(jc, component);
return jc;
}
- private static void addBundleInfo(JSONObject jsonObject, Object component) {
+ private static void addBundleInfo(ObjectNode jsonObject, Object component) {
BundleInfo bundleInfo = bundleInfo(component);
- putJson(jsonObject, "class", bundleInfo.className);
- putJson(jsonObject, "bundle", bundleInfo.bundleName);
+ jsonObject.put("class", bundleInfo.className);
+ jsonObject.put("bundle", bundleInfo.bundleName);
}
@@ -205,15 +212,6 @@ public class ApplicationStatusHandler extends AbstractRequestHandler {
}
}
- private static void putJson(JSONObject json, String key, Object value) {
- try {
- json.put(key, value);
- } catch (JSONException e) {
- // The original JSONException lacks key-value info.
- throw new RuntimeException("Trying to add invalid JSON object with key '" + key + "' and value '" + value + "' - " + e.getMessage(), e);
- }
- }
-
static final class BundleInfo {
public final String className;
public final String bundleName;
@@ -224,18 +222,18 @@ public class ApplicationStatusHandler extends AbstractRequestHandler {
}
static final class StatusResponse {
- private final JSONObject applicationJson;
- private final JSONArray clientsJson;
- private final JSONArray serversJson;
- private final JSONArray requestFiltersJson;
- private final JSONArray responseFiltersJson;
+ private final JsonNode applicationJson;
+ private final JsonNode clientsJson;
+ private final JsonNode serversJson;
+ private final JsonNode requestFiltersJson;
+ private final JsonNode responseFiltersJson;
private final JdiscBindingsConfig bindingsConfig;
- StatusResponse(JSONObject applicationJson,
- JSONArray clientsJson,
- JSONArray serversJson,
- JSONArray requestFiltersJson,
- JSONArray responseFiltersJson,
+ StatusResponse(JsonNode applicationJson,
+ JsonNode clientsJson,
+ JsonNode serversJson,
+ JsonNode requestFiltersJson,
+ JsonNode responseFiltersJson,
JdiscBindingsConfig bindingsConfig) {
this.applicationJson = applicationJson;
this.clientsJson = clientsJson;
@@ -245,52 +243,52 @@ public class ApplicationStatusHandler extends AbstractRequestHandler {
this.bindingsConfig = bindingsConfig;
}
- public JSONObject render() {
- JSONObject root = new JSONObject();
+ public JsonNode render() {
+ ObjectNode root = jsonMapper.createObjectNode();
- putJson(root, "application", applicationJson);
- putJson(root, "abstractComponents",
+ root.set("application", applicationJson);
+ root.set("abstractComponents",
renderAbstractComponents(Container.get().getComponentRegistry().allComponents()));
- putJson(root, "handlers",
+ root.set("handlers",
renderRequestHandlers(bindingsConfig, Container.get().getRequestHandlerRegistry().allComponentsById()));
- putJson(root, "clients", clientsJson);
- putJson(root, "servers", serversJson);
- putJson(root, "httpRequestFilters", requestFiltersJson);
- putJson(root, "httpResponseFilters", responseFiltersJson);
+ root.set("clients", clientsJson);
+ root.set("servers", serversJson);
+ root.set("httpRequestFilters", requestFiltersJson);
+ root.set("httpResponseFilters", responseFiltersJson);
- putJson(root, "searchChains", renderSearchChains(Container.get()));
- putJson(root, "docprocChains", renderDocprocChains(Container.get()));
- putJson(root, "processingChains", renderProcessingChains(Container.get()));
+ root.set("searchChains", renderSearchChains(Container.get()));
+ root.set("docprocChains", renderDocprocChains(Container.get()));
+ root.set("processingChains", renderProcessingChains(Container.get()));
return root;
}
- private static JSONObject renderSearchChains(Container container) {
+ private static JsonNode renderSearchChains(Container container) {
for (RequestHandler h : container.getRequestHandlerRegistry().allComponents()) {
if (h instanceof SearchHandler) {
SearchChainRegistry scReg = ((SearchHandler) h).getSearchChainRegistry();
return renderChains(scReg);
}
}
- return new JSONObject();
+ return jsonMapper.createObjectNode();
}
- private static JSONObject renderDocprocChains(Container container) {
- JSONObject ret = new JSONObject();
+ private static JsonNode renderDocprocChains(Container container) {
+ ObjectNode ret = jsonMapper.createObjectNode();
for (RequestHandler h : container.getRequestHandlerRegistry().allComponents()) {
if (h instanceof DocumentProcessingHandler) {
ComponentRegistry<DocprocService> registry = ((DocumentProcessingHandler) h).getDocprocServiceRegistry();
for (DocprocService service : registry.allComponents()) {
- putJson(ret, service.getId().stringValue(), renderCalls(service.getCallStack().iterator()));
+ ret.set(service.getId().stringValue(), renderCalls(service.getCallStack().iterator()));
}
}
}
return ret;
}
- private static JSONObject renderProcessingChains(Container container) {
- JSONObject ret = new JSONObject();
+ private static JsonNode renderProcessingChains(Container container) {
+ JsonNode ret = jsonMapper.createObjectNode();
for (RequestHandler h : container.getRequestHandlerRegistry().allComponents()) {
if (h instanceof ProcessingHandler) {
ChainRegistry<Processor> registry = ((ProcessingHandler) h).getChainRegistry();
@@ -301,20 +299,20 @@ public class ApplicationStatusHandler extends AbstractRequestHandler {
}
// Note the generic param here! The key to make this work is '? extends Chain', but why?
- static JSONObject renderChains(ComponentRegistry<? extends Chain<?>> chains) {
- JSONObject ret = new JSONObject();
+ static JsonNode renderChains(ComponentRegistry<? extends Chain<?>> chains) {
+ ObjectNode ret = jsonMapper.createObjectNode();
for (Chain<?> chain : chains.allComponents()) {
- putJson(ret, chain.getId().stringValue(), renderAbstractComponents(chain.components()));
+ ret.set(chain.getId().stringValue(), renderAbstractComponents(chain.components()));
}
return ret;
}
- private static JSONArray renderCalls(Iterator<Call> components) {
- JSONArray ret = new JSONArray();
+ private static JsonNode renderCalls(Iterator<Call> components) {
+ ArrayNode ret = jsonMapper.createArrayNode();
while (components.hasNext()) {
Call c = components.next();
- JSONObject jc = renderComponent(c.getDocumentProcessor(), c.getDocumentProcessor().getId());
- ret.put(jc);
+ JsonNode jc = renderComponent(c.getDocumentProcessor(), c.getDocumentProcessor().getId());
+ ret.add(jc);
}
return ret;
}
diff --git a/container-search-gui/src/main/java/com/yahoo/search/query/gui/GUIHandler.java b/container-search-gui/src/main/java/com/yahoo/search/query/gui/GUIHandler.java
index 9ddcc7a7e69..3132f3744c9 100644
--- a/container-search-gui/src/main/java/com/yahoo/search/query/gui/GUIHandler.java
+++ b/container-search-gui/src/main/java/com/yahoo/search/query/gui/GUIHandler.java
@@ -1,8 +1,10 @@
// Copyright 2018 Yahoo Holdings. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root.
package com.yahoo.search.query.gui;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.node.ArrayNode;
+import com.fasterxml.jackson.databind.node.ObjectNode;
import com.google.inject.Inject;
-
import com.yahoo.container.QrSearchersConfig;
import com.yahoo.container.jdisc.HttpRequest;
import com.yahoo.container.jdisc.HttpResponse;
@@ -11,6 +13,7 @@ import com.yahoo.prelude.IndexModel;
import com.yahoo.prelude.querytransform.RecallSearcher;
import com.yahoo.restapi.Path;
import com.yahoo.search.Query;
+import com.yahoo.search.config.IndexInfoConfig;
import com.yahoo.search.query.Model;
import com.yahoo.search.query.Presentation;
import com.yahoo.search.query.Ranking;
@@ -19,19 +22,12 @@ import com.yahoo.search.query.ranking.MatchPhase;
import com.yahoo.search.query.restapi.ErrorResponse;
import com.yahoo.search.yql.MinimalQueryInserter;
import com.yahoo.vespa.config.search.RankProfilesConfig;
-import com.yahoo.search.config.IndexInfoConfig;
import com.yahoo.yolean.Exceptions;
-import org.json.JSONException;
-import org.json.JSONObject;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.HashMap;
-import java.util.List;
import java.util.logging.Level;
@@ -42,6 +38,8 @@ import java.util.logging.Level;
*/
public class GUIHandler extends LoggingRequestHandler {
+ private static final ObjectMapper jsonMapper = new ObjectMapper();
+
private final IndexModel indexModel;
private final RankProfilesConfig rankProfilesConfig;
@@ -112,7 +110,7 @@ public class GUIHandler extends LoggingRequestHandler {
InputStream is;
if (this.path.equals("config.json")){
String json = "{}";
- try { json = getGUIConfig(); } catch (JSONException e) { /*Something happened while parsing JSON */ }
+ try { json = getGUIConfig(); } catch (IOException e) { /*Something happened while parsing JSON */ }
is = new ByteArrayInputStream(json.getBytes());
} else{
is = GUIHandler.class.getClassLoader().getResourceAsStream("gui/"+this.path);
@@ -160,50 +158,50 @@ public class GUIHandler extends LoggingRequestHandler {
return "text/html";
}
- private String getGUIConfig() throws JSONException {
- JSONObject json = new JSONObject();
- json.put("ranking_properties", Arrays.asList("propertyname"));
- json.put("ranking_features", Arrays.asList("featurename"));
+ private String getGUIConfig() throws IOException {
+ ObjectNode json = jsonMapper.createObjectNode();
+ json.set("ranking_properties", jsonMapper.createArrayNode().add("propertyname"));
+ json.set("ranking_features", jsonMapper.createArrayNode().add("featurename"));
- List<String> sources = new ArrayList<>();
+ ArrayNode sources = jsonMapper.createArrayNode();
try {
- sources = new ArrayList<>(indexModel.getMasterClusters().keySet());
+ indexModel.getMasterClusters().keySet().forEach(sources::add);
} catch (NullPointerException ex){ /* clusters are not set */ }
- json.put("model_sources", sources);
+ json.set("model_sources", sources);
- List<String> rankProfiles = new ArrayList<>();
+ ArrayNode rankProfiles = jsonMapper.createArrayNode();
try {
rankProfilesConfig.rankprofile().forEach(rankProfile -> rankProfiles.add(rankProfile.name()));
} catch (NullPointerException ex){ /* rankprofiles are not set*/ }
- json.put("ranking_profile", rankProfiles);
+ json.set("ranking_profile", rankProfiles);
// Creating map from parent to children for GUI: parameter --> child-parameters
- HashMap<String, List<String>> childMap = new HashMap<>();
- childMap.put(Model.MODEL, Arrays.asList(Model.DEFAULT_INDEX, Model.ENCODING, Model.LANGUAGE, Model.QUERY_STRING, Model.RESTRICT, Model.SEARCH_PATH, Model.SOURCES, Model.TYPE));
- childMap.put(Ranking.RANKING, Arrays.asList(Ranking.LOCATION, Ranking.FEATURES, Ranking.LIST_FEATURES, Ranking.PROFILE, Ranking.PROPERTIES, Ranking.SORTING, Ranking.FRESHNESS, Ranking.QUERYCACHE, Ranking.MATCH_PHASE));
- childMap.put(Ranking.RANKING +"."+ Ranking.MATCH_PHASE, Arrays.asList(MatchPhase.MAX_HITS, MatchPhase.ATTRIBUTE, MatchPhase.ASCENDING, Ranking.DIVERSITY));
- childMap.put(Ranking.RANKING +"."+ Ranking.MATCH_PHASE +"."+Ranking.DIVERSITY, Arrays.asList(Diversity.ATTRIBUTE, Diversity.MINGROUPS));
- childMap.put(Presentation.PRESENTATION, Arrays.asList(Presentation.BOLDING, Presentation.FORMAT, Presentation.SUMMARY, "template", Presentation.TIMING ));
- childMap.put("trace", Arrays.asList("timestamps"));
- childMap.put("tracelevel", Arrays.asList("rules"));
- childMap.put("metrics", Arrays.asList("ignore"));
- childMap.put("collapse", Arrays.asList("summary"));
- childMap.put("pos", Arrays.asList("ll", "radius", "bb", "attribute"));
- childMap.put("streaming", Arrays.asList("userid", "groupname", "selection", "priority", "maxbucketspervisitor"));
- childMap.put("rules", Arrays.asList("off", "rulebase"));
- json.put("childMap", childMap);
-
- List<String> levelZeroParameters = Arrays.asList(MinimalQueryInserter.YQL.toString(), Query.HITS.toString(), Query.OFFSET.toString(),
- "queryProfile", Query.NO_CACHE.toString(), Query.GROUPING_SESSION_CACHE.toString(),
- Query.SEARCH_CHAIN.toString(), Query.TIMEOUT.toString(), "trace", "tracelevel",
- Query.TRACE_LEVEL.toString(), Query.EXPLAIN_LEVEL.toString(), "explainlevel", Model.MODEL, Ranking.RANKING, "collapse", "collapsesize","collapsefield",
- Presentation.PRESENTATION, "pos", "streaming", "rules", RecallSearcher.recallName.toString(), "user",
- "metrics", "");
- json.put("levelZeroParameters", levelZeroParameters);
-
- return json.toString();
+ ObjectNode childMap = jsonMapper.createObjectNode();
+ childMap.set(Model.MODEL, jsonMapper.createArrayNode().add(Model.DEFAULT_INDEX).add(Model.ENCODING).add(Model.LANGUAGE).add(Model.QUERY_STRING).add(Model.RESTRICT).add(Model.SEARCH_PATH).add(Model.SOURCES).add(Model.TYPE));
+ childMap.set(Ranking.RANKING, jsonMapper.createArrayNode().add(Ranking.LOCATION).add(Ranking.FEATURES).add(Ranking.LIST_FEATURES).add(Ranking.PROFILE).add(Ranking.PROPERTIES).add(Ranking.SORTING).add(Ranking.FRESHNESS).add(Ranking.QUERYCACHE).add(Ranking.MATCH_PHASE));
+ childMap.set(Ranking.RANKING +"."+ Ranking.MATCH_PHASE, jsonMapper.createArrayNode().add(MatchPhase.MAX_HITS).add(MatchPhase.ATTRIBUTE).add(MatchPhase.ASCENDING).add(Ranking.DIVERSITY));
+ childMap.set(Ranking.RANKING +"."+ Ranking.MATCH_PHASE +"."+Ranking.DIVERSITY, jsonMapper.createArrayNode().add(Diversity.ATTRIBUTE).add(Diversity.MINGROUPS));
+ childMap.set(Presentation.PRESENTATION, jsonMapper.createArrayNode().add(Presentation.BOLDING).add(Presentation.FORMAT).add(Presentation.SUMMARY).add("template").add(Presentation.TIMING ));
+ childMap.set("trace", jsonMapper.createArrayNode().add("timestamps"));
+ childMap.set("tracelevel", jsonMapper.createArrayNode().add("rules"));
+ childMap.set("metrics", jsonMapper.createArrayNode().add("ignore"));
+ childMap.set("collapse", jsonMapper.createArrayNode().add("summary"));
+ childMap.set("pos", jsonMapper.createArrayNode().add("ll").add("radius").add("bb").add("attribute"));
+ childMap.set("streaming", jsonMapper.createArrayNode().add("userid").add("groupname").add("selection").add("priority").add("maxbucketspervisitor"));
+ childMap.set("rules", jsonMapper.createArrayNode().add("off").add("rulebase"));
+ json.set("childMap", childMap);
+
+ ArrayNode levelZeroParameters = jsonMapper.createArrayNode().add(MinimalQueryInserter.YQL.toString()).add(Query.HITS.toString()).add(Query.OFFSET.toString())
+ .add("queryProfile").add(Query.NO_CACHE.toString()).add(Query.GROUPING_SESSION_CACHE.toString())
+ .add(Query.SEARCH_CHAIN.toString()).add(Query.TIMEOUT.toString()).add("trace").add("tracelevel")
+ .add(Query.TRACE_LEVEL.toString()).add(Query.EXPLAIN_LEVEL.toString()).add("explainlevel").add(Model.MODEL).add(Ranking.RANKING).add("collapse").add("collapsesize").add("collapsefield")
+ .add(Presentation.PRESENTATION).add("pos").add("streaming").add("rules").add(RecallSearcher.recallName.toString()).add("user")
+ .add("metrics").add("");
+ json.set("levelZeroParameters", levelZeroParameters);
+
+ return jsonMapper.writerWithDefaultPrettyPrinter().writeValueAsString(json);
}
}
} \ No newline at end of file
diff --git a/container-search/pom.xml b/container-search/pom.xml
index 074f5827122..014b7dda14f 100644
--- a/container-search/pom.xml
+++ b/container-search/pom.xml
@@ -150,6 +150,11 @@
<version>${project.version}</version>
<scope>test</scope>
</dependency>
+ <dependency>
+ <groupId>org.assertj</groupId>
+ <artifactId>assertj-core</artifactId>
+ <scope>test</scope>
+ </dependency>
</dependencies>
<build>
<plugins>
diff --git a/container-search/src/main/java/com/yahoo/prelude/hitfield/JSONString.java b/container-search/src/main/java/com/yahoo/prelude/hitfield/JSONString.java
index 209bfd08e6b..55438aa35ba 100644
--- a/container-search/src/main/java/com/yahoo/prelude/hitfield/JSONString.java
+++ b/container-search/src/main/java/com/yahoo/prelude/hitfield/JSONString.java
@@ -21,6 +21,7 @@ import java.util.Iterator;
*
* @author Steinar Knutsen
*/
+// TODO Vespa 8: remove methods leaking org.json types (replace with Slime equivalent?)
public class JSONString implements Inspectable {
private Inspector value;
@@ -436,6 +437,8 @@ public class JSONString implements Inspectable {
return content;
}
+ /** @deprecated Use {@link #getContent()} instead and parse content yourself */
+ @Deprecated(forRemoval = true, since = "7")
public Object getParsedJSON() {
initContent();
if (parsedJSON == null) {
@@ -444,6 +447,7 @@ public class JSONString implements Inspectable {
return parsedJSON;
}
+ @Deprecated(forRemoval = true, since = "7")
public void setParsedJSON(Object parsedJSON) {
this.parsedJSON = parsedJSON;
}
diff --git a/container-search/src/main/java/com/yahoo/search/rendering/JsonRenderer.java b/container-search/src/main/java/com/yahoo/search/rendering/JsonRenderer.java
index 31f8194b3b7..c4f850307ae 100644
--- a/container-search/src/main/java/com/yahoo/search/rendering/JsonRenderer.java
+++ b/container-search/src/main/java/com/yahoo/search/rendering/JsonRenderer.java
@@ -45,8 +45,6 @@ import com.yahoo.search.result.Hit;
import com.yahoo.search.result.HitGroup;
import com.yahoo.search.result.NanNumber;
import com.yahoo.tensor.Tensor;
-import org.json.JSONArray;
-import org.json.JSONObject;
import java.io.IOException;
import java.io.OutputStream;
@@ -671,14 +669,6 @@ public class JsonRenderer extends AsynchronousSectionedRenderer<Result> {
} else if (field instanceof FieldValue) {
// the null below is the field which has already been written
((FieldValue) field).serialize(null, new JsonWriter(generator));
- } else if (field instanceof JSONArray || field instanceof JSONObject) {
- // org.json returns null if the object would not result in syntactically correct JSON
- String s = field.toString();
- if (s == null) {
- generator.writeNull();
- } else {
- generator.writeRawValue(s);
- }
} else {
generator.writeString(field.toString());
}
diff --git a/container-search/src/test/java/com/yahoo/prelude/fastsearch/SlimeSummaryTestCase.java b/container-search/src/test/java/com/yahoo/prelude/fastsearch/SlimeSummaryTestCase.java
index d1399cabc75..49df321e581 100644
--- a/container-search/src/test/java/com/yahoo/prelude/fastsearch/SlimeSummaryTestCase.java
+++ b/container-search/src/test/java/com/yahoo/prelude/fastsearch/SlimeSummaryTestCase.java
@@ -4,16 +4,21 @@ package com.yahoo.prelude.fastsearch;
import com.google.common.collect.ImmutableSet;
import com.yahoo.config.subscription.ConfigGetter;
import com.yahoo.data.access.slime.SlimeAdapter;
+import com.yahoo.prelude.hitfield.JSONString;
import com.yahoo.prelude.hitfield.RawData;
import com.yahoo.prelude.hitfield.XMLString;
-import com.yahoo.prelude.hitfield.JSONString;
import com.yahoo.search.result.FeatureData;
import com.yahoo.search.result.Hit;
-import com.yahoo.search.result.NanNumber;
import com.yahoo.search.result.StructuredData;
+import com.yahoo.slime.BinaryFormat;
+import com.yahoo.slime.Cursor;
+import com.yahoo.slime.Slime;
+import com.yahoo.tensor.Tensor;
+import com.yahoo.tensor.serialization.TypedBinaryFormat;
+import org.junit.Test;
+
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
-
import java.nio.charset.StandardCharsets;
import java.util.HashMap;
import java.util.Iterator;
@@ -21,14 +26,6 @@ import java.util.Map;
import java.util.Set;
import java.util.stream.Collectors;
-import com.yahoo.slime.BinaryFormat;
-import com.yahoo.slime.Cursor;
-import com.yahoo.slime.Inspector;
-import com.yahoo.slime.Slime;
-import com.yahoo.tensor.Tensor;
-import com.yahoo.tensor.serialization.TypedBinaryFormat;
-import org.junit.Test;
-
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
@@ -102,7 +99,7 @@ public class SlimeSummaryTestCase {
if (hit.getField("jsonstring_field") instanceof JSONString) {
JSONString jstr = (JSONString) hit.getField("jsonstring_field");
assertEquals("{\"foo\":1,\"bar\":2}", jstr.getContent());
- assertNotNull(jstr.getParsedJSON());
+ assertNotNull(getParsedJSON(jstr));
com.yahoo.data.access.Inspector value = jstr.inspect();
assertEquals(1L, value.field("foo").asLong());
@@ -126,6 +123,8 @@ public class SlimeSummaryTestCase {
assertEquals(tensor2, featureData.getTensor("tensor2_feature"));
}
+ @SuppressWarnings("removal") private static Object getParsedJSON(JSONString jstr) { return jstr.getParsedJSON(); }
+
@Test
public void testFieldAccessAPI() {
DocsumDefinitionSet partialDocsum1 = createDocsumDefinitionSet(partial_summary1_cf);
diff --git a/container-search/src/test/java/com/yahoo/search/handler/test/JSONSearchHandlerTestCase.java b/container-search/src/test/java/com/yahoo/search/handler/test/JSONSearchHandlerTestCase.java
index 3cca053d0e5..80e629ca4cb 100644
--- a/container-search/src/test/java/com/yahoo/search/handler/test/JSONSearchHandlerTestCase.java
+++ b/container-search/src/test/java/com/yahoo/search/handler/test/JSONSearchHandlerTestCase.java
@@ -1,10 +1,13 @@
// Copyright 2017 Yahoo Holdings. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root.
package com.yahoo.search.handler.test;
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.node.ArrayNode;
+import com.fasterxml.jackson.databind.node.ObjectNode;
import com.yahoo.container.Container;
import com.yahoo.container.core.config.testutil.HandlersConfigurerTestWrapper;
import com.yahoo.container.jdisc.HttpRequest;
-
import com.yahoo.container.jdisc.RequestHandlerTestDriver;
import com.yahoo.container.protect.Error;
import com.yahoo.io.IOUtils;
@@ -13,8 +16,8 @@ import com.yahoo.search.handler.SearchHandler;
import com.yahoo.search.searchchain.config.test.SearchChainConfigurerTestCase;
import com.yahoo.slime.Inspector;
import com.yahoo.slime.SlimeUtils;
-import org.json.JSONArray;
-import org.json.JSONObject;
+import com.yahoo.test.json.JsonTestHelper;
+import org.assertj.core.api.Assertions;
import org.junit.After;
import org.junit.Before;
import org.junit.Rule;
@@ -23,13 +26,19 @@ import org.junit.rules.TemporaryFolder;
import java.io.File;
import java.io.IOException;
-import java.util.Map;
+import java.nio.charset.StandardCharsets;
import java.util.HashMap;
+import java.util.Map;
import static com.yahoo.jdisc.http.HttpRequest.Method.GET;
import static org.hamcrest.CoreMatchers.containsString;
import static org.hamcrest.CoreMatchers.is;
-import static org.junit.Assert.*;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertNotSame;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertThat;
+import static org.junit.Assert.assertTrue;
/**
* Tests submitting the query as JSON.
@@ -38,6 +47,8 @@ import static org.junit.Assert.*;
*/
public class JSONSearchHandlerTestCase {
+ private static final ObjectMapper jsonMapper = new ObjectMapper();
+
private static final String testDir = "src/test/java/com/yahoo/search/handler/test/config";
private static final String myHostnameHeader = "my-hostname-header";
private static final String selfHostname = HostName.getLocalhost();
@@ -97,8 +108,8 @@ public class JSONSearchHandlerTestCase {
}
@Test
- public void testFailing() throws Exception {
- JSONObject json = new JSONObject();
+ public void testFailing() {
+ ObjectNode json = jsonMapper.createObjectNode();
json.put("query", "test");
json.put("searchChain", "classLoadingError");
assertTrue(driver.sendRequest(uri, com.yahoo.jdisc.http.HttpRequest.Method.POST, json.toString(), JSON_CONTENT_TYPE).readAll().contains("NoClassDefFoundError"));
@@ -106,16 +117,16 @@ public class JSONSearchHandlerTestCase {
@Test
- public synchronized void testPluginError() throws Exception {
- JSONObject json = new JSONObject();
+ public synchronized void testPluginError() {
+ ObjectNode json = jsonMapper.createObjectNode();
json.put("query", "test");
json.put("searchChain", "exceptionInPlugin");
assertTrue(driver.sendRequest(uri, com.yahoo.jdisc.http.HttpRequest.Method.POST, json.toString(), JSON_CONTENT_TYPE).readAll().contains("NullPointerException"));
}
@Test
- public synchronized void testWorkingReconfiguration() throws Exception {
- JSONObject json = new JSONObject();
+ public synchronized void testWorkingReconfiguration() throws IOException {
+ ObjectNode json = jsonMapper.createObjectNode();
json.put("query", "abc");
assertJsonResult(json, driver);
@@ -135,7 +146,7 @@ public class JSONSearchHandlerTestCase {
}
@Test
- public void testInvalidYqlQuery() throws Exception {
+ public void testInvalidYqlQuery() throws IOException {
IOUtils.copyDirectory(new File(testDir, "config_yql"), new File(tempDir), 1);
generateComponentsConfigForActive();
configurer.reloadConfig();
@@ -143,7 +154,7 @@ public class JSONSearchHandlerTestCase {
SearchHandler newSearchHandler = fetchSearchHandler(configurer);
assertTrue("Do I have a new instance of the search handler?", searchHandler != newSearchHandler);
try (RequestHandlerTestDriver newDriver = new RequestHandlerTestDriver(newSearchHandler)) {
- JSONObject json = new JSONObject();
+ ObjectNode json = jsonMapper.createObjectNode();
json.put("yql", "select * from foo where bar > 1453501295");
RequestHandlerTestDriver.MockResponseHandler responseHandler = newDriver.sendRequest(uri, com.yahoo.jdisc.http.HttpRequest.Method.POST, json.toString(), JSON_CONTENT_TYPE);
responseHandler.readAll();
@@ -153,14 +164,14 @@ public class JSONSearchHandlerTestCase {
// Query handling takes a different code path when a query profile is active, so we test both paths.
@Test
- public void testInvalidQueryParamWithQueryProfile() throws Exception {
+ public void testInvalidQueryParamWithQueryProfile() throws IOException {
try (RequestHandlerTestDriver newDriver = driverWithConfig("config_invalid_param")) {
testInvalidQueryParam(newDriver);
}
}
- private void testInvalidQueryParam(final RequestHandlerTestDriver testDriver) throws Exception {
- JSONObject json = new JSONObject();
+ private void testInvalidQueryParam(final RequestHandlerTestDriver testDriver) {
+ ObjectNode json = jsonMapper.createObjectNode();
json.put("query", "status_code:0");
json.put("hits", 20);
json.put("offset", -20);
@@ -173,16 +184,16 @@ public class JSONSearchHandlerTestCase {
}
@Test
- public void testNormalResultJsonAliasRendering() throws Exception {
- JSONObject json = new JSONObject();
+ public void testNormalResultJsonAliasRendering() {
+ ObjectNode json = jsonMapper.createObjectNode();
json.put("format", "json");
json.put("query", "abc");
assertJsonResult(json, driver);
}
@Test
- public void testNullQuery() throws Exception {
- JSONObject json = new JSONObject();
+ public void testNullQuery() {
+ ObjectNode json = jsonMapper.createObjectNode();
json.put("format", "xml");
assertEquals("<?xml version=\"1.0\" encoding=\"utf-8\" ?>\n" +
@@ -195,8 +206,8 @@ public class JSONSearchHandlerTestCase {
}
@Test
- public void testWebServiceStatus() throws Exception {
- JSONObject json = new JSONObject();
+ public void testWebServiceStatus() {
+ ObjectNode json = jsonMapper.createObjectNode();
json.put("query", "web_service_status_code");
RequestHandlerTestDriver.MockResponseHandler responseHandler =
driver.sendRequest(uri, com.yahoo.jdisc.http.HttpRequest.Method.POST, json.toString(), JSON_CONTENT_TYPE);
@@ -206,39 +217,39 @@ public class JSONSearchHandlerTestCase {
}
@Test
- public void testNormalResultImplicitDefaultRendering() throws Exception {
- JSONObject json = new JSONObject();
+ public void testNormalResultImplicitDefaultRendering() {
+ ObjectNode json = jsonMapper.createObjectNode();
json.put("query", "abc");
assertJsonResult(json, driver);
}
@Test
- public void testNormalResultExplicitDefaultRendering() throws Exception {
- JSONObject json = new JSONObject();
+ public void testNormalResultExplicitDefaultRendering() {
+ ObjectNode json = jsonMapper.createObjectNode();
json.put("query", "abc");
json.put("format", "default");
assertJsonResult(json, driver);
}
@Test
- public void testNormalResultXmlAliasRendering() throws Exception {
- JSONObject json = new JSONObject();
+ public void testNormalResultXmlAliasRendering() {
+ ObjectNode json = jsonMapper.createObjectNode();
json.put("query", "abc");
json.put("format", "xml");
assertXmlResult(json, driver);
}
@Test
- public void testNormalResultExplicitDefaultRenderingFullRendererName1() throws Exception {
- JSONObject json = new JSONObject();
+ public void testNormalResultExplicitDefaultRenderingFullRendererName1() {
+ ObjectNode json = jsonMapper.createObjectNode();
json.put("query", "abc");
json.put("format", "XmlRenderer");
assertXmlResult(json, driver);
}
@Test
- public void testNormalResultExplicitDefaultRenderingFullRendererName2() throws Exception {
- JSONObject json = new JSONObject();
+ public void testNormalResultExplicitDefaultRenderingFullRendererName2() {
+ ObjectNode json = jsonMapper.createObjectNode();
json.put("query", "abc");
json.put("format", "JsonRenderer");
assertJsonResult(json, driver);
@@ -253,7 +264,7 @@ public class JSONSearchHandlerTestCase {
" </hit>\n" +
"</result>\n";
- private void assertXmlResult(JSONObject json, RequestHandlerTestDriver driver) {
+ private void assertXmlResult(JsonNode json, RequestHandlerTestDriver driver) {
assertOkResult(driver.sendRequest(uri, com.yahoo.jdisc.http.HttpRequest.Method.POST, json.toString(), JSON_CONTENT_TYPE), xmlResult);
}
@@ -263,7 +274,7 @@ public class JSONSearchHandlerTestCase {
+ "{\"id\":\"testHit\",\"relevance\":1.0,\"fields\":{\"uri\":\"testHit\"}}"
+ "]}}";
- private void assertJsonResult(JSONObject json, RequestHandlerTestDriver driver) {
+ private void assertJsonResult(JsonNode json, RequestHandlerTestDriver driver) {
assertOkResult(driver.sendRequest(uri, com.yahoo.jdisc.http.HttpRequest.Method.POST, json.toString(), JSON_CONTENT_TYPE), jsonResult);
}
@@ -288,7 +299,7 @@ public class JSONSearchHandlerTestCase {
}
- private RequestHandlerTestDriver driverWithConfig(String configDirectory) throws Exception {
+ private RequestHandlerTestDriver driverWithConfig(String configDirectory) throws IOException {
IOUtils.copyDirectory(new File(testDir, configDirectory), new File(tempDir), 1);
generateComponentsConfigForActive();
configurer.reloadConfig();
@@ -299,44 +310,44 @@ public class JSONSearchHandlerTestCase {
}
@Test
- public void testSelectParameters() throws Exception {
- JSONObject json = new JSONObject();
+ public void testSelectParameters() throws IOException {
+ ObjectNode json = jsonMapper.createObjectNode();
- JSONObject select = new JSONObject();
+ ObjectNode select = jsonMapper.createObjectNode();
- JSONObject where = new JSONObject();
+ ObjectNode where = jsonMapper.createObjectNode();
where.put("where", "where");
- JSONObject grouping = new JSONObject();
+ ObjectNode grouping = jsonMapper.createObjectNode();
grouping.put("grouping", "grouping");
- select.put("where", where);
- select.put("grouping", grouping);
+ select.set("where", where);
+ select.set("grouping", grouping);
- json.put("select", select);
+ json.set("select", select);
- Inspector inspector = SlimeUtils.jsonToSlime(json.toString().getBytes("utf-8")).get();
+ Inspector inspector = SlimeUtils.jsonToSlime(json.toString().getBytes(StandardCharsets.UTF_8)).get();
Map<String, String> map = new HashMap<>();
searchHandler.createRequestMapping(inspector, map, "");
- JSONObject processedWhere = new JSONObject(map.get("select.where"));
- assertEquals(where.toString(), processedWhere.toString());
+ JsonNode processedWhere = jsonMapper.readTree(map.get("select.where"));
+ JsonTestHelper.assertJsonEquals(where.toString(), processedWhere.toString());
- JSONObject processedGrouping = new JSONObject(map.get("select.grouping"));
- assertEquals(grouping.toString(), processedGrouping.toString());
+ JsonNode processedGrouping = jsonMapper.readTree(map.get("select.grouping"));
+ JsonTestHelper.assertJsonEquals(grouping.toString(), processedGrouping.toString());
}
@Test
- public void testJsonQueryWithSelectWhere() throws Exception {
- JSONObject root = new JSONObject();
- JSONObject select = new JSONObject();
- JSONObject where = new JSONObject();
- JSONArray term = new JSONArray();
- term.put("default");
- term.put("bad");
- where.put("contains", term);
- select.put("where", where);
- root.put("select", select);
+ public void testJsonQueryWithSelectWhere() {
+ ObjectNode root = jsonMapper.createObjectNode();
+ ObjectNode select = jsonMapper.createObjectNode();
+ ObjectNode where = jsonMapper.createObjectNode();
+ ArrayNode term = jsonMapper.createArrayNode();
+ term.add("default");
+ term.add("bad");
+ where.set("contains", term);
+ select.set("where", where);
+ root.set("select", select);
// Run query
String result = driver.sendRequest(uri + "searchChain=echoingQuery", com.yahoo.jdisc.http.HttpRequest.Method.POST, root.toString(), JSON_CONTENT_TYPE).readAll();
@@ -393,8 +404,8 @@ public class JSONSearchHandlerTestCase {
}
@Test
- public void testJsonQueryWithYQL() throws Exception {
- JSONObject root = new JSONObject();
+ public void testJsonQueryWithYQL() {
+ ObjectNode root = jsonMapper.createObjectNode();
root.put("yql", "select * from sources * where default contains 'bad';");
// Run query
@@ -404,10 +415,10 @@ public class JSONSearchHandlerTestCase {
}
@Test
- public void testRequestMapping() throws Exception {
- JSONObject json = new JSONObject();
+ public void testRequestMapping() {
+ ObjectNode json = jsonMapper.createObjectNode();
json.put("yql", "select * from sources * where sddocname contains \"blog_post\" limit 0 | all(group(date) max(3) order(-count())each(output(count())));");
- json.put("hits", 10.0);
+ json.put("hits", 10);
json.put("offset", 5);
json.put("queryProfile", "foo");
json.put("nocache", false);
@@ -417,7 +428,7 @@ public class JSONSearchHandlerTestCase {
json.put("select", "_all");
- JSONObject model = new JSONObject();
+ ObjectNode model = jsonMapper.createObjectNode();
model.put("defaultIndex", 1);
model.put("encoding", "json");
model.put("filter", "default");
@@ -427,9 +438,9 @@ public class JSONSearchHandlerTestCase {
model.put("searchPath", "node1");
model.put("sources", "source1,source2");
model.put("type", "yql");
- json.put("model", model);
+ json.set("model", model);
- JSONObject ranking = new JSONObject();
+ ObjectNode ranking = jsonMapper.createObjectNode();
ranking.put("location", "123789.89123N;128123W");
ranking.put("features", "none");
ranking.put("listFeatures", false);
@@ -439,61 +450,61 @@ public class JSONSearchHandlerTestCase {
ranking.put("freshness", "0.05");
ranking.put("queryCache", false);
- JSONObject matchPhase = new JSONObject();
+ ObjectNode matchPhase = jsonMapper.createObjectNode();
matchPhase.put("maxHits", "100");
matchPhase.put("attribute", "title");
matchPhase.put("ascending", true);
- JSONObject diversity = new JSONObject();
+ ObjectNode diversity = jsonMapper.createObjectNode();
diversity.put("attribute", "title");
diversity.put("minGroups", 1);
- matchPhase.put("diversity", diversity);
- ranking.put("matchPhase", matchPhase);
- json.put("ranking", ranking);
+ matchPhase.set("diversity", diversity);
+ ranking.set("matchPhase", matchPhase);
+ json.set("ranking", ranking);
- JSONObject presentation = new JSONObject();
+ ObjectNode presentation = jsonMapper.createObjectNode();
presentation.put("bolding", true);
presentation.put("format", "json");
presentation.put("summary", "none");
presentation.put("template", "json");
presentation.put("timing", false);
- json.put("presentation", presentation);
+ json.set("presentation", presentation);
- JSONObject collapse = new JSONObject();
+ ObjectNode collapse = jsonMapper.createObjectNode();
collapse.put("field", "none");
collapse.put("size", 2);
collapse.put("summary", "default");
- json.put("collapse", collapse);
+ json.set("collapse", collapse);
- JSONObject trace = new JSONObject();
+ ObjectNode trace = jsonMapper.createObjectNode();
trace.put("level", 1);
trace.put("timestamps", false);
trace.put("rules", "none");
- json.put("trace", trace);
+ json.set("trace", trace);
- JSONObject pos = new JSONObject();
+ ObjectNode pos = jsonMapper.createObjectNode();
pos.put("ll", "1263123N;1231.9W");
pos.put("radius", "71234m");
pos.put("bb", "1237123W;123218N");
pos.put("attribute", "default");
- json.put("pos", pos);
+ json.set("pos", pos);
- JSONObject streaming = new JSONObject();
+ ObjectNode streaming = jsonMapper.createObjectNode();
streaming.put("userid", 123);
streaming.put("groupname", "abc");
streaming.put("selection", "none");
streaming.put("priority", 10);
streaming.put("maxbucketspervisitor", 5);
- json.put("streaming", streaming);
+ json.set("streaming", streaming);
- JSONObject rules = new JSONObject();
+ ObjectNode rules = jsonMapper.createObjectNode();
rules.put("off", false);
rules.put("rulebase", "default");
- json.put("rules", rules);
+ json.set("rules", rules);
- JSONObject metrics = new JSONObject();
+ ObjectNode metrics = jsonMapper.createObjectNode();
metrics.put("ignore", "_all");
- json.put("metrics", metrics);
+ json.set("metrics", metrics);
json.put("recall", "none");
json.put("user", 123);
@@ -501,7 +512,7 @@ public class JSONSearchHandlerTestCase {
json.put("hitcountestimate", true);
// Create mapping
- Inspector inspector = SlimeUtils.jsonToSlime(json.toString().getBytes("utf-8")).get();
+ Inspector inspector = SlimeUtils.jsonToSlime(json.toString().getBytes(StandardCharsets.UTF_8)).get();
Map<String, String> map = new HashMap<>();
searchHandler.createRequestMapping(inspector, map, "");
@@ -518,12 +529,12 @@ public class JSONSearchHandlerTestCase {
// Get mapping
Map<String, String> propertyMap = request.propertyMap();
- assertEquals("Should have same mapping for properties", map, propertyMap);
+ Assertions.assertThat(propertyMap).isEqualTo(map);
}
@Test
- public void testContentTypeParsing() throws Exception {
- JSONObject json = new JSONObject();
+ public void testContentTypeParsing() {
+ ObjectNode json = jsonMapper.createObjectNode();
json.put("query", "abc");
assertOkResult(driver.sendRequest(uri, com.yahoo.jdisc.http.HttpRequest.Method.POST, json.toString(), "Application/JSON; charset=utf-8"), jsonResult);
}
diff --git a/container-search/src/test/java/com/yahoo/search/rendering/JsonRendererTestCase.java b/container-search/src/test/java/com/yahoo/search/rendering/JsonRendererTestCase.java
index c4d49c11f5e..48003f6586f 100644
--- a/container-search/src/test/java/com/yahoo/search/rendering/JsonRendererTestCase.java
+++ b/container-search/src/test/java/com/yahoo/search/rendering/JsonRendererTestCase.java
@@ -1,7 +1,6 @@
// Copyright 2017 Yahoo Holdings. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root.
package com.yahoo.search.rendering;
-import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.google.common.util.concurrent.ListenableFuture;
@@ -56,9 +55,6 @@ import com.yahoo.tensor.TensorType;
import com.yahoo.tensor.serialization.TypedBinaryFormat;
import com.yahoo.text.Utf8;
import com.yahoo.yolean.trace.TraceNode;
-import org.json.JSONArray;
-import org.json.JSONException;
-import org.json.JSONObject;
import org.junit.Before;
import org.junit.Test;
@@ -67,7 +63,6 @@ import java.io.IOException;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.nio.charset.StandardCharsets;
-import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ExecutionException;
@@ -83,6 +78,8 @@ import static org.junit.Assert.assertTrue;
*/
public class JsonRendererTestCase {
+ private static final ObjectMapper jsonMapper = new ObjectMapper();
+
private JsonRenderer originalRenderer;
private JsonRenderer renderer;
@@ -959,7 +956,7 @@ public class JsonRendererTestCase {
}
@Test
- public void testJsonObjects() throws InterruptedException, ExecutionException, IOException, JSONException {
+ public void testJsonObjects() throws InterruptedException, ExecutionException, IOException {
String expected = "{"
+ " \"root\": {"
+ " \"children\": ["
@@ -973,14 +970,6 @@ public class JsonRendererTestCase {
+ " },"
+ " \"json producer\": {"
+ " \"long in structured\": 7809531904"
- + " },"
- + " \"org.json array\": ["
- + " true,"
- + " true,"
- + " false"
- + " ],"
- + " \"org.json object\": {"
- + " \"forty-two\": 42"
+ " }"
+ " },"
+ " \"id\": \"json objects\","
@@ -996,26 +985,17 @@ public class JsonRendererTestCase {
+ "}";
Result r = newEmptyResult();
Hit h = new Hit("json objects");
- JSONObject o = new JSONObject();
- JSONArray a = new JSONArray();
- ObjectMapper mapper = new ObjectMapper();
- JsonNode j = mapper.createObjectNode();
+ ObjectNode j = jsonMapper.createObjectNode();
JSONString s = new JSONString("{\"a\": \"b\"}");
Slime slime = new Slime();
Cursor c = slime.setObject();
c.setLong("long in structured", 7809531904L);
SlimeAdapter slimeInit = new SlimeAdapter(slime.get());
StructuredData struct = new StructuredData(slimeInit);
- ((ObjectNode) j).put("Nineteen-eighty-four", 1984);
- o.put("forty-two", 42);
- a.put(true);
- a.put(true);
- a.put(false);
+ j.put("Nineteen-eighty-four", 1984);
h.setField("inspectable", s);
h.setField("jackson", j);
h.setField("json producer", struct);
- h.setField("org.json array", a);
- h.setField("org.json object", o);
r.hits().add(h);
String summary = render(r);
assertEqualJson(expected, summary);
@@ -1236,11 +1216,13 @@ public class JsonRendererTestCase {
public void testThatTheJsonValidatorCanCatchErrors() {
String json = "{"
+ " \"root\": {"
- + " \"duplicate\": 1,"
- + " \"duplicate\": 2"
+ + " \"invalidvalue\": 1adsf,"
+ " }"
+ "}";
- assertEquals("Duplicate key \"duplicate\"", validateJSON(json));
+ assertEquals(
+ "Unexpected character ('a' (code 97)): was expecting comma to separate Object entries\n" +
+ " at [Source: { \"root\": { \"invalidvalue\": 1adsf, }}; line: 1, column: 41]",
+ validateJSON(json));
}
@Test
@@ -1316,9 +1298,9 @@ public class JsonRendererTestCase {
private String validateJSON(String presumablyValidJson) {
try {
- new JSONObject(presumablyValidJson);
+ jsonMapper.readTree(presumablyValidJson);
return "";
- } catch (JSONException e) {
+ } catch (IOException e) {
return e.getMessage();
}
}
diff --git a/container-search/src/test/java/com/yahoo/select/SelectTestCase.java b/container-search/src/test/java/com/yahoo/select/SelectTestCase.java
index 3239a97a094..9bcd3addd92 100644
--- a/container-search/src/test/java/com/yahoo/select/SelectTestCase.java
+++ b/container-search/src/test/java/com/yahoo/select/SelectTestCase.java
@@ -1,6 +1,9 @@
// Copyright 2018 Yahoo Holdings. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root.
package com.yahoo.select;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.node.ArrayNode;
+import com.fasterxml.jackson.databind.node.ObjectNode;
import com.yahoo.prelude.query.AndItem;
import com.yahoo.prelude.query.ExactStringItem;
import com.yahoo.prelude.query.Item;
@@ -17,19 +20,15 @@ import com.yahoo.prelude.query.WordItem;
import com.yahoo.processing.IllegalInputException;
import com.yahoo.search.Query;
import com.yahoo.search.grouping.GroupingRequest;
-import com.yahoo.search.grouping.request.AllOperation;
import com.yahoo.search.query.QueryTree;
import com.yahoo.search.query.Select;
import com.yahoo.search.query.SelectParser;
import com.yahoo.search.query.parser.Parsable;
import com.yahoo.search.query.parser.ParserEnvironment;
import com.yahoo.search.yql.VespaGroupingStep;
-import org.json.JSONException;
-import org.json.JSONObject;
import org.junit.Test;
import java.util.ArrayList;
-import java.util.Arrays;
import java.util.List;
import static org.junit.Assert.assertEquals;
@@ -48,15 +47,18 @@ import static org.junit.Assert.fail;
*/
public class SelectTestCase {
+ private static final ObjectMapper jsonMapper = new ObjectMapper();
+
private final SelectParser parser = new SelectParser(new ParserEnvironment());
//------------------------------------------------------------------- "where" tests
@Test
- public void test_contains() throws Exception {
- JSONObject json = new JSONObject();
- List<String> contains = Arrays.asList("default", "foo");
- json.put("contains", contains);
+ public void test_contains() {
+ ObjectNode json = jsonMapper.createObjectNode();
+ ArrayNode arrayNode = jsonMapper.createArrayNode();
+ arrayNode.add("default").add("foo");
+ json.set("contains", arrayNode);
assertParse(json.toString(), "default:foo");
}
@@ -77,21 +79,21 @@ public class SelectTestCase {
@Test
public void testOr() throws Exception {
- JSONObject json_two_or = new JSONObject();
- JSONObject json_three_or = new JSONObject();
- List<String> contains1 = Arrays.asList("title", "madonna");
- List<String> contains2 = Arrays.asList("title", "saint");
- List<String> contains3 = Arrays.asList("title", "angel");
-
- JSONObject contains_json1 = new JSONObject();
- JSONObject contains_json2 = new JSONObject();
- JSONObject contains_json3 = new JSONObject();
- contains_json1.put("contains", contains1);
- contains_json2.put("contains", contains2);
- contains_json3.put("contains", contains3);
-
- json_two_or.put("or", Arrays.asList(contains_json1, contains_json2));
- json_three_or.put("or", Arrays.asList(contains_json1, contains_json2, contains_json3));
+ ObjectNode json_two_or = jsonMapper.createObjectNode();
+ ObjectNode json_three_or = jsonMapper.createObjectNode();
+ ArrayNode contains1 = jsonMapper.createArrayNode().add("title").add("madonna");
+ ArrayNode contains2 = jsonMapper.createArrayNode().add("title").add("saint");
+ ArrayNode contains3 = jsonMapper.createArrayNode().add("title").add("angel");
+
+ ObjectNode contains_json1 = jsonMapper.createObjectNode();
+ ObjectNode contains_json2 = jsonMapper.createObjectNode();
+ ObjectNode contains_json3 = jsonMapper.createObjectNode();
+ contains_json1.set("contains", contains1);
+ contains_json2.set("contains", contains2);
+ contains_json3.set("contains", contains3);
+
+ json_two_or.set("or", jsonMapper.createArrayNode().add(contains_json1).add(contains_json2));
+ json_three_or.set("or", jsonMapper.createArrayNode().add(contains_json1).add(contains_json2).add(contains_json3));
assertParse(json_two_or.toString(), "OR title:madonna title:saint");
assertParse(json_three_or.toString(), "OR title:madonna title:saint title:angel");
@@ -99,178 +101,178 @@ public class SelectTestCase {
@Test
public void testAnd() throws Exception{
- JSONObject json_two_and = new JSONObject();
- JSONObject json_three_and = new JSONObject();
- List<String> contains1 = Arrays.asList("title", "madonna");
- List<String> contains2 = Arrays.asList("title", "saint");
- List<String> contains3 = Arrays.asList("title", "angel");
-
- JSONObject contains_json1 = new JSONObject();
- JSONObject contains_json2 = new JSONObject();
- JSONObject contains_json3 = new JSONObject();
- contains_json1.put("contains", contains1);
- contains_json2.put("contains", contains2);
- contains_json3.put("contains", contains3);
-
- json_two_and.put("and", Arrays.asList(contains_json1, contains_json2));
- json_three_and.put("and", Arrays.asList(contains_json1, contains_json2, contains_json3));
+ ObjectNode json_two_and = jsonMapper.createObjectNode();
+ ObjectNode json_three_and = jsonMapper.createObjectNode();
+ ArrayNode contains1 = jsonMapper.createArrayNode().add("title").add("madonna");
+ ArrayNode contains2 = jsonMapper.createArrayNode().add("title").add("saint");
+ ArrayNode contains3 = jsonMapper.createArrayNode().add("title").add("angel");
+
+ ObjectNode contains_json1 = jsonMapper.createObjectNode();
+ ObjectNode contains_json2 = jsonMapper.createObjectNode();
+ ObjectNode contains_json3 = jsonMapper.createObjectNode();
+ contains_json1.set("contains", contains1);
+ contains_json2.set("contains", contains2);
+ contains_json3.set("contains", contains3);
+
+ json_two_and.set("and", jsonMapper.createArrayNode().add(contains_json1).add(contains_json2));
+ json_three_and.set("and", jsonMapper.createArrayNode().add(contains_json1).add(contains_json2).add(contains_json3));
assertParse(json_two_and.toString(), "AND title:madonna title:saint");
assertParse(json_three_and.toString(), "AND title:madonna title:saint title:angel");
}
@Test
- public void testAndNot() throws JSONException {
- JSONObject json_and_not = new JSONObject();
- List<String> contains1 = Arrays.asList("title", "madonna");
- List<String> contains2 = Arrays.asList("title", "saint");
+ public void testAndNot() {
+ ObjectNode json_and_not = jsonMapper.createObjectNode();
+ ArrayNode contains1 = jsonMapper.createArrayNode().add("title").add("madonna");
+ ArrayNode contains2 = jsonMapper.createArrayNode().add("title").add("saint");
- JSONObject contains_json1 = new JSONObject();
- JSONObject contains_json2 = new JSONObject();
- contains_json1.put("contains", contains1);
- contains_json2.put("contains", contains2);
+ ObjectNode contains_json1 = jsonMapper.createObjectNode();
+ ObjectNode contains_json2 = jsonMapper.createObjectNode();
+ contains_json1.set("contains", contains1);
+ contains_json2.set("contains", contains2);
- json_and_not.put("and_not", Arrays.asList(contains_json1, contains_json2));
+ json_and_not.set("and_not", jsonMapper.createArrayNode().add(contains_json1).add(contains_json2));
assertParse(json_and_not.toString(),
"+title:madonna -title:saint");
}
@Test
- public void testLessThan() throws JSONException {
- JSONObject range_json = new JSONObject();
- JSONObject operators = new JSONObject();
+ public void testLessThan() {
+ ObjectNode range_json = jsonMapper.createObjectNode();
+ ObjectNode operators = jsonMapper.createObjectNode();
operators.put("<", 500);
- List<Object> range = Arrays.asList("price", operators);
+ ArrayNode range = jsonMapper.createArrayNode().add("price").add(operators);
- range_json.put("range", range);
+ range_json.set("range", range);
assertParse(range_json.toString(),
"price:<500");
}
@Test
- public void testGreaterThan() throws JSONException {
- JSONObject range_json = new JSONObject();
- JSONObject operators = new JSONObject();
+ public void testGreaterThan() {
+ ObjectNode range_json = jsonMapper.createObjectNode();
+ ObjectNode operators = jsonMapper.createObjectNode();
operators.put(">", 500);
- List<Object> range = Arrays.asList("price", operators);
+ ArrayNode range = jsonMapper.createArrayNode().add("price").add(operators);
- range_json.put("range", range);
+ range_json.set("range", range);
assertParse(range_json.toString(),
"price:>500");
}
@Test
- public void testLessThanOrEqual() throws JSONException {
- JSONObject range_json = new JSONObject();
- JSONObject operators = new JSONObject();
+ public void testLessThanOrEqual() {
+ ObjectNode range_json = jsonMapper.createObjectNode();
+ ObjectNode operators = jsonMapper.createObjectNode();
operators.put("<=", 500);
- List<Object> range = Arrays.asList("price", operators);
+ ArrayNode range = jsonMapper.createArrayNode().add("price").add(operators);
- range_json.put("range", range);
+ range_json.set("range", range);
assertParse(range_json.toString(),
"price:[;500]");
}
@Test
- public void testGreaterThanOrEqual() throws JSONException {
- JSONObject range_json = new JSONObject();
- JSONObject operators = new JSONObject();
+ public void testGreaterThanOrEqual() {
+ ObjectNode range_json = jsonMapper.createObjectNode();
+ ObjectNode operators = jsonMapper.createObjectNode();
operators.put(">=", 500);
- List<Object> range = Arrays.asList("price", operators);
+ ArrayNode range = jsonMapper.createArrayNode().add("price").add(operators);
- range_json.put("range", range);
+ range_json.set("range", range);
assertParse(range_json.toString(),
"price:[500;]");
}
@Test
- public void testEquality() throws JSONException {
- JSONObject range_json = new JSONObject();
- JSONObject operators = new JSONObject();
+ public void testEquality() {
+ ObjectNode range_json = jsonMapper.createObjectNode();
+ ObjectNode operators = jsonMapper.createObjectNode();
operators.put("=", 500);
- List<Object> range = Arrays.asList("price", operators);
+ ArrayNode range = jsonMapper.createArrayNode().add("price").add(operators);
- range_json.put("range", range);
+ range_json.set("range", range);
assertParse(range_json.toString(),
"price:500");
}
@Test
- public void testNegativeLessThan() throws JSONException {
- JSONObject range_json = new JSONObject();
- JSONObject operators = new JSONObject();
+ public void testNegativeLessThan() {
+ ObjectNode range_json = jsonMapper.createObjectNode();
+ ObjectNode operators = jsonMapper.createObjectNode();
operators.put("<", -500);
- List<Object> range = Arrays.asList("price", operators);
+ ArrayNode range = jsonMapper.createArrayNode().add("price").add(operators);
- range_json.put("range", range);
+ range_json.set("range", range);
assertParse(range_json.toString(),
"price:<-500");
}
@Test
- public void testNegativeGreaterThan() throws JSONException {
- JSONObject range_json = new JSONObject();
- JSONObject operators = new JSONObject();
+ public void testNegativeGreaterThan() {
+ ObjectNode range_json = jsonMapper.createObjectNode();
+ ObjectNode operators = jsonMapper.createObjectNode();
operators.put(">", -500);
- List<Object> range = Arrays.asList("price", operators);
+ ArrayNode range = jsonMapper.createArrayNode().add("price").add(operators);
- range_json.put("range", range);
+ range_json.set("range", range);
assertParse(range_json.toString(),
"price:>-500");
}
@Test
- public void testNegativeLessThanOrEqual() throws JSONException {
- JSONObject range_json = new JSONObject();
- JSONObject operators = new JSONObject();
+ public void testNegativeLessThanOrEqual() {
+ ObjectNode range_json = jsonMapper.createObjectNode();
+ ObjectNode operators = jsonMapper.createObjectNode();
operators.put("<=", -500);
- List<Object> range = Arrays.asList("price", operators);
+ ArrayNode range = jsonMapper.createArrayNode().add("price").add(operators);
- range_json.put("range", range);
+ range_json.set("range", range);
assertParse(range_json.toString(),
"price:[;-500]");
}
@Test
- public void testNegativeGreaterThanOrEqual() throws JSONException {
- JSONObject range_json = new JSONObject();
- JSONObject operators = new JSONObject();
+ public void testNegativeGreaterThanOrEqual() {
+ ObjectNode range_json = jsonMapper.createObjectNode();
+ ObjectNode operators = jsonMapper.createObjectNode();
operators.put(">=", -500);
- List<Object> range = Arrays.asList("price", operators);
+ ArrayNode range = jsonMapper.createArrayNode().add("price").add(operators);
- range_json.put("range", range);
+ range_json.set("range", range);
assertParse(range_json.toString(),
"price:[-500;]");
}
@Test
- public void testNegativeEquality() throws JSONException {
- JSONObject range_json = new JSONObject();
- JSONObject operators = new JSONObject();
+ public void testNegativeEquality() {
+ ObjectNode range_json = jsonMapper.createObjectNode();
+ ObjectNode operators = jsonMapper.createObjectNode();
operators.put("=", -500);
- List<Object> range = Arrays.asList("price", operators);
+ ArrayNode range = jsonMapper.createArrayNode().add("price").add(operators);
- range_json.put("range", range);
+ range_json.set("range", range);
assertParse(range_json.toString(),
"price:-500");
diff --git a/controller-api/src/main/java/com/yahoo/vespa/hosted/controller/api/application/v4/model/ProtonMetrics.java b/controller-api/src/main/java/com/yahoo/vespa/hosted/controller/api/application/v4/model/ProtonMetrics.java
index c6d907ec7fc..ed88902e094 100644
--- a/controller-api/src/main/java/com/yahoo/vespa/hosted/controller/api/application/v4/model/ProtonMetrics.java
+++ b/controller-api/src/main/java/com/yahoo/vespa/hosted/controller/api/application/v4/model/ProtonMetrics.java
@@ -1,14 +1,19 @@
package com.yahoo.vespa.hosted.controller.api.application.v4.model;
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.node.ObjectNode;
+
import java.util.HashMap;
import java.util.Map;
+import java.util.logging.Level;
import java.util.logging.LogManager;
import java.util.logging.Logger;
-import org.json.JSONException;
-import org.json.JSONObject;
public class ProtonMetrics {
+ private static final ObjectMapper jsonMapper = new ObjectMapper();
+
private static final Logger logger = LogManager.getLogManager().getLogger(ProtonMetrics.class.getName());
public static final String DOCUMENTS_ACTIVE_COUNT = "documentsActiveCount";
@@ -45,19 +50,19 @@ public class ProtonMetrics {
return this;
}
- public JSONObject toJson() {
+ public JsonNode toJson() {
try {
- JSONObject protonMetrics = new JSONObject();
+ ObjectNode protonMetrics = jsonMapper.createObjectNode();
protonMetrics.put("clusterId", clusterId);
- JSONObject jsonMetrics = new JSONObject();
+ ObjectNode jsonMetrics = jsonMapper.createObjectNode();
for (Map.Entry<String, Double> entry : metrics.entrySet()) {
jsonMetrics.put(entry.getKey(), entry.getValue());
}
- protonMetrics.put("metrics", jsonMetrics);
+ protonMetrics.set("metrics", jsonMetrics);
return protonMetrics;
- } catch (JSONException e) {
- logger.severe("Unable to convert Proton Metrics to JSON Object");
+ } catch (Exception e) {
+ logger.log(Level.SEVERE, "Unable to convert Proton Metrics to JSON Object: " + e.getMessage(), e);
}
- return new JSONObject();
+ return jsonMapper.createObjectNode();
}
}
diff --git a/controller-server/src/main/java/com/yahoo/vespa/hosted/controller/dns/NameServiceQueue.java b/controller-server/src/main/java/com/yahoo/vespa/hosted/controller/dns/NameServiceQueue.java
index 786547d4a67..8a48cbd281d 100644
--- a/controller-server/src/main/java/com/yahoo/vespa/hosted/controller/dns/NameServiceQueue.java
+++ b/controller-server/src/main/java/com/yahoo/vespa/hosted/controller/dns/NameServiceQueue.java
@@ -2,6 +2,7 @@
package com.yahoo.vespa.hosted.controller.dns;
import com.yahoo.vespa.hosted.controller.api.integration.dns.NameService;
+import com.yahoo.yolean.Exceptions;
import java.util.ArrayList;
import java.util.Collection;
@@ -82,7 +83,7 @@ public class NameServiceQueue {
request.dispatchTo(nameService);
queue.requests.poll();
} catch (Exception e) {
- log.log(Level.WARNING, "Failed to execute " + request + ": " + e.getMessage() +
+ log.log(Level.WARNING, "Failed to execute " + request + ": " + Exceptions.toMessageString(e) +
", request will be retried");
}
}
diff --git a/controller-server/src/main/java/com/yahoo/vespa/hosted/controller/maintenance/ControllerMaintenance.java b/controller-server/src/main/java/com/yahoo/vespa/hosted/controller/maintenance/ControllerMaintenance.java
index 56b7e5b2e46..979cd9060d9 100644
--- a/controller-server/src/main/java/com/yahoo/vespa/hosted/controller/maintenance/ControllerMaintenance.java
+++ b/controller-server/src/main/java/com/yahoo/vespa/hosted/controller/maintenance/ControllerMaintenance.java
@@ -118,13 +118,13 @@ public class ControllerMaintenance extends AbstractComponent {
this.outstandingChangeDeployer = duration(3, MINUTES);
this.versionStatusUpdater = duration(3, MINUTES);
this.readyJobsTrigger = duration(1, MINUTES);
- this.deploymentMetricsMaintainer = duration(5, MINUTES);
+ this.deploymentMetricsMaintainer = duration(10, MINUTES);
this.applicationOwnershipConfirmer = duration(12, HOURS);
- this.systemUpgrader = duration(1, MINUTES);
+ this.systemUpgrader = duration(90, SECONDS);
this.jobRunner = duration(90, SECONDS);
this.osUpgrader = duration(1, MINUTES);
this.contactInformationMaintainer = duration(12, HOURS);
- this.nameServiceDispatcher = duration(10, SECONDS);
+ this.nameServiceDispatcher = duration(30, SECONDS);
this.costReportMaintainer = duration(2, HOURS);
this.resourceMeterMaintainer = duration(1, MINUTES);
this.cloudEventReporter = duration(30, MINUTES);
diff --git a/controller-server/src/main/java/com/yahoo/vespa/hosted/controller/maintenance/JobRunner.java b/controller-server/src/main/java/com/yahoo/vespa/hosted/controller/maintenance/JobRunner.java
index d6739581c79..73528977166 100644
--- a/controller-server/src/main/java/com/yahoo/vespa/hosted/controller/maintenance/JobRunner.java
+++ b/controller-server/src/main/java/com/yahoo/vespa/hosted/controller/maintenance/JobRunner.java
@@ -80,10 +80,12 @@ public class JobRunner extends ControllerMaintainer {
/** Advances each of the ready steps for the given run, or marks it as finished, and stashes it. Public for testing. */
public void advance(Run run) {
if ( ! run.hasFailed()
- && controller().clock().instant().isAfter(run.start().plus(jobTimeout))) {
- jobs.abort(run.id());
- advance(jobs.run(run.id()).get());
- }
+ && controller().clock().instant().isAfter(run.start().plus(jobTimeout)))
+ executors.execute(() -> {
+ jobs.abort(run.id());
+ advance(jobs.run(run.id()).get());
+ });
+
else if (run.readySteps().isEmpty())
executors.execute(() -> finish(run.id()));
else
diff --git a/controller-server/src/main/java/com/yahoo/vespa/hosted/controller/restapi/application/ApplicationApiHandler.java b/controller-server/src/main/java/com/yahoo/vespa/hosted/controller/restapi/application/ApplicationApiHandler.java
index 5a1496bf507..9331e5086cc 100644
--- a/controller-server/src/main/java/com/yahoo/vespa/hosted/controller/restapi/application/ApplicationApiHandler.java
+++ b/controller-server/src/main/java/com/yahoo/vespa/hosted/controller/restapi/application/ApplicationApiHandler.java
@@ -2,6 +2,8 @@
package com.yahoo.vespa.hosted.controller.restapi.application;
import ai.vespa.hosted.api.Signatures;
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.base.Joiner;
import com.google.common.collect.ImmutableSet;
import com.google.inject.Inject;
@@ -100,9 +102,6 @@ import com.yahoo.vespa.hosted.controller.versions.VersionStatus;
import com.yahoo.vespa.hosted.controller.versions.VespaVersion;
import com.yahoo.vespa.serviceview.bindings.ApplicationView;
import com.yahoo.yolean.Exceptions;
-import org.json.JSONArray;
-import org.json.JSONException;
-import org.json.JSONObject;
import javax.ws.rs.ForbiddenException;
import javax.ws.rs.InternalServerErrorException;
@@ -151,6 +150,8 @@ import static java.util.stream.Collectors.toUnmodifiableList;
@SuppressWarnings("unused") // created by injection
public class ApplicationApiHandler extends LoggingRequestHandler {
+ private static final ObjectMapper jsonMapper = new ObjectMapper();
+
private static final String OPTIONAL_PREFIX = "/api";
private final Controller controller;
@@ -789,15 +790,15 @@ public class ApplicationApiHandler extends LoggingRequestHandler {
private JsonResponse buildResponseFromProtonMetrics(List<ProtonMetrics> protonMetrics) {
try {
- var jsonObject = new JSONObject();
- var jsonArray = new JSONArray();
+ var jsonObject = jsonMapper.createObjectNode();
+ var jsonArray = jsonMapper.createArrayNode();
for (ProtonMetrics metrics : protonMetrics) {
- jsonArray.put(metrics.toJson());
+ jsonArray.add(metrics.toJson());
}
- jsonObject.put("metrics", jsonArray);
- return new JsonResponse(200, jsonObject.toString());
- } catch (JSONException e) {
- log.severe("Unable to build JsonResponse with Proton data");
+ jsonObject.set("metrics", jsonArray);
+ return new JsonResponse(200, jsonMapper.writerWithDefaultPrettyPrinter().writeValueAsString(jsonObject));
+ } catch (JsonProcessingException e) {
+ log.log(Level.SEVERE, "Unable to build JsonResponse with Proton data: " + e.getMessage(), e);
return new JsonResponse(500, "");
}
}
@@ -1682,11 +1683,20 @@ public class ApplicationApiHandler extends LoggingRequestHandler {
controller.jobController().deploy(id, type, version, applicationPackage);
RunId runId = controller.jobController().last(id, type).get().id();
+ DeploymentId deploymentId = new DeploymentId(id, type.zone(controller.system()));
+
Slime slime = new Slime();
Cursor rootObject = slime.setObject();
rootObject.setString("message", "Deployment started in " + runId +
". This may take about 15 minutes the first time.");
rootObject.setLong("run", runId.number());
+ var endpointArray = rootObject.setArray("endpoints");
+ EndpointList zoneEndpoints = controller.routing().endpointsOf(deploymentId)
+ .scope(Endpoint.Scope.zone)
+ .not().legacy();
+ for (var endpoint : controller.routing().directEndpoints(zoneEndpoints, deploymentId.applicationId())) {
+ toSlime(endpoint, endpointArray.addObject());
+ }
return new SlimeJsonResponse(slime);
}
diff --git a/controller-server/src/test/java/com/yahoo/vespa/hosted/controller/restapi/ContainerTester.java b/controller-server/src/test/java/com/yahoo/vespa/hosted/controller/restapi/ContainerTester.java
index bda5a708a94..2bf6eb39089 100644
--- a/controller-server/src/test/java/com/yahoo/vespa/hosted/controller/restapi/ContainerTester.java
+++ b/controller-server/src/test/java/com/yahoo/vespa/hosted/controller/restapi/ContainerTester.java
@@ -9,6 +9,7 @@ import com.yahoo.config.provision.ApplicationName;
import com.yahoo.container.http.filter.FilterChainRepository;
import com.yahoo.jdisc.http.filter.SecurityRequestFilter;
import com.yahoo.jdisc.http.filter.SecurityRequestFilterChain;
+import com.yahoo.test.json.JsonTestHelper;
import com.yahoo.vespa.athenz.api.AthenzDomain;
import com.yahoo.vespa.athenz.api.AthenzIdentity;
import com.yahoo.vespa.hosted.controller.Controller;
@@ -65,6 +66,10 @@ public class ContainerTester {
.addRoleMember(action, identity);
}
+ public void assertJsonResponse(Supplier<Request> request, File responseFile) {
+ assertResponse(request.get(), responseFile, 200, false, true);
+ }
+
public void assertResponse(Supplier<Request> request, File responseFile) {
assertResponse(request.get(), responseFile);
}
@@ -82,6 +87,10 @@ public class ContainerTester {
}
public void assertResponse(Request request, File responseFile, int expectedStatusCode, boolean removeWhitespace) {
+ assertResponse(request, responseFile, expectedStatusCode, removeWhitespace, false);
+ }
+
+ private void assertResponse(Request request, File responseFile, int expectedStatusCode, boolean removeWhitespace, boolean compareJson) {
String expectedResponse = readTestFile(responseFile.toString());
expectedResponse = include(expectedResponse);
if (removeWhitespace) expectedResponse = expectedResponse.replaceAll("(\"[^\"]*\")|\\s*", "$1"); // Remove whitespace
@@ -106,7 +115,11 @@ public class ContainerTester {
expectedResponsePattern, responseString);
}
} else {
- assertEquals(responseFile.toString(), expectedResponse, responseString);
+ if (compareJson) {
+ JsonTestHelper.assertJsonEquals(expectedResponse, responseString);
+ } else {
+ assertEquals(responseFile.toString(), expectedResponse, responseString);
+ }
}
assertEquals("Status code", expectedStatusCode, response.getStatus());
}
diff --git a/controller-server/src/test/java/com/yahoo/vespa/hosted/controller/restapi/application/ApplicationApiTest.java b/controller-server/src/test/java/com/yahoo/vespa/hosted/controller/restapi/application/ApplicationApiTest.java
index 434c83898ee..6626134b69a 100644
--- a/controller-server/src/test/java/com/yahoo/vespa/hosted/controller/restapi/application/ApplicationApiTest.java
+++ b/controller-server/src/test/java/com/yahoo/vespa/hosted/controller/restapi/application/ApplicationApiTest.java
@@ -242,7 +242,8 @@ public class ApplicationApiTest extends ControllerContainerTest {
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/instance/instance1/deploy/production-us-east-3/", POST)
.data(entity)
.userIdentity(HOSTED_VESPA_OPERATOR),
- "{\"message\":\"Deployment started in run 1 of production-us-east-3 for tenant1.application1.instance1. This may take about 15 minutes the first time.\",\"run\":1}");
+ "{\"message\":\"Deployment started in run 1 of production-us-east-3 for tenant1.application1.instance1. This may take about 15 minutes the first time.\",\"run\":1," +
+ "\"endpoints\":[{\"cluster\":\"default\",\"tls\":true,\"url\":\"https://instance1--application1--tenant1.us-east-3.vespa.oath.cloud:4443/\",\"scope\":\"zone\",\"routingMethod\":\"shared\"}]}");
app1.runJob(JobType.productionUsEast3);
tester.controller().applications().deactivate(app1.instanceId(), ZoneId.from("prod", "us-east-3"));
@@ -250,7 +251,8 @@ public class ApplicationApiTest extends ControllerContainerTest {
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/instance/instance1/deploy/dev-us-east-1/", POST)
.data(entity)
.userIdentity(USER_ID),
- "{\"message\":\"Deployment started in run 1 of dev-us-east-1 for tenant1.application1.instance1. This may take about 15 minutes the first time.\",\"run\":1}");
+ "{\"message\":\"Deployment started in run 1 of dev-us-east-1 for tenant1.application1.instance1. This may take about 15 minutes the first time.\",\"run\":1," +
+ "\"endpoints\":[{\"cluster\":\"default\",\"tls\":true,\"url\":\"https://instance1--application1--tenant1.us-east-1.dev.vespa.oath.cloud:4443/\",\"scope\":\"zone\",\"routingMethod\":\"shared\"}]}");
app1.runJob(JobType.devUsEast1);
// GET dev application package
@@ -514,7 +516,7 @@ public class ApplicationApiTest extends ControllerContainerTest {
updateMetrics();
// GET metrics
- tester.assertResponse(request("/application/v4/tenant/tenant2/application/application1/environment/dev/region/us-east-1/instance/default/metrics", GET)
+ tester.assertJsonResponse(request("/application/v4/tenant/tenant2/application/application1/environment/dev/region/us-east-1/instance/default/metrics", GET)
.userIdentity(USER_ID),
new File("proton-metrics.json"));
@@ -1426,7 +1428,8 @@ public class ApplicationApiTest extends ControllerContainerTest {
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/instance/new-user/deploy/dev-us-east-1", POST)
.data(entity)
.userIdentity(userId),
- "{\"message\":\"Deployment started in run 1 of dev-us-east-1 for tenant1.application1.new-user. This may take about 15 minutes the first time.\",\"run\":1}");
+ "{\"message\":\"Deployment started in run 1 of dev-us-east-1 for tenant1.application1.new-user. This may take about 15 minutes the first time.\",\"run\":1," +
+ "\"endpoints\":[{\"cluster\":\"default\",\"tls\":true,\"url\":\"https://new-user--application1--tenant1.us-east-1.dev.vespa.oath.cloud:4443/\",\"scope\":\"zone\",\"routingMethod\":\"shared\"}]}");
}
@Test
@@ -1471,7 +1474,8 @@ public class ApplicationApiTest extends ControllerContainerTest {
tester.assertResponse(request("/application/v4/tenant/sandbox/application/myapp/instance/default/deploy/dev-us-east-1", POST)
.data(entity)
.userIdentity(developer),
- "{\"message\":\"Deployment started in run 1 of dev-us-east-1 for sandbox.myapp. This may take about 15 minutes the first time.\",\"run\":1}",
+ "{\"message\":\"Deployment started in run 1 of dev-us-east-1 for sandbox.myapp. This may take about 15 minutes the first time.\",\"run\":1," +
+ "\"endpoints\":[{\"cluster\":\"default\",\"tls\":true,\"url\":\"https://myapp--sandbox.us-east-1.dev.vespa.oath.cloud:4443/\",\"scope\":\"zone\",\"routingMethod\":\"shared\"}]}",
200);
// To add temporary support allowing tenant admins to launch services
@@ -1482,7 +1486,8 @@ public class ApplicationApiTest extends ControllerContainerTest {
tester.assertResponse(request("/application/v4/tenant/sandbox/application/myapp/instance/default/deploy/dev-us-east-1", POST)
.data(entity)
.userIdentity(developer2),
- "{\"message\":\"Deployment started in run 2 of dev-us-east-1 for sandbox.myapp. This may take about 15 minutes the first time.\",\"run\":2}",
+ "{\"message\":\"Deployment started in run 2 of dev-us-east-1 for sandbox.myapp. This may take about 15 minutes the first time.\",\"run\":2," +
+ "\"endpoints\":[{\"cluster\":\"default\",\"tls\":true,\"url\":\"https://myapp--sandbox.us-east-1.dev.vespa.oath.cloud:4443/\",\"scope\":\"zone\",\"routingMethod\":\"shared\"}]}",
200);
@@ -1491,7 +1496,8 @@ public class ApplicationApiTest extends ControllerContainerTest {
.data(applicationPackageInstance1.zippedContent())
.contentType("application/zip")
.userIdentity(developer2),
- "{\"message\":\"Deployment started in run 3 of dev-us-east-1 for sandbox.myapp. This may take about 15 minutes the first time.\",\"run\":3}");
+ "{\"message\":\"Deployment started in run 3 of dev-us-east-1 for sandbox.myapp. This may take about 15 minutes the first time.\",\"run\":3," +
+ "\"endpoints\":[{\"cluster\":\"default\",\"tls\":true,\"url\":\"https://myapp--sandbox.us-east-1.dev.vespa.oath.cloud:4443/\",\"scope\":\"zone\",\"routingMethod\":\"shared\"}]}");
// POST (deploy) an application package not as content type application/zip — not multipart — is disallowed
tester.assertResponse(request("/application/v4/tenant/sandbox/application/myapp/instance/default/deploy/dev-us-east-1", POST)
diff --git a/controller-server/src/test/java/com/yahoo/vespa/hosted/controller/restapi/application/JobControllerApiHandlerHelperTest.java b/controller-server/src/test/java/com/yahoo/vespa/hosted/controller/restapi/application/JobControllerApiHandlerHelperTest.java
index 828e2856cae..c43abf276c5 100644
--- a/controller-server/src/test/java/com/yahoo/vespa/hosted/controller/restapi/application/JobControllerApiHandlerHelperTest.java
+++ b/controller-server/src/test/java/com/yahoo/vespa/hosted/controller/restapi/application/JobControllerApiHandlerHelperTest.java
@@ -4,6 +4,7 @@ package com.yahoo.vespa.hosted.controller.restapi.application;
import com.yahoo.component.Version;
import com.yahoo.config.provision.zone.ZoneId;
import com.yahoo.container.jdisc.HttpResponse;
+import com.yahoo.test.json.JsonTestHelper;
import com.yahoo.vespa.hosted.controller.api.application.v4.model.DeployOptions;
import com.yahoo.vespa.hosted.controller.api.integration.configserver.ConfigServerException;
import com.yahoo.vespa.hosted.controller.api.integration.deployment.ApplicationVersion;
@@ -12,8 +13,6 @@ import com.yahoo.vespa.hosted.controller.api.integration.deployment.TestReport;
import com.yahoo.vespa.hosted.controller.application.ApplicationPackage;
import com.yahoo.vespa.hosted.controller.deployment.ApplicationPackageBuilder;
import com.yahoo.vespa.hosted.controller.deployment.DeploymentTester;
-import org.json.JSONException;
-import org.json.JSONObject;
import org.junit.Test;
import java.io.ByteArrayOutputStream;
@@ -180,12 +179,10 @@ public class JobControllerApiHandlerHelperTest {
"jobs-direct-deployment.json");
}
- private void compare(HttpResponse response, String expected) throws JSONException, IOException {
+ private void compare(HttpResponse response, String expected) throws IOException {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
response.render(baos);
- JSONObject actualJSON = new JSONObject(new String(baos.toByteArray()));
- JSONObject expectedJSON = new JSONObject(expected);
- assertEquals(expectedJSON.toString(), actualJSON.toString());
+ JsonTestHelper.assertJsonEquals(expected, baos.toString());
}
private void assertResponse(HttpResponse response, String fileName) {
diff --git a/controller-server/src/test/java/com/yahoo/vespa/hosted/controller/restapi/application/responses/deployment-job-accepted-2.json b/controller-server/src/test/java/com/yahoo/vespa/hosted/controller/restapi/application/responses/deployment-job-accepted-2.json
index 8ea3f318d1d..c53cee8fd97 100644
--- a/controller-server/src/test/java/com/yahoo/vespa/hosted/controller/restapi/application/responses/deployment-job-accepted-2.json
+++ b/controller-server/src/test/java/com/yahoo/vespa/hosted/controller/restapi/application/responses/deployment-job-accepted-2.json
@@ -1,4 +1,13 @@
{
"message": "Deployment started in run 1 of dev-us-east-1 for tenant1.application1.myuser. This may take about 15 minutes the first time.",
- "run": 1
+ "run": 1,
+ "endpoints": [
+ {
+ "cluster": "default",
+ "tls": true,
+ "url": "https://myuser--application1--tenant1.us-east-1.dev.vespa.oath.cloud:4443/",
+ "scope": "zone",
+ "routingMethod": "shared"
+ }
+ ]
} \ No newline at end of file
diff --git a/controller-server/src/test/java/com/yahoo/vespa/hosted/controller/restapi/application/responses/proton-metrics.json b/controller-server/src/test/java/com/yahoo/vespa/hosted/controller/restapi/application/responses/proton-metrics.json
index a7e5b3918d8..3fba9b3c91c 100644
--- a/controller-server/src/test/java/com/yahoo/vespa/hosted/controller/restapi/application/responses/proton-metrics.json
+++ b/controller-server/src/test/java/com/yahoo/vespa/hosted/controller/restapi/application/responses/proton-metrics.json
@@ -1,23 +1,26 @@
{
- "metrics": [{
- "clusterId": "content/doc/",
- "metrics": {
- "resourceMemoryUsageAverage": 0.103482,
- "documentsReadyCount": 11430,
- "documentDiskUsage": 44021,
- "resourceDiskUsageAverage": 0.0168421,
- "documentsTotalCount": 11430,
- "documentsActiveCount": 11430
+ "metrics": [
+ {
+ "clusterId": "content/doc/",
+ "metrics": {
+ "resourceMemoryUsageAverage": 0.103482,
+ "documentsReadyCount": 11430.0,
+ "documentDiskUsage": 44021.0,
+ "resourceDiskUsageAverage": 0.0168421,
+ "documentsTotalCount": 11430.0,
+ "documentsActiveCount": 11430.0
+ }
+ },
+ {
+ "clusterId": "content/music/",
+ "metrics": {
+ "resourceMemoryUsageAverage": 0.00912,
+ "documentsReadyCount": 32000.0,
+ "documentDiskUsage": 90113.0,
+ "resourceDiskUsageAverage": 0.23912,
+ "documentsTotalCount": 32210.0,
+ "documentsActiveCount": 32210.0
+ }
}
- }, {
- "clusterId": "content/music/",
- "metrics": {
- "resourceMemoryUsageAverage": 0.00912,
- "documentsReadyCount": 32000,
- "documentDiskUsage": 90113,
- "resourceDiskUsageAverage": 0.23912,
- "documentsTotalCount": 32210,
- "documentsActiveCount": 32210
- }
- }]
-} \ No newline at end of file
+ ]
+}
diff --git a/dist/vespa.spec b/dist/vespa.spec
index b19d1c9a201..42039fae1d8 100644
--- a/dist/vespa.spec
+++ b/dist/vespa.spec
@@ -158,6 +158,7 @@ Requires: gdb
Requires: nc
Requires: net-tools
Requires: unzip
+Requires: zstd
%if 0%{?el7}
Requires: llvm7.0
Requires: vespa-icu >= 65.1.0-1
diff --git a/docker-api/src/main/java/com/yahoo/vespa/hosted/dockerapi/ContainerStats.java b/docker-api/src/main/java/com/yahoo/vespa/hosted/dockerapi/ContainerStats.java
index 797dffdef1f..dc2db50d3ab 100644
--- a/docker-api/src/main/java/com/yahoo/vespa/hosted/dockerapi/ContainerStats.java
+++ b/docker-api/src/main/java/com/yahoo/vespa/hosted/dockerapi/ContainerStats.java
@@ -1,44 +1,31 @@
// Copyright 2017 Yahoo Holdings. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root.
package com.yahoo.vespa.hosted.dockerapi;
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.github.dockerjava.api.model.CpuStatsConfig;
-import com.github.dockerjava.api.model.MemoryStatsConfig;
-import com.github.dockerjava.api.model.StatisticNetworksConfig;
-import com.github.dockerjava.api.model.Statistics;
-
-import java.io.IOException;
-import java.io.UncheckedIOException;
+import java.util.Collections;
+import java.util.LinkedHashMap;
import java.util.Map;
-import java.util.Optional;
-import java.util.TreeMap;
-import java.util.stream.Collectors;
+import java.util.Objects;
/**
- * Wrapper class for {@link com.github.dockerjava.api.model.Statistics} to prevent leaking from docker-java library.
+ * CPU, memory and network statistics collected from a container.
*
* @author freva
*/
+// TODO: Move this to node-admin when docker-api module can be removed
public class ContainerStats {
+
private final Map<String, NetworkStats> networkStatsByInterface;
private final MemoryStats memoryStats;
private final CpuStats cpuStats;
- ContainerStats(Statistics statistics) {
- // Network stats are null when container uses host network
- this.networkStatsByInterface = Optional.ofNullable(statistics.getNetworks()).orElseGet(Map::of)
- .entrySet().stream()
- .collect(Collectors.toMap(
- Map.Entry::getKey,
- e -> new NetworkStats(e.getValue()),
- (u, v) -> { throw new IllegalStateException(); },
- TreeMap::new));
- this.memoryStats = new MemoryStats(statistics.getMemoryStats());
- this.cpuStats = new CpuStats(statistics.getCpuStats());
+ public ContainerStats(Map<String, NetworkStats> networkStatsByInterface, MemoryStats memoryStats, CpuStats cpuStats) {
+ this.networkStatsByInterface = new LinkedHashMap<>(Objects.requireNonNull(networkStatsByInterface));
+ this.memoryStats = Objects.requireNonNull(memoryStats);
+ this.cpuStats = Objects.requireNonNull(cpuStats);
}
public Map<String, NetworkStats> getNetworks() {
- return networkStatsByInterface;
+ return Collections.unmodifiableMap(networkStatsByInterface);
}
public MemoryStats getMemoryStats() {
@@ -49,7 +36,22 @@ public class ContainerStats {
return cpuStats;
}
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ ContainerStats that = (ContainerStats) o;
+ return networkStatsByInterface.equals(that.networkStatsByInterface) && memoryStats.equals(that.memoryStats) && cpuStats.equals(that.cpuStats);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(networkStatsByInterface, memoryStats, cpuStats);
+ }
+
+ /** Statistics for network usage */
public static class NetworkStats {
+
private final long rxBytes;
private final long rxDropped;
private final long rxErrors;
@@ -57,40 +59,109 @@ public class ContainerStats {
private final long txDropped;
private final long txErrors;
- private NetworkStats(StatisticNetworksConfig statisticNetworksConfig) {
- this.rxBytes = statisticNetworksConfig.getRxBytes();
- this.rxDropped = statisticNetworksConfig.getRxDropped();
- this.rxErrors = statisticNetworksConfig.getRxErrors();
- this.txBytes = statisticNetworksConfig.getTxBytes();
- this.txDropped = statisticNetworksConfig.getTxDropped();
- this.txErrors = statisticNetworksConfig.getTxErrors();
+ public NetworkStats(long rxBytes, long rxDropped, long rxErrors, long txBytes, long txDropped, long txErrors) {
+ this.rxBytes = rxBytes;
+ this.rxDropped = rxDropped;
+ this.rxErrors = rxErrors;
+ this.txBytes = txBytes;
+ this.txDropped = txDropped;
+ this.txErrors = txErrors;
}
+ /** Returns received bytes */
public long getRxBytes() { return this.rxBytes; }
+
+ /** Returns received bytes, which was dropped */
public long getRxDropped() { return this.rxDropped; }
+
+ /** Returns received errors */
public long getRxErrors() { return this.rxErrors; }
+
+ /** Returns transmitted bytes */
public long getTxBytes() { return this.txBytes; }
+
+ /** Returns transmitted bytes, which was dropped */
public long getTxDropped() { return this.txDropped; }
+
+ /** Returns transmission errors */
public long getTxErrors() { return this.txErrors; }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ NetworkStats that = (NetworkStats) o;
+ return rxBytes == that.rxBytes && rxDropped == that.rxDropped && rxErrors == that.rxErrors && txBytes == that.txBytes && txDropped == that.txDropped && txErrors == that.txErrors;
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(rxBytes, rxDropped, rxErrors, txBytes, txDropped, txErrors);
+ }
+
+ @Override
+ public String toString() {
+ return "NetworkStats{" +
+ "rxBytes=" + rxBytes +
+ ", rxDropped=" + rxDropped +
+ ", rxErrors=" + rxErrors +
+ ", txBytes=" + txBytes +
+ ", txDropped=" + txDropped +
+ ", txErrors=" + txErrors +
+ '}';
+ }
+
}
- public class MemoryStats {
+ /** Statistics for memory usage */
+ public static class MemoryStats {
+
private final long cache;
private final long usage;
private final long limit;
- private MemoryStats(MemoryStatsConfig memoryStats) {
- this.cache = memoryStats.getStats().getCache();
- this.usage = memoryStats.getUsage();
- this.limit = memoryStats.getLimit();
+ public MemoryStats(long cache, long usage, long limit) {
+ this.cache = cache;
+ this.usage = usage;
+ this.limit = limit;
}
+ /** Returns memory used by cache in bytes */
public long getCache() { return this.cache; }
+
+ /** Returns memory usage in bytes */
public long getUsage() { return this.usage; }
+
+ /** Returns memory limit in bytes */
public long getLimit() { return this.limit; }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ MemoryStats that = (MemoryStats) o;
+ return cache == that.cache && usage == that.usage && limit == that.limit;
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(cache, usage, limit);
+ }
+
+ @Override
+ public String toString() {
+ return "MemoryStats{" +
+ "cache=" + cache +
+ ", usage=" + usage +
+ ", limit=" + limit +
+ '}';
+ }
+
}
- public class CpuStats {
+ /** Statistics for CPU usage */
+ public static class CpuStats {
+
private final int onlineCpus;
private final long systemCpuUsage;
private final long totalUsage;
@@ -99,15 +170,15 @@ public class ContainerStats {
private final long throttlingActivePeriods;
private final long throttledPeriods;
- public CpuStats(CpuStatsConfig cpuStats) {
- // Added in 1.27
- this.onlineCpus = cpuStats.getCpuUsage().getPercpuUsage().size();
- this.systemCpuUsage = cpuStats.getSystemCpuUsage();
- this.totalUsage = cpuStats.getCpuUsage().getTotalUsage();
- this.usageInKernelMode = cpuStats.getCpuUsage().getUsageInKernelmode();
- this.throttledTime = cpuStats.getThrottlingData().getThrottledTime();
- this.throttlingActivePeriods = cpuStats.getThrottlingData().getPeriods();
- this.throttledPeriods = cpuStats.getThrottlingData().getThrottledPeriods();
+ public CpuStats(int onlineCpus, long systemCpuUsage, long totalUsage, long usageInKernelMode,
+ long throttledTime, long throttlingActivePeriods, long throttledPeriods) {
+ this.onlineCpus = onlineCpus;
+ this.systemCpuUsage = systemCpuUsage;
+ this.totalUsage = totalUsage;
+ this.usageInKernelMode = usageInKernelMode;
+ this.throttledTime = throttledTime;
+ this.throttlingActivePeriods = throttlingActivePeriods;
+ this.throttledPeriods = throttledPeriods;
}
public int getOnlineCpus() { return this.onlineCpus; }
@@ -129,15 +200,33 @@ public class ContainerStats {
/** Number of periods this container hit the throttling limit */
public long getThrottledPeriods() { return throttledPeriods; }
- }
- // For testing only, create ContainerStats from JSON returned by docker daemon stats API
- public static ContainerStats fromJson(String json) {
- try {
- Statistics statistics = new ObjectMapper().readValue(json, Statistics.class);
- return new ContainerStats(statistics);
- } catch (IOException e) {
- throw new UncheckedIOException(e);
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ CpuStats cpuStats = (CpuStats) o;
+ return onlineCpus == cpuStats.onlineCpus && systemCpuUsage == cpuStats.systemCpuUsage && totalUsage == cpuStats.totalUsage && usageInKernelMode == cpuStats.usageInKernelMode && throttledTime == cpuStats.throttledTime && throttlingActivePeriods == cpuStats.throttlingActivePeriods && throttledPeriods == cpuStats.throttledPeriods;
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(onlineCpus, systemCpuUsage, totalUsage, usageInKernelMode, throttledTime, throttlingActivePeriods, throttledPeriods);
}
+
+ @Override
+ public String toString() {
+ return "CpuStats{" +
+ "onlineCpus=" + onlineCpus +
+ ", systemCpuUsage=" + systemCpuUsage +
+ ", totalUsage=" + totalUsage +
+ ", usageInKernelMode=" + usageInKernelMode +
+ ", throttledTime=" + throttledTime +
+ ", throttlingActivePeriods=" + throttlingActivePeriods +
+ ", throttledPeriods=" + throttledPeriods +
+ '}';
+ }
+
}
+
}
diff --git a/docker-api/src/main/java/com/yahoo/vespa/hosted/dockerapi/DockerEngine.java b/docker-api/src/main/java/com/yahoo/vespa/hosted/dockerapi/DockerEngine.java
index a45855764ed..630efb7990f 100644
--- a/docker-api/src/main/java/com/yahoo/vespa/hosted/dockerapi/DockerEngine.java
+++ b/docker-api/src/main/java/com/yahoo/vespa/hosted/dockerapi/DockerEngine.java
@@ -1,6 +1,7 @@
// Copyright Verizon Media. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root.
package com.yahoo.vespa.hosted.dockerapi;
+import com.fasterxml.jackson.databind.ObjectMapper;
import com.github.dockerjava.api.DockerClient;
import com.github.dockerjava.api.command.ExecCreateCmdResponse;
import com.github.dockerjava.api.command.InspectContainerResponse;
@@ -33,6 +34,8 @@ import com.yahoo.vespa.hosted.dockerapi.metrics.Gauge;
import com.yahoo.vespa.hosted.dockerapi.metrics.Metrics;
import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.io.UncheckedIOException;
import java.time.Clock;
import java.time.Duration;
import java.time.Instant;
@@ -43,6 +46,7 @@ import java.util.Map;
import java.util.Optional;
import java.util.OptionalLong;
import java.util.Set;
+import java.util.TreeMap;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import java.util.logging.Level;
@@ -198,8 +202,7 @@ public class DockerEngine implements ContainerEngine {
try {
DockerStatsCallback statsCallback = dockerClient.statsCmd(containerName.asString()).exec(new DockerStatsCallback());
statsCallback.awaitCompletion(5, TimeUnit.SECONDS);
-
- return statsCallback.stats.map(ContainerStats::new);
+ return statsCallback.stats.map(DockerEngine::containerStatsFrom);
} catch (NotFoundException ignored) {
return Optional.empty();
} catch (RuntimeException | InterruptedException e) {
@@ -437,4 +440,39 @@ public class DockerEngine implements ContainerEngine {
return DockerClientImpl.getInstance(dockerClientConfig)
.withDockerCmdExecFactory(dockerFactory);
}
+
+ private static ContainerStats containerStatsFrom(Statistics statistics) {
+ return new ContainerStats(Optional.ofNullable(statistics.getNetworks()).orElseGet(Map::of)
+ .entrySet().stream()
+ .collect(Collectors.toMap(
+ Map.Entry::getKey,
+ e -> new ContainerStats.NetworkStats(e.getValue().getRxBytes(), e.getValue().getRxDropped(),
+ e.getValue().getRxErrors(), e.getValue().getTxBytes(),
+ e.getValue().getTxDropped(), e.getValue().getTxErrors()),
+ (u, v) -> {
+ throw new IllegalStateException();
+ },
+ TreeMap::new)),
+ new ContainerStats.MemoryStats(statistics.getMemoryStats().getStats().getCache(),
+ statistics.getMemoryStats().getUsage(),
+ statistics.getMemoryStats().getLimit()),
+ new ContainerStats.CpuStats(statistics.getCpuStats().getCpuUsage().getPercpuUsage().size(),
+ statistics.getCpuStats().getSystemCpuUsage(),
+ statistics.getCpuStats().getCpuUsage().getTotalUsage(),
+ statistics.getCpuStats().getCpuUsage().getUsageInKernelmode(),
+ statistics.getCpuStats().getThrottlingData().getThrottledTime(),
+ statistics.getCpuStats().getThrottlingData().getPeriods(),
+ statistics.getCpuStats().getThrottlingData().getThrottledPeriods()));
+ }
+
+ // For testing only, create ContainerStats from JSON returned by docker daemon stats API
+ public static ContainerStats statsFromJson(String json) {
+ try {
+ Statistics statistics = new ObjectMapper().readValue(json, Statistics.class);
+ return containerStatsFrom(statistics);
+ } catch (IOException e) {
+ throw new UncheckedIOException(e);
+ }
+ }
+
}
diff --git a/document/src/main/java/com/yahoo/document/json/SingleDocumentParser.java b/document/src/main/java/com/yahoo/document/json/SingleDocumentParser.java
deleted file mode 100644
index 67508c61b23..00000000000
--- a/document/src/main/java/com/yahoo/document/json/SingleDocumentParser.java
+++ /dev/null
@@ -1,53 +0,0 @@
-// Copyright 2017 Yahoo Holdings. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root.
-package com.yahoo.document.json;
-
-import com.fasterxml.jackson.core.JsonFactory;
-import com.yahoo.document.DocumentOperation;
-import com.yahoo.document.DocumentPut;
-import com.yahoo.document.DocumentTypeManager;
-import com.yahoo.document.DocumentUpdate;
-import com.yahoo.vespaxmlparser.DocumentFeedOperation;
-import com.yahoo.vespaxmlparser.DocumentUpdateFeedOperation;
-import com.yahoo.vespaxmlparser.FeedOperation;
-
-import java.io.IOException;
-import java.io.InputStream;
-
-/**
- * Parser that supports parsing PUT operation and UPDATE operation.
- *
- * @author dybis
- */
-public class SingleDocumentParser {
-
- private static final JsonFactory jsonFactory = new JsonFactory().disable(JsonFactory.Feature.CANONICALIZE_FIELD_NAMES);
- private DocumentTypeManager docMan;
-
- public SingleDocumentParser(DocumentTypeManager docMan) {
- this.docMan = docMan;
- }
-
- public FeedOperation parsePut(InputStream inputStream, String docId) {
- return parse(inputStream, docId, DocumentOperationType.PUT);
- }
-
- public FeedOperation parseUpdate(InputStream inputStream, String docId) {
- return parse(inputStream, docId, DocumentOperationType.UPDATE);
- }
-
- private FeedOperation parse(InputStream inputStream, String docId, DocumentOperationType documentOperationType) {
- JsonReader reader = new JsonReader(docMan, inputStream, jsonFactory);
- DocumentOperation documentOperation = reader.readSingleDocument(documentOperationType, docId);
- try {
- inputStream.close();
- } catch (IOException e) {
- throw new IllegalStateException(e);
- }
- if (documentOperationType == DocumentOperationType.PUT) {
- return new DocumentFeedOperation(((DocumentPut) documentOperation).getDocument(), documentOperation.getCondition());
- } else {
- return new DocumentUpdateFeedOperation((DocumentUpdate) documentOperation, documentOperation.getCondition());
- }
- }
-
-}
diff --git a/document/src/vespa/document/bucket/bucket.h b/document/src/vespa/document/bucket/bucket.h
index 44068e1c443..f189c2951c9 100644
--- a/document/src/vespa/document/bucket/bucket.h
+++ b/document/src/vespa/document/bucket/bucket.h
@@ -32,7 +32,7 @@ public:
vespalib::string toString() const;
struct hash {
- size_t operator () (const Bucket& b) const {
+ size_t operator () (const Bucket& b) const noexcept {
size_t hash1 = BucketId::hash()(b.getBucketId());
size_t hash2 = BucketSpace::hash()(b.getBucketSpace());
// Formula taken from std::hash_combine proposal
diff --git a/document/src/vespa/document/bucket/bucketid.cpp b/document/src/vespa/document/bucket/bucketid.cpp
index 668798d6c39..1b9cf1e5304 100644
--- a/document/src/vespa/document/bucket/bucketid.cpp
+++ b/document/src/vespa/document/bucket/bucketid.cpp
@@ -71,7 +71,7 @@ Initialize _initializeUsedMasks;
}
-void BucketId::initialize() {
+void BucketId::initialize() noexcept {
fillUsedMasks(BucketId::_usedMasks, BucketId::maxNumBits);
fillStripMasks(BucketId::_stripMasks, BucketId::maxNumBits);
}
@@ -91,7 +91,7 @@ void BucketId::throwFailedSetUsedBits(uint32_t used, uint32_t availBits) {
}
BucketId::Type
-BucketId::reverse(Type id)
+BucketId::reverse(Type id) noexcept
{
id = ((id & 0x5555555555555555l) << 1) | ((id & 0xaaaaaaaaaaaaaaaal) >> 1);
id = ((id & 0x3333333333333333l) << 2) | ((id & 0xccccccccccccccccl) >> 2);
@@ -100,7 +100,7 @@ BucketId::reverse(Type id)
}
BucketId::Type
-BucketId::keyToBucketId(Type key)
+BucketId::keyToBucketId(Type key) noexcept
{
Type retVal = reverse(key);
@@ -113,7 +113,7 @@ BucketId::keyToBucketId(Type key)
}
bool
-BucketId::contains(const BucketId& id) const
+BucketId::contains(const BucketId& id) const noexcept
{
if (id.getUsedBits() < getUsedBits()) {
return false;
diff --git a/document/src/vespa/document/bucket/bucketid.h b/document/src/vespa/document/bucket/bucketid.h
index b31f9080acc..675a0d23ebd 100644
--- a/document/src/vespa/document/bucket/bucketid.h
+++ b/document/src/vespa/document/bucket/bucketid.h
@@ -37,7 +37,7 @@ class BucketId
{
public:
struct hash {
- size_t operator () (const BucketId& g) const {
+ size_t operator () (const BucketId& g) const noexcept {
return g.getId();
}
};
@@ -55,23 +55,23 @@ public:
/** Create a bucket id using a set of bits from a raw unchecked value. */
BucketId(uint32_t useBits, Type id) noexcept : _id(createUsedBits(useBits, id)) { }
- bool operator<(const BucketId& id) const {
+ bool operator<(const BucketId& id) const noexcept {
return getId() < id.getId();
}
- bool operator==(const BucketId& id) const { return getId() == id.getId(); }
- bool operator!=(const BucketId& id) const { return getId() != id.getId(); }
+ bool operator==(const BucketId& id) const noexcept { return getId() == id.getId(); }
+ bool operator!=(const BucketId& id) const noexcept { return getId() != id.getId(); }
vespalib::string toString() const;
- bool valid() const {
+ bool valid() const noexcept {
return validUsedBits(getUsedBits());
}
- static bool validUsedBits(uint32_t usedBits) {
+ static bool validUsedBits(uint32_t usedBits) noexcept {
return (usedBits >= minNumBits) && (usedBits <= maxNumBits);
}
- bool isSet() const {
+ bool isSet() const noexcept {
return _id != 0u;
}
/**
@@ -79,14 +79,14 @@ public:
* verify that two different documents belong to the same bucket given some
* level of bucket splitting, use this to ignore the unused bits.
*/
- BucketId stripUnused() const { return BucketId(getUsedBits(), getId()); }
+ BucketId stripUnused() const noexcept { return BucketId(getUsedBits(), getId()); }
/**
* Checks whether the given bucket is contained within this bucket. That is,
* if it is the same bucket, or if it is a bucket using more bits, which is
* identical to this one if set to use as many bits as this one.
*/
- bool contains(const BucketId& id) const;
+ bool contains(const BucketId& id) const noexcept;
// Functions exposing internals we want to make users independent of
@@ -97,7 +97,7 @@ public:
static constexpr uint32_t maxNumBits = (8 * sizeof(Type) - CountBits);
static constexpr uint32_t minNumBits = 1u; // See comment above.
- uint32_t getUsedBits() const { return _id >> maxNumBits; }
+ uint32_t getUsedBits() const noexcept { return _id >> maxNumBits; }
void setUsedBits(uint32_t used) {
uint32_t availBits = maxNumBits;
@@ -113,22 +113,22 @@ public:
}
/** Get the bucket id value stripped of the bits that are not in use. */
- Type getId() const { return (_id & getStripMask()); }
+ Type getId() const noexcept { return (_id & getStripMask()); }
/**
* Get the bucket id value stripped of the count bits plus the bits that
* are not in use.
*/
- Type withoutCountBits() const { return (_id & getUsedMask()); }
+ Type withoutCountBits() const noexcept { return (_id & getUsedMask()); }
- Type getRawId() const { return _id; }
+ Type getRawId() const noexcept { return _id; }
/**
* Reverses the bits in the given number, except the countbits part.
* Used for sorting in the bucket database as we want related buckets
* to be sorted next to each other.
*/
- static Type bucketIdToKey(Type id) {
+ static Type bucketIdToKey(Type id) noexcept {
Type retVal = reverse(id);
Type usedCountLSB = id >> maxNumBits;
@@ -139,39 +139,39 @@ public:
return retVal;
}
- static Type keyToBucketId(Type key);
+ static Type keyToBucketId(Type key) noexcept ;
/**
* Reverses the bucket id bitwise, except the countbits part,
* and returns the value,
*/
- Type toKey() const { return bucketIdToKey(getId()); };
+ Type toKey() const noexcept { return bucketIdToKey(getId()); };
/**
* Reverses the order of the bits in the bucket id.
*/
- static Type reverse(Type id);
+ static Type reverse(Type id) noexcept;
/**
* Returns the value of the Nth bit, counted in the reverse order of the
* bucket id.
*/
- uint8_t getBit(uint32_t n) const {
+ uint8_t getBit(uint32_t n) const noexcept {
return (_id & ((Type)1 << n)) == 0 ? 0 : 1;
}
- static void initialize();
+ static void initialize() noexcept;
private:
static Type _usedMasks[maxNumBits+1];
static Type _stripMasks[maxNumBits+1];
Type _id;
- Type getUsedMask() const {
+ Type getUsedMask() const noexcept {
return _usedMasks[getUsedBits()];
}
- Type getStripMask() const {
+ Type getStripMask() const noexcept {
return _stripMasks[getUsedBits()];
}
diff --git a/eval/CMakeLists.txt b/eval/CMakeLists.txt
index fe621f6e9f0..23127cc12b5 100644
--- a/eval/CMakeLists.txt
+++ b/eval/CMakeLists.txt
@@ -65,6 +65,7 @@ vespa_define_module(
src/tests/instruction/mixed_simple_join_function
src/tests/instruction/pow_as_map_optimizer
src/tests/instruction/remove_trivial_dimension_optimizer
+ src/tests/instruction/sparse_dot_product_function
src/tests/instruction/sum_max_dot_product_function
src/tests/instruction/vector_from_doubles_function
src/tests/streamed/value
diff --git a/eval/src/tests/eval/fast_value/fast_value_test.cpp b/eval/src/tests/eval/fast_value/fast_value_test.cpp
index 279f17a1ead..6cf43511977 100644
--- a/eval/src/tests/eval/fast_value/fast_value_test.cpp
+++ b/eval/src/tests/eval/fast_value/fast_value_test.cpp
@@ -3,7 +3,7 @@
#include <vespa/eval/eval/fast_value.hpp>
#include <vespa/eval/eval/fast_value.h>
#include <vespa/eval/eval/value_codec.h>
-#include <vespa/eval/eval/test/tensor_model.hpp>
+#include <vespa/eval/eval/test/gen_spec.h>
#include <vespa/vespalib/gtest/gtest.h>
using namespace vespalib;
@@ -142,29 +142,29 @@ TEST(FastValueBuilderTest, mixed_add_subspace_robustness) {
}
}
-std::vector<Layout> layouts = {
- {},
- {x(3)},
- {x(3),y(5)},
- {x(3),y(5),z(7)},
- float_cells({x(3),y(5),z(7)}),
- {x({"a","b","c"})},
- {x({"a","b","c"}),y({"foo","bar"})},
- {x({"a","b","c"}),y({"foo","bar"}),z({"i","j","k","l"})},
- float_cells({x({"a","b","c"}),y({"foo","bar"}),z({"i","j","k","l"})}),
- {x(3),y({"foo", "bar"}),z(7)},
- {x({"a","b","c"}),y(5),z({"i","j","k","l"})},
- float_cells({x({"a","b","c"}),y(5),z({"i","j","k","l"})})
+GenSpec G() { return GenSpec().cells_float(); }
+
+std::vector<GenSpec> layouts = {
+ G(),
+ G().idx("x", 3),
+ G().idx("x", 3).idx("y", 5),
+ G().idx("x", 3).idx("y", 5).idx("z", 7),
+ G().map("x", {"a","b","c"}),
+ G().map("x", {"a","b","c"}).map("y", {"foo","bar"}),
+ G().map("x", {"a","b","c"}).map("y", {"foo","bar"}).map("z", {"i","j","k","l"}),
+ G().idx("x", 3).map("y", {"foo", "bar"}).idx("z", 7),
+ G().map("x", {"a","b","c"}).idx("y", 5).map("z", {"i","j","k","l"})
};
TEST(FastValueBuilderFactoryTest, fast_values_can_be_copied) {
auto factory = FastValueBuilderFactory::get();
for (const auto &layout: layouts) {
- TensorSpec expect = spec(layout, N());
- std::unique_ptr<Value> value = value_from_spec(expect, factory);
- std::unique_ptr<Value> copy = factory.copy(*value);
- TensorSpec actual = spec_from_value(*copy);
- EXPECT_EQ(actual, expect);
+ for (TensorSpec expect : { layout.gen(), layout.cpy().cells_double().gen() }) {
+ std::unique_ptr<Value> value = value_from_spec(expect, factory);
+ std::unique_ptr<Value> copy = factory.copy(*value);
+ TensorSpec actual = spec_from_value(*copy);
+ EXPECT_EQ(actual, expect);
+ }
}
}
diff --git a/eval/src/tests/eval/gen_spec/gen_spec_test.cpp b/eval/src/tests/eval/gen_spec/gen_spec_test.cpp
index bae25a68760..0d1a4744e42 100644
--- a/eval/src/tests/eval/gen_spec/gen_spec_test.cpp
+++ b/eval/src/tests/eval/gen_spec/gen_spec_test.cpp
@@ -131,7 +131,7 @@ TEST(GenSpec, generating_float_vector) {
}
TEST(GenSpec, generating_custom_vector) {
- GenSpec::seq_t my_seq = [](size_t idx){ return (5.0 - idx); };
+ GenSpec::seq_t my_seq = [](size_t idx) noexcept { return (5.0 - idx); };
EXPECT_EQ(GenSpec().idx("a", 5).seq(my_seq).gen(), custom_vector);
}
diff --git a/eval/src/tests/eval/reference_operations/reference_operations_test.cpp b/eval/src/tests/eval/reference_operations/reference_operations_test.cpp
index 0495923018e..3fcca5e34d8 100644
--- a/eval/src/tests/eval/reference_operations/reference_operations_test.cpp
+++ b/eval/src/tests/eval/reference_operations/reference_operations_test.cpp
@@ -1,13 +1,11 @@
// Copyright Verizon Media. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root.
#include <vespa/eval/eval/test/reference_operations.h>
-#include <vespa/eval/eval/test/tensor_model.hpp>
#include <vespa/vespalib/gtest/gtest.h>
#include <iostream>
using namespace vespalib;
using namespace vespalib::eval;
-using namespace vespalib::eval::test;
TensorSpec dense_2d_some_cells(bool square) {
return TensorSpec("tensor(a[3],d[5])")
diff --git a/eval/src/tests/eval/simple_value/simple_value_test.cpp b/eval/src/tests/eval/simple_value/simple_value_test.cpp
index 3a653b75172..09884b0ed0d 100644
--- a/eval/src/tests/eval/simple_value/simple_value_test.cpp
+++ b/eval/src/tests/eval/simple_value/simple_value_test.cpp
@@ -5,7 +5,7 @@
#include <vespa/eval/instruction/generic_join.h>
#include <vespa/eval/eval/interpreted_function.h>
#include <vespa/eval/eval/test/reference_operations.h>
-#include <vespa/eval/eval/test/tensor_model.hpp>
+#include <vespa/eval/eval/test/gen_spec.h>
#include <vespa/vespalib/util/stringfmt.h>
#include <vespa/vespalib/gtest/gtest.h>
@@ -23,45 +23,36 @@ using Handle = SharedStringRepo::Handle;
vespalib::string as_str(string_id label) { return Handle::string_from_id(label); }
-std::vector<Layout> layouts = {
- {},
- {x(3)},
- {x(3),y(5)},
- {x(3),y(5),z(7)},
- float_cells({x(3),y(5),z(7)}),
- {x({"a","b","c"})},
- {x({"a","b","c"}),y({"foo","bar"})},
- {x({"a","b","c"}),y({"foo","bar"}),z({"i","j","k","l"})},
- float_cells({x({"a","b","c"}),y({"foo","bar"}),z({"i","j","k","l"})}),
- {x(3),y({"foo", "bar"}),z(7)},
- {x({"a","b","c"}),y(5),z({"i","j","k","l"})},
- float_cells({x({"a","b","c"}),y(5),z({"i","j","k","l"})})
+GenSpec G() { return GenSpec().cells_float(); }
+
+std::vector<GenSpec> layouts = {
+ G(),
+ G().idx("x", 3),
+ G().idx("x", 3).idx("y", 5),
+ G().idx("x", 3).idx("y", 5).idx("z", 7),
+ G().map("x", {"a","b","c"}),
+ G().map("x", {"a","b","c"}).map("y", {"foo","bar"}),
+ G().map("x", {"a","b","c"}).map("y", {"foo","bar"}).map("z", {"i","j","k","l"}),
+ G().idx("x", 3).map("y", {"foo", "bar"}).idx("z", 7),
+ G().map("x", {"a","b","c"}).idx("y", 5).map("z", {"i","j","k","l"})
};
-std::vector<Layout> join_layouts = {
- {}, {},
- {x(5)}, {x(5)},
- {x(5)}, {y(5)},
- {x(5)}, {x(5),y(5)},
- {y(3)}, {x(2),z(3)},
- {x(3),y(5)}, {y(5),z(7)},
- float_cells({x(3),y(5)}), {y(5),z(7)},
- {x(3),y(5)}, float_cells({y(5),z(7)}),
- float_cells({x(3),y(5)}), float_cells({y(5),z(7)}),
- {x({"a","b","c"})}, {x({"a","b","c"})},
- {x({"a","b","c"})}, {x({"a","b"})},
- {x({"a","b","c"})}, {y({"foo","bar","baz"})},
- {x({"a","b","c"})}, {x({"a","b","c"}),y({"foo","bar","baz"})},
- {x({"a","b"}),y({"foo","bar","baz"})}, {x({"a","b","c"}),y({"foo","bar"})},
- {x({"a","b"}),y({"foo","bar","baz"})}, {y({"foo","bar"}),z({"i","j","k","l"})},
- float_cells({x({"a","b"}),y({"foo","bar","baz"})}), {y({"foo","bar"}),z({"i","j","k","l"})},
- {x({"a","b"}),y({"foo","bar","baz"})}, float_cells({y({"foo","bar"}),z({"i","j","k","l"})}),
- float_cells({x({"a","b"}),y({"foo","bar","baz"})}), float_cells({y({"foo","bar"}),z({"i","j","k","l"})}),
- {x(3),y({"foo", "bar"})}, {y({"foo", "bar"}),z(7)},
- {x({"a","b","c"}),y(5)}, {y(5),z({"i","j","k","l"})},
- float_cells({x({"a","b","c"}),y(5)}), {y(5),z({"i","j","k","l"})},
- {x({"a","b","c"}),y(5)}, float_cells({y(5),z({"i","j","k","l"})}),
- float_cells({x({"a","b","c"}),y(5)}), float_cells({y(5),z({"i","j","k","l"})})
+std::vector<GenSpec> join_layouts = {
+ G(), G(),
+ G().idx("x", 5), G().idx("x", 5),
+ G().idx("x", 5), G().idx("y", 5),
+ G().idx("x", 5), G().idx("x", 5).idx("y", 5),
+ G().idx("y", 3), G().idx("x", 2).idx("z", 3),
+ G().idx("x", 3).idx("y", 5), G().idx("y", 5).idx("z", 7),
+ G().map("x", {"a","b","c"}), G().map("x", {"a","b","c"}),
+ G().map("x", {"a","b","c"}), G().map("x", {"a","b"}),
+ G().map("x", {"a","b","c"}), G().map("y", {"foo","bar","baz"}),
+ G().map("x", {"a","b","c"}), G().map("x", {"a","b","c"}).map("y", {"foo","bar","baz"}),
+ G().map("x", {"a","b"}).map("y", {"foo","bar","baz"}), G().map("x", {"a","b","c"}).map("y", {"foo","bar"}),
+ G().map("x", {"a","b"}).map("y", {"foo","bar","baz"}), G().map("y", {"foo","bar"}).map("z", {"i","j","k","l"}),
+ G().idx("x", 3).map("y", {"foo", "bar"}), G().map("y", {"foo","bar"}).idx("z", 7),
+ G().map("x", {"a","b","c"}).idx("y", 5), G().idx("y", 5).map("z", {"i","j","k","l"})
+
};
TensorSpec simple_value_join(const TensorSpec &a, const TensorSpec &b, join_fun_t function) {
@@ -76,20 +67,22 @@ TensorSpec simple_value_join(const TensorSpec &a, const TensorSpec &b, join_fun_
TEST(SimpleValueTest, simple_values_can_be_converted_from_and_to_tensor_spec) {
for (const auto &layout: layouts) {
- TensorSpec expect = spec(layout, N());
- std::unique_ptr<Value> value = value_from_spec(expect, SimpleValueBuilderFactory::get());
- TensorSpec actual = spec_from_value(*value);
- EXPECT_EQ(actual, expect);
+ for (TensorSpec expect : { layout.gen(), layout.cpy().cells_double().gen() }) {
+ std::unique_ptr<Value> value = value_from_spec(expect, SimpleValueBuilderFactory::get());
+ TensorSpec actual = spec_from_value(*value);
+ EXPECT_EQ(actual, expect);
+ }
}
}
TEST(SimpleValueTest, simple_values_can_be_copied) {
for (const auto &layout: layouts) {
- TensorSpec expect = spec(layout, N());
- std::unique_ptr<Value> value = value_from_spec(expect, SimpleValueBuilderFactory::get());
- std::unique_ptr<Value> copy = SimpleValueBuilderFactory::get().copy(*value);
- TensorSpec actual = spec_from_value(*copy);
- EXPECT_EQ(actual, expect);
+ for (TensorSpec expect : { layout.gen(), layout.cpy().cells_double().gen() }) {
+ std::unique_ptr<Value> value = value_from_spec(expect, SimpleValueBuilderFactory::get());
+ std::unique_ptr<Value> copy = SimpleValueBuilderFactory::get().copy(*value);
+ TensorSpec actual = spec_from_value(*copy);
+ EXPECT_EQ(actual, expect);
+ }
}
}
@@ -126,16 +119,22 @@ TEST(SimpleValueTest, simple_value_can_be_built_and_inspected) {
EXPECT_EQ(result["bb"], 3);
}
+GenSpec::seq_t N_16ths = [] (size_t i) noexcept { return (i + 1.0) / 16.0; };
+
TEST(SimpleValueTest, new_generic_join_works_for_simple_values) {
ASSERT_TRUE((join_layouts.size() % 2) == 0);
for (size_t i = 0; i < join_layouts.size(); i += 2) {
- TensorSpec lhs = spec(join_layouts[i], Div16(N()));
- TensorSpec rhs = spec(join_layouts[i + 1], Div16(N()));
- for (auto fun: {operation::Add::f, operation::Sub::f, operation::Mul::f, operation::Div::f}) {
- SCOPED_TRACE(fmt("\n===\nLHS: %s\nRHS: %s\n===\n", lhs.to_string().c_str(), rhs.to_string().c_str()));
- auto expect = ReferenceOperations::join(lhs, rhs, fun);
- auto actual = simple_value_join(lhs, rhs, fun);
- EXPECT_EQ(actual, expect);
+ const auto l = join_layouts[i].seq(N_16ths);
+ const auto r = join_layouts[i + 1].seq(N_16ths);
+ for (TensorSpec lhs : { l.gen(), l.cpy().cells_double().gen() }) {
+ for (TensorSpec rhs : { r.gen(), r.cpy().cells_double().gen() }) {
+ for (auto fun: {operation::Add::f, operation::Sub::f, operation::Mul::f, operation::Div::f}) {
+ SCOPED_TRACE(fmt("\n===\nLHS: %s\nRHS: %s\n===\n", lhs.to_string().c_str(), rhs.to_string().c_str()));
+ auto expect = ReferenceOperations::join(lhs, rhs, fun);
+ auto actual = simple_value_join(lhs, rhs, fun);
+ EXPECT_EQ(actual, expect);
+ }
+ }
}
}
}
diff --git a/eval/src/tests/eval/tensor_lambda/tensor_lambda_test.cpp b/eval/src/tests/eval/tensor_lambda/tensor_lambda_test.cpp
index 23bd16cb721..18198a75f7d 100644
--- a/eval/src/tests/eval/tensor_lambda/tensor_lambda_test.cpp
+++ b/eval/src/tests/eval/tensor_lambda/tensor_lambda_test.cpp
@@ -8,7 +8,7 @@
#include <vespa/eval/instruction/dense_cell_range_function.h>
#include <vespa/eval/instruction/dense_lambda_peek_function.h>
#include <vespa/eval/instruction/fast_rename_optimizer.h>
-#include <vespa/eval/eval/test/tensor_model.hpp>
+#include <vespa/eval/eval/test/gen_spec.h>
#include <vespa/eval/eval/test/eval_fixture.h>
#include <vespa/eval/eval/tensor_nodes.h>
@@ -23,17 +23,19 @@ using namespace vespalib::eval::tensor_function;
const ValueBuilderFactory &simple_factory = SimpleValueBuilderFactory::get();
const ValueBuilderFactory &prod_factory = FastValueBuilderFactory::get();
+TensorSpec spec(double v) { return TensorSpec("double").add({}, v); }
+
EvalFixture::ParamRepo make_params() {
return EvalFixture::ParamRepo()
.add("a", spec(1))
.add("b", spec(2))
- .add("x3", spec({x(3)}, N()))
- .add("x3f", spec(float_cells({x(3)}), N()))
- .add("x3m", spec({x({"0", "1", "2"})}, N()))
- .add("x3y5", spec({x(3), y(5)}, N()))
- .add("x3y5f", spec(float_cells({x(3), y(5)}), N()))
- .add("x15", spec({x(15)}, N()))
- .add("x15f", spec(float_cells({x(15)}), N()));
+ .add("x3", GenSpec().idx("x", 3).gen())
+ .add("x3f", GenSpec().idx("x", 3).cells_float().gen())
+ .add("x3m", GenSpec().map("x", 3).gen())
+ .add("x3y5", GenSpec().idx("x", 3).idx("y", 5).gen())
+ .add("x3y5f", GenSpec().idx("x", 3).idx("y", 5).cells_float().gen())
+ .add("x15", GenSpec().idx("x", 15).gen())
+ .add("x15f", GenSpec().idx("x", 15).cells_float().gen());
}
EvalFixture::ParamRepo param_repo = make_params();
diff --git a/eval/src/tests/eval/value_codec/value_codec_test.cpp b/eval/src/tests/eval/value_codec/value_codec_test.cpp
index 2b03cffe730..acce0f5667f 100644
--- a/eval/src/tests/eval/value_codec/value_codec_test.cpp
+++ b/eval/src/tests/eval/value_codec/value_codec_test.cpp
@@ -2,7 +2,7 @@
#include <iostream>
#include <vespa/eval/eval/simple_value.h>
-#include <vespa/eval/eval/test/tensor_model.hpp>
+#include <vespa/eval/eval/test/gen_spec.h>
#include <vespa/eval/eval/value_codec.h>
#include <vespa/vespalib/data/memory.h>
#include <vespa/vespalib/gtest/gtest.h>
@@ -15,28 +15,28 @@ using namespace vespalib::eval::test;
const ValueBuilderFactory &factory = SimpleValueBuilderFactory::get();
-std::vector<Layout> layouts = {
- {},
- {x(3)},
- {x(3),y(5)},
- {x(3),y(5),z(7)},
- float_cells({x(3),y(5),z(7)}),
- {x({"a","b","c"})},
- {x({"a","b","c"}),y({"foo","bar"})},
- {x({"a","b","c"}),y({"foo","bar"}),z({"i","j","k","l"})},
- float_cells({x({"a","b","c"}),y({"foo","bar"}),z({"i","j","k","l"})}),
- {x(3),y({"foo", "bar"}),z(7)},
- {x({"a","b","c"}),y(5),z({"i","j","k","l"})},
- float_cells({x({"a","b","c"}),y(5),z({"i","j","k","l"})})
+GenSpec G() { return GenSpec().cells_float(); }
+
+std::vector<GenSpec> layouts = {
+ G(),
+ G().idx("x", 3),
+ G().idx("x", 3).idx("y", 5),
+ G().idx("x", 3).idx("y", 5).idx("z", 7),
+ G().map("x", {"a","b","c"}),
+ G().map("x", {"a","b","c"}).map("y", {"foo","bar"}),
+ G().map("x", {"a","b","c"}).map("y", {"foo","bar"}).map("z", {"i","j","k","l"}),
+ G().idx("x", 3).map("y", {"foo", "bar"}).idx("z", 7),
+ G().map("x", {"a","b","c"}).idx("y", 5).map("z", {"i","j","k","l"})
};
TEST(ValueCodecTest, simple_values_can_be_converted_from_and_to_tensor_spec) {
for (const auto &layout: layouts) {
- TensorSpec expect = spec(layout, N());
- std::unique_ptr<Value> value = value_from_spec(expect, factory);
- TensorSpec actual = spec_from_value(*value);
- EXPECT_EQ(actual, expect);
+ for (TensorSpec expect : { layout.gen(), layout.cpy().cells_double().gen() }) {
+ std::unique_ptr<Value> value = value_from_spec(expect, factory);
+ TensorSpec actual = spec_from_value(*value);
+ EXPECT_EQ(actual, expect);
+ }
}
}
@@ -66,8 +66,8 @@ TEST(ValueCodecTest, simple_values_can_be_built_using_tensor_spec) {
.add({{"w", "yyy"}, {"x", 1}, {"y", "yyy"}, {"z", 0}}, 0.0)
.add({{"w", "yyy"}, {"x", 1}, {"y", "yyy"}, {"z", 1}}, 4.0);
Value::UP full_tensor = value_from_spec(full_spec, factory);
- EXPECT_EQUAL(full_spec, spec_from_value(*tensor));
- EXPECT_EQUAL(full_spec, spec_from_value(*full_tensor));
+ EXPECT_EQ(full_spec, spec_from_value(*tensor));
+ EXPECT_EQ(full_spec, spec_from_value(*full_tensor));
};
//-----------------------------------------------------------------------------
@@ -333,11 +333,11 @@ TEST(ValueCodecTest, bad_sparse_tensors_are_caught) {
bad.encode_default(data_default);
bad.encode_with_double(data_double);
bad.encode_with_float(data_float);
- EXPECT_EXCEPTION(decode_value(data_default, factory), vespalib::IllegalStateException,
+ VESPA_EXPECT_EXCEPTION(decode_value(data_default, factory), vespalib::IllegalStateException,
"serialized input claims 12345678 blocks of size 1*8, but only");
- EXPECT_EXCEPTION(decode_value(data_double, factory), vespalib::IllegalStateException,
+ VESPA_EXPECT_EXCEPTION(decode_value(data_double, factory), vespalib::IllegalStateException,
"serialized input claims 12345678 blocks of size 1*8, but only");
- EXPECT_EXCEPTION(decode_value(data_float, factory), vespalib::IllegalStateException,
+ VESPA_EXPECT_EXCEPTION(decode_value(data_float, factory), vespalib::IllegalStateException,
"serialized input claims 12345678 blocks of size 1*4, but only");
}
@@ -386,11 +386,11 @@ TEST(ValueCodecTest, bad_dense_tensors_are_caught) {
bad.encode_default(data_default);
bad.encode_with_double(data_double);
bad.encode_with_float(data_float);
- EXPECT_EXCEPTION(decode_value(data_default, factory), vespalib::IllegalStateException,
+ VESPA_EXPECT_EXCEPTION(decode_value(data_default, factory), vespalib::IllegalStateException,
"serialized input claims 1 blocks of size 60000*8, but only");
- EXPECT_EXCEPTION(decode_value(data_double, factory), vespalib::IllegalStateException,
+ VESPA_EXPECT_EXCEPTION(decode_value(data_double, factory), vespalib::IllegalStateException,
"serialized input claims 1 blocks of size 60000*8, but only");
- EXPECT_EXCEPTION(decode_value(data_float, factory), vespalib::IllegalStateException,
+ VESPA_EXPECT_EXCEPTION(decode_value(data_float, factory), vespalib::IllegalStateException,
"serialized input claims 1 blocks of size 60000*4, but only");
}
diff --git a/eval/src/tests/instruction/add_trivial_dimension_optimizer/add_trivial_dimension_optimizer_test.cpp b/eval/src/tests/instruction/add_trivial_dimension_optimizer/add_trivial_dimension_optimizer_test.cpp
index 35195522adc..d2dccfde2fd 100644
--- a/eval/src/tests/instruction/add_trivial_dimension_optimizer/add_trivial_dimension_optimizer_test.cpp
+++ b/eval/src/tests/instruction/add_trivial_dimension_optimizer/add_trivial_dimension_optimizer_test.cpp
@@ -5,7 +5,7 @@
#include <vespa/eval/eval/tensor_function.h>
#include <vespa/eval/instruction/replace_type_function.h>
#include <vespa/eval/instruction/fast_rename_optimizer.h>
-#include <vespa/eval/eval/test/tensor_model.hpp>
+#include <vespa/eval/eval/test/gen_spec.h>
#include <vespa/eval/eval/test/eval_fixture.h>
#include <vespa/vespalib/util/stringfmt.h>
@@ -20,11 +20,11 @@ const ValueBuilderFactory &prod_factory = FastValueBuilderFactory::get();
EvalFixture::ParamRepo make_params() {
return EvalFixture::ParamRepo()
- .add("x5", spec({x(5)}, N()))
- .add("x5f", spec(float_cells({x(5)}), N()))
- .add("x5y1", spec({x(5),y(1)}, N()))
- .add("y1z1", spec({y(1),z(1)}, N()))
- .add("x_m", spec({x({"a"})}, N()));
+ .add("x5", GenSpec().idx("x", 5).gen())
+ .add("x5f", GenSpec().idx("x", 5).cells_float().gen())
+ .add("x5y1", GenSpec().idx("x", 5).idx("y", 1).gen())
+ .add("y1z1", GenSpec().idx("y", 5).idx("z", 1).gen())
+ .add("x_m", GenSpec().map("x", {"a"}).gen());
}
EvalFixture::ParamRepo param_repo = make_params();
diff --git a/eval/src/tests/instruction/dense_replace_type_function/dense_replace_type_function_test.cpp b/eval/src/tests/instruction/dense_replace_type_function/dense_replace_type_function_test.cpp
index 988ca79a04a..5dcdbc5bab8 100644
--- a/eval/src/tests/instruction/dense_replace_type_function/dense_replace_type_function_test.cpp
+++ b/eval/src/tests/instruction/dense_replace_type_function/dense_replace_type_function_test.cpp
@@ -5,7 +5,7 @@
#include <vespa/eval/eval/value_codec.h>
#include <vespa/eval/eval/interpreted_function.h>
#include <vespa/eval/instruction/replace_type_function.h>
-#include <vespa/eval/eval/test/tensor_model.hpp>
+#include <vespa/eval/eval/test/gen_spec.h>
using namespace vespalib::eval::tensor_function;
using namespace vespalib::eval::test;
@@ -33,7 +33,7 @@ struct Fixture {
std::vector<TensorFunction::Child::CREF> children;
InterpretedFunction::State state;
Fixture()
- : my_value(value_from_spec(spec({x(10)}, N()), prod_factory)),
+ : my_value(value_from_spec(GenSpec().idx("x", 10).gen(), prod_factory)),
new_type(ValueType::from_spec("tensor(x[5],y[2])")),
mock_child(my_value->type()),
my_fun(new_type, mock_child),
diff --git a/eval/src/tests/instruction/fast_rename_optimizer/fast_rename_optimizer_test.cpp b/eval/src/tests/instruction/fast_rename_optimizer/fast_rename_optimizer_test.cpp
index dc90a5e54a1..e915a396ae7 100644
--- a/eval/src/tests/instruction/fast_rename_optimizer/fast_rename_optimizer_test.cpp
+++ b/eval/src/tests/instruction/fast_rename_optimizer/fast_rename_optimizer_test.cpp
@@ -4,7 +4,7 @@
#include <vespa/eval/eval/tensor_function.h>
#include <vespa/eval/instruction/replace_type_function.h>
#include <vespa/eval/instruction/fast_rename_optimizer.h>
-#include <vespa/eval/eval/test/tensor_model.hpp>
+#include <vespa/eval/eval/test/gen_spec.h>
#include <vespa/eval/eval/test/eval_fixture.h>
#include <vespa/vespalib/util/stringfmt.h>
@@ -19,13 +19,13 @@ const ValueBuilderFactory &prod_factory = FastValueBuilderFactory::get();
EvalFixture::ParamRepo make_params() {
return EvalFixture::ParamRepo()
- .add("x5", spec({x(5)}, N()))
- .add("x5f", spec(float_cells({x(5)}), N()))
- .add("x_m", spec({x({"a", "b", "c"})}, N()))
- .add("xy_mm", spec({x({"a", "b", "c"}),y({"d","e"})}, N()))
- .add("x5y3z_m", spec({x(5),y(3),z({"a","b"})}, N()))
- .add("x5yz_m", spec({x(5),y({"a","b"}),z({"d","e"})}, N()))
- .add("x5y3", spec({x(5),y(3)}, N()));
+ .add("x5", GenSpec().idx("x", 5).gen())
+ .add("x5f", GenSpec().idx("x", 5).cells_float().gen())
+ .add("x_m", GenSpec().map("x", {"a", "b", "c"}).gen())
+ .add("xy_mm", GenSpec().map("x", {"a", "b", "c"}).map("y", {"d","e"}).gen())
+ .add("x5y3z_m", GenSpec().idx("x", 5).idx("y", 3).map("z", {"a","b"}).gen())
+ .add("x5yz_m", GenSpec().idx("x", 5).map("y", {"a","b"}).map("z", {"d","e"}).gen())
+ .add("x5y3", GenSpec().idx("x", 5).idx("y", 3).gen());
}
EvalFixture::ParamRepo param_repo = make_params();
diff --git a/eval/src/tests/instruction/generic_concat/generic_concat_test.cpp b/eval/src/tests/instruction/generic_concat/generic_concat_test.cpp
index bc8ea84744f..59c4c5bf0c8 100644
--- a/eval/src/tests/instruction/generic_concat/generic_concat_test.cpp
+++ b/eval/src/tests/instruction/generic_concat/generic_concat_test.cpp
@@ -7,7 +7,7 @@
#include <vespa/eval/instruction/generic_concat.h>
#include <vespa/eval/eval/interpreted_function.h>
#include <vespa/eval/eval/test/reference_operations.h>
-#include <vespa/eval/eval/test/tensor_model.hpp>
+#include <vespa/eval/eval/test/gen_spec.h>
#include <vespa/vespalib/util/stringfmt.h>
#include <vespa/vespalib/gtest/gtest.h>
@@ -18,53 +18,48 @@ using namespace vespalib::eval::test;
using vespalib::make_string_short::fmt;
-std::vector<Layout> concat_layouts = {
- {}, {},
- {}, {y(5)},
- float_cells({y(5)}), {},
- {}, float_cells({y(5)}),
- {y(5)}, {},
- {y(2)}, {y(3)},
- {y(2)}, {x(3)},
- {x(2)}, {z(3)},
- {x(2),y(3)}, {x(2),y(3)},
- {x(2),y(3)}, {x(2),y(4)},
- {y(3),z(5)}, {y(3),z(5)},
- {y(3),z(5)}, {y(4),z(5)},
- {x(2),y(3),z(5)}, {x(2),y(3),z(5)},
- {x(2),y(3),z(5)}, {x(2),y(4),z(5)},
- {x(2),y(3),z({"a","b"})}, {x(2),y(3),z({"b","c"})},
- {x(2),y(3),z({"a","b"})}, {x(2),y(4),z({"b","c"})},
- {y(5)}, {y(2),x(5)},
- {x(3)}, {y(2),z(3)},
- {y(2)}, {y(3),x(5),z(2)},
- {y(2),x(5),z(2)}, {y(3),x(5),z(2)},
- {y(3),x(5)}, {x(5),z(7)},
- float_cells({y(3),x(5)}), {x(5),z(7)},
- float_cells({y(3),x(5)}), {},
- {y(3),x(5)}, float_cells({x(5),z(7)}),
- float_cells({y(3),x(5)}), float_cells({x(5),z(7)}),
- {x({"a","b","c"})}, {x({"a","b","c"})},
- {x({"a","b","c"})}, {x({"a","b"})},
- {x({"a","b","c"})}, {x({"b","c","d"})},
- float_cells({x({"a","b","c"})}), {x({"b","c","d"})},
- {x({"a","b","c"})}, float_cells({x({"b","c","d"})}),
- float_cells({x({"a","b","c"})}), float_cells({z({"foo","bar","baz"})}),
- {x({"a","b","c"})}, {x({"a","b","c"}),z({"foo","bar","baz"})},
- {x({"a","b"}),z({"foo","bar","baz"})}, {x({"a","b","c"}),z({"foo","bar"})},
- {x({"a","b","c"}),y(3)}, {y(2)},
- {x({"a","b","c"}),y(3)}, {z(5)},
- {x({"a","b","c"}),y(3)}, {y(2),z(5)},
- {x({"a","b","c"}),y(3)}, {y(2)},
- {x({"a","b","c"}),y(3),z(5)}, {z(5)},
- {y(2)}, {x({"a","b","c"}),y(3)},
- {z(5)}, {x({"a","b","c"}),y(3)},
- {y(2),z(5)}, {x({"a","b","c"}),y(3)},
- {y(2)}, {x({"a","b","c"}),y(3)},
- {z(5)}, {x({"a","b","c"}),y(3),z(5)},
- {y(2),z(5)}, {x({"a","b","c"}),y(3),z(5)},
- {y(2),x({"a","b","c"})}, {y(3),x({"b","c","d"})},
- {y(2),x({"a","b"})}, {y(3),z({"c","d"})}
+GenSpec G() { return GenSpec().cells_float(); }
+
+GenSpec::seq_t N_16ths = [] (size_t i) noexcept { return (i + 1.0) / 16.0; };
+
+std::vector<GenSpec> concat_layouts = {
+ G(), G(),
+ G(), G().idx("y", 5),
+ G().idx("y", 5), G(),
+ G().idx("y", 2), G().idx("y", 3),
+ G().idx("y", 2), G().idx("x", 3),
+ G().idx("x", 2), G().idx("z", 3),
+ G().idx("x", 2).idx("y", 3), G().idx("x", 2).idx("y", 3),
+ G().idx("x", 2).idx("y", 3), G().idx("x", 2).idx("y", 4),
+ G().idx("y", 3).idx("z", 5), G().idx("y", 3).idx("z", 5),
+ G().idx("y", 3).idx("z", 5), G().idx("y", 4).idx("z", 5),
+ G().idx("x", 2).idx("y", 3).idx("z", 5), G().idx("x", 2).idx("y", 3).idx("z", 5),
+ G().idx("x", 2).idx("y", 3).idx("z", 5), G().idx("x", 2).idx("y", 4).idx("z", 5),
+ G().idx("x", 2).idx("y", 3).map("z", {"a","b"}), G().idx("x", 2).idx("y", 3).map("z", {"b","c"}),
+ G().idx("x", 2).idx("y", 3).map("z", {"a","b"}), G().idx("x", 2).idx("y", 4).map("z", {"b","c"}),
+ G().idx("y", 5), G().idx("x", 5).idx("y", 2),
+ G().idx("x", 3), G().idx("y", 2).idx("z", 3),
+ G().idx("y", 2), G().idx("x", 5).idx("y", 3).idx("z", 2),
+ G().idx("x", 5).idx("y", 2).idx("z", 2), G().idx("x", 5).idx("y", 3).idx("z", 2),
+ G().idx("x", 5).idx("y", 3), G().idx("x", 5).idx("z", 7),
+ G().map("x", {"a","b","c"}), G().map("x", {"a","b","c"}),
+ G().map("x", {"a","b","c"}), G().map("x", {"a","b"}),
+ G().map("x", {"a","b","c"}), G().map("x", {"b","c","d"}),
+ G().map("x", {"a","b","c"}), G().map("x", {"a","b","c"}).map("z", {"foo","bar","baz"}),
+ G().map("x", {"a","b"}).map("z", {"foo","bar","baz"}), G().map("x", {"a","b","c"}).map("z", {"foo","bar"}),
+ G().map("x", {"a","b","c"}).idx("y", 3), G().idx("y", 2),
+ G().map("x", {"a","b","c"}).idx("y", 3), G().idx("z", 5),
+ G().map("x", {"a","b","c"}).idx("y", 3), G().idx("y", 2).idx("z", 5),
+ G().map("x", {"a","b","c"}).idx("y", 3), G().idx("y", 2),
+ G().map("x", {"a","b","c"}).idx("y", 3).idx("z", 5), G().idx("z", 5),
+ G().idx("y", 2), G().map("x", {"a","b","c"}).idx("y", 3),
+ G().idx("z", 5), G().map("x", {"a","b","c"}).idx("y", 3),
+ G().idx("y", 2).idx("z", 5), G().map("x", {"a","b","c"}).idx("y", 3),
+ G().idx("y", 2), G().map("x", {"a","b","c"}).idx("y", 3),
+ G().idx("z", 5), G().map("x", {"a","b","c"}).idx("y", 3).idx("z", 5),
+ G().idx("y", 2).idx("z", 5), G().map("x", {"a","b","c"}).idx("y", 3).idx("z", 5),
+ G().map("x", {"a","b","c"}).idx("y", 2), G().map("x", {"b","c","d"}).idx("y", 3),
+ G().map("x", {"a","b"}).idx("y", 2), G().idx("y", 3).map("z", {"c","d"})
};
TensorSpec perform_generic_concat(const TensorSpec &a, const TensorSpec &b,
@@ -81,12 +76,16 @@ TensorSpec perform_generic_concat(const TensorSpec &a, const TensorSpec &b,
void test_generic_concat_with(const ValueBuilderFactory &factory) {
ASSERT_TRUE((concat_layouts.size() % 2) == 0);
for (size_t i = 0; i < concat_layouts.size(); i += 2) {
- const TensorSpec lhs = spec(concat_layouts[i], N());
- const TensorSpec rhs = spec(concat_layouts[i + 1], Div16(N()));
- SCOPED_TRACE(fmt("\n===\nin LHS: %s\nin RHS: %s\n===\n", lhs.to_string().c_str(), rhs.to_string().c_str()));
- auto actual = perform_generic_concat(lhs, rhs, "y", factory);
- auto expect = ReferenceOperations::concat(lhs, rhs, "y");
- EXPECT_EQ(actual, expect);
+ const auto &l = concat_layouts[i];
+ const auto &r = concat_layouts[i+1].seq(N_16ths);
+ for (TensorSpec lhs : { l.gen(), l.cpy().cells_double().gen() }) {
+ for (TensorSpec rhs : { r.gen(), r.cpy().cells_double().gen() }) {
+ SCOPED_TRACE(fmt("\n===\nin LHS: %s\nin RHS: %s\n===\n", lhs.to_string().c_str(), rhs.to_string().c_str()));
+ auto actual = perform_generic_concat(lhs, rhs, "y", factory);
+ auto expect = ReferenceOperations::concat(lhs, rhs, "y");
+ EXPECT_EQ(actual, expect);
+ }
+ }
}
}
diff --git a/eval/src/tests/instruction/generic_create/generic_create_test.cpp b/eval/src/tests/instruction/generic_create/generic_create_test.cpp
index 00af75e4d83..2dce4509571 100644
--- a/eval/src/tests/instruction/generic_create/generic_create_test.cpp
+++ b/eval/src/tests/instruction/generic_create/generic_create_test.cpp
@@ -6,7 +6,7 @@
#include <vespa/eval/instruction/generic_create.h>
#include <vespa/eval/eval/interpreted_function.h>
#include <vespa/eval/eval/test/reference_operations.h>
-#include <vespa/eval/eval/test/tensor_model.hpp>
+#include <vespa/eval/eval/test/gen_spec.h>
#include <vespa/vespalib/util/stringfmt.h>
#include <vespa/vespalib/gtest/gtest.h>
#include <stdlib.h>
@@ -19,18 +19,17 @@ using namespace vespalib::eval::test;
using vespalib::make_string_short::fmt;
-std::vector<Layout> create_layouts = {
- {x(3)},
- {x(3),y(5)},
- {x(3),y(5),z(7)},
- float_cells({x(3),y(5),z(7)}),
- {x({"a","b","c"})},
- {x({"a","b","c"}),y({"foo","bar"})},
- {x({"a","b","c"}),y({"foo","bar"}),z({"i","j","k","l"})},
- float_cells({x({"a","b","c"}),y({"foo","bar"}),z({"i","j","k","l"})}),
- {x(3),y({"foo", "bar"}),z(7)},
- {x({"a","b","c"}),y(5),z({"i","j","k","l"})},
- float_cells({x({"a","b","c"}),y(5),z({"i","j","k","l"})})
+GenSpec G() { return GenSpec().cells_float(); }
+
+std::vector<GenSpec> create_layouts = {
+ G().idx("x", 3),
+ G().idx("x", 3).idx("y", 5),
+ G().idx("x", 3).idx("y", 5).idx("z", 7),
+ G().map("x", {"a","b","c"}),
+ G().map("x", {"a","b","c"}).map("y", {"foo","bar"}),
+ G().map("x", {"a","b","c"}).map("y", {"foo","bar"}).map("z", {"i","j","k","l"}),
+ G().idx("x", 3).map("y", {"foo", "bar"}).idx("z", 7),
+ G().map("x", {"a","b","c"}).idx("y", 5).map("z", {"i","j","k","l"})
};
TensorSpec remove_each(const TensorSpec &a, size_t n) {
@@ -91,16 +90,17 @@ TensorSpec perform_generic_create(const TensorSpec &a, const ValueBuilderFactory
}
void test_generic_create_with(const ValueBuilderFactory &factory) {
- for (const auto & layout : create_layouts) {
- TensorSpec full = spec(layout, N());
- auto actual = perform_generic_create(full, factory);
- auto expect = reference_create(full).normalize();
- EXPECT_EQ(actual, expect);
- for (size_t n : {2, 3, 4, 5}) {
- TensorSpec partial = remove_each(full, n);
- actual = perform_generic_create(partial, factory);
- expect = reference_create(partial).normalize();
+ for (const auto &layout : create_layouts) {
+ for (TensorSpec full : { layout.gen(), layout.cpy().cells_double().gen() }) {
+ auto actual = perform_generic_create(full, factory);
+ auto expect = reference_create(full).normalize();
EXPECT_EQ(actual, expect);
+ for (size_t n : {2, 3, 4, 5}) {
+ TensorSpec partial = remove_each(full, n);
+ actual = perform_generic_create(partial, factory);
+ expect = reference_create(partial).normalize();
+ EXPECT_EQ(actual, expect);
+ }
}
}
}
diff --git a/eval/src/tests/instruction/generic_join/generic_join_test.cpp b/eval/src/tests/instruction/generic_join/generic_join_test.cpp
index 8eca3cad763..55dc4c25389 100644
--- a/eval/src/tests/instruction/generic_join/generic_join_test.cpp
+++ b/eval/src/tests/instruction/generic_join/generic_join_test.cpp
@@ -6,7 +6,7 @@
#include <vespa/eval/instruction/generic_join.h>
#include <vespa/eval/eval/interpreted_function.h>
#include <vespa/eval/eval/test/reference_operations.h>
-#include <vespa/eval/eval/test/tensor_model.hpp>
+#include <vespa/eval/eval/test/gen_spec.h>
#include <vespa/vespalib/util/stringfmt.h>
#include <vespa/vespalib/gtest/gtest.h>
@@ -17,33 +17,25 @@ using namespace vespalib::eval::test;
using vespalib::make_string_short::fmt;
-std::vector<Layout> join_layouts = {
- {}, {},
- {x(5)}, {x(5)},
- {x(5)}, {y(5)},
- {x(5)}, {x(5),y(5)},
- {y(3)}, {x(2),z(3)},
- {x(3),y(5)}, {y(5),z(7)},
- float_cells({x(3),y(5)}), {y(5),z(7)},
- {x(3),y(5)}, float_cells({y(5),z(7)}),
- float_cells({x(3),y(5)}), float_cells({y(5),z(7)}),
- {x({"a","b","c"})}, {x({"a","b","c"})},
- {x({"a","b","c"})}, {x({"a","b"})},
- {x({"a","b","c"})}, {y({"foo","bar","baz"})},
- {x({"a","b","c"})}, {x({"a","b","c"}),y({"foo","bar","baz"})},
- {x({"a","b"}),y({"foo","bar","baz"})}, {x({"a","b","c"}),y({"foo","bar"})},
- {x({"a","b"}),y({"foo","bar","baz"})}, {y({"foo","bar"}),z({"i","j","k","l"})},
- float_cells({x({"a","b"}),y({"foo","bar","baz"})}), {y({"foo","bar"}),z({"i","j","k","l"})},
- {x({"a","b"}),y({"foo","bar","baz"})}, float_cells({y({"foo","bar"}),z({"i","j","k","l"})}),
- float_cells({x({"a","b"}),y({"foo","bar","baz"})}), float_cells({y({"foo","bar"}),z({"i","j","k","l"})}),
- {x(3),y({"foo", "bar"})}, {y({"foo", "bar"}),z(7)},
- {x({"a","b","c"}),y(5)}, {y(5),z({"i","j","k","l"})},
- float_cells({x({"a","b","c"}),y(5)}), {y(5),z({"i","j","k","l"})},
- {x({"a","b","c"}),y(5)}, float_cells({y(5),z({"i","j","k","l"})}),
- float_cells({x({"a","b","c"}),y(5)}), float_cells({y(5),z({"i","j","k","l"})}),
- {x({"a","b","c"}),y(5)}, float_cells({y(5)}),
- {y(5)}, float_cells({x({"a","b","c"}),y(5)}),
- {x({}),y(5)}, float_cells({y(5)})
+GenSpec::seq_t N_16ths = [] (size_t i) noexcept { return (i + 1.0) / 16.0; };
+
+GenSpec G() { return GenSpec().cells_float().seq(N_16ths); }
+
+std::vector<GenSpec> join_layouts = {
+ G(), G(),
+ G().idx("x", 5), G().idx("x", 5),
+ G().idx("x", 5), G().idx("y", 5),
+ G().idx("x", 5), G().idx("x", 5).idx("y", 5),
+ G().idx("y", 3), G().idx("x", 2).idx("z", 3),
+ G().idx("x", 3).idx("y", 5), G().idx("y", 5).idx("z", 7),
+ G().map("x", {"a","b","c"}), G().map("x", {"a","b","c"}),
+ G().map("x", {"a","b","c"}), G().map("x", {"a","b"}),
+ G().map("x", {"a","b","c"}), G().map("y", {"foo","bar","baz"}),
+ G().map("x", {"a","b","c"}), G().map("x", {"a","b","c"}).map("y", {"foo","bar","baz"}),
+ G().map("x", {"a","b"}).map("y", {"foo","bar","baz"}), G().map("x", {"a","b","c"}).map("y", {"foo","bar"}),
+ G().map("x", {"a","b"}).map("y", {"foo","bar","baz"}), G().map("y", {"foo","bar"}).map("z", {"i","j","k","l"}),
+ G().idx("x", 3).map("y", {"foo", "bar"}), G().map("y", {"foo", "bar"}).idx("z", 7),
+ G().map("x", {"a","b","c"}).idx("y", 5), G().idx("y", 5).map("z", {"i","j","k","l"})
};
bool join_address(const TensorSpec::Address &a, const TensorSpec::Address &b, TensorSpec::Address &addr) {
@@ -113,15 +105,19 @@ TEST(GenericJoinTest, dense_join_plan_can_be_executed) {
TEST(GenericJoinTest, generic_join_works_for_simple_and_fast_values) {
ASSERT_TRUE((join_layouts.size() % 2) == 0);
for (size_t i = 0; i < join_layouts.size(); i += 2) {
- TensorSpec lhs = spec(join_layouts[i], Div16(N()));
- TensorSpec rhs = spec(join_layouts[i + 1], Div16(N()));
- for (auto fun: {operation::Add::f, operation::Sub::f, operation::Mul::f, operation::Div::f}) {
- SCOPED_TRACE(fmt("\n===\nLHS: %s\nRHS: %s\n===\n", lhs.to_string().c_str(), rhs.to_string().c_str()));
- auto expect = ReferenceOperations::join(lhs, rhs, fun);
- auto simple = perform_generic_join(lhs, rhs, fun, SimpleValueBuilderFactory::get());
- auto fast = perform_generic_join(lhs, rhs, fun, FastValueBuilderFactory::get());
- EXPECT_EQ(simple, expect);
- EXPECT_EQ(fast, expect);
+ const auto &l = join_layouts[i];
+ const auto &r = join_layouts[i+1];
+ for (TensorSpec lhs : { l.gen(), l.cpy().cells_double().gen() }) {
+ for (TensorSpec rhs : { r.gen(), r.cpy().cells_double().gen() }) {
+ for (auto fun: {operation::Add::f, operation::Sub::f, operation::Mul::f, operation::Div::f}) {
+ SCOPED_TRACE(fmt("\n===\nLHS: %s\nRHS: %s\n===\n", lhs.to_string().c_str(), rhs.to_string().c_str()));
+ auto expect = ReferenceOperations::join(lhs, rhs, fun);
+ auto simple = perform_generic_join(lhs, rhs, fun, SimpleValueBuilderFactory::get());
+ auto fast = perform_generic_join(lhs, rhs, fun, FastValueBuilderFactory::get());
+ EXPECT_EQ(simple, expect);
+ EXPECT_EQ(fast, expect);
+ }
+ }
}
}
}
diff --git a/eval/src/tests/instruction/generic_map/generic_map_test.cpp b/eval/src/tests/instruction/generic_map/generic_map_test.cpp
index 687b6aa60ac..aaa8990d794 100644
--- a/eval/src/tests/instruction/generic_map/generic_map_test.cpp
+++ b/eval/src/tests/instruction/generic_map/generic_map_test.cpp
@@ -6,7 +6,7 @@
#include <vespa/eval/instruction/generic_map.h>
#include <vespa/eval/eval/interpreted_function.h>
#include <vespa/eval/eval/test/reference_operations.h>
-#include <vespa/eval/eval/test/tensor_model.hpp>
+#include <vespa/eval/eval/test/gen_spec.h>
#include <vespa/vespalib/util/stringfmt.h>
#include <vespa/vespalib/gtest/gtest.h>
@@ -17,18 +17,20 @@ using namespace vespalib::eval::test;
using vespalib::make_string_short::fmt;
-std::vector<Layout> map_layouts = {
- {x(3)},
- {x(3),y(5)},
- {x(3),y(5),z(7)},
- float_cells({x(3),y(5),z(7)}),
- {x({"a","b","c"})},
- {x({"a","b","c"}),y({"foo","bar"})},
- {x({"a","b","c"}),y({"foo","bar"}),z({"i","j","k","l"})},
- float_cells({x({"a","b","c"}),y({"foo","bar"}),z({"i","j","k","l"})}),
- {x(3),y({"foo", "bar"}),z(7)},
- {x({"a","b","c"}),y(5),z({"i","j","k","l"})},
- float_cells({x({"a","b","c"}),y(5),z({"i","j","k","l"})})
+GenSpec::seq_t N_16ths = [] (size_t i) noexcept { return (i + 1.0) / 16.0; };
+
+GenSpec G() { return GenSpec().cells_float().seq(N_16ths); }
+
+std::vector<GenSpec> map_layouts = {
+ G(),
+ G().idx("x", 3),
+ G().idx("x", 3).idx("y", 5),
+ G().idx("x", 3).idx("y", 5).idx("z", 7),
+ G().map("x", {"a","b","c"}),
+ G().map("x", {"a","b","c"}).map("y", {"foo","bar"}),
+ G().map("x", {"a","b","c"}).map("y", {"foo","bar"}).map("z", {"i","j","k","l"}),
+ G().idx("x", 3).map("y", {"foo", "bar"}).idx("z", 7),
+ G().map("x", {"a","b","c"}).idx("y", 5).map("z", {"i","j","k","l"})
};
TensorSpec perform_generic_map(const TensorSpec &a, map_fun_t func, const ValueBuilderFactory &factory)
@@ -40,14 +42,14 @@ TensorSpec perform_generic_map(const TensorSpec &a, map_fun_t func, const ValueB
}
void test_generic_map_with(const ValueBuilderFactory &factory) {
- for (const auto & layout : map_layouts) {
- TensorSpec lhs = spec(layout, Div16(N()));
- ValueType lhs_type = ValueType::from_spec(lhs.type());
- for (auto func : {operation::Floor::f, operation::Fabs::f, operation::Square::f, operation::Inv::f}) {
- SCOPED_TRACE(fmt("\n===\nLHS: %s\n===\n", lhs.to_string().c_str()));
- auto expect = ReferenceOperations::map(lhs, func);
- auto actual = perform_generic_map(lhs, func, factory);
- EXPECT_EQ(actual, expect);
+ for (const auto &layout : map_layouts) {
+ for (TensorSpec lhs : { layout.gen(), layout.cpy().cells_double().gen() }) {
+ for (auto func : {operation::Floor::f, operation::Fabs::f, operation::Square::f, operation::Inv::f}) {
+ SCOPED_TRACE(fmt("\n===\nLHS: %s\n===\n", lhs.to_string().c_str()));
+ auto expect = ReferenceOperations::map(lhs, func);
+ auto actual = perform_generic_map(lhs, func, factory);
+ EXPECT_EQ(actual, expect);
+ }
}
}
}
diff --git a/eval/src/tests/instruction/generic_merge/generic_merge_test.cpp b/eval/src/tests/instruction/generic_merge/generic_merge_test.cpp
index 60a27e6f6e9..f2ddd9b74d8 100644
--- a/eval/src/tests/instruction/generic_merge/generic_merge_test.cpp
+++ b/eval/src/tests/instruction/generic_merge/generic_merge_test.cpp
@@ -6,7 +6,7 @@
#include <vespa/eval/instruction/generic_merge.h>
#include <vespa/eval/eval/interpreted_function.h>
#include <vespa/eval/eval/test/reference_operations.h>
-#include <vespa/eval/eval/test/tensor_model.hpp>
+#include <vespa/eval/eval/test/gen_spec.h>
#include <vespa/vespalib/util/stringfmt.h>
#include <vespa/vespalib/gtest/gtest.h>
#include <optional>
@@ -18,20 +18,22 @@ using namespace vespalib::eval::test;
using vespalib::make_string_short::fmt;
-std::vector<Layout> merge_layouts = {
- {}, {},
- {x(5)}, {x(5)},
- {x(3),y(5)}, {x(3),y(5)},
- float_cells({x(3),y(5)}), {x(3),y(5)},
- {x(3),y(5)}, float_cells({x(3),y(5)}),
- {x({"a","b","c"})}, {x({"a","b","c"})},
- {x({"a","b","c"})}, {x({"c","d","e"})},
- {x({"a","c","e"})}, {x({"b","c","d"})},
- {x({"b","c","d"})}, {x({"a","c","e"})},
- {x({"a","b","c"})}, {x({"c","d"})},
- {x({"a","b"}),y({"foo","bar","baz"})}, {x({"b","c"}),y({"any","foo","bar"})},
- {x(3),y({"foo", "bar"})}, {x(3),y({"baz", "bar"})},
- {x({"a","b","c"}),y(5)}, {x({"b","c","d"}),y(5)}
+GenSpec G() { return GenSpec().cells_float(); }
+
+GenSpec::seq_t N_16ths = [] (size_t i) noexcept { return (i + 1.0) / 16.0; };
+
+std::vector<GenSpec> merge_layouts = {
+ G(), G(),
+ G().idx("x", 5), G().idx("x", 5),
+ G().idx("x", 3).idx("y", 5), G().idx("x", 3).idx("y", 5),
+ G().map("x", {"a","b","c"}), G().map("x", {"a","b","c"}),
+ G().map("x", {"a","b","c"}), G().map("x", {"c","d","e"}),
+ G().map("x", {"a","c","e"}), G().map("x", {"b","c","d"}),
+ G().map("x", {"b","c","d"}), G().map("x", {"a","c","e"}),
+ G().map("x", {"a","b","c"}), G().map("x", {"c","d"}),
+ G().map("x", {"a","b"}).map("y", {"foo","bar","baz"}), G().map("x", {"b","c"}).map("y", {"any","foo","bar"}),
+ G().idx("x", 3).map("y", {"foo", "bar"}), G().idx("x", 3).map("y", {"baz", "bar"}),
+ G().map("x", {"a","b","c"}).idx("y", 5), G().map("x", {"b","c","d"}).idx("y", 5)
};
TensorSpec perform_generic_merge(const TensorSpec &a, const TensorSpec &b, join_fun_t fun, const ValueBuilderFactory &factory) {
@@ -46,13 +48,17 @@ TensorSpec perform_generic_merge(const TensorSpec &a, const TensorSpec &b, join_
void test_generic_merge_with(const ValueBuilderFactory &factory) {
ASSERT_TRUE((merge_layouts.size() % 2) == 0);
for (size_t i = 0; i < merge_layouts.size(); i += 2) {
- TensorSpec lhs = spec(merge_layouts[i], N());
- TensorSpec rhs = spec(merge_layouts[i + 1], Div16(N()));
- SCOPED_TRACE(fmt("\n===\nLHS: %s\nRHS: %s\n===\n", lhs.to_string().c_str(), rhs.to_string().c_str()));
- for (auto fun: {operation::Add::f, operation::Mul::f, operation::Sub::f, operation::Max::f}) {
- auto expect = ReferenceOperations::merge(lhs, rhs, fun);
- auto actual = perform_generic_merge(lhs, rhs, fun, factory);
- EXPECT_EQ(actual, expect);
+ const auto &l = merge_layouts[i];
+ const auto &r = merge_layouts[i+1].seq(N_16ths);
+ for (TensorSpec lhs : { l.gen(), l.cpy().cells_double().gen() }) {
+ for (TensorSpec rhs : { r.gen(), r.cpy().cells_double().gen() }) {
+ SCOPED_TRACE(fmt("\n===\nLHS: %s\nRHS: %s\n===\n", lhs.to_string().c_str(), rhs.to_string().c_str()));
+ for (auto fun: {operation::Add::f, operation::Mul::f, operation::Sub::f, operation::Max::f}) {
+ auto expect = ReferenceOperations::merge(lhs, rhs, fun);
+ auto actual = perform_generic_merge(lhs, rhs, fun, factory);
+ EXPECT_EQ(actual, expect);
+ }
+ }
}
}
}
diff --git a/eval/src/tests/instruction/generic_peek/generic_peek_test.cpp b/eval/src/tests/instruction/generic_peek/generic_peek_test.cpp
index 6841215038a..092a91711ba 100644
--- a/eval/src/tests/instruction/generic_peek/generic_peek_test.cpp
+++ b/eval/src/tests/instruction/generic_peek/generic_peek_test.cpp
@@ -7,7 +7,7 @@
#include <vespa/eval/instruction/generic_peek.h>
#include <vespa/eval/eval/interpreted_function.h>
#include <vespa/eval/eval/test/reference_operations.h>
-#include <vespa/eval/eval/test/tensor_model.hpp>
+#include <vespa/eval/eval/test/gen_spec.h>
#include <vespa/vespalib/util/stringfmt.h>
#include <vespa/vespalib/util/overload.h>
#include <vespa/vespalib/gtest/gtest.h>
@@ -22,17 +22,16 @@ using namespace vespalib::eval::test;
using vespalib::make_string_short::fmt;
-std::vector<Layout> peek_layouts = {
- {x(4)},
- {x(4),y(5)},
- {x(4),y(5),z(3)},
- float_cells({x(4),y(5),z(3)}),
- {x({"-1","0","2"})},
- {x({"-1","0","2"}),y({"-2","0","1"}),z({"-2","-1","0","1","2"})},
- float_cells({x({"-1","0","2"}),y({"-2","0","1"})}),
- {x(4),y({"-2","0","1"}),z(3)},
- {x({"-1","0","2"}),y(5),z({"-2","-1","0","1","2"})},
- float_cells({x({"-1","0","2"}),y(5),z({"-2","-1","0","1","2"})})
+GenSpec G() { return GenSpec().cells_float(); }
+
+std::vector<GenSpec> peek_layouts = {
+ G().idx("x", 4),
+ G().idx("x", 4).idx("y", 5),
+ G().idx("x", 4).idx("y", 5).idx("z", 3),
+ G().map("x", {"-1","0","2"}),
+ G().map("x", {"-1","0","2"}).map("y", {"-2","0","1"}).map("z", {"-2","-1","0","1","2"}),
+ G().idx("x", 4).map("y", {"-2","0","1"}).idx("z", 3),
+ G().map("x", {"-1","0","2"}).idx("y", 5).map("z", {"-2","-1","0","1","2"})
};
using PeekSpec = GenericPeek::SpecMap;
@@ -194,12 +193,13 @@ void fill_dims_and_check(const TensorSpec &input,
}
void test_generic_peek_with(const ValueBuilderFactory &factory) {
- for (const auto & layout : peek_layouts) {
- TensorSpec input = spec(layout, N());
- ValueType input_type = ValueType::from_spec(input.type());
- const auto &dims = input_type.dimensions();
- PeekSpec spec;
- fill_dims_and_check(input, spec, dims, factory);
+ for (const auto &layout : peek_layouts) {
+ for (TensorSpec input : { layout.gen(), layout.cpy().cells_double().gen() }) {
+ ValueType input_type = ValueType::from_spec(input.type());
+ const auto &dims = input_type.dimensions();
+ PeekSpec spec;
+ fill_dims_and_check(input, spec, dims, factory);
+ }
}
}
diff --git a/eval/src/tests/instruction/generic_reduce/generic_reduce_test.cpp b/eval/src/tests/instruction/generic_reduce/generic_reduce_test.cpp
index 9e2090fa968..77317b0ee27 100644
--- a/eval/src/tests/instruction/generic_reduce/generic_reduce_test.cpp
+++ b/eval/src/tests/instruction/generic_reduce/generic_reduce_test.cpp
@@ -6,7 +6,7 @@
#include <vespa/eval/instruction/generic_reduce.h>
#include <vespa/eval/eval/interpreted_function.h>
#include <vespa/eval/eval/test/reference_operations.h>
-#include <vespa/eval/eval/test/tensor_model.hpp>
+#include <vespa/eval/eval/test/gen_spec.h>
#include <vespa/vespalib/util/stringfmt.h>
#include <vespa/vespalib/gtest/gtest.h>
#include <optional>
@@ -18,22 +18,23 @@ using namespace vespalib::eval::test;
using vespalib::make_string_short::fmt;
-std::vector<Layout> layouts = {
- {},
- {x(3)},
- {x(3),y(5)},
- {x(3),y(5),z(7)},
- float_cells({x(3),y(5),z(7)}),
- {x({"a","b","c"})},
- {x({})},
- {x({}),y(10)},
- {x({"a","b","c"}),y({"foo","bar"})},
- {x({"a","b","c"}),y({"foo","bar"}),z({"i","j","k","l"})},
- float_cells({x({"a","b","c"}),y({"foo","bar"}),z({"i","j","k","l"})}),
- {x(3),y({"foo", "bar"}),z(7)},
- {x({"a","b","c"}),y(5),z({"i","j","k","l"})},
- float_cells({x({"a","b","c"}),y(5),z({"i","j","k","l"})}),
- {x(3),y({}),z(7)}
+GenSpec::seq_t N_16ths = [] (size_t i) noexcept { return (i + 1.0) / 16.0; };
+
+GenSpec G() { return GenSpec().cells_float().seq(N_16ths); }
+
+std::vector<GenSpec> layouts = {
+ G(),
+ G().idx("x", 3),
+ G().idx("x", 3).idx("y", 5),
+ G().idx("x", 3).idx("y", 5).idx("z", 7),
+ G().map("x", {"a","b","c"}),
+ G().map("x", {}),
+ G().map("x", {}).idx("y", 10),
+ G().map("x", {"a","b","c"}).map("y", {"foo","bar"}),
+ G().map("x", {"a","b","c"}).map("y", {"foo","bar"}).map("z", {"i","j","k","l"}),
+ G().idx("x", 3).map("y", {"foo", "bar"}).idx("z", 7),
+ G().map("x", {"a","b","c"}).idx("y", 5).map("z", {"i","j","k","l"}),
+ G().idx("x", 3).map("y", {}).idx("z", 7)
};
TensorSpec perform_generic_reduce(const TensorSpec &a, Aggr aggr, const std::vector<vespalib::string> &dims,
@@ -68,19 +69,21 @@ TEST(GenericReduceTest, sparse_reduce_plan_can_be_created) {
}
void test_generic_reduce_with(const ValueBuilderFactory &factory) {
- for (const Layout &layout: layouts) {
- TensorSpec input = spec(layout, Div16(N()));
- SCOPED_TRACE(fmt("tensor type: %s, num_cells: %zu", input.type().c_str(), input.cells().size()));
- for (Aggr aggr: {Aggr::SUM, Aggr::AVG, Aggr::MIN, Aggr::MAX}) {
- SCOPED_TRACE(fmt("aggregator: %s", AggrNames::name_of(aggr)->c_str()));
- for (const Domain &domain: layout) {
- auto expect = ReferenceOperations::reduce(input, aggr, {domain.dimension}).normalize();
- auto actual = perform_generic_reduce(input, aggr, {domain.dimension}, factory);
+ for (const auto &layout: layouts) {
+ for (TensorSpec input : { layout.gen(), layout.cpy().cells_double().gen() }) {
+ SCOPED_TRACE(fmt("tensor type: %s, num_cells: %zu", input.type().c_str(), input.cells().size()));
+ for (Aggr aggr: {Aggr::SUM, Aggr::AVG, Aggr::MIN, Aggr::MAX}) {
+ SCOPED_TRACE(fmt("aggregator: %s", AggrNames::name_of(aggr)->c_str()));
+ auto t = layout.type();
+ for (const auto & dim: t.dimensions()) {
+ auto expect = ReferenceOperations::reduce(input, aggr, {dim.name}).normalize();
+ auto actual = perform_generic_reduce(input, aggr, {dim.name}, factory);
+ EXPECT_EQ(actual, expect);
+ }
+ auto expect = ReferenceOperations::reduce(input, aggr, {}).normalize();
+ auto actual = perform_generic_reduce(input, aggr, {}, factory);
EXPECT_EQ(actual, expect);
}
- auto expect = ReferenceOperations::reduce(input, aggr, {}).normalize();
- auto actual = perform_generic_reduce(input, aggr, {}, factory);
- EXPECT_EQ(actual, expect);
}
}
}
diff --git a/eval/src/tests/instruction/generic_rename/generic_rename_test.cpp b/eval/src/tests/instruction/generic_rename/generic_rename_test.cpp
index 20d155822b5..430e417e288 100644
--- a/eval/src/tests/instruction/generic_rename/generic_rename_test.cpp
+++ b/eval/src/tests/instruction/generic_rename/generic_rename_test.cpp
@@ -5,7 +5,7 @@
#include <vespa/eval/eval/value_codec.h>
#include <vespa/eval/instruction/generic_rename.h>
#include <vespa/eval/eval/interpreted_function.h>
-#include <vespa/eval/eval/test/tensor_model.hpp>
+#include <vespa/eval/eval/test/gen_spec.h>
#include <vespa/eval/eval/test/reference_operations.h>
#include <vespa/vespalib/util/stringfmt.h>
#include <vespa/vespalib/gtest/gtest.h>
@@ -17,18 +17,17 @@ using namespace vespalib::eval::test;
using vespalib::make_string_short::fmt;
-std::vector<Layout> rename_layouts = {
- {x(3)},
- {x(3),y(5)},
- {x(3),y(5),z(7)},
- float_cells({x(3),y(5),z(7)}),
- {x({"a","b","c"})},
- {x({"a","b","c"}),y({"foo","bar"})},
- {x({"a","b","c"}),y({"foo","bar"}),z({"i","j","k","l"})},
- float_cells({x({"a","b","c"}),y({"foo","bar"}),z({"i","j","k","l"})}),
- {x(3),y({"foo", "bar"}),z(7)},
- {x({"a","b","c"}),y(5),z({"i","j","k","l"})},
- float_cells({x({"a","b","c"}),y(5),z({"i","j","k","l"})})
+GenSpec G() { return GenSpec().cells_float(); }
+
+std::vector<GenSpec> rename_layouts = {
+ G().idx("x", 3),
+ G().idx("x", 3).idx("y", 5),
+ G().idx("x", 3).idx("y", 5).idx("z", 7),
+ G().map("x", {"a","b","c"}),
+ G().map("x", {"a","b","c"}).map("y", {"foo","bar"}),
+ G().map("x", {"a","b","c"}).map("y", {"foo","bar"}).map("z", {"i","j","k","l"}),
+ G().idx("x", 3).map("y", {"foo", "bar"}).idx("z", 7),
+ G().map("x", {"a","b","c"}).idx("y", 5).map("z", {"i","j","k","l"})
};
struct FromTo {
@@ -110,18 +109,18 @@ TensorSpec perform_generic_rename(const TensorSpec &a,
}
void test_generic_rename_with(const ValueBuilderFactory &factory) {
- for (const auto & layout : rename_layouts) {
- TensorSpec lhs = spec(layout, N());
- ValueType lhs_type = ValueType::from_spec(lhs.type());
- // printf("lhs_type: %s\n", lhs_type.to_spec().c_str());
- for (const auto & from_to : rename_from_to) {
- ValueType renamed_type = lhs_type.rename(from_to.from, from_to.to);
- if (renamed_type.is_error()) continue;
- // printf("type %s -> %s\n", lhs_type.to_spec().c_str(), renamed_type.to_spec().c_str());
- SCOPED_TRACE(fmt("\n===\nLHS: %s\n===\n", lhs.to_string().c_str()));
- auto expect = ReferenceOperations::rename(lhs, from_to.from, from_to.to);
- auto actual = perform_generic_rename(lhs, from_to, factory);
- EXPECT_EQ(actual, expect);
+ for (const auto &layout : rename_layouts) {
+ for (TensorSpec lhs : { layout.gen(), layout.cpy().cells_double().gen() }) {
+ ValueType lhs_type = ValueType::from_spec(lhs.type());
+ for (const auto & from_to : rename_from_to) {
+ ValueType renamed_type = lhs_type.rename(from_to.from, from_to.to);
+ if (renamed_type.is_error()) continue;
+ // printf("type %s -> %s\n", lhs_type.to_spec().c_str(), renamed_type.to_spec().c_str());
+ SCOPED_TRACE(fmt("\n===\nLHS: %s\n===\n", lhs.to_string().c_str()));
+ auto expect = ReferenceOperations::rename(lhs, from_to.from, from_to.to);
+ auto actual = perform_generic_rename(lhs, from_to, factory);
+ EXPECT_EQ(actual, expect);
+ }
}
}
}
diff --git a/eval/src/tests/instruction/join_with_number/join_with_number_function_test.cpp b/eval/src/tests/instruction/join_with_number/join_with_number_function_test.cpp
index a3fbb3ed529..2d943aa569e 100644
--- a/eval/src/tests/instruction/join_with_number/join_with_number_function_test.cpp
+++ b/eval/src/tests/instruction/join_with_number/join_with_number_function_test.cpp
@@ -4,8 +4,7 @@
#include <vespa/eval/eval/fast_value.h>
#include <vespa/eval/eval/tensor_function.h>
#include <vespa/eval/eval/test/eval_fixture.h>
-#include <vespa/eval/eval/test/tensor_model.hpp>
-#include <vespa/eval/eval/test/param_variants.h>
+#include <vespa/eval/eval/test/gen_spec.h>
#include <vespa/eval/instruction/join_with_number_function.h>
#include <vespa/vespalib/util/stringfmt.h>
@@ -34,15 +33,16 @@ std::ostream &operator<<(std::ostream &os, Primary primary)
const ValueBuilderFactory &prod_factory = FastValueBuilderFactory::get();
+TensorSpec spec(double v) { return TensorSpec("double").add({}, v); }
+
EvalFixture::ParamRepo make_params() {
auto repo = EvalFixture::ParamRepo()
.add("a", spec(1.5))
.add("number", spec(2.5))
- .add("dense", spec({y(5)}, N()))
- .add_matrix("x", 3, "y", 5);
-
- add_variants(repo, "mixed", {x({"a"}),y(5),z({"d","e"})}, N());
- add_variants(repo, "sparse", {x({"a","b","c"}),z({"d","e","f"})}, N());
+ .add("dense", GenSpec().idx("y", 5).gen())
+ .add_variants("x3y5", GenSpec().idx("x", 3).idx("y", 5))
+ .add_variants("mixed", GenSpec().map("x", {"a"}).idx("y", 5).map("z", {"d","e"}))
+ .add_variants("sparse", GenSpec().map("x", {"a","b","c"}).map("z", {"d","e","f"}));
return repo;
}
@@ -81,22 +81,22 @@ void verify_not_optimized(const vespalib::string &expr) {
TEST("require that dense number join can be optimized") {
TEST_DO(verify_optimized("x3y5+a", Primary::LHS, false));
TEST_DO(verify_optimized("a+x3y5", Primary::RHS, false));
- TEST_DO(verify_optimized("x3y5f*a", Primary::LHS, false));
- TEST_DO(verify_optimized("a*x3y5f", Primary::RHS, false));
+ TEST_DO(verify_optimized("x3y5_f*a", Primary::LHS, false));
+ TEST_DO(verify_optimized("a*x3y5_f", Primary::RHS, false));
}
TEST("require that dense number join can be inplace") {
TEST_DO(verify_optimized("@x3y5*a", Primary::LHS, true));
TEST_DO(verify_optimized("a*@x3y5", Primary::RHS, true));
- TEST_DO(verify_optimized("@x3y5f+a", Primary::LHS, true));
- TEST_DO(verify_optimized("a+@x3y5f", Primary::RHS, true));
+ TEST_DO(verify_optimized("@x3y5_f+a", Primary::LHS, true));
+ TEST_DO(verify_optimized("a+@x3y5_f", Primary::RHS, true));
}
TEST("require that asymmetric operations work") {
TEST_DO(verify_optimized("x3y5/a", Primary::LHS, false));
TEST_DO(verify_optimized("a/x3y5", Primary::RHS, false));
- TEST_DO(verify_optimized("x3y5f-a", Primary::LHS, false));
- TEST_DO(verify_optimized("a-x3y5f", Primary::RHS, false));
+ TEST_DO(verify_optimized("x3y5_f-a", Primary::LHS, false));
+ TEST_DO(verify_optimized("a-x3y5_f", Primary::RHS, false));
}
TEST("require that sparse number join can be optimized") {
diff --git a/eval/src/tests/instruction/mixed_inner_product_function/mixed_inner_product_function_test.cpp b/eval/src/tests/instruction/mixed_inner_product_function/mixed_inner_product_function_test.cpp
index fbe71f3ed63..6b549b4d4d4 100644
--- a/eval/src/tests/instruction/mixed_inner_product_function/mixed_inner_product_function_test.cpp
+++ b/eval/src/tests/instruction/mixed_inner_product_function/mixed_inner_product_function_test.cpp
@@ -3,7 +3,7 @@
#include <vespa/eval/eval/fast_value.h>
#include <vespa/eval/eval/tensor_function.h>
#include <vespa/eval/eval/test/eval_fixture.h>
-#include <vespa/eval/eval/test/tensor_model.hpp>
+#include <vespa/eval/eval/test/gen_spec.h>
#include <vespa/eval/instruction/dense_dot_product_function.h>
#include <vespa/eval/instruction/dense_matmul_function.h>
#include <vespa/eval/instruction/dense_multi_matmul_function.h>
@@ -22,34 +22,25 @@ using namespace vespalib::eval::test;
const ValueBuilderFactory &prod_factory = FastValueBuilderFactory::get();
-struct MyVecSeq : Sequence {
- double bias;
- double operator[](size_t i) const override { return (i + bias); }
- MyVecSeq(double cellBias) : bias(cellBias) {}
-};
-
-std::function<double(size_t)> my_vec_gen(double cellBias) {
- return [=] (size_t i) noexcept { return i + cellBias; };
-}
//-----------------------------------------------------------------------------
EvalFixture::ParamRepo make_params() {
return EvalFixture::ParamRepo()
- .add_vector("x", 3, my_vec_gen(2.0))
- .add_vector("x", 3, my_vec_gen(13.25))
- .add_vector("y", 3, my_vec_gen(4.0))
- .add_vector("z", 3, my_vec_gen(0.25))
- .add_matrix("x", 3, "y", 1, my_vec_gen(5.0))
- .add_matrix("x", 1, "y", 3, my_vec_gen(6.0))
- .add_matrix("x", 3, "y", 3, my_vec_gen(1.5))
- .add_matrix("x", 3, "z", 3, my_vec_gen(2.5))
- .add_cube("x", 3, "y", 3, "z", 3, my_vec_gen(-4.0))
- .add("mix_x3zm", spec({x(3),z({"c","d"})}, MyVecSeq(0.5)))
- .add("mix_y3zm", spec({y(3),z({"c","d"})}, MyVecSeq(3.5)))
- .add("mix_x3zm_f", spec(float_cells({x(3),z({"c","d"})}), MyVecSeq(0.5)))
- .add("mix_y3zm_f", spec(float_cells({y(3),z({"c","d"})}), MyVecSeq(3.5)))
- .add("mix_x3y3zm", spec({x(3),y(3),z({"c","d"})}, MyVecSeq(0.0)))
+ .add_variants("x3", GenSpec().idx("x", 3).seq_bias(2.0))
+ .add_variants("x3$2", GenSpec().idx("x", 3).seq_bias(13.25))
+ .add_variants("y3", GenSpec().idx("y", 3).seq_bias(4.0))
+ .add_variants("z3", GenSpec().idx("z", 3).seq_bias(0.25))
+ .add_variants("x3y3", GenSpec().idx("x", 3).idx("y", 3).seq_bias(5.0))
+ .add_variants("x1y3", GenSpec().idx("x", 1).idx("y", 3).seq_bias(6.0))
+ .add_variants("x3y1", GenSpec().idx("x", 3).idx("y", 1).seq_bias(1.5))
+ .add_variants("x3z3", GenSpec().idx("x", 3).idx("z", 3).seq_bias(2.5))
+ .add_variants("x3y3z3", GenSpec().idx("x", 3).idx("y", 3).idx("z", 3).seq_bias(-4.0))
+ .add("mix_x3zm", GenSpec().idx("x", 3).map("z", {"c","d"}).seq_bias(0.5).gen())
+ .add("mix_y3zm", GenSpec().idx("y", 3).map("z", {"c","d"}).seq_bias(3.5).gen())
+ .add("mix_x3zm_f", GenSpec().idx("x", 3).map("z", {"c","d"}).cells_float().seq_bias(0.5).gen())
+ .add("mix_y3zm_f", GenSpec().idx("y", 3).map("z", {"c","d"}).cells_float().seq_bias(3.5).gen())
+ .add("mix_x3y3zm", GenSpec().idx("x", 3).idx("y", 3).map("z", {"c","d"}).seq_bias(0.0).gen())
;
}
@@ -101,35 +92,35 @@ TEST(MixedInnerProduct, use_dense_optimizers_when_possible) {
TEST(MixedInnerProduct, trigger_optimizer_when_possible) {
assert_mixed_optimized("reduce(x3 * mix_x3zm,sum,x)");
- assert_mixed_optimized("reduce(x3f * mix_x3zm,sum,x)");
+ assert_mixed_optimized("reduce(x3_f * mix_x3zm,sum,x)");
assert_mixed_optimized("reduce(x3 * mix_x3zm_f,sum,x)");
- assert_mixed_optimized("reduce(x3f * mix_x3zm_f,sum,x)");
+ assert_mixed_optimized("reduce(x3_f * mix_x3zm_f,sum,x)");
assert_mixed_optimized("reduce(x3$2 * mix_x3zm,sum,x)");
- assert_mixed_optimized("reduce(x3f$2 * mix_x3zm,sum,x)");
+ assert_mixed_optimized("reduce(x3$2_f * mix_x3zm,sum,x)");
assert_mixed_optimized("reduce(y3 * mix_y3zm,sum,y)");
- assert_mixed_optimized("reduce(y3f * mix_y3zm,sum,y)");
+ assert_mixed_optimized("reduce(y3_f * mix_y3zm,sum,y)");
assert_mixed_optimized("reduce(y3 * mix_y3zm_f,sum,y)");
- assert_mixed_optimized("reduce(y3f * mix_y3zm_f,sum,y)");
+ assert_mixed_optimized("reduce(y3_f * mix_y3zm_f,sum,y)");
assert_mixed_optimized("reduce(x3y1 * mix_x3zm,sum,x)");
- assert_mixed_optimized("reduce(x3y1f * mix_x3zm,sum,x)");
+ assert_mixed_optimized("reduce(x3y1_f * mix_x3zm,sum,x)");
assert_mixed_optimized("reduce(x3y1 * mix_x3zm,sum,x,y)");
- assert_mixed_optimized("reduce(x3y1f * mix_x3zm,sum,x,y)");
+ assert_mixed_optimized("reduce(x3y1_f * mix_x3zm,sum,x,y)");
assert_mixed_optimized("reduce(x1y3 * mix_y3zm,sum,y)");
- assert_mixed_optimized("reduce(x1y3f * mix_y3zm,sum,y)");
+ assert_mixed_optimized("reduce(x1y3_f * mix_y3zm,sum,y)");
assert_mixed_optimized("reduce(x1y3 * x1y3,sum,y)");
- assert_mixed_optimized("reduce(x1y3 * x1y3f,sum,y)");
- assert_mixed_optimized("reduce(x1y3f * x1y3,sum,y)");
- assert_mixed_optimized("reduce(x1y3f * x1y3f,sum,y)");
+ assert_mixed_optimized("reduce(x1y3 * x1y3_f,sum,y)");
+ assert_mixed_optimized("reduce(x1y3_f * x1y3,sum,y)");
+ assert_mixed_optimized("reduce(x1y3_f * x1y3_f,sum,y)");
assert_mixed_optimized("reduce(x1y3 * mix_y3zm,sum,y)");
- assert_mixed_optimized("reduce(x1y3f * mix_y3zm,sum,y)");
+ assert_mixed_optimized("reduce(x1y3_f * mix_y3zm,sum,y)");
assert_mixed_optimized("reduce(mix_x3zm * x3,sum,x)");
- assert_mixed_optimized("reduce(mix_x3zm * x3f,sum,x)");
+ assert_mixed_optimized("reduce(mix_x3zm * x3_f,sum,x)");
assert_mixed_optimized("reduce(mix_x3zm * x3y1,sum,x)");
- assert_mixed_optimized("reduce(mix_x3zm * x3y1f,sum,x)");
+ assert_mixed_optimized("reduce(mix_x3zm * x3y1_f,sum,x)");
assert_mixed_optimized("reduce(mix_y3zm * y3,sum,y)");
- assert_mixed_optimized("reduce(mix_y3zm * y3f,sum,y)");
+ assert_mixed_optimized("reduce(mix_y3zm * y3_f,sum,y)");
assert_mixed_optimized("reduce(mix_y3zm * x1y3,sum,y)");
- assert_mixed_optimized("reduce(mix_y3zm * x1y3f,sum,y)");
+ assert_mixed_optimized("reduce(mix_y3zm * x1y3_f,sum,y)");
}
TEST(MixedInnerProduct, should_not_trigger_optimizer_for_other_cases) {
diff --git a/eval/src/tests/instruction/mixed_map_function/mixed_map_function_test.cpp b/eval/src/tests/instruction/mixed_map_function/mixed_map_function_test.cpp
index 3caebea7298..45e885fac33 100644
--- a/eval/src/tests/instruction/mixed_map_function/mixed_map_function_test.cpp
+++ b/eval/src/tests/instruction/mixed_map_function/mixed_map_function_test.cpp
@@ -3,7 +3,7 @@
#include <vespa/eval/eval/tensor_function.h>
#include <vespa/eval/instruction/mixed_map_function.h>
#include <vespa/eval/eval/test/eval_fixture.h>
-#include <vespa/eval/eval/test/tensor_model.hpp>
+#include <vespa/eval/eval/test/gen_spec.h>
#include <vespa/vespalib/gtest/gtest.h>
using namespace vespalib;
@@ -13,14 +13,18 @@ using namespace vespalib::eval::tensor_function;
const ValueBuilderFactory &prod_factory = FastValueBuilderFactory::get();
+TensorSpec spec(double v) { return TensorSpec("double").add({}, v); }
+TensorSpec sparse_spec = GenSpec().map("x", {"a"}).gen();
+TensorSpec mixed_spec = GenSpec().map("x", {"a"}).idx("y", 5).gen();
+
EvalFixture::ParamRepo make_params() {
return EvalFixture::ParamRepo()
.add("a", spec(1.5))
.add("b", spec(2.5))
- .add("sparse", spec({x({"a"})}, N()))
- .add("mixed", spec({x({"a"}),y(5)}, N()))
- .add_mutable("@sparse", spec({x({"a"})}, N()))
- .add_mutable("@mixed", spec({x({"a"}),y(5)}, N()))
+ .add("sparse", sparse_spec)
+ .add("mixed", mixed_spec)
+ .add_mutable("@sparse", sparse_spec)
+ .add_mutable("@mixed", mixed_spec)
.add_matrix("x", 5, "y", 3);
}
EvalFixture::ParamRepo param_repo = make_params();
diff --git a/eval/src/tests/instruction/mixed_simple_join_function/mixed_simple_join_function_test.cpp b/eval/src/tests/instruction/mixed_simple_join_function/mixed_simple_join_function_test.cpp
index 9c891adf179..02e13fcbef3 100644
--- a/eval/src/tests/instruction/mixed_simple_join_function/mixed_simple_join_function_test.cpp
+++ b/eval/src/tests/instruction/mixed_simple_join_function/mixed_simple_join_function_test.cpp
@@ -4,7 +4,7 @@
#include <vespa/eval/eval/tensor_function.h>
#include <vespa/eval/instruction/mixed_simple_join_function.h>
#include <vespa/eval/eval/test/eval_fixture.h>
-#include <vespa/eval/eval/test/tensor_model.hpp>
+#include <vespa/eval/eval/test/gen_spec.h>
#include <vespa/vespalib/util/stringfmt.h>
@@ -43,26 +43,28 @@ std::ostream &operator<<(std::ostream &os, Overlap overlap)
const ValueBuilderFactory &prod_factory = FastValueBuilderFactory::get();
+TensorSpec spec(double v) { return TensorSpec("double").add({}, v); }
+
EvalFixture::ParamRepo make_params() {
return EvalFixture::ParamRepo()
.add("a", spec(1.5))
.add("b", spec(2.5))
- .add("sparse", spec({x({"a", "b", "c"})}, N()))
- .add("mixed", spec({x({"a", "b", "c"}),y(5),z(3)}, N()))
- .add("empty_mixed", spec({x({}),y(5),z(3)}, N()))
- .add_mutable("@mixed", spec({x({"a", "b", "c"}),y(5),z(3)}, N()))
- .add_cube("a", 1, "b", 1, "c", 1)
- .add_cube("x", 1, "y", 1, "z", 1)
- .add_cube("x", 3, "y", 5, "z", 3)
- .add_vector("z", 3)
- .add_dense({{"c", 5}, {"d", 1}})
- .add_dense({{"b", 1}, {"c", 5}})
- .add_matrix("x", 3, "y", 5, [](size_t idx) noexcept { return double((idx * 2) + 3); })
- .add_matrix("x", 3, "y", 5, [](size_t idx) noexcept { return double((idx * 3) + 2); })
- .add_vector("y", 5, [](size_t idx) noexcept { return double((idx * 2) + 3); })
- .add_vector("y", 5, [](size_t idx) noexcept { return double((idx * 3) + 2); })
- .add_matrix("y", 5, "z", 3, [](size_t idx) noexcept { return double((idx * 2) + 3); })
- .add_matrix("y", 5, "z", 3, [](size_t idx) noexcept { return double((idx * 3) + 2); });
+ .add("sparse", GenSpec().map("x", {"a", "b", "c"}).gen())
+ .add("mixed", GenSpec().map("x", {"a", "b", "c"}).idx("y", 5).idx("z", 3).gen())
+ .add("empty_mixed", GenSpec().map("x", {}).idx("y", 5).idx("z", 3).gen())
+ .add_mutable("@mixed", GenSpec().map("x", {"a", "b", "c"}).idx("y", 5).idx("z", 3).gen())
+ .add_variants("a1b1c1", GenSpec().idx("a", 1).idx("b", 1).idx("c", 1))
+ .add_variants("x1y1z1", GenSpec().idx("x", 1).idx("y", 1).idx("z", 1))
+ .add_variants("x3y5z3", GenSpec().idx("x", 3).idx("y", 5).idx("z", 3))
+ .add_variants("z3", GenSpec().idx("z", 3))
+ .add_variants("c5d1", GenSpec().idx("c", 5).idx("d", 1))
+ .add_variants("b1c5", GenSpec().idx("b", 1).idx("c", 5))
+ .add_variants("x3y5", GenSpec().idx("x", 3).idx("y", 5).seq([](size_t idx) noexcept { return double((idx * 2) + 3); }))
+ .add_variants("x3y5$2", GenSpec().idx("x", 3).idx("y", 5).seq([](size_t idx) noexcept { return double((idx * 3) + 2); }))
+ .add_variants("y5", GenSpec().idx("y", 5).seq([](size_t idx) noexcept { return double((idx * 2) + 3); }))
+ .add_variants("y5$2", GenSpec().idx("y", 5).seq([](size_t idx) noexcept { return double((idx * 3) + 2); }))
+ .add_variants("y5z3", GenSpec().idx("y", 5).idx("z", 3).seq([](size_t idx) noexcept { return double((idx * 2) + 3); }))
+ .add_variants("y5z3$2", GenSpec().idx("y", 5).idx("z", 3).seq([](size_t idx) noexcept { return double((idx * 3) + 2); }));
}
EvalFixture::ParamRepo param_repo = make_params();
@@ -149,12 +151,12 @@ vespalib::string adjust_param(const vespalib::string &str, bool float_cells, boo
if (mut_cells) {
result = "@" + result;
}
- if (float_cells) {
- result += "f";
- }
if (is_rhs) {
result += "$2";
}
+ if (float_cells) {
+ result += "_f";
+ }
return result;
}
diff --git a/eval/src/tests/instruction/pow_as_map_optimizer/pow_as_map_optimizer_test.cpp b/eval/src/tests/instruction/pow_as_map_optimizer/pow_as_map_optimizer_test.cpp
index b4bf9ec5ef6..cceb18bfea6 100644
--- a/eval/src/tests/instruction/pow_as_map_optimizer/pow_as_map_optimizer_test.cpp
+++ b/eval/src/tests/instruction/pow_as_map_optimizer/pow_as_map_optimizer_test.cpp
@@ -3,7 +3,7 @@
#include <vespa/eval/eval/fast_value.h>
#include <vespa/eval/eval/tensor_function.h>
#include <vespa/eval/eval/test/eval_fixture.h>
-#include <vespa/eval/eval/test/tensor_model.hpp>
+#include <vespa/eval/eval/test/gen_spec.h>
#include <vespa/vespalib/gtest/gtest.h>
using namespace vespalib::eval::operation;
@@ -14,13 +14,15 @@ using namespace vespalib::eval;
const ValueBuilderFactory &prod_factory = FastValueBuilderFactory::get();
+TensorSpec spec(double v) { return TensorSpec("double").add({}, v); }
+
EvalFixture::ParamRepo make_params() {
return EvalFixture::ParamRepo()
.add("a", spec(1.5))
.add("b", spec(2.5))
- .add("sparse", spec({x({"a","b"})}, N()))
- .add("mixed", spec({x({"a"}),y(5)}, N()))
- .add_matrix("x", 5, "y", 3);
+ .add("sparse", GenSpec().map("x", {"a","b"}).gen())
+ .add("mixed", GenSpec().map("x", {"a"}).idx("y", 5).gen())
+ .add_variants("x5y3", GenSpec().idx("x", 5).idx("y", 3));
}
EvalFixture::ParamRepo param_repo = make_params();
@@ -55,9 +57,9 @@ TEST(PowAsMapTest, squared_dense_tensor_is_optimized) {
verify_optimized("pow(x5y3,2.0)", Square::f);
verify_optimized("join(x5y3,2.0,f(x,y)(x^y))", Square::f);
verify_optimized("join(x5y3,2.0,f(x,y)(pow(x,y)))", Square::f);
- verify_optimized("join(x5y3f,2.0,f(x,y)(pow(x,y)))", Square::f);
+ verify_optimized("join(x5y3_f,2.0,f(x,y)(pow(x,y)))", Square::f);
verify_optimized("join(@x5y3,2.0,f(x,y)(pow(x,y)))", Square::f, true);
- verify_optimized("join(@x5y3f,2.0,f(x,y)(pow(x,y)))", Square::f, true);
+ verify_optimized("join(@x5y3_f,2.0,f(x,y)(pow(x,y)))", Square::f, true);
}
TEST(PowAsMapTest, cubed_dense_tensor_is_optimized) {
@@ -65,9 +67,9 @@ TEST(PowAsMapTest, cubed_dense_tensor_is_optimized) {
verify_optimized("pow(x5y3,3.0)", Cube::f);
verify_optimized("join(x5y3,3.0,f(x,y)(x^y))", Cube::f);
verify_optimized("join(x5y3,3.0,f(x,y)(pow(x,y)))", Cube::f);
- verify_optimized("join(x5y3f,3.0,f(x,y)(pow(x,y)))", Cube::f);
+ verify_optimized("join(x5y3_f,3.0,f(x,y)(pow(x,y)))", Cube::f);
verify_optimized("join(@x5y3,3.0,f(x,y)(pow(x,y)))", Cube::f, true);
- verify_optimized("join(@x5y3f,3.0,f(x,y)(pow(x,y)))", Cube::f, true);
+ verify_optimized("join(@x5y3_f,3.0,f(x,y)(pow(x,y)))", Cube::f, true);
}
TEST(PowAsMapTest, hypercubed_dense_tensor_is_not_optimized) {
diff --git a/eval/src/tests/instruction/remove_trivial_dimension_optimizer/remove_trivial_dimension_optimizer_test.cpp b/eval/src/tests/instruction/remove_trivial_dimension_optimizer/remove_trivial_dimension_optimizer_test.cpp
index 4de7e85074d..794725a8257 100644
--- a/eval/src/tests/instruction/remove_trivial_dimension_optimizer/remove_trivial_dimension_optimizer_test.cpp
+++ b/eval/src/tests/instruction/remove_trivial_dimension_optimizer/remove_trivial_dimension_optimizer_test.cpp
@@ -4,7 +4,7 @@
#include <vespa/eval/eval/tensor_function.h>
#include <vespa/eval/instruction/replace_type_function.h>
#include <vespa/eval/instruction/fast_rename_optimizer.h>
-#include <vespa/eval/eval/test/tensor_model.hpp>
+#include <vespa/eval/eval/test/gen_spec.h>
#include <vespa/eval/eval/test/eval_fixture.h>
#include <vespa/vespalib/util/stringfmt.h>
@@ -19,10 +19,10 @@ const ValueBuilderFactory &prod_factory = FastValueBuilderFactory::get();
EvalFixture::ParamRepo make_params() {
return EvalFixture::ParamRepo()
- .add("x1y5z1", spec({x(1),y(5),z(1)}, N()))
- .add("x1y5z1f", spec(float_cells({x(1),y(5),z(1)}), N()))
- .add("x1y1z1", spec({x(1),y(1),z(1)}, N()))
- .add("x1y5z_m", spec({x(1),y(5),z({"a"})}, N()));
+ .add("x1y5z1", GenSpec().idx("x", 1).idx("y", 5).idx("z", 1).gen())
+ .add("x1y5z1f", GenSpec().idx("x", 1).idx("y", 5).idx("z", 1).cells_float().gen())
+ .add("x1y1z1", GenSpec().idx("x", 1).idx("y", 1).idx("z", 1).gen())
+ .add("x1y5z_m", GenSpec().idx("x", 1).idx("y", 5).map("z", {"a"}).gen());
}
EvalFixture::ParamRepo param_repo = make_params();
diff --git a/eval/src/tests/instruction/sparse_dot_product_function/CMakeLists.txt b/eval/src/tests/instruction/sparse_dot_product_function/CMakeLists.txt
new file mode 100644
index 00000000000..076f1d79796
--- /dev/null
+++ b/eval/src/tests/instruction/sparse_dot_product_function/CMakeLists.txt
@@ -0,0 +1,9 @@
+# Copyright Verizon Media. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root.
+vespa_add_executable(eval_sparse_dot_product_function_test_app TEST
+ SOURCES
+ sparse_dot_product_function_test.cpp
+ DEPENDS
+ vespaeval
+ GTest::GTest
+)
+vespa_add_test(NAME eval_sparse_dot_product_function_test_app COMMAND eval_sparse_dot_product_function_test_app)
diff --git a/eval/src/tests/instruction/sparse_dot_product_function/sparse_dot_product_function_test.cpp b/eval/src/tests/instruction/sparse_dot_product_function/sparse_dot_product_function_test.cpp
new file mode 100644
index 00000000000..65eab2778aa
--- /dev/null
+++ b/eval/src/tests/instruction/sparse_dot_product_function/sparse_dot_product_function_test.cpp
@@ -0,0 +1,85 @@
+// Copyright Verizon Media. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root.
+
+#include <vespa/eval/eval/fast_value.h>
+#include <vespa/eval/eval/simple_value.h>
+#include <vespa/eval/instruction/sparse_dot_product_function.h>
+#include <vespa/eval/eval/test/eval_fixture.h>
+#include <vespa/eval/eval/test/gen_spec.h>
+#include <vespa/vespalib/gtest/gtest.h>
+
+using namespace vespalib::eval;
+using namespace vespalib::eval::test;
+
+const ValueBuilderFactory &prod_factory = FastValueBuilderFactory::get();
+const ValueBuilderFactory &test_factory = SimpleValueBuilderFactory::get();
+
+//-----------------------------------------------------------------------------
+
+EvalFixture::ParamRepo make_params() {
+ return EvalFixture::ParamRepo()
+ .add("v1_x", GenSpec().map("x", 32, 1).seq_bias(3.0).gen())
+ .add("v1_x_f", GenSpec().map("x", 32, 1).seq_bias(3.0).cells_float().gen())
+ .add("v2_x", GenSpec().map("x", 16, 2).seq_bias(7.0).gen())
+ .add("v2_x_f", GenSpec().map("x", 16, 2).seq_bias(7.0).cells_float().gen())
+ .add("v3_y", GenSpec().map("y", 10, 1).gen())
+ .add("v4_xd", GenSpec().idx("x", 10).gen())
+ .add("m1_xy", GenSpec().map("x", 32, 1).map("y", 16, 2).seq_bias(3.0).gen())
+ .add("m2_xy", GenSpec().map("x", 16, 2).map("y", 32, 1).seq_bias(7.0).gen())
+ .add("m3_xym", GenSpec().map("x", 8, 1).idx("y", 5).gen());
+}
+EvalFixture::ParamRepo param_repo = make_params();
+
+void assert_optimized(const vespalib::string &expr) {
+ EvalFixture fast_fixture(prod_factory, expr, param_repo, true);
+ EvalFixture test_fixture(test_factory, expr, param_repo, true);
+ EvalFixture slow_fixture(prod_factory, expr, param_repo, false);
+ EXPECT_EQ(fast_fixture.result(), EvalFixture::ref(expr, param_repo));
+ EXPECT_EQ(test_fixture.result(), EvalFixture::ref(expr, param_repo));
+ EXPECT_EQ(slow_fixture.result(), EvalFixture::ref(expr, param_repo));
+ EXPECT_EQ(fast_fixture.find_all<SparseDotProductFunction>().size(), 1u);
+ EXPECT_EQ(test_fixture.find_all<SparseDotProductFunction>().size(), 1u);
+ EXPECT_EQ(slow_fixture.find_all<SparseDotProductFunction>().size(), 0u);
+}
+
+void assert_not_optimized(const vespalib::string &expr) {
+ EvalFixture fast_fixture(prod_factory, expr, param_repo, true);
+ EXPECT_EQ(fast_fixture.result(), EvalFixture::ref(expr, param_repo));
+ EXPECT_EQ(fast_fixture.find_all<SparseDotProductFunction>().size(), 0u);
+}
+
+//-----------------------------------------------------------------------------
+
+TEST(SparseDotProduct, expression_can_be_optimized)
+{
+ assert_optimized("reduce(v1_x*v2_x,sum,x)");
+ assert_optimized("reduce(v2_x*v1_x,sum)");
+ assert_optimized("reduce(v1_x*v2_x_f,sum)");
+ assert_optimized("reduce(v1_x_f*v2_x,sum)");
+ assert_optimized("reduce(v1_x_f*v2_x_f,sum)");
+}
+
+TEST(SparseDotProduct, multi_dimensional_expression_can_be_optimized)
+{
+ assert_optimized("reduce(m1_xy*m2_xy,sum,x,y)");
+ assert_optimized("reduce(m1_xy*m2_xy,sum)");
+}
+
+TEST(SparseDotProduct, embedded_dot_product_is_not_optimized)
+{
+ assert_not_optimized("reduce(m1_xy*v1_x,sum,x)");
+ assert_not_optimized("reduce(v1_x*m1_xy,sum,x)");
+}
+
+TEST(SparseDotProduct, similar_expressions_are_not_optimized)
+{
+ assert_not_optimized("reduce(m1_xy*v1_x,sum)");
+ assert_not_optimized("reduce(v1_x*v3_y,sum)");
+ assert_not_optimized("reduce(v2_x*v1_x,max)");
+ assert_not_optimized("reduce(v2_x+v1_x,sum)");
+ assert_not_optimized("reduce(v4_xd*v4_xd,sum)");
+ assert_not_optimized("reduce(m3_xym*m3_xym,sum)");
+}
+
+//-----------------------------------------------------------------------------
+
+GTEST_MAIN_RUN_ALL_TESTS()
diff --git a/eval/src/tests/instruction/sum_max_dot_product_function/sum_max_dot_product_function_test.cpp b/eval/src/tests/instruction/sum_max_dot_product_function/sum_max_dot_product_function_test.cpp
index 4b89f30d879..616649e914b 100644
--- a/eval/src/tests/instruction/sum_max_dot_product_function/sum_max_dot_product_function_test.cpp
+++ b/eval/src/tests/instruction/sum_max_dot_product_function/sum_max_dot_product_function_test.cpp
@@ -3,7 +3,7 @@
#include <vespa/eval/eval/fast_value.h>
#include <vespa/eval/eval/tensor_function.h>
#include <vespa/eval/eval/test/eval_fixture.h>
-#include <vespa/eval/eval/test/tensor_model.hpp>
+#include <vespa/eval/eval/test/gen_spec.h>
#include <vespa/eval/instruction/sum_max_dot_product_function.h>
#include <vespa/vespalib/gtest/gtest.h>
@@ -13,12 +13,6 @@ using namespace vespalib::eval::test;
const ValueBuilderFactory &prod_factory = FastValueBuilderFactory::get();
-struct MyVecSeq : Sequence {
- double bias;
- double operator[](size_t i) const override { return (i + bias); }
- MyVecSeq(double cellBias) : bias(cellBias) {}
-};
-
//-----------------------------------------------------------------------------
vespalib::string main_expr = "reduce(reduce(reduce(a*b,sum,z),max,y),sum,x)";
@@ -34,7 +28,7 @@ void assert_optimized(const TensorSpec &a, const TensorSpec &b, size_t dp_size)
auto info = fast_fixture.find_all<SumMaxDotProductFunction>();
ASSERT_EQ(info.size(), 1u);
EXPECT_TRUE(info[0]->result_is_mutable());
- EXPECT_EQUAL(info[0]->dp_size(), dp_size);
+ EXPECT_EQ(info[0]->dp_size(), dp_size);
}
void assert_not_optimized(const TensorSpec &a, const TensorSpec &b, const vespalib::string &expr = main_expr) {
@@ -51,10 +45,23 @@ void assert_not_optimized(const TensorSpec &a, const TensorSpec &b, const vespal
//-----------------------------------------------------------------------------
-auto query = spec(float_cells({x({"0", "1", "2"}),z(5)}), MyVecSeq(0.5));
-auto document = spec(float_cells({y({"0", "1", "2", "3", "4", "5"}),z(5)}), MyVecSeq(2.5));
-auto empty_query = spec(float_cells({x({}),z(5)}), MyVecSeq(0.5));
-auto empty_document = spec(float_cells({y({}),z(5)}), MyVecSeq(2.5));
+GenSpec QueGen(size_t x_size, size_t z_size) { return GenSpec().cells_float().map("x", x_size).idx("z", z_size).seq_bias(0.5); }
+
+GenSpec DocGen(size_t y_size, size_t z_size) { return GenSpec().cells_float().map("y", y_size).idx("z", z_size).seq_bias(2.5); }
+
+GenSpec Que() { return QueGen(3, 5); }
+GenSpec Doc() { return DocGen(6, 5); }
+
+GenSpec QueX0() { return QueGen(0, 5); }
+GenSpec DocX0() { return DocGen(0, 5); }
+
+GenSpec QueZ1() { return QueGen(3, 1); }
+GenSpec DocZ1() { return DocGen(6, 1); }
+
+auto query = Que().gen();
+auto document = Doc().gen();
+auto empty_query = QueX0().gen();
+auto empty_document = DocX0().gen();
TEST(SumMaxDotProduct, expressions_can_be_optimized)
{
@@ -66,24 +73,24 @@ TEST(SumMaxDotProduct, expressions_can_be_optimized)
}
TEST(SumMaxDotProduct, double_cells_are_not_optimized) {
- auto double_query = spec({x({"0", "1", "2"}),z(5)}, MyVecSeq(0.5));
- auto double_document = spec({y({"0", "1", "2", "3", "4", "5"}),z(5)}, MyVecSeq(2.5));
+ auto double_query = Que().cells_double().gen();
+ auto double_document = Doc().cells_double().gen();
assert_not_optimized(query, double_document);
assert_not_optimized(double_query, document);
assert_not_optimized(double_query, double_document);
}
TEST(SumMaxDotProduct, trivial_dot_product_is_not_optimized) {
- auto trivial_query = spec(float_cells({x({"0", "1", "2"}),z(1)}), MyVecSeq(0.5));
- auto trivial_document = spec(float_cells({y({"0", "1", "2", "3", "4", "5"}),z(1)}), MyVecSeq(2.5));
+ auto trivial_query = QueZ1().gen();
+ auto trivial_document = DocZ1().gen();
assert_not_optimized(trivial_query, trivial_document);
}
TEST(SumMaxDotProduct, additional_dimensions_are_not_optimized) {
- auto extra_sparse_query = spec(float_cells({Domain("a", {"0"}),x({"0", "1", "2"}),z(5)}), MyVecSeq(0.5));
- auto extra_dense_query = spec(float_cells({Domain("a", 1),x({"0", "1", "2"}),z(5)}), MyVecSeq(0.5));
- auto extra_sparse_document = spec(float_cells({Domain("a", {"0"}),y({"0", "1", "2", "3", "4", "5"}),z(5)}), MyVecSeq(2.5));
- auto extra_dense_document = spec(float_cells({Domain("a", 1),y({"0", "1", "2", "3", "4", "5"}),z(5)}), MyVecSeq(2.5));
+ auto extra_sparse_query = Que().map("a", 1).gen();
+ auto extra_dense_query = Que().idx("a", 1).gen();
+ auto extra_sparse_document = Doc().map("a", 1).gen();
+ auto extra_dense_document = Doc().idx("a", 1).gen();
vespalib::string extra_sum_expr = "reduce(reduce(reduce(a*b,sum,z),max,y),sum,a,x)";
vespalib::string extra_max_expr = "reduce(reduce(reduce(a*b,sum,z),max,a,y),sum,x)";
assert_not_optimized(extra_sparse_query, document);
@@ -97,8 +104,8 @@ TEST(SumMaxDotProduct, additional_dimensions_are_not_optimized) {
}
TEST(SumMaxDotProduct, more_dense_variants_are_not_optimized) {
- auto dense_query = spec(float_cells({x(3),z(5)}), MyVecSeq(0.5));
- auto dense_document = spec(float_cells({y(5),z(5)}), MyVecSeq(2.5));
+ auto dense_query = GenSpec().cells_float().idx("x", 3).idx("z", 5).seq_bias(0.5).gen();
+ auto dense_document = GenSpec().cells_float().idx("y", 5).idx("z", 5).seq_bias(2.5).gen();
assert_not_optimized(dense_query, document);
assert_not_optimized(query, dense_document);
assert_not_optimized(dense_query, dense_document);
diff --git a/eval/src/tests/streamed/value/streamed_value_test.cpp b/eval/src/tests/streamed/value/streamed_value_test.cpp
index 2f91c3b9390..d1b0e0a8d56 100644
--- a/eval/src/tests/streamed/value/streamed_value_test.cpp
+++ b/eval/src/tests/streamed/value/streamed_value_test.cpp
@@ -5,7 +5,7 @@
#include <vespa/eval/eval/value_codec.h>
#include <vespa/eval/instruction/generic_join.h>
#include <vespa/eval/eval/interpreted_function.h>
-#include <vespa/eval/eval/test/tensor_model.hpp>
+#include <vespa/eval/eval/test/gen_spec.h>
#include <vespa/vespalib/util/stringfmt.h>
#include <vespa/vespalib/gtest/gtest.h>
@@ -23,45 +23,36 @@ using Handle = SharedStringRepo::Handle;
vespalib::string as_str(string_id label) { return Handle::string_from_id(label); }
-std::vector<Layout> layouts = {
- {},
- {x(3)},
- {x(3),y(5)},
- {x(3),y(5),z(7)},
- float_cells({x(3),y(5),z(7)}),
- {x({"a","b","c"})},
- {x({"a","b","c"}),y({"foo","bar"})},
- {x({"a","b","c"}),y({"foo","bar"}),z({"i","j","k","l"})},
- float_cells({x({"a","b","c"}),y({"foo","bar"}),z({"i","j","k","l"})}),
- {x(3),y({"foo", "bar"}),z(7)},
- {x({"a","b","c"}),y(5),z({"i","j","k","l"})},
- float_cells({x({"a","b","c"}),y(5),z({"i","j","k","l"})})
+GenSpec G() { return GenSpec().cells_float(); }
+
+std::vector<GenSpec> layouts = {
+ G(),
+ G().idx("x", 3),
+ G().idx("x", 3).idx("y", 5),
+ G().idx("x", 3).idx("y", 5).idx("z", 7),
+ G().map("x", {"a","b","c"}),
+ G().map("x", {"a","b","c"}).map("y", {"foo","bar"}),
+ G().map("x", {"a","b","c"}).map("y", {"foo","bar"}).map("z", {"i","j","k","l"}),
+ G().idx("x", 3).map("y", {"foo", "bar"}).idx("z", 7),
+ G().map("x", {"a","b","c"}).idx("y", 5).map("z", {"i","j","k","l"})
};
-std::vector<Layout> join_layouts = {
- {}, {},
- {x(5)}, {x(5)},
- {x(5)}, {y(5)},
- {x(5)}, {x(5),y(5)},
- {y(3)}, {x(2),z(3)},
- {x(3),y(5)}, {y(5),z(7)},
- float_cells({x(3),y(5)}), {y(5),z(7)},
- {x(3),y(5)}, float_cells({y(5),z(7)}),
- float_cells({x(3),y(5)}), float_cells({y(5),z(7)}),
- {x({"a","b","c"})}, {x({"a","b","c"})},
- {x({"a","b","c"})}, {x({"a","b"})},
- {x({"a","b","c"})}, {y({"foo","bar","baz"})},
- {x({"a","b","c"})}, {x({"a","b","c"}),y({"foo","bar","baz"})},
- {x({"a","b"}),y({"foo","bar","baz"})}, {x({"a","b","c"}),y({"foo","bar"})},
- {x({"a","b"}),y({"foo","bar","baz"})}, {y({"foo","bar"}),z({"i","j","k","l"})},
- float_cells({x({"a","b"}),y({"foo","bar","baz"})}), {y({"foo","bar"}),z({"i","j","k","l"})},
- {x({"a","b"}),y({"foo","bar","baz"})}, float_cells({y({"foo","bar"}),z({"i","j","k","l"})}),
- float_cells({x({"a","b"}),y({"foo","bar","baz"})}), float_cells({y({"foo","bar"}),z({"i","j","k","l"})}),
- {x(3),y({"foo", "bar"})}, {y({"foo", "bar"}),z(7)},
- {x({"a","b","c"}),y(5)}, {y(5),z({"i","j","k","l"})},
- float_cells({x({"a","b","c"}),y(5)}), {y(5),z({"i","j","k","l"})},
- {x({"a","b","c"}),y(5)}, float_cells({y(5),z({"i","j","k","l"})}),
- float_cells({x({"a","b","c"}),y(5)}), float_cells({y(5),z({"i","j","k","l"})})
+std::vector<GenSpec> join_layouts = {
+ G(), G(),
+ G().idx("x", 5), G().idx("x", 5),
+ G().idx("x", 5), G().idx("y", 5),
+ G().idx("x", 5), G().idx("x", 5).idx("y", 5),
+ G().idx("y", 3), G().idx("x", 2).idx("z", 3),
+ G().idx("x", 3).idx("y", 5), G().idx("y", 5).idx("z", 7),
+ G().map("x", {"a","b","c"}), G().map("x", {"a","b","c"}),
+ G().map("x", {"a","b","c"}), G().map("x", {"a","b"}),
+ G().map("x", {"a","b","c"}), G().map("y", {"foo","bar","baz"}),
+ G().map("x", {"a","b","c"}), G().map("x", {"a","b","c"}).map("y", {"foo","bar","baz"}),
+ G().map("x", {"a","b"}).map("y", {"foo","bar","baz"}), G().map("x", {"a","b","c"}).map("y", {"foo","bar"}),
+ G().map("x", {"a","b"}).map("y", {"foo","bar","baz"}), G().map("y", {"foo","bar"}).map("z", {"i","j","k","l"}),
+ G().idx("x", 3).map("y", {"foo", "bar"}), G().map("y", {"foo","bar"}).idx("z", 7),
+ G().map("x", {"a","b","c"}).idx("y", 5), G().idx("y", 5).map("z", {"i","j","k","l"})
+
};
TensorSpec streamed_value_join(const TensorSpec &a, const TensorSpec &b, join_fun_t function) {
@@ -76,20 +67,22 @@ TensorSpec streamed_value_join(const TensorSpec &a, const TensorSpec &b, join_fu
TEST(StreamedValueTest, streamed_values_can_be_converted_from_and_to_tensor_spec) {
for (const auto &layout: layouts) {
- TensorSpec expect = spec(layout, N());
- std::unique_ptr<Value> value = value_from_spec(expect, StreamedValueBuilderFactory::get());
- TensorSpec actual = spec_from_value(*value);
- EXPECT_EQ(actual, expect);
+ for (TensorSpec expect : { layout.gen(), layout.cpy().cells_double().gen() }) {
+ std::unique_ptr<Value> value = value_from_spec(expect, StreamedValueBuilderFactory::get());
+ TensorSpec actual = spec_from_value(*value);
+ EXPECT_EQ(actual, expect);
+ }
}
}
TEST(StreamedValueTest, streamed_values_can_be_copied) {
for (const auto &layout: layouts) {
- TensorSpec expect = spec(layout, N());
- std::unique_ptr<Value> value = value_from_spec(expect, StreamedValueBuilderFactory::get());
- std::unique_ptr<Value> copy = StreamedValueBuilderFactory::get().copy(*value);
- TensorSpec actual = spec_from_value(*copy);
- EXPECT_EQ(actual, expect);
+ for (TensorSpec expect : { layout.gen(), layout.cpy().cells_double().gen() }) {
+ std::unique_ptr<Value> value = value_from_spec(expect, StreamedValueBuilderFactory::get());
+ std::unique_ptr<Value> copy = StreamedValueBuilderFactory::get().copy(*value);
+ TensorSpec actual = spec_from_value(*copy);
+ EXPECT_EQ(actual, expect);
+ }
}
}
@@ -126,16 +119,22 @@ TEST(StreamedValueTest, streamed_value_can_be_built_and_inspected) {
EXPECT_EQ(result["bb"], 3);
}
+GenSpec::seq_t N_16ths = [] (size_t i) noexcept { return (i + 1.0) / 16.0; };
+
TEST(StreamedValueTest, new_generic_join_works_for_streamed_values) {
ASSERT_TRUE((join_layouts.size() % 2) == 0);
for (size_t i = 0; i < join_layouts.size(); i += 2) {
- TensorSpec lhs = spec(join_layouts[i], Div16(N()));
- TensorSpec rhs = spec(join_layouts[i + 1], Div16(N()));
- for (auto fun: {operation::Add::f, operation::Sub::f, operation::Mul::f, operation::Max::f}) {
- SCOPED_TRACE(fmt("\n===\nLHS: %s\nRHS: %s\n===\n", lhs.to_string().c_str(), rhs.to_string().c_str()));
- auto expect = ReferenceOperations::join(lhs, rhs, fun);
- auto actual = streamed_value_join(lhs, rhs, fun);
- EXPECT_EQ(actual, expect);
+ const auto l = join_layouts[i].seq(N_16ths);
+ const auto r = join_layouts[i + 1].seq(N_16ths);
+ for (TensorSpec lhs : { l.gen(), l.cpy().cells_double().gen() }) {
+ for (TensorSpec rhs : { r.gen(), r.cpy().cells_double().gen() }) {
+ for (auto fun: {operation::Add::f, operation::Sub::f, operation::Mul::f, operation::Max::f}) {
+ SCOPED_TRACE(fmt("\n===\nLHS: %s\nRHS: %s\n===\n", lhs.to_string().c_str(), rhs.to_string().c_str()));
+ auto expect = ReferenceOperations::join(lhs, rhs, fun);
+ auto actual = streamed_value_join(lhs, rhs, fun);
+ EXPECT_EQ(actual, expect);
+ }
+ }
}
}
}
diff --git a/eval/src/tests/tensor/instruction_benchmark/instruction_benchmark.cpp b/eval/src/tests/tensor/instruction_benchmark/instruction_benchmark.cpp
index 3345d7dc8ee..000794aca7d 100644
--- a/eval/src/tests/tensor/instruction_benchmark/instruction_benchmark.cpp
+++ b/eval/src/tests/tensor/instruction_benchmark/instruction_benchmark.cpp
@@ -43,6 +43,7 @@
#include <vespa/vespalib/io/fileutil.h>
#include <vespa/vespalib/data/slime/slime.h>
#include <vespa/vespalib/data/smart_buffer.h>
+#include <vespa/eval/eval/test/gen_spec.h>
#include <optional>
#include <algorithm>
@@ -60,51 +61,7 @@ template <typename T> using CREF = std::reference_wrapper<const T>;
//-----------------------------------------------------------------------------
-struct D {
- vespalib::string name;
- bool mapped;
- size_t size;
- size_t stride;
- static D map(const vespalib::string &name_in, size_t size_in, size_t stride_in) { return D{name_in, true, size_in, stride_in}; }
- static D idx(const vespalib::string &name_in, size_t size_in) { return D{name_in, false, size_in, 1}; }
- operator ValueType::Dimension() const {
- if (mapped) {
- return ValueType::Dimension(name);
- } else {
- return ValueType::Dimension(name, size);
- }
- }
- TensorSpec::Label operator()(size_t idx) const {
- if (mapped) {
- // need plain number as string for dynamic sparse peek
- return TensorSpec::Label(fmt("%zu", idx));
- } else {
- return TensorSpec::Label(idx);
- }
- }
-};
-
-void add_cells(TensorSpec &spec, double &seq, TensorSpec::Address addr) {
- spec.add(addr, seq);
- seq += 1.0;
-}
-
-template <typename ...Ds> void add_cells(TensorSpec &spec, double &seq, TensorSpec::Address addr, const D &d, const Ds &...ds) {
- for (size_t i = 0, idx = 0; i < d.size; ++i, idx += d.stride) {
- addr.insert_or_assign(d.name, d(idx));
- add_cells(spec, seq, addr, ds...);
- }
-}
-
-template <typename ...Ds> TensorSpec make_spec(double seq, const Ds &...ds) {
- TensorSpec spec(ValueType::tensor_type({ds...}, CellType::FLOAT).to_spec());
- add_cells(spec, seq, TensorSpec::Address(), ds...);
- return spec;
-}
-
-TensorSpec make_vector(const D &d1, double seq) { return make_spec(seq, d1); }
-TensorSpec make_matrix(const D &d1, const D &d2, double seq) { return make_spec(seq, d1, d2); }
-TensorSpec make_cube(const D &d1, const D &d2, const D &d3, double seq) { return make_spec(seq, d1, d2, d3); }
+test::GenSpec GS(double bias) { return test::GenSpec().cells_float().seq_bias(bias); }
//-----------------------------------------------------------------------------
@@ -609,7 +566,7 @@ void benchmark_tensor_create(const vespalib::string &desc, const TensorSpec &pro
ASSERT_FALSE(proto_type.is_error());
std::vector<CREF<TensorSpec>> stack_spec;
for (const auto &cell: proto.cells()) {
- stack_spec.emplace_back(stash.create<TensorSpec>(make_spec(cell.second)));
+ stack_spec.emplace_back(stash.create<TensorSpec>(GS(cell.second).gen()));
}
std::vector<EvalOp::UP> list;
for (const Impl &impl: impl_list) {
@@ -645,7 +602,7 @@ void benchmark_tensor_peek(const vespalib::string &desc, const TensorSpec &lhs,
stack_spec.emplace_back(lhs);
if (peek_spec.is_dynamic) {
for (const auto &entry: peek_spec.spec) {
- stack_spec.emplace_back(stash.create<TensorSpec>(make_spec(double(entry.second))));
+ stack_spec.emplace_back(stash.create<TensorSpec>(GS(double(entry.second)).gen()));
}
}
std::vector<EvalOp::UP> list;
@@ -660,10 +617,10 @@ void benchmark_tensor_peek(const vespalib::string &desc, const TensorSpec &lhs,
//-----------------------------------------------------------------------------
TEST(MakeInputTest, print_some_test_input) {
- auto number = make_spec(5.0);
- auto sparse = make_vector(D::map("x", 5, 3), 1.0);
- auto dense = make_vector(D::idx("x", 5), 10.0);
- auto mixed = make_cube(D::map("x", 3, 7), D::idx("y", 2), D::idx("z", 2), 100.0);
+ auto number = GS(5.0).gen();
+ auto sparse = GS(1.0).map("x", 5, 3).gen();
+ auto dense = GS(10.0).idx("x", 5).gen();
+ auto mixed = GS(100.0).map("x", 3, 7).idx("y", 2).idx("z", 2).gen();
fprintf(stderr, "--------------------------------------------------------\n");
fprintf(stderr, "simple number: %s\n", number.to_string().c_str());
fprintf(stderr, "sparse vector: %s\n", sparse.to_string().c_str());
@@ -728,197 +685,197 @@ void benchmark_encode_decode(const vespalib::string &desc, const TensorSpec &pro
// relevant for the overall performance of the tensor implementation.
TEST(EncodeDecodeBench, encode_decode_dense) {
- auto proto = make_matrix(D::idx("a", 64), D::idx("b", 64), 1.0);
+ auto proto = GS(1.0).idx("a", 64).idx("b", 64).gen();
benchmark_encode_decode("dense tensor", proto);
}
TEST(EncodeDecodeBench, encode_decode_sparse) {
- auto proto = make_matrix(D::map("a", 64, 1), D::map("b", 64, 1), 1.0);
+ auto proto = GS(1.0).map("a", 64, 1).map("b", 64, 1).gen();
benchmark_encode_decode("sparse tensor", proto);
}
TEST(EncodeDecodeBench, encode_decode_mixed) {
- auto proto = make_matrix(D::map("a", 64, 1), D::idx("b", 64), 1.0);
+ auto proto = GS(1.0).map("a", 64, 1).idx("b", 64).gen();
benchmark_encode_decode("mixed tensor", proto);
}
//-----------------------------------------------------------------------------
TEST(DenseConcat, small_vectors) {
- auto lhs = make_vector(D::idx("x", 10), 1.0);
- auto rhs = make_vector(D::idx("x", 10), 2.0);
+ auto lhs = GS(1.0).idx("x", 10).gen();
+ auto rhs = GS(2.0).idx("x", 10).gen();
benchmark_concat("small dense vector append concat", lhs, rhs, "x");
}
TEST(DenseConcat, cross_vectors) {
- auto lhs = make_vector(D::idx("x", 10), 1.0);
- auto rhs = make_vector(D::idx("x", 10), 2.0);
+ auto lhs = GS(1.0).idx("x", 10).gen();
+ auto rhs = GS(2.0).idx("x", 10).gen();
benchmark_concat("small dense vector cross concat", lhs, rhs, "y");
}
TEST(DenseConcat, cube_and_vector) {
- auto lhs = make_cube(D::idx("a", 16), D::idx("b", 16), D::idx("c", 16), 1.0);
- auto rhs = make_vector(D::idx("a", 16), 42.0);
+ auto lhs = GS(1.0).idx("a", 16).idx("b", 16).idx("c", 16).gen();
+ auto rhs = GS(42.0).idx("a", 16).gen();
benchmark_concat("cube vs vector concat", lhs, rhs, "a");
}
TEST(SparseConcat, small_vectors) {
- auto lhs = make_vector(D::map("x", 10, 1), 1.0);
- auto rhs = make_vector(D::map("x", 10, 2), 2.0);
+ auto lhs = GS(1.0).map("x", 10, 1).gen();
+ auto rhs = GS(2.0).map("x", 10, 2).gen();
benchmark_concat("small sparse concat", lhs, rhs, "y");
}
TEST(MixedConcat, mixed_vs_dense) {
- auto lhs = make_cube(D::idx("a", 16), D::idx("b", 16), D::map("c", 16, 1), 1.0);
- auto rhs = make_matrix(D::idx("a", 16), D::idx("b", 16), 2.0);
+ auto lhs = GS(1.0).idx("a", 16).idx("b", 16).map("c", 16, 1).gen();
+ auto rhs = GS(2.0).idx("a", 16).idx("b", 16).gen();
benchmark_concat("mixed dense concat a", lhs, rhs, "a");
}
TEST(MixedConcat, large_mixed_a) {
- auto lhs = make_cube(D::idx("a", 16), D::idx("b", 16), D::map("c", 16, 1), 1.0);
- auto rhs = make_cube(D::idx("a", 16), D::idx("b", 16), D::map("c", 16, 2), 2.0);
+ auto lhs = GS(1.0).idx("a", 16).idx("b", 16).map("c", 16, 1).gen();
+ auto rhs = GS(2.0).idx("a", 16).idx("b", 16).map("c", 16, 2).gen();
benchmark_concat("mixed append concat a", lhs, rhs, "a");
}
TEST(MixedConcat, large_mixed_b) {
- auto lhs = make_cube(D::idx("a", 16), D::idx("b", 16), D::map("c", 16, 1), 1.0);
- auto rhs = make_cube(D::idx("a", 16), D::idx("b", 16), D::map("c", 16, 2), 2.0);
+ auto lhs = GS(1.0).idx("a", 16).idx("b", 16).map("c", 16, 1).gen();
+ auto rhs = GS(2.0).idx("a", 16).idx("b", 16).map("c", 16, 2).gen();
benchmark_concat("mixed append concat b", lhs, rhs, "b");
}
//-----------------------------------------------------------------------------
TEST(NumberJoin, plain_op2) {
- auto lhs = make_spec(2.0);
- auto rhs = make_spec(3.0);
+ auto lhs = GS(2.0).gen();
+ auto rhs = GS(3.0).gen();
benchmark_join("simple numbers multiply", lhs, rhs, operation::Mul::f);
}
//-----------------------------------------------------------------------------
TEST(DenseJoin, small_vectors) {
- auto lhs = make_vector(D::idx("x", 10), 1.0);
- auto rhs = make_vector(D::idx("x", 10), 2.0);
+ auto lhs = GS(1.0).idx("x", 10).gen();
+ auto rhs = GS(2.0).idx("x", 10).gen();
benchmark_join("small dense vector multiply", lhs, rhs, operation::Mul::f);
}
TEST(DenseJoin, full_overlap) {
- auto lhs = make_cube(D::idx("a", 16), D::idx("b", 16), D::idx("c", 16), 1.0);
- auto rhs = make_cube(D::idx("a", 16), D::idx("b", 16), D::idx("c", 16), 2.0);
+ auto lhs = GS(1.0).idx("a", 16).idx("b", 16).idx("c", 16).gen();
+ auto rhs = GS(2.0).idx("a", 16).idx("b", 16).idx("c", 16).gen();
benchmark_join("dense full overlap multiply", lhs, rhs, operation::Mul::f);
}
TEST(DenseJoin, partial_overlap) {
- auto lhs = make_cube(D::idx("a", 8), D::idx("c", 8), D::idx("d", 8), 1.0);
- auto rhs = make_cube(D::idx("b", 8), D::idx("c", 8), D::idx("d", 8), 2.0);
+ auto lhs = GS(1.0).idx("a", 8).idx("c", 8).idx("d", 8).gen();
+ auto rhs = GS(2.0).idx("b", 8).idx("c", 8).idx("d", 8).gen();
benchmark_join("dense partial overlap multiply", lhs, rhs, operation::Mul::f);
}
TEST(DenseJoin, subset_overlap) {
- auto lhs = make_cube(D::idx("a", 16), D::idx("b", 16), D::idx("c", 16), 1.0);
- auto rhs_inner = make_matrix(D::idx("b", 16), D::idx("c", 16), 2.0);
- auto rhs_outer = make_matrix(D::idx("a", 16), D::idx("b", 16), 3.0);
+ auto lhs = GS(1.0).idx("a", 16).idx("b", 16).idx("c", 16).gen();
+ auto rhs_inner = GS(2.0).idx("b", 16).idx("c", 16).gen();
+ auto rhs_outer = GS(3.0).idx("a", 16).idx("b", 16).gen();
benchmark_join("dense subset overlap inner multiply", lhs, rhs_inner, operation::Mul::f);
benchmark_join("dense subset overlap outer multiply", lhs, rhs_outer, operation::Mul::f);
}
TEST(DenseJoin, no_overlap) {
- auto lhs = make_cube(D::idx("a", 4), D::idx("e", 4), D::idx("f", 4), 1.0);
- auto rhs = make_cube(D::idx("b", 4), D::idx("c", 4), D::idx("d", 4), 2.0);
+ auto lhs = GS(1.0).idx("a", 4).idx("e", 4).idx("f", 4).gen();
+ auto rhs = GS(2.0).idx("b", 4).idx("c", 4).idx("d", 4).gen();
benchmark_join("dense no overlap multiply", lhs, rhs, operation::Mul::f);
}
TEST(DenseJoin, simple_expand) {
- auto lhs = make_cube(D::idx("a", 5), D::idx("b", 4), D::idx("c", 4), 1.0);
- auto rhs = make_cube(D::idx("d", 4), D::idx("e", 4), D::idx("f", 5), 2.0);
+ auto lhs = GS(1.0).idx("a", 5).idx("b", 4).idx("c", 4).gen();
+ auto rhs = GS(2.0).idx("d", 4).idx("e", 4).idx("f", 5).gen();
benchmark_join("dense simple expand multiply", lhs, rhs, operation::Mul::f);
}
TEST(DenseJoin, multiply_by_number) {
- auto lhs = make_spec(3.0);
- auto rhs = make_cube(D::idx("a", 16), D::idx("b", 16), D::idx("c", 16), 2.0);
+ auto lhs = GS(3.0).gen();
+ auto rhs = GS(2.0).idx("a", 16).idx("b", 16).idx("c", 16).gen();
benchmark_join("dense cube multiply by number", lhs, rhs, operation::Mul::f);
}
//-----------------------------------------------------------------------------
TEST(SparseJoin, small_vectors) {
- auto lhs = make_vector(D::map("x", 10, 1), 1.0);
- auto rhs = make_vector(D::map("x", 10, 2), 2.0);
+ auto lhs = GS(1.0).map("x", 10, 1).gen();
+ auto rhs = GS(2.0).map("x", 10, 2).gen();
benchmark_join("small sparse vector multiply", lhs, rhs, operation::Mul::f);
}
TEST(SparseJoin, large_vectors) {
- auto lhs = make_vector(D::map("x", 1800, 1), 1.0);
- auto rhs = make_vector(D::map("x", 1000, 2), 2.0);
+ auto lhs = GS(1.0).map("x", 1800, 1).gen();
+ auto rhs = GS(2.0).map("x", 1000, 2).gen();
benchmark_join("large sparse vector multiply", lhs, rhs, operation::Mul::f);
}
TEST(SparseJoin, full_overlap) {
- auto lhs = make_cube(D::map("a", 16, 1), D::map("b", 16, 1), D::map("c", 16, 1), 1.0);
- auto rhs = make_cube(D::map("a", 16, 2), D::map("b", 16, 2), D::map("c", 16, 2), 2.0);
+ auto lhs = GS(1.0).map("a", 16, 1).map("b", 16, 1).map("c", 16, 1).gen();
+ auto rhs = GS(2.0).map("a", 16, 2).map("b", 16, 2).map("c", 16, 2).gen();
benchmark_join("sparse full overlap multiply", lhs, rhs, operation::Mul::f);
}
TEST(SparseJoin, full_overlap_big_vs_small) {
- auto lhs = make_cube(D::map("a", 16, 1), D::map("b", 16, 1), D::map("c", 16, 1), 1.0);
- auto rhs = make_cube(D::map("a", 2, 1), D::map("b", 2, 1), D::map("c", 2, 1), 2.0);
+ auto lhs = GS(1.0).map("a", 16, 1).map("b", 16, 1).map("c", 16, 1).gen();
+ auto rhs = GS(2.0).map("a", 2, 1).map("b", 2, 1).map("c", 2, 1).gen();
benchmark_join("sparse full overlap big vs small multiply", lhs, rhs, operation::Mul::f);
}
TEST(SparseJoin, partial_overlap) {
- auto lhs = make_cube(D::map("a", 8, 1), D::map("c", 8, 1), D::map("d", 8, 1), 1.0);
- auto rhs = make_cube(D::map("b", 8, 2), D::map("c", 8, 2), D::map("d", 8, 2), 2.0);
+ auto lhs = GS(1.0).map("a", 8, 1).map("c", 8, 1).map("d", 8, 1).gen();
+ auto rhs = GS(2.0).map("b", 8, 2).map("c", 8, 2).map("d", 8, 2).gen();
benchmark_join("sparse partial overlap multiply", lhs, rhs, operation::Mul::f);
}
TEST(SparseJoin, no_overlap) {
- auto lhs = make_cube(D::map("a", 4, 1), D::map("e", 4, 1), D::map("f", 4, 1), 1.0);
- auto rhs = make_cube(D::map("b", 4, 1), D::map("c", 4, 1), D::map("d", 4, 1), 2.0);
+ auto lhs = GS(1.0).map("a", 4, 1).map("e", 4, 1).map("f", 4, 1).gen();
+ auto rhs = GS(2.0).map("b", 4, 1).map("c", 4, 1).map("d", 4, 1).gen();
benchmark_join("sparse no overlap multiply", lhs, rhs, operation::Mul::f);
}
TEST(SparseJoin, multiply_by_number) {
- auto lhs = make_spec(3.0);
- auto rhs = make_cube(D::map("a", 16, 2), D::map("b", 16, 2), D::map("c", 16, 2), 2.0);
+ auto lhs = GS(3.0).gen();
+ auto rhs = GS(2.0).map("a", 16, 2).map("b", 16, 2).map("c", 16, 2).gen();
benchmark_join("sparse multiply by number", lhs, rhs, operation::Mul::f);
}
//-----------------------------------------------------------------------------
TEST(MixedJoin, full_overlap) {
- auto lhs = make_cube(D::map("a", 16, 1), D::map("b", 16, 1), D::idx("c", 16), 1.0);
- auto rhs = make_cube(D::map("a", 16, 2), D::map("b", 16, 2), D::idx("c", 16), 2.0);
+ auto lhs = GS(1.0).map("a", 16, 1).map("b", 16, 1).idx("c", 16).gen();
+ auto rhs = GS(2.0).map("a", 16, 2).map("b", 16, 2).idx("c", 16).gen();
benchmark_join("mixed full overlap multiply", lhs, rhs, operation::Mul::f);
}
TEST(MixedJoin, partial_sparse_overlap) {
- auto lhs = make_cube(D::map("a", 8, 1), D::map("c", 8, 1), D::idx("d", 8), 1.0);
- auto rhs = make_cube(D::map("b", 8, 2), D::map("c", 8, 2), D::idx("d", 8), 2.0);
+ auto lhs = GS(1.0).map("a", 8, 1).map("c", 8, 1).idx("d", 8).gen();
+ auto rhs = GS(2.0).map("b", 8, 2).map("c", 8, 2).idx("d", 8).gen();
benchmark_join("mixed partial sparse overlap multiply", lhs, rhs, operation::Mul::f);
}
TEST(MixedJoin, no_overlap) {
- auto lhs = make_cube(D::map("a", 4, 1), D::map("e", 4, 1), D::idx("f", 4), 1.0);
- auto rhs = make_cube(D::map("b", 4, 1), D::map("c", 4, 1), D::idx("d", 4), 2.0);
+ auto lhs = GS(1.0).map("a", 4, 1).map("e", 4, 1).idx("f", 4).gen();
+ auto rhs = GS(2.0).map("b", 4, 1).map("c", 4, 1).idx("d", 4).gen();
benchmark_join("mixed no overlap multiply", lhs, rhs, operation::Mul::f);
}
TEST(MixedJoin, multiply_by_number) {
- auto lhs = make_spec(3.0);
- auto rhs = make_cube(D::map("a", 16, 2), D::map("b", 16, 2), D::idx("c", 16), 2.0);
+ auto lhs = GS(3.0).gen();
+ auto rhs = GS(2.0).map("a", 16, 2).map("b", 16, 2).idx("c", 16).gen();
benchmark_join("mixed multiply by number", lhs, rhs, operation::Mul::f);
}
//-----------------------------------------------------------------------------
TEST(ReduceBench, number_reduce) {
- auto lhs = make_spec(1.0);
+ auto lhs = GS(1.0).gen();
benchmark_reduce("number reduce", lhs, Aggr::SUM, {});
}
TEST(ReduceBench, dense_reduce) {
- auto lhs = make_cube(D::idx("a", 16), D::idx("b", 16), D::idx("c", 16), 1.0);
+ auto lhs = GS(1.0).idx("a", 16).idx("b", 16).idx("c", 16).gen();
benchmark_reduce("dense reduce inner", lhs, Aggr::SUM, {"c"});
benchmark_reduce("dense reduce middle", lhs, Aggr::SUM, {"b"});
benchmark_reduce("dense reduce outer", lhs, Aggr::SUM, {"a"});
@@ -929,7 +886,7 @@ TEST(ReduceBench, dense_reduce) {
}
TEST(ReduceBench, sparse_reduce) {
- auto lhs = make_cube(D::map("a", 16, 1), D::map("b", 16, 1), D::map("c", 16, 1), 1.0);
+ auto lhs = GS(1.0).map("a", 16, 1).map("b", 16, 1).map("c", 16, 1).gen();
benchmark_reduce("sparse reduce inner", lhs, Aggr::SUM, {"c"});
benchmark_reduce("sparse reduce middle", lhs, Aggr::SUM, {"b"});
benchmark_reduce("sparse reduce outer", lhs, Aggr::SUM, {"a"});
@@ -940,8 +897,8 @@ TEST(ReduceBench, sparse_reduce) {
}
TEST(ReduceBench, mixed_reduce) {
- auto lhs = make_spec(1.0, D::map("a", 4, 1), D::map("b", 4, 1), D::map("c", 4, 1),
- D::idx("d", 4), D::idx("e", 4), D::idx("f", 4));
+ auto lhs = GS(1.0).map("a", 4, 1).map("b", 4, 1).map("c", 4, 1)
+ .idx("d", 4).idx("e", 4).idx("f", 4).gen();
benchmark_reduce("mixed reduce middle dense", lhs, Aggr::SUM, {"e"});
benchmark_reduce("mixed reduce middle sparse", lhs, Aggr::SUM, {"b"});
benchmark_reduce("mixed reduce middle sparse/dense", lhs, Aggr::SUM, {"b", "e"});
@@ -953,87 +910,87 @@ TEST(ReduceBench, mixed_reduce) {
//-----------------------------------------------------------------------------
TEST(RenameBench, dense_rename) {
- auto lhs = make_matrix(D::idx("a", 64), D::idx("b", 64), 1.0);
+ auto lhs = GS(1.0).idx("a", 64).idx("b", 64).gen();
benchmark_rename("dense transpose", lhs, {"a", "b"}, {"b", "a"});
}
TEST(RenameBench, sparse_rename) {
- auto lhs = make_matrix(D::map("a", 64, 1), D::map("b", 64, 1), 1.0);
+ auto lhs = GS(1.0).map("a", 64, 1).map("b", 64, 1).gen();
benchmark_rename("sparse transpose", lhs, {"a", "b"}, {"b", "a"});
}
TEST(RenameBench, mixed_rename) {
- auto lhs = make_spec(1.0, D::map("a", 8, 1), D::map("b", 8, 1), D::idx("c", 8), D::idx("d", 8));
+ auto lhs = GS(1.0).map("a", 8, 1).map("b", 8, 1).idx("c", 8).idx("d", 8).gen();
benchmark_rename("mixed multi-transpose", lhs, {"a", "b", "c", "d"}, {"b", "a", "d", "c"});
}
//-----------------------------------------------------------------------------
TEST(MergeBench, dense_merge) {
- auto lhs = make_matrix(D::idx("a", 64), D::idx("b", 64), 1.0);
- auto rhs = make_matrix(D::idx("a", 64), D::idx("b", 64), 2.0);
+ auto lhs = GS(1.0).idx("a", 64).idx("b", 64).gen();
+ auto rhs = GS(2.0).idx("a", 64).idx("b", 64).gen();
benchmark_merge("dense merge", lhs, rhs, operation::Max::f);
}
TEST(MergeBench, sparse_merge_big_small) {
- auto lhs = make_matrix(D::map("a", 64, 1), D::map("b", 64, 1), 1.0);
- auto rhs = make_matrix(D::map("a", 8, 1), D::map("b", 8, 1), 2.0);
+ auto lhs = GS(1.0).map("a", 64, 1).map("b", 64, 1).gen();
+ auto rhs = GS(2.0).map("a", 8, 1).map("b", 8, 1).gen();
benchmark_merge("sparse merge big vs small", lhs, rhs, operation::Max::f);
}
TEST(MergeBench, sparse_merge_minimal_overlap) {
- auto lhs = make_matrix(D::map("a", 64, 11), D::map("b", 32, 11), 1.0);
- auto rhs = make_matrix(D::map("a", 32, 13), D::map("b", 64, 13), 2.0);
+ auto lhs = GS(1.0).map("a", 64, 11).map("b", 32, 11).gen();
+ auto rhs = GS(2.0).map("a", 32, 13).map("b", 64, 13).gen();
benchmark_merge("sparse merge minimal overlap", lhs, rhs, operation::Max::f);
}
TEST(MergeBench, mixed_merge) {
- auto lhs = make_matrix(D::map("a", 64, 1), D::idx("b", 64), 1.0);
- auto rhs = make_matrix(D::map("a", 64, 2), D::idx("b", 64), 2.0);
+ auto lhs = GS(1.0).map("a", 64, 1).idx("b", 64).gen();
+ auto rhs = GS(2.0).map("a", 64, 2).idx("b", 64).gen();
benchmark_merge("mixed merge", lhs, rhs, operation::Max::f);
}
//-----------------------------------------------------------------------------
TEST(MapBench, number_map) {
- auto lhs = make_spec(1.75);
+ auto lhs = GS(1.75).gen();
benchmark_map("number map", lhs, operation::Floor::f);
}
TEST(MapBench, dense_map) {
- auto lhs = make_matrix(D::idx("a", 64), D::idx("b", 64), 1.75);
+ auto lhs = GS(1.75).idx("a", 64).idx("b", 64).gen();
benchmark_map("dense map", lhs, operation::Floor::f);
}
TEST(MapBench, sparse_map_small) {
- auto lhs = make_matrix(D::map("a", 4, 1), D::map("b", 4, 1), 1.75);
+ auto lhs = GS(1.75).map("a", 4, 1).map("b", 4, 1).gen();
benchmark_map("sparse map small", lhs, operation::Floor::f);
}
TEST(MapBench, sparse_map_big) {
- auto lhs = make_matrix(D::map("a", 64, 1), D::map("b", 64, 1), 1.75);
+ auto lhs = GS(1.75).map("a", 64, 1).map("b", 64, 1).gen();
benchmark_map("sparse map big", lhs, operation::Floor::f);
}
TEST(MapBench, mixed_map) {
- auto lhs = make_matrix(D::map("a", 64, 1), D::idx("b", 64), 1.75);
+ auto lhs = GS(1.75).map("a", 64, 1).idx("b", 64).gen();
benchmark_map("mixed map", lhs, operation::Floor::f);
}
//-----------------------------------------------------------------------------
TEST(TensorCreateBench, create_dense) {
- auto proto = make_matrix(D::idx("a", 32), D::idx("b", 32), 1.0);
+ auto proto = GS(1.0).idx("a", 32).idx("b", 32).gen();
benchmark_tensor_create("dense tensor create", proto);
}
TEST(TensorCreateBench, create_sparse) {
- auto proto = make_matrix(D::map("a", 32, 1), D::map("b", 32, 1), 1.0);
+ auto proto = GS(1.0).map("a", 32, 1).map("b", 32, 1).gen();
benchmark_tensor_create("sparse tensor create", proto);
}
TEST(TensorCreateBench, create_mixed) {
- auto proto = make_matrix(D::map("a", 32, 1), D::idx("b", 32), 1.0);
+ auto proto = GS(1.0).map("a", 32, 1).idx("b", 32).gen();
benchmark_tensor_create("mixed tensor create", proto);
}
@@ -1041,7 +998,7 @@ TEST(TensorCreateBench, create_mixed) {
TEST(TensorLambdaBench, simple_lambda) {
auto type = ValueType::from_spec("tensor<float>(a[64],b[64])");
- auto p0 = make_spec(3.5);
+ auto p0 = GS(3.5).gen();
auto function = Function::parse({"a", "b", "p0"}, "(a*64+b)*p0");
ASSERT_FALSE(function->has_error());
benchmark_tensor_lambda("simple tensor lambda", type, p0, *function);
@@ -1049,7 +1006,7 @@ TEST(TensorLambdaBench, simple_lambda) {
TEST(TensorLambdaBench, complex_lambda) {
auto type = ValueType::from_spec("tensor<float>(a[64],b[64])");
- auto p0 = make_vector(D::idx("x", 3), 1.0);
+ auto p0 = GS(1.0).idx("x", 3).gen();
auto function = Function::parse({"a", "b", "p0"}, "(a*64+b)*reduce(p0,sum)");
ASSERT_FALSE(function->has_error());
benchmark_tensor_lambda("complex tensor lambda", type, p0, *function);
@@ -1058,7 +1015,7 @@ TEST(TensorLambdaBench, complex_lambda) {
//-----------------------------------------------------------------------------
TEST(TensorPeekBench, dense_peek) {
- auto lhs = make_matrix(D::idx("a", 64), D::idx("b", 64), 1.0);
+ auto lhs = GS(1.0).idx("a", 64).idx("b", 64).gen();
benchmark_tensor_peek("dense peek cell verbatim", lhs, verbatim_peek().add("a", 1).add("b", 2));
benchmark_tensor_peek("dense peek cell dynamic", lhs, dynamic_peek().add("a", 1).add("b", 2));
benchmark_tensor_peek("dense peek vector verbatim", lhs, verbatim_peek().add("a", 1));
@@ -1066,7 +1023,7 @@ TEST(TensorPeekBench, dense_peek) {
}
TEST(TensorPeekBench, sparse_peek) {
- auto lhs = make_matrix(D::map("a", 64, 1), D::map("b", 64, 1), 1.0);
+ auto lhs = GS(1.0).map("a", 64, 1).map("b", 64, 1).gen();
benchmark_tensor_peek("sparse peek cell verbatim", lhs, verbatim_peek().add("a", 1).add("b", 2));
benchmark_tensor_peek("sparse peek cell dynamic", lhs, dynamic_peek().add("a", 1).add("b", 2));
benchmark_tensor_peek("sparse peek vector verbatim", lhs, verbatim_peek().add("a", 1));
@@ -1074,7 +1031,7 @@ TEST(TensorPeekBench, sparse_peek) {
}
TEST(TensorPeekBench, mixed_peek) {
- auto lhs = make_spec(1.0, D::map("a", 8, 1), D::map("b", 8, 1), D::idx("c", 8), D::idx("d", 8));
+ auto lhs = GS(1.0).map("a", 8, 1).map("b", 8, 1).idx("c", 8).idx("d", 8).gen();
benchmark_tensor_peek("mixed peek cell verbatim", lhs, verbatim_peek().add("a", 1).add("b", 2).add("c", 3).add("d", 4));
benchmark_tensor_peek("mixed peek cell dynamic", lhs, dynamic_peek().add("a", 1).add("b", 2).add("c", 3).add("d", 4));
benchmark_tensor_peek("mixed peek dense verbatim", lhs, verbatim_peek().add("a", 1).add("b", 2));
diff --git a/eval/src/vespa/eval/eval/optimize_tensor_function.cpp b/eval/src/vespa/eval/eval/optimize_tensor_function.cpp
index 2e8c89f88fc..25612b8d5fd 100644
--- a/eval/src/vespa/eval/eval/optimize_tensor_function.cpp
+++ b/eval/src/vespa/eval/eval/optimize_tensor_function.cpp
@@ -5,6 +5,7 @@
#include "simple_value.h"
#include <vespa/eval/instruction/dense_dot_product_function.h>
+#include <vespa/eval/instruction/sparse_dot_product_function.h>
#include <vespa/eval/instruction/mixed_inner_product_function.h>
#include <vespa/eval/instruction/sum_max_dot_product_function.h>
#include <vespa/eval/instruction/dense_xw_product_function.h>
@@ -31,11 +32,7 @@ namespace vespalib::eval {
namespace {
-const TensorFunction &optimize_for_factory(const ValueBuilderFactory &factory, const TensorFunction &expr, Stash &stash) {
- if (&factory == &SimpleValueBuilderFactory::get()) {
- // never optimize simple value evaluation
- return expr;
- }
+const TensorFunction &optimize_for_factory(const ValueBuilderFactory &, const TensorFunction &expr, Stash &stash) {
using Child = TensorFunction::Child;
Child root(expr);
{
@@ -47,6 +44,7 @@ const TensorFunction &optimize_for_factory(const ValueBuilderFactory &factory, c
const Child &child = nodes.back().get();
child.set(SumMaxDotProductFunction::optimize(child.get(), stash));
child.set(DenseDotProductFunction::optimize(child.get(), stash));
+ child.set(SparseDotProductFunction::optimize(child.get(), stash));
child.set(DenseXWProductFunction::optimize(child.get(), stash));
child.set(DenseMatMulFunction::optimize(child.get(), stash));
child.set(DenseMultiMatMulFunction::optimize(child.get(), stash));
diff --git a/eval/src/vespa/eval/eval/test/eval_fixture.cpp b/eval/src/vespa/eval/eval/test/eval_fixture.cpp
index 58d8905baf3..966954b9026 100644
--- a/eval/src/vespa/eval/eval/test/eval_fixture.cpp
+++ b/eval/src/vespa/eval/eval/test/eval_fixture.cpp
@@ -28,7 +28,10 @@ NodeTypes get_types(const Function &function, const ParamRepo &param_repo) {
std::vector<ValueType> param_types;
for (size_t i = 0; i < function.num_params(); ++i) {
auto pos = param_repo.map.find(function.param_name(i));
- ASSERT_TRUE(pos != param_repo.map.end());
+ if (pos == param_repo.map.end()) {
+ TEST_STATE(fmt("param name: '%s'", function.param_name(i).data()).c_str());
+ ASSERT_TRUE(pos != param_repo.map.end());
+ }
param_types.push_back(ValueType::from_spec(pos->second.value.type()));
ASSERT_TRUE(!param_types.back().is_error());
}
@@ -181,6 +184,23 @@ EvalFixture::ParamRepo::add_dense(const std::vector<std::pair<vespalib::string,
return *this;
}
+// produce 4 variants: float/double * mutable/const
+EvalFixture::ParamRepo &
+EvalFixture::ParamRepo::add_variants(const vespalib::string &name_base,
+ const GenSpec &spec)
+{
+ auto name_f = name_base + "_f";
+ auto name_m = "@" + name_base;
+ auto name_m_f = "@" + name_base + "_f";
+ auto dbl_ts = spec.cpy().cells_double().gen();
+ auto flt_ts = spec.cpy().cells_float().gen();
+ add(name_base, dbl_ts);
+ add(name_f, flt_ts);
+ add_mutable(name_m, dbl_ts);
+ add_mutable(name_m_f, flt_ts);
+ return *this;
+}
+
void
EvalFixture::detect_param_tampering(const ParamRepo &param_repo, bool allow_mutable) const
{
diff --git a/eval/src/vespa/eval/eval/test/eval_fixture.h b/eval/src/vespa/eval/eval/test/eval_fixture.h
index dc49cf7e4dc..44adaca3298 100644
--- a/eval/src/vespa/eval/eval/test/eval_fixture.h
+++ b/eval/src/vespa/eval/eval/test/eval_fixture.h
@@ -10,6 +10,7 @@
#include <vespa/vespalib/util/stash.h>
#include <set>
#include <functional>
+#include "gen_spec.h"
namespace vespalib::eval::test {
@@ -40,6 +41,10 @@ public:
ParamRepo &add_matrix(const char *d1, size_t s1, const char *d2, size_t s2, gen_fun_t gen = gen_N);
ParamRepo &add_cube(const char *d1, size_t s1, const char *d2, size_t s2, const char *d3, size_t s3, gen_fun_t gen = gen_N);
ParamRepo &add_dense(const std::vector<std::pair<vespalib::string, size_t> > &dims, gen_fun_t gen = gen_N);
+
+ // produce 4 variants: float/double * mutable/const
+ ParamRepo &add_variants(const vespalib::string &name_base,
+ const GenSpec &spec);
~ParamRepo() {}
};
diff --git a/eval/src/vespa/eval/eval/test/gen_spec.cpp b/eval/src/vespa/eval/eval/test/gen_spec.cpp
index 9c40c65620e..c20e9005318 100644
--- a/eval/src/vespa/eval/eval/test/gen_spec.cpp
+++ b/eval/src/vespa/eval/eval/test/gen_spec.cpp
@@ -20,6 +20,11 @@ DimSpec::make_dict(size_t size, size_t stride, const vespalib::string &prefix)
return dict;
}
+GenSpec::GenSpec(GenSpec &&other) = default;
+GenSpec::GenSpec(const GenSpec &other) = default;
+GenSpec &GenSpec::operator=(GenSpec &&other) = default;
+GenSpec &GenSpec::operator=(const GenSpec &other) = default;
+
GenSpec::~GenSpec() = default;
ValueType
diff --git a/eval/src/vespa/eval/eval/test/gen_spec.h b/eval/src/vespa/eval/eval/test/gen_spec.h
index 81843156fd9..36bbd554125 100644
--- a/eval/src/vespa/eval/eval/test/gen_spec.h
+++ b/eval/src/vespa/eval/eval/test/gen_spec.h
@@ -66,10 +66,15 @@ private:
static double default_seq(size_t idx) { return (idx + 1.0); }
public:
GenSpec() : _dims(), _cells(CellType::DOUBLE), _seq(default_seq) {}
+ GenSpec(GenSpec &&other);
+ GenSpec(const GenSpec &other);
+ GenSpec &operator=(GenSpec &&other);
+ GenSpec &operator=(const GenSpec &other);
~GenSpec();
std::vector<DimSpec> dims() const { return _dims; }
CellType cells() const { return _cells; }
seq_t seq() const { return _seq; }
+ GenSpec cpy() const { return *this; }
GenSpec &idx(const vespalib::string &name, size_t size) {
_dims.emplace_back(name, size);
return *this;
@@ -94,7 +99,7 @@ public:
}
GenSpec &seq_n() { return seq(default_seq); }
GenSpec &seq_bias(double bias) {
- seq_t fun = [bias](size_t idx) { return (idx + bias); };
+ seq_t fun = [bias](size_t idx) noexcept { return (idx + bias); };
return seq(fun);
}
ValueType type() const;
diff --git a/eval/src/vespa/eval/eval/test/param_variants.h b/eval/src/vespa/eval/eval/test/param_variants.h
deleted file mode 100644
index 41a43ebca08..00000000000
--- a/eval/src/vespa/eval/eval/test/param_variants.h
+++ /dev/null
@@ -1,23 +0,0 @@
-#include "eval_fixture.h"
-#include "tensor_model.hpp"
-
-namespace vespalib::eval::test {
-
-// for testing of optimizers / tensor functions
-// we produce the same param three times:
-// as-is, with float cells, and tagged as mutable.
-void add_variants(EvalFixture::ParamRepo &repo,
- const vespalib::string &name_base,
- const Layout &base_layout,
- const Sequence &seq)
-{
- auto name_f = name_base + "_f";
- auto name_m = "@" + name_base;
- auto name_m_f = "@" + name_base + "_f";
- repo.add(name_base, spec(base_layout, seq));
- repo.add(name_f, spec(float_cells(base_layout), seq));
- repo.add_mutable(name_m, spec(base_layout, seq));
- repo.add_mutable(name_m_f, spec(float_cells(base_layout), seq));
-}
-
-} // namespace
diff --git a/eval/src/vespa/eval/instruction/CMakeLists.txt b/eval/src/vespa/eval/instruction/CMakeLists.txt
index 58d5290f5d9..cac69d23640 100644
--- a/eval/src/vespa/eval/instruction/CMakeLists.txt
+++ b/eval/src/vespa/eval/instruction/CMakeLists.txt
@@ -32,6 +32,7 @@ vespa_add_library(eval_instruction OBJECT
pow_as_map_optimizer.cpp
remove_trivial_dimension_optimizer.cpp
replace_type_function.cpp
+ sparse_dot_product_function.cpp
sum_max_dot_product_function.cpp
vector_from_doubles_function.cpp
)
diff --git a/eval/src/vespa/eval/instruction/generic_join.cpp b/eval/src/vespa/eval/instruction/generic_join.cpp
index abe29b8228c..6d6f86b7c4d 100644
--- a/eval/src/vespa/eval/instruction/generic_join.cpp
+++ b/eval/src/vespa/eval/instruction/generic_join.cpp
@@ -308,6 +308,17 @@ SparseJoinPlan::SparseJoinPlan(const ValueType &lhs_type, const ValueType &rhs_t
[](const auto &a, const auto &b){ return (a.name < b.name); });
}
+SparseJoinPlan::SparseJoinPlan(size_t num_mapped_dims)
+ : sources(num_mapped_dims, Source::BOTH), lhs_overlap(), rhs_overlap()
+{
+ lhs_overlap.reserve(num_mapped_dims);
+ rhs_overlap.reserve(num_mapped_dims);
+ for (size_t i = 0; i < num_mapped_dims; ++i) {
+ lhs_overlap.push_back(i);
+ rhs_overlap.push_back(i);
+ }
+}
+
bool
SparseJoinPlan::should_forward_lhs_index() const
{
diff --git a/eval/src/vespa/eval/instruction/generic_join.h b/eval/src/vespa/eval/instruction/generic_join.h
index 1fcfcf416cc..026a2938971 100644
--- a/eval/src/vespa/eval/instruction/generic_join.h
+++ b/eval/src/vespa/eval/instruction/generic_join.h
@@ -58,6 +58,7 @@ struct SparseJoinPlan {
bool should_forward_lhs_index() const;
bool should_forward_rhs_index() const;
SparseJoinPlan(const ValueType &lhs_type, const ValueType &rhs_type);
+ SparseJoinPlan(size_t num_mapped_dims); // full overlap plan
~SparseJoinPlan();
};
@@ -70,15 +71,14 @@ struct SparseJoinState {
const Value::Index &first_index;
const Value::Index &second_index;
const std::vector<size_t> &second_view_dims;
- std::vector<string_id> full_address;
- std::vector<string_id*> first_address;
- std::vector<const string_id*> address_overlap;
- std::vector<string_id*> second_only_address;
+ std::vector<string_id> full_address;
+ std::vector<string_id*> first_address;
+ std::vector<const string_id*> address_overlap;
+ std::vector<string_id*> second_only_address;
size_t lhs_subspace;
size_t rhs_subspace;
size_t &first_subspace;
size_t &second_subspace;
-
SparseJoinState(const SparseJoinPlan &plan, const Value::Index &lhs, const Value::Index &rhs);
~SparseJoinState();
};
diff --git a/eval/src/vespa/eval/instruction/sparse_dot_product_function.cpp b/eval/src/vespa/eval/instruction/sparse_dot_product_function.cpp
new file mode 100644
index 00000000000..93ae2856372
--- /dev/null
+++ b/eval/src/vespa/eval/instruction/sparse_dot_product_function.cpp
@@ -0,0 +1,111 @@
+// Copyright Verizon Media. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root.
+
+#include "sparse_dot_product_function.h"
+#include "generic_join.h"
+#include "detect_type.h"
+#include <vespa/eval/eval/fast_value.hpp>
+
+namespace vespalib::eval {
+
+using namespace tensor_function;
+using namespace operation;
+using namespace instruction;
+
+namespace {
+
+template <typename SCT, typename BCT>
+double my_fast_sparse_dot_product(const FastValueIndex &small_idx, const FastValueIndex &big_idx,
+ const SCT *small_cells, const BCT *big_cells)
+{
+ double result = 0.0;
+ small_idx.map.each_map_entry([&](auto small_subspace, auto hash) {
+ auto small_addr = small_idx.map.get_addr(small_subspace);
+ auto big_subspace = big_idx.map.lookup(small_addr, hash);
+ if (big_subspace != FastAddrMap::npos()) {
+ result += (small_cells[small_subspace] * big_cells[big_subspace]);
+ }
+ });
+ return result;
+}
+
+template <typename LCT, typename RCT>
+void my_sparse_dot_product_op(InterpretedFunction::State &state, uint64_t num_mapped_dims) {
+ const auto &lhs_idx = state.peek(1).index();
+ const auto &rhs_idx = state.peek(0).index();
+ const LCT *lhs_cells = state.peek(1).cells().typify<LCT>().cbegin();
+ const RCT *rhs_cells = state.peek(0).cells().typify<RCT>().cbegin();
+ if (auto indexes = detect_type<FastValueIndex>(lhs_idx, rhs_idx)) {
+#if __has_cpp_attribute(likely)
+ [[likely]];
+#endif
+ const auto &lhs_fast = indexes.get<0>();
+ const auto &rhs_fast = indexes.get<1>();
+ double result = (rhs_fast.map.size() < lhs_fast.map.size())
+ ? my_fast_sparse_dot_product(rhs_fast, lhs_fast, rhs_cells, lhs_cells)
+ : my_fast_sparse_dot_product(lhs_fast, rhs_fast, lhs_cells, rhs_cells);
+ state.pop_pop_push(state.stash.create<ScalarValue<double>>(result));
+ } else {
+#if __has_cpp_attribute(unlikely)
+ [[unlikely]];
+#endif
+ double result = 0.0;
+ SparseJoinPlan plan(num_mapped_dims);
+ SparseJoinState sparse(plan, lhs_idx, rhs_idx);
+ auto outer = sparse.first_index.create_view({});
+ auto inner = sparse.second_index.create_view(sparse.second_view_dims);
+ outer->lookup({});
+ while (outer->next_result(sparse.first_address, sparse.first_subspace)) {
+ inner->lookup(sparse.address_overlap);
+ if (inner->next_result(sparse.second_only_address, sparse.second_subspace)) {
+ result += (lhs_cells[sparse.lhs_subspace] * rhs_cells[sparse.rhs_subspace]);
+ }
+ }
+ state.pop_pop_push(state.stash.create<ScalarValue<double>>(result));
+ }
+}
+
+struct MyGetFun {
+ template <typename LCT, typename RCT>
+ static auto invoke() { return my_sparse_dot_product_op<LCT,RCT>; }
+};
+
+} // namespace <unnamed>
+
+SparseDotProductFunction::SparseDotProductFunction(const TensorFunction &lhs_in,
+ const TensorFunction &rhs_in)
+ : tensor_function::Op2(ValueType::make_type(CellType::DOUBLE, {}), lhs_in, rhs_in)
+{
+}
+
+InterpretedFunction::Instruction
+SparseDotProductFunction::compile_self(const ValueBuilderFactory &, Stash &) const
+{
+ auto op = typify_invoke<2,TypifyCellType,MyGetFun>(lhs().result_type().cell_type(), rhs().result_type().cell_type());
+ return InterpretedFunction::Instruction(op, lhs().result_type().count_mapped_dimensions());
+}
+
+bool
+SparseDotProductFunction::compatible_types(const ValueType &res, const ValueType &lhs, const ValueType &rhs)
+{
+ return (res.is_scalar() && (res.cell_type() == CellType::DOUBLE) &&
+ lhs.is_sparse() && (rhs.dimensions() == lhs.dimensions()));
+}
+
+const TensorFunction &
+SparseDotProductFunction::optimize(const TensorFunction &expr, Stash &stash)
+{
+ auto reduce = as<Reduce>(expr);
+ if (reduce && (reduce->aggr() == Aggr::SUM)) {
+ auto join = as<Join>(reduce->child());
+ if (join && (join->function() == Mul::f)) {
+ const TensorFunction &lhs = join->lhs();
+ const TensorFunction &rhs = join->rhs();
+ if (compatible_types(expr.result_type(), lhs.result_type(), rhs.result_type())) {
+ return stash.create<SparseDotProductFunction>(lhs, rhs);
+ }
+ }
+ }
+ return expr;
+}
+
+} // namespace
diff --git a/eval/src/vespa/eval/instruction/sparse_dot_product_function.h b/eval/src/vespa/eval/instruction/sparse_dot_product_function.h
new file mode 100644
index 00000000000..ccc7a61f5e8
--- /dev/null
+++ b/eval/src/vespa/eval/instruction/sparse_dot_product_function.h
@@ -0,0 +1,23 @@
+// Copyright Verizon Media. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root.
+
+#pragma once
+
+#include <vespa/eval/eval/tensor_function.h>
+
+namespace vespalib::eval {
+
+/**
+ * Tensor function for a dot product between two sparse tensors.
+ */
+class SparseDotProductFunction : public tensor_function::Op2
+{
+public:
+ SparseDotProductFunction(const TensorFunction &lhs_in,
+ const TensorFunction &rhs_in);
+ InterpretedFunction::Instruction compile_self(const ValueBuilderFactory &factory, Stash &stash) const override;
+ bool result_is_mutable() const override { return true; }
+ static bool compatible_types(const ValueType &res, const ValueType &lhs, const ValueType &rhs);
+ static const TensorFunction &optimize(const TensorFunction &expr, Stash &stash);
+};
+
+} // namespace
diff --git a/fastos/src/vespa/fastos/ringbuffer.h b/fastos/src/vespa/fastos/ringbuffer.h
index 41c0af7385b..89b1bb84c9c 100644
--- a/fastos/src/vespa/fastos/ringbuffer.h
+++ b/fastos/src/vespa/fastos/ringbuffer.h
@@ -42,18 +42,6 @@ public:
_closed = false;
}
- FastOS_RingBufferData *GetData () { return _data; }
-
- void RepositionDataAt0 ()
- {
- uint8_t *src = &_data->_buffer[_dataIndex];
- uint8_t *dst = _data->_buffer;
-
- for(int i=0; i<_dataSize; i++)
- *dst++ = *src++;
- _dataIndex = 0;
- }
-
FastOS_RingBuffer (int bufferSize)
: _closed(false),
_data(0),
@@ -93,11 +81,6 @@ public:
_dataSize += bytes;
}
- int GetDataSize ()
- {
- return _dataSize;
- }
-
int GetWriteSpace ()
{
int spaceLeft = _bufferSize - _dataSize;
diff --git a/flags/src/main/java/com/yahoo/vespa/flags/Flags.java b/flags/src/main/java/com/yahoo/vespa/flags/Flags.java
index e80ad6b4117..28206aceee1 100644
--- a/flags/src/main/java/com/yahoo/vespa/flags/Flags.java
+++ b/flags/src/main/java/com/yahoo/vespa/flags/Flags.java
@@ -60,14 +60,14 @@ public class Flags {
public static final UnboundDoubleFlag DEFAULT_TERM_WISE_LIMIT = defineDoubleFlag(
"default-term-wise-limit", 1.0,
- List.of("baldersheim"), "2020-12-02", "2021-02-01",
+ List.of("baldersheim"), "2020-12-02", "2022-01-01",
"Default limit for when to apply termwise query evaluation",
"Takes effect at redeployment",
ZONE_ID, APPLICATION_ID);
public static final UnboundStringFlag FEED_SEQUENCER_TYPE = defineStringFlag(
"feed-sequencer-type", "LATENCY",
- List.of("baldersheim"), "2020-12-02", "2021-02-01",
+ List.of("baldersheim"), "2020-12-02", "2022-01-01",
"Selects type of sequenced executor used for feeding, valid values are LATENCY, ADAPTIVE, THROUGHPUT",
"Takes effect at redeployment",
ZONE_ID, APPLICATION_ID);
@@ -81,28 +81,28 @@ public class Flags {
public static final UnboundIntFlag RESPONSE_NUM_THREADS = defineIntFlag(
"response-num-threads", 2,
- List.of("baldersheim"), "2020-12-02", "2021-02-01",
+ List.of("baldersheim"), "2020-12-02", "2022-01-01",
"Number of threads used for mbus responses, default is 2, negative number = numcores/4",
"Takes effect at redeployment",
ZONE_ID, APPLICATION_ID);
public static final UnboundBooleanFlag SKIP_COMMUNICATIONMANAGER_THREAD = defineFeatureFlag(
"skip-communicatiomanager-thread", false,
- List.of("baldersheim"), "2020-12-02", "2021-02-01",
+ List.of("baldersheim"), "2020-12-02", "2022-01-01",
"Should we skip the communicationmanager thread",
"Takes effect at redeployment",
ZONE_ID, APPLICATION_ID);
public static final UnboundBooleanFlag SKIP_MBUS_REQUEST_THREAD = defineFeatureFlag(
"skip-mbus-request-thread", false,
- List.of("baldersheim"), "2020-12-02", "2021-02-01",
+ List.of("baldersheim"), "2020-12-02", "2022-01-01",
"Should we skip the mbus request thread",
"Takes effect at redeployment",
ZONE_ID, APPLICATION_ID);
public static final UnboundBooleanFlag SKIP_MBUS_REPLY_THREAD = defineFeatureFlag(
"skip-mbus-reply-thread", false,
- List.of("baldersheim"), "2020-12-02", "2021-02-01",
+ List.of("baldersheim"), "2020-12-02", "2022-01-01",
"Should we skip the mbus reply thread",
"Takes effect at redeployment",
ZONE_ID, APPLICATION_ID);
@@ -130,14 +130,14 @@ public class Flags {
public static final UnboundStringFlag TLS_FOR_ZOOKEEPER_CLIENT_SERVER_COMMUNICATION = defineStringFlag(
"tls-for-zookeeper-client-server-communication", "OFF",
- List.of("hmusum"), "2020-12-02", "2021-02-01",
+ List.of("hmusum"), "2020-12-02", "2021-04-01",
"How to setup TLS for ZooKeeper client/server communication. Valid values are OFF, PORT_UNIFICATION, TLS_WITH_PORT_UNIFICATION, TLS_ONLY",
"Takes effect on restart of config server",
NODE_TYPE, HOSTNAME);
public static final UnboundBooleanFlag USE_TLS_FOR_ZOOKEEPER_CLIENT = defineFeatureFlag(
"use-tls-for-zookeeper-client", false,
- List.of("hmusum"), "2020-12-02", "2021-02-01",
+ List.of("hmusum"), "2020-12-02", "2021-04-01",
"Whether to use TLS for ZooKeeper clients",
"Takes effect on restart of process",
NODE_TYPE, HOSTNAME);
@@ -162,7 +162,7 @@ public class Flags {
public static final UnboundStringFlag YUM_DIST_HOST = defineStringFlag(
"yum-dist-host", "",
- List.of("aressem"), "2020-12-02", "2021-02-01",
+ List.of("aressem"), "2020-12-02", "2021-03-01",
"Override the default dist host for yum.",
"Takes effect on next tick or on host-admin restart (may vary where used).");
@@ -174,14 +174,14 @@ public class Flags {
public static final UnboundBooleanFlag PROVISION_APPLICATION_ROLES = defineFeatureFlag(
"provision-application-roles", false,
- List.of("tokle"), "2020-12-02", "2021-02-01",
+ List.of("tokle"), "2020-12-02", "2021-04-01",
"Whether application roles should be provisioned",
"Takes effect on next deployment (controller)",
ZONE_ID);
public static final UnboundBooleanFlag APPLICATION_IAM_ROLE = defineFeatureFlag(
"application-iam-roles", false,
- List.of("tokle"), "2020-12-02", "2021-02-01",
+ List.of("tokle"), "2020-12-02", "2021-04-01",
"Allow separate iam roles when provisioning/assigning hosts",
"Takes effect immediately on new hosts, on next redeploy for applications",
APPLICATION_ID);
@@ -226,39 +226,25 @@ public class Flags {
public static final UnboundBooleanFlag USE_ACCESS_CONTROL_CLIENT_AUTHENTICATION = defineFeatureFlag(
"use-access-control-client-authentication", false,
- List.of("tokle"), "2020-12-02", "2021-02-01",
+ List.of("tokle"), "2020-12-02", "2021-03-01",
"Whether application container should set up client authentication on default port based on access control element",
"Takes effect on next internal redeployment",
APPLICATION_ID);
public static final UnboundBooleanFlag USE_ASYNC_MESSAGE_HANDLING_ON_SCHEDULE = defineFeatureFlag(
"async-message-handling-on-schedule", false,
- List.of("baldersheim"), "2020-12-02", "2021-02-01",
+ List.of("baldersheim"), "2020-12-02", "2022-01-01",
"Optionally deliver async messages in own thread",
"Takes effect at redeployment",
ZONE_ID, APPLICATION_ID);
- public static final UnboundIntFlag MERGE_CHUNK_SIZE = defineIntFlag(
- "merge-chunk-size", 0x2000000,
- List.of("baldersheim"), "2020-12-02", "2021-02-01",
- "Size of baldersheim buffer in service layer",
- "Takes effect at redeployment",
- ZONE_ID, APPLICATION_ID);
-
public static final UnboundDoubleFlag FEED_CONCURRENCY = defineDoubleFlag(
"feed-concurrency", 0.5,
- List.of("baldersheim"), "2020-12-02", "2021-02-01",
+ List.of("baldersheim"), "2020-12-02", "2022-01-01",
"How much concurrency should be allowed for feed",
"Takes effect at redeployment",
ZONE_ID, APPLICATION_ID);
- public static final UnboundBooleanFlag ENABLE_AUTOMATIC_REINDEXING = defineFeatureFlag(
- "enable-automatic-reindexing", true,
- List.of("bjorncs", "jonmv"), "2020-12-02", "2021-02-01",
- "Whether to automatically trigger reindexing from config change",
- "Takes effect on next internal redeployment",
- APPLICATION_ID);
-
public static final UnboundDoubleFlag REINDEXER_WINDOW_SIZE_INCREMENT = defineDoubleFlag(
"reindexer-window-size-increment", 0.2,
List.of("jonmv"), "2020-12-09", "2021-02-07",
@@ -275,7 +261,7 @@ public class Flags {
public static final UnboundBooleanFlag USE_POWER_OF_TWO_CHOICES_LOAD_BALANCING = defineFeatureFlag(
"use-power-of-two-choices-load-balancing", false,
- List.of("tokle"), "2020-12-02", "2021-02-01",
+ List.of("tokle"), "2020-12-02", "2021-02-15",
"Whether to use Power of two load balancing algorithm for application",
"Takes effect on next internal redeployment",
APPLICATION_ID);
diff --git a/metrics-proxy/pom.xml b/metrics-proxy/pom.xml
index 8bf5a30e584..90d9f093da1 100644
--- a/metrics-proxy/pom.xml
+++ b/metrics-proxy/pom.xml
@@ -101,11 +101,6 @@
<version>${project.version}</version>
<scope>provided</scope>
</dependency>
- <dependency>
- <groupId>org.json</groupId>
- <artifactId>json</artifactId>
- <scope>provided</scope>
- </dependency>
<!-- compile scope -->
diff --git a/metrics-proxy/src/main/java/ai/vespa/metricsproxy/node/NodeMetricGatherer.java b/metrics-proxy/src/main/java/ai/vespa/metricsproxy/node/NodeMetricGatherer.java
index 3cd9f526387..0e0511967a3 100644
--- a/metrics-proxy/src/main/java/ai/vespa/metricsproxy/node/NodeMetricGatherer.java
+++ b/metrics-proxy/src/main/java/ai/vespa/metricsproxy/node/NodeMetricGatherer.java
@@ -9,13 +9,11 @@ import ai.vespa.metricsproxy.metric.model.MetricsPacket;
import ai.vespa.metricsproxy.metric.model.ServiceId;
import ai.vespa.metricsproxy.service.SystemPollerProvider;
import ai.vespa.metricsproxy.service.VespaServices;
+import com.fasterxml.jackson.databind.JsonNode;
import com.google.inject.Inject;
import com.yahoo.container.jdisc.state.CoredumpGatherer;
import com.yahoo.container.jdisc.state.FileWrapper;
import com.yahoo.container.jdisc.state.HostLifeGatherer;
-import com.yahoo.yolean.Exceptions;
-import org.json.JSONException;
-import org.json.JSONObject;
import java.util.ArrayList;
import java.util.Iterator;
@@ -54,10 +52,10 @@ public class NodeMetricGatherer {
List<MetricsPacket.Builder> metricPacketBuilders = new ArrayList<>();
metricPacketBuilders.addAll(gatherServiceHealthMetrics(vespaServices));
- JSONObject coredumpPacket = CoredumpGatherer.gatherCoredumpMetrics(fileWrapper);
+ JsonNode coredumpPacket = CoredumpGatherer.gatherCoredumpMetrics(fileWrapper);
addObjectToBuilders(metricPacketBuilders, coredumpPacket);
if (SystemPollerProvider.runningOnLinux()) {
- JSONObject packet = HostLifeGatherer.getHostLifePacket(fileWrapper);
+ JsonNode packet = HostLifeGatherer.getHostLifePacket(fileWrapper);
addObjectToBuilders(metricPacketBuilders, packet);
}
@@ -69,24 +67,20 @@ public class NodeMetricGatherer {
).collect(Collectors.toList());
}
- protected static void addObjectToBuilders(List<MetricsPacket.Builder> builders, JSONObject object) {
- try {
- MetricsPacket.Builder builder = new MetricsPacket.Builder(ServiceId.toServiceId(object.getString("application")));
- builder.timestamp(object.getLong("timestamp"));
- if (object.has("status_code")) builder.statusCode(object.getInt("status_code"));
- if (object.has("status_msg")) builder.statusMessage(object.getString("status_msg"));
- if (object.has("metrics")) {
- JSONObject metrics = object.getJSONObject("metrics");
- Iterator<?> keys = metrics.keys();
- while(keys.hasNext()) {
- String key = (String) keys.next();
- builder.putMetric(MetricId.toMetricId(key), metrics.getLong(key));
- }
+ protected static void addObjectToBuilders(List<MetricsPacket.Builder> builders, JsonNode object) {
+ MetricsPacket.Builder builder = new MetricsPacket.Builder(ServiceId.toServiceId(object.get("application").textValue()));
+ builder.timestamp(object.get("timestamp").longValue());
+ if (object.has("status_code")) builder.statusCode(object.get("status_code").intValue());
+ if (object.has("status_msg")) builder.statusMessage(object.get("status_msg").textValue());
+ if (object.has("metrics")) {
+ JsonNode metrics = object.get("metrics");
+ Iterator<?> keys = metrics.fieldNames();
+ while(keys.hasNext()) {
+ String key = (String) keys.next();
+ builder.putMetric(MetricId.toMetricId(key), metrics.get(key).asLong());
}
- builders.add(builder);
- } catch (JSONException e) {
- Exceptions.toMessageString(e);
}
+ builders.add(builder);
}
}
diff --git a/metrics-proxy/src/main/java/ai/vespa/metricsproxy/service/RemoteHealthMetricFetcher.java b/metrics-proxy/src/main/java/ai/vespa/metricsproxy/service/RemoteHealthMetricFetcher.java
index 1cab1a859a9..827f513a418 100644
--- a/metrics-proxy/src/main/java/ai/vespa/metricsproxy/service/RemoteHealthMetricFetcher.java
+++ b/metrics-proxy/src/main/java/ai/vespa/metricsproxy/service/RemoteHealthMetricFetcher.java
@@ -2,9 +2,10 @@
package ai.vespa.metricsproxy.service;
import ai.vespa.metricsproxy.metric.HealthMetric;
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.ObjectMapper;
+
import java.util.logging.Level;
-import org.json.JSONException;
-import org.json.JSONObject;
import java.io.IOException;
import java.util.logging.Logger;
@@ -15,6 +16,7 @@ import java.util.logging.Logger;
* @author Jo Kristian Bergum
*/
public class RemoteHealthMetricFetcher extends HttpMetricFetcher {
+ private static final ObjectMapper jsonMapper = new ObjectMapper();
private final static Logger log = Logger.getLogger(RemoteHealthMetricFetcher.class.getPackage().getName());
private final static String HEALTH_PATH = STATE_PATH + "health";
@@ -54,16 +56,16 @@ public class RemoteHealthMetricFetcher extends HttpMetricFetcher {
return HealthMetric.getUnknown("Empty response from status page");
}
try {
- JSONObject o = new JSONObject(data);
- JSONObject status = o.getJSONObject("status");
- String code = status.getString("code");
+ JsonNode o = jsonMapper.readTree(data);
+ JsonNode status = o.get("status");
+ String code = status.get("code").asText();
String message = "";
if (status.has("message")) {
- message = status.getString("message");
+ message = status.get("message").textValue();
}
return HealthMetric.get(code, message);
- } catch (JSONException e) {
+ } catch (IOException e) {
log.log(Level.FINE, "Failed to parse json response from metrics page:" + e + ":" + data);
return HealthMetric.getUnknown("Not able to parse json from status page");
}
diff --git a/metrics-proxy/src/main/java/ai/vespa/metricsproxy/service/RemoteMetricsFetcher.java b/metrics-proxy/src/main/java/ai/vespa/metricsproxy/service/RemoteMetricsFetcher.java
index 442ebc0d38d..464f215edc4 100644
--- a/metrics-proxy/src/main/java/ai/vespa/metricsproxy/service/RemoteMetricsFetcher.java
+++ b/metrics-proxy/src/main/java/ai/vespa/metricsproxy/service/RemoteMetricsFetcher.java
@@ -4,9 +4,9 @@ package ai.vespa.metricsproxy.service;
import ai.vespa.metricsproxy.metric.Metric;
import ai.vespa.metricsproxy.metric.Metrics;
import ai.vespa.metricsproxy.metric.model.DimensionId;
-import org.json.JSONArray;
-import org.json.JSONException;
-import org.json.JSONObject;
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.node.ArrayNode;
import java.io.IOException;
import java.util.Collections;
@@ -23,6 +23,8 @@ import static ai.vespa.metricsproxy.metric.model.DimensionId.toDimensionId;
*/
public class RemoteMetricsFetcher extends HttpMetricFetcher {
+ private static final ObjectMapper jsonMapper = new ObjectMapper();
+
final static String METRICS_PATH = STATE_PATH + "metrics";
RemoteMetricsFetcher(VespaService service, int port) {
@@ -57,21 +59,21 @@ public class RemoteMetricsFetcher extends HttpMetricFetcher {
return remoteMetrics;
}
- private Metrics parse(String data) throws JSONException {
- JSONObject o = new JSONObject(data);
+ private Metrics parse(String data) throws IOException {
+ JsonNode o = jsonMapper.readTree(data);
if (!(o.has("metrics"))) {
return new Metrics(); //empty
}
- JSONObject metrics = o.getJSONObject("metrics");
- JSONArray values;
+ JsonNode metrics = o.get("metrics");
+ ArrayNode values;
long timestamp;
try {
- JSONObject snapshot = metrics.getJSONObject("snapshot");
- timestamp = (long) snapshot.getDouble("to");
- values = metrics.getJSONArray("values");
- } catch (JSONException e) {
+ JsonNode snapshot = metrics.get("snapshot");
+ timestamp = snapshot.get("to").asLong();
+ values = (ArrayNode) metrics.get("values");
+ } catch (Exception e) {
// snapshot might not have been produced. Do not throw exception into log
return new Metrics();
}
@@ -81,29 +83,29 @@ public class RemoteMetricsFetcher extends HttpMetricFetcher {
Map<DimensionId, String> noDims = Collections.emptyMap();
Map<String, Map<DimensionId, String>> uniqueDimensions = new HashMap<>();
- for (int i = 0; i < values.length(); i++) {
- JSONObject metric = values.getJSONObject(i);
- String name = metric.getString("name");
+ for (int i = 0; i < values.size(); i++) {
+ JsonNode metric = values.get(i);
+ String name = metric.get("name").textValue();
String description = "";
if (metric.has("description")) {
- description = metric.getString("description");
+ description = metric.get("description").textValue();
}
Map<DimensionId, String> dim = noDims;
if (metric.has("dimensions")) {
- JSONObject dimensions = metric.getJSONObject("dimensions");
+ JsonNode dimensions = metric.get("dimensions");
StringBuilder sb = new StringBuilder();
- for (Iterator<?> it = dimensions.keys(); it.hasNext(); ) {
+ for (Iterator<?> it = dimensions.fieldNames(); it.hasNext(); ) {
String k = (String) it.next();
- String v = dimensions.getString(k);
+ String v = dimensions.get(k).asText();
sb.append(toDimensionId(k)).append(v);
}
if ( ! uniqueDimensions.containsKey(sb.toString())) {
dim = new HashMap<>();
- for (Iterator<?> it = dimensions.keys(); it.hasNext(); ) {
+ for (Iterator<?> it = dimensions.fieldNames(); it.hasNext(); ) {
String k = (String) it.next();
- String v = dimensions.getString(k);
+ String v = dimensions.get(k).textValue();
dim.put(toDimensionId(k), v);
}
uniqueDimensions.put(sb.toString(), Collections.unmodifiableMap(dim));
@@ -111,10 +113,17 @@ public class RemoteMetricsFetcher extends HttpMetricFetcher {
dim = uniqueDimensions.get(sb.toString());
}
- JSONObject aggregates = metric.getJSONObject("values");
- for (Iterator<?> it = aggregates.keys(); it.hasNext(); ) {
+ JsonNode aggregates = metric.get("values");
+ for (Iterator<?> it = aggregates.fieldNames(); it.hasNext(); ) {
String aggregator = (String) it.next();
- Number value = (Number) aggregates.get(aggregator);
+ JsonNode aggregatorValue = aggregates.get(aggregator);
+ if (aggregatorValue == null) {
+ throw new IllegalArgumentException("Value for aggregator '" + aggregator + "' is missing");
+ }
+ Number value = aggregatorValue.numberValue();
+ if (value == null) {
+ throw new IllegalArgumentException("Value for aggregator '" + aggregator + "' is not a number");
+ }
StringBuilder metricName = (new StringBuilder()).append(name).append(".").append(aggregator);
m.add(new Metric(metricName.toString(), value, timestamp, dim, description));
}
diff --git a/metrics-proxy/src/test/java/ai/vespa/metricsproxy/http/application/ApplicationMetricsHandlerTest.java b/metrics-proxy/src/test/java/ai/vespa/metricsproxy/http/application/ApplicationMetricsHandlerTest.java
index d7576718e8a..cf1eac3c691 100644
--- a/metrics-proxy/src/test/java/ai/vespa/metricsproxy/http/application/ApplicationMetricsHandlerTest.java
+++ b/metrics-proxy/src/test/java/ai/vespa/metricsproxy/http/application/ApplicationMetricsHandlerTest.java
@@ -8,13 +8,11 @@ import ai.vespa.metricsproxy.metric.model.json.GenericApplicationModel;
import ai.vespa.metricsproxy.metric.model.json.GenericJsonModel;
import ai.vespa.metricsproxy.metric.model.json.GenericMetrics;
import ai.vespa.metricsproxy.metric.model.json.GenericService;
+import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.node.ArrayNode;
import com.github.tomakehurst.wiremock.junit.WireMockRule;
import com.yahoo.container.jdisc.RequestHandlerTestDriver;
-import java.util.regex.Pattern;
-
-import org.json.JSONArray;
-import org.json.JSONObject;
import org.junit.Before;
import org.junit.Ignore;
import org.junit.Rule;
@@ -24,6 +22,7 @@ import java.io.IOException;
import java.util.Arrays;
import java.util.Map;
import java.util.concurrent.Executors;
+import java.util.regex.Pattern;
import static ai.vespa.metricsproxy.TestUtil.getFileContents;
import static ai.vespa.metricsproxy.http.ValuesFetcher.defaultMetricsConsumerId;
@@ -49,6 +48,8 @@ import static org.junit.Assert.fail;
@SuppressWarnings("UnstableApiUsage")
public class ApplicationMetricsHandlerTest {
+ private static final ObjectMapper jsonMapper = new ObjectMapper();
+
private static final String HOST = "localhost";
private static final String URI_BASE = "http://" + HOST;
private static final String METRICS_V1_URI = URI_BASE + METRICS_V1_PATH;
@@ -102,16 +103,16 @@ public class ApplicationMetricsHandlerTest {
@Test
public void v1_response_contains_values_uri() throws Exception {
String response = testDriver.sendRequest(METRICS_V1_URI).readAll();
- JSONObject root = new JSONObject(response);
+ JsonNode root = jsonMapper.readTree(response);
assertTrue(root.has("resources"));
- JSONArray resources = root.getJSONArray("resources");
- assertEquals(2, resources.length());
+ ArrayNode resources = (ArrayNode) root.get("resources");
+ assertEquals(2, resources.size());
- JSONObject valuesUrl = resources.getJSONObject(0);
- assertEquals(METRICS_VALUES_URI, valuesUrl.getString("url"));
- JSONObject prometheusUrl = resources.getJSONObject(1);
- assertEquals(PROMETHEUS_VALUES_URI, prometheusUrl.getString("url"));
+ JsonNode valuesUrl = resources.get(0);
+ assertEquals(METRICS_VALUES_URI, valuesUrl.get("url").textValue());
+ JsonNode prometheusUrl = resources.get(1);
+ assertEquals(PROMETHEUS_VALUES_URI, prometheusUrl.get("url").textValue());
}
@Ignore
@@ -199,7 +200,7 @@ public class ApplicationMetricsHandlerTest {
@Test
public void invalid_path_yields_error_response() throws Exception {
String response = testDriver.sendRequest(METRICS_V1_URI + "/invalid").readAll();
- JSONObject root = new JSONObject(response);
+ JsonNode root = jsonMapper.readTree(response);
assertTrue(root.has("error"));
}
diff --git a/metrics-proxy/src/test/java/ai/vespa/metricsproxy/http/metrics/MetricsHandlerTestBase.java b/metrics-proxy/src/test/java/ai/vespa/metricsproxy/http/metrics/MetricsHandlerTestBase.java
index 1c5ce695155..379ef04d38d 100644
--- a/metrics-proxy/src/test/java/ai/vespa/metricsproxy/http/metrics/MetricsHandlerTestBase.java
+++ b/metrics-proxy/src/test/java/ai/vespa/metricsproxy/http/metrics/MetricsHandlerTestBase.java
@@ -6,9 +6,9 @@ import ai.vespa.metricsproxy.metric.model.json.GenericJsonModel;
import ai.vespa.metricsproxy.metric.model.json.GenericMetrics;
import ai.vespa.metricsproxy.metric.model.json.GenericService;
import ai.vespa.metricsproxy.service.DownService;
+import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
-import org.json.JSONArray;
-import org.json.JSONObject;
+import com.fasterxml.jackson.databind.node.ArrayNode;
import org.junit.Ignore;
import org.junit.Test;
@@ -32,6 +32,8 @@ import static org.junit.Assert.fail;
*/
public abstract class MetricsHandlerTestBase<MODEL> extends HttpHandlerTestBase {
+ private static final ObjectMapper jsonMapper = new ObjectMapper();
+
static String rootUri;
static String valuesUri;
@@ -56,21 +58,21 @@ public abstract class MetricsHandlerTestBase<MODEL> extends HttpHandlerTestBase
@Test
public void invalid_path_yields_error_response() throws Exception {
String response = testDriver.sendRequest(rootUri + "/invalid").readAll();
- JSONObject root = new JSONObject(response);
+ JsonNode root = jsonMapper.readTree(response);
assertTrue(root.has("error"));
}
@Test
public void root_response_contains_values_uri() throws Exception {
String response = testDriver.sendRequest(rootUri).readAll();
- JSONObject root = new JSONObject(response);
+ JsonNode root = jsonMapper.readTree(response);
assertTrue(root.has("resources"));
- JSONArray resources = root.getJSONArray("resources");
- assertEquals(1, resources.length());
+ ArrayNode resources = (ArrayNode) root.get("resources");
+ assertEquals(1, resources.size());
- JSONObject valuesUrl = resources.getJSONObject(0);
- assertEquals(valuesUri, valuesUrl.getString("url"));
+ JsonNode valuesUrl = resources.get(0);
+ assertEquals(valuesUri, valuesUrl.get("url").textValue());
}
@Ignore
diff --git a/metrics-proxy/src/test/java/ai/vespa/metricsproxy/http/prometheus/PrometheusHandlerTest.java b/metrics-proxy/src/test/java/ai/vespa/metricsproxy/http/prometheus/PrometheusHandlerTest.java
index a224c4090b3..89186e63b93 100644
--- a/metrics-proxy/src/test/java/ai/vespa/metricsproxy/http/prometheus/PrometheusHandlerTest.java
+++ b/metrics-proxy/src/test/java/ai/vespa/metricsproxy/http/prometheus/PrometheusHandlerTest.java
@@ -3,9 +3,10 @@ package ai.vespa.metricsproxy.http.prometheus;
import ai.vespa.metricsproxy.http.HttpHandlerTestBase;
import ai.vespa.metricsproxy.service.DummyService;
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.node.ArrayNode;
import com.yahoo.container.jdisc.RequestHandlerTestDriver;
-import org.json.JSONArray;
-import org.json.JSONObject;
import org.junit.BeforeClass;
import org.junit.Ignore;
import org.junit.Test;
@@ -22,6 +23,8 @@ import static org.junit.Assert.assertTrue;
@SuppressWarnings("UnstableApiUsage")
public class PrometheusHandlerTest extends HttpHandlerTestBase {
+ private static final ObjectMapper jsonMapper = new ObjectMapper();
+
private static final String V1_URI = URI_BASE + PrometheusHandler.V1_PATH;
private static final String VALUES_URI = URI_BASE + PrometheusHandler.VALUES_PATH;
@@ -40,14 +43,14 @@ public class PrometheusHandlerTest extends HttpHandlerTestBase {
@Test
public void v1_response_contains_values_uri() throws Exception {
String response = testDriver.sendRequest(V1_URI).readAll();
- JSONObject root = new JSONObject(response);
+ JsonNode root = jsonMapper.readTree(response);
assertTrue(root.has("resources"));
- JSONArray resources = root.getJSONArray("resources");
- assertEquals(1, resources.length());
+ ArrayNode resources = (ArrayNode) root.get("resources");
+ assertEquals(1, resources.size());
- JSONObject valuesUrl = resources.getJSONObject(0);
- assertEquals(VALUES_URI, valuesUrl.getString("url"));
+ JsonNode valuesUrl = resources.get(0);
+ assertEquals(VALUES_URI, valuesUrl.get("url").textValue());
}
@Ignore
diff --git a/metrics-proxy/src/test/java/ai/vespa/metricsproxy/node/NodeMetricGathererTest.java b/metrics-proxy/src/test/java/ai/vespa/metricsproxy/node/NodeMetricGathererTest.java
index e2ad0ccd504..c2fc23a878d 100644
--- a/metrics-proxy/src/test/java/ai/vespa/metricsproxy/node/NodeMetricGathererTest.java
+++ b/metrics-proxy/src/test/java/ai/vespa/metricsproxy/node/NodeMetricGathererTest.java
@@ -3,8 +3,9 @@ package ai.vespa.metricsproxy.node;
import ai.vespa.metricsproxy.metric.model.MetricId;
import ai.vespa.metricsproxy.metric.model.MetricsPacket;
-import org.json.JSONException;
-import org.json.JSONObject;
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.node.ObjectNode;
import org.junit.Test;
import java.util.ArrayList;
@@ -17,10 +18,12 @@ import static org.junit.Assert.assertEquals;
*/
public class NodeMetricGathererTest {
+ private static final ObjectMapper jsonMapper = new ObjectMapper();
+
@Test
- public void testJSONObjectIsCorrectlyConvertedToMetricsPacket() throws JSONException {
+ public void testJSONObjectIsCorrectlyConvertedToMetricsPacket() {
List<MetricsPacket.Builder> builders = new ArrayList<>();
- JSONObject hostLifePacket = generateHostLifePacket();
+ JsonNode hostLifePacket = generateHostLifePacket();
NodeMetricGatherer.addObjectToBuilders(builders, hostLifePacket);
MetricsPacket packet = builders.remove(0).build();
@@ -32,17 +35,17 @@ public class NodeMetricGathererTest {
assertEquals(1l, packet.metrics().get(MetricId.toMetricId("alive")));
}
- private JSONObject generateHostLifePacket() throws JSONException {
+ private JsonNode generateHostLifePacket() {
- JSONObject jsonObject = new JSONObject();
+ ObjectNode jsonObject = jsonMapper.createObjectNode();
jsonObject.put("status_code", 0);
jsonObject.put("status_msg", "OK");
jsonObject.put("timestamp", 123);
jsonObject.put("application", "host_life");
- JSONObject metrics = new JSONObject();
+ ObjectNode metrics = jsonMapper.createObjectNode();
metrics.put("uptime", 12);
metrics.put("alive", 1);
- jsonObject.put("metrics", metrics);
+ jsonObject.set("metrics", metrics);
return jsonObject;
}
}
diff --git a/metrics-proxy/src/test/java/ai/vespa/metricsproxy/rpc/RpcMetricsTest.java b/metrics-proxy/src/test/java/ai/vespa/metricsproxy/rpc/RpcMetricsTest.java
index 8d5bba77844..70970bfe8da 100644
--- a/metrics-proxy/src/test/java/ai/vespa/metricsproxy/rpc/RpcMetricsTest.java
+++ b/metrics-proxy/src/test/java/ai/vespa/metricsproxy/rpc/RpcMetricsTest.java
@@ -5,17 +5,18 @@ import ai.vespa.metricsproxy.metric.Metric;
import ai.vespa.metricsproxy.metric.Metrics;
import ai.vespa.metricsproxy.metric.model.ConsumerId;
import ai.vespa.metricsproxy.service.VespaService;
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.node.ArrayNode;
import com.yahoo.jrt.Request;
import com.yahoo.jrt.Spec;
import com.yahoo.jrt.StringValue;
import com.yahoo.jrt.Supervisor;
import com.yahoo.jrt.Target;
import com.yahoo.jrt.Transport;
-import org.json.JSONArray;
-import org.json.JSONException;
-import org.json.JSONObject;
import org.junit.Test;
+import java.io.IOException;
import java.util.List;
import static ai.vespa.metricsproxy.TestUtil.getFileContents;
@@ -40,6 +41,8 @@ import static org.junit.Assert.assertTrue;
*/
public class RpcMetricsTest {
+ private static final ObjectMapper jsonMapper = new ObjectMapper();
+
private static final String METRICS_RESPONSE = getFileContents("metrics-storage-simple.json").trim();
private static final String EXTRA_APP = "extra";
@@ -67,9 +70,9 @@ public class RpcMetricsTest {
String allServicesResponse = getMetricsForYamas(ALL_SERVICES, rpcClient).trim();
// Verify that application is used as serviceId, and that metric exists.
- JSONObject extraMetrics = findExtraMetricsObject(allServicesResponse);
- assertThat(extraMetrics.getJSONObject("metrics").getInt("foo.count"), is(3));
- assertThat(extraMetrics.getJSONObject("dimensions").getString("role"), is("extra-role"));
+ JsonNode extraMetrics = findExtraMetricsObject(allServicesResponse);
+ assertThat(extraMetrics.get("metrics").get("foo.count").intValue(), is(3));
+ assertThat(extraMetrics.get("dimensions").get("role").textValue(), is("extra-role"));
}
}
}
@@ -85,7 +88,7 @@ public class RpcMetricsTest {
// Verify that no extra metrics exists
String allServicesResponse = getMetricsForYamas(ALL_SERVICES, rpcClient).trim();
- JSONObject extraMetrics = findExtraMetricsObject(allServicesResponse);
+ JsonNode extraMetrics = findExtraMetricsObject(allServicesResponse);
assertEquals(extraMetrics.toString(), "{}");
}
}
@@ -130,28 +133,28 @@ public class RpcMetricsTest {
}
}
- private static void verifyMetricsFromRpcRequest(VespaService service, RpcClient client) throws JSONException {
+ private static void verifyMetricsFromRpcRequest(VespaService service, RpcClient client) throws IOException {
String jsonResponse = getMetricsForYamas(service.getMonitoringName(), client).trim();
- JSONArray metrics = new JSONObject(jsonResponse).getJSONArray("metrics");
- assertThat("Expected 3 metric messages", metrics.length(), is(3));
- for (int i = 0; i < metrics.length() - 1; i++) { // The last "metric message" contains only status code/message
- JSONObject jsonObject = metrics.getJSONObject(i);
+ ArrayNode metrics = (ArrayNode) jsonMapper.readTree(jsonResponse).get("metrics");
+ assertThat("Expected 3 metric messages", metrics.size(), is(3));
+ for (int i = 0; i < metrics.size() - 1; i++) { // The last "metric message" contains only status code/message
+ JsonNode jsonObject = metrics.get(i);
assertFalse(jsonObject.has("status_code"));
assertFalse(jsonObject.has("status_msg"));
- assertThat(jsonObject.getJSONObject("dimensions").getString("foo"), is("bar"));
- assertThat(jsonObject.getJSONObject("dimensions").getString("bar"), is("foo"));
- assertThat(jsonObject.getJSONObject("dimensions").getString("serviceDim"), is("serviceDimValue"));
- assertThat(jsonObject.getJSONObject("routing").getJSONObject("yamas").getJSONArray("namespaces").length(), is(1));
- if (jsonObject.getJSONObject("metrics").has("foo_count")) {
- assertThat(jsonObject.getJSONObject("metrics").getInt("foo_count"), is(1));
- assertThat(jsonObject.getJSONObject("routing").getJSONObject("yamas").getJSONArray("namespaces").get(0), is(vespaMetricsConsumerId.id));
+ assertThat(jsonObject.get("dimensions").get("foo").textValue(), is("bar"));
+ assertThat(jsonObject.get("dimensions").get("bar").textValue(), is("foo"));
+ assertThat(jsonObject.get("dimensions").get("serviceDim").textValue(), is("serviceDimValue"));
+ assertThat(jsonObject.get("routing").get("yamas").get("namespaces").size(), is(1));
+ if (jsonObject.get("metrics").has("foo_count")) {
+ assertThat(jsonObject.get("metrics").get("foo_count").intValue(), is(1));
+ assertThat(jsonObject.get("routing").get("yamas").get("namespaces").get(0).textValue(), is(vespaMetricsConsumerId.id));
} else {
- assertThat(jsonObject.getJSONObject("metrics").getInt("foo.count"), is(1));
- assertThat(jsonObject.getJSONObject("routing").getJSONObject("yamas").getJSONArray("namespaces").get(0), is(CUSTOM_CONSUMER_ID.id));
+ assertThat(jsonObject.get("metrics").get("foo.count").intValue(), is(1));
+ assertThat(jsonObject.get("routing").get("yamas").get("namespaces").get(0).textValue(), is(CUSTOM_CONSUMER_ID.id));
}
}
- verifyStatusMessage(metrics.getJSONObject(metrics.length() - 1));
+ verifyStatusMessage(metrics.get(metrics.size() - 1));
}
private void verfiyMetricsFromServiceObject(VespaService service) {
@@ -166,15 +169,15 @@ public class RpcMetricsTest {
assertThat("Metric foo did not contain correct dimension for key = bar", foo.getDimensions().get(toDimensionId("bar")), is("foo"));
}
- private void verifyMetricsFromRpcRequestForAllServices(RpcClient client) throws JSONException {
+ private void verifyMetricsFromRpcRequestForAllServices(RpcClient client) throws IOException {
// Verify that metrics for all services can be retrieved in one request.
String allServicesResponse = getMetricsForYamas(ALL_SERVICES, client).trim();
- JSONArray allServicesMetrics = new JSONObject(allServicesResponse).getJSONArray("metrics");
- assertThat(allServicesMetrics.length(), is(5));
+ ArrayNode allServicesMetrics = (ArrayNode) jsonMapper.readTree(allServicesResponse).get("metrics");
+ assertThat(allServicesMetrics.size(), is(5));
}
@Test
- public void testGetAllMetricNames() throws Exception {
+ public void testGetAllMetricNames() {
try (IntegrationTester tester = new IntegrationTester()) {
tester.httpServer().setResponse(METRICS_RESPONSE);
@@ -205,14 +208,14 @@ public class RpcMetricsTest {
invoke(req, rpcClient, false);
}
- private JSONObject findExtraMetricsObject(String jsonResponse) throws JSONException {
- JSONArray metrics = new JSONObject(jsonResponse).getJSONArray("metrics");
- for (int i = 0; i < metrics.length(); i++) {
- JSONObject jsonObject = metrics.getJSONObject(i);
+ private JsonNode findExtraMetricsObject(String jsonResponse) throws IOException {
+ ArrayNode metrics = (ArrayNode) jsonMapper.readTree(jsonResponse).get("metrics");
+ for (int i = 0; i < metrics.size(); i++) {
+ JsonNode jsonObject = metrics.get(i);
assertTrue(jsonObject.has("application"));
- if (jsonObject.getString("application").equals(EXTRA_APP)) return jsonObject;
+ if (jsonObject.get("application").textValue().equals(EXTRA_APP)) return jsonObject;
}
- return new JSONObject();
+ return jsonMapper.createObjectNode();
}
private static String getMetricsForYamas(String service, RpcClient client) {
@@ -250,12 +253,12 @@ public class RpcMetricsTest {
return returnValue;
}
- private static void verifyStatusMessage(JSONObject jsonObject) throws JSONException {
- assertThat(jsonObject.getInt("status_code"), is(0));
- assertThat(jsonObject.getString("status_msg"), notNullValue());
- assertThat(jsonObject.getString("application"), notNullValue());
- assertThat(jsonObject.getString("routing"), notNullValue());
- assertThat(jsonObject.length(), is(4));
+ private static void verifyStatusMessage(JsonNode jsonObject) {
+ assertThat(jsonObject.get("status_code").intValue(), is(0));
+ assertThat(jsonObject.get("status_msg").textValue(), notNullValue());
+ assertThat(jsonObject.get("application").textValue(), notNullValue());
+ assertThat(jsonObject.get("routing"), notNullValue());
+ assertThat(jsonObject.size(), is(4));
}
}
diff --git a/metrics-proxy/src/test/java/ai/vespa/metricsproxy/service/ContainerServiceTest.java b/metrics-proxy/src/test/java/ai/vespa/metricsproxy/service/ContainerServiceTest.java
index 0d53f988ac7..7ff179e5528 100644
--- a/metrics-proxy/src/test/java/ai/vespa/metricsproxy/service/ContainerServiceTest.java
+++ b/metrics-proxy/src/test/java/ai/vespa/metricsproxy/service/ContainerServiceTest.java
@@ -2,7 +2,6 @@
package ai.vespa.metricsproxy.service;
import ai.vespa.metricsproxy.metric.Metric;
-import org.json.JSONException;
import org.junit.After;
import org.junit.Before;
import org.junit.BeforeClass;
@@ -37,7 +36,7 @@ public class ContainerServiceTest {
}
@Test
- public void testMultipleQueryDimensions() throws JSONException {
+ public void testMultipleQueryDimensions() {
int count = 0;
VespaService service = VespaService.create("service1", "id", httpServer.port());
for (Metric m : service.getMetrics().getMetrics()) {
diff --git a/node-repository/src/main/java/com/yahoo/vespa/hosted/provision/maintenance/CapacityChecker.java b/node-repository/src/main/java/com/yahoo/vespa/hosted/provision/maintenance/CapacityChecker.java
index 6094b497fff..46ffe14d4e2 100644
--- a/node-repository/src/main/java/com/yahoo/vespa/hosted/provision/maintenance/CapacityChecker.java
+++ b/node-repository/src/main/java/com/yahoo/vespa/hosted/provision/maintenance/CapacityChecker.java
@@ -161,7 +161,7 @@ public class CapacityChecker {
int timesHostCanBeRemoved = 0;
Optional<Node> unallocatedNode;
- while (timesHostCanBeRemoved < 1000) { // Arbitrary upper bound
+ while (timesHostCanBeRemoved < 100) { // Arbitrary upper bound
unallocatedNode = tryAllocateNodes(nodeChildren.get(host), hosts, resourceMap, containedAllocations);
if (unallocatedNode.isEmpty()) {
timesHostCanBeRemoved++;
diff --git a/node-repository/src/main/java/com/yahoo/vespa/hosted/provision/provisioning/GroupPreparer.java b/node-repository/src/main/java/com/yahoo/vespa/hosted/provision/provisioning/GroupPreparer.java
index 3d61dd39b31..53ba73e4d82 100644
--- a/node-repository/src/main/java/com/yahoo/vespa/hosted/provision/provisioning/GroupPreparer.java
+++ b/node-repository/src/main/java/com/yahoo/vespa/hosted/provision/provisioning/GroupPreparer.java
@@ -61,12 +61,17 @@ public class GroupPreparer {
public List<Node> prepare(ApplicationId application, ClusterSpec cluster, NodeSpec requestedNodes,
List<Node> surplusActiveNodes, MutableInteger highestIndex, int wantedGroups) {
+ String allocateOsRequirement = allocateOsRequirementFlag
+ .with(FetchVector.Dimension.APPLICATION_ID, application.serializedForm())
+ .value();
+
// Try preparing in memory without global unallocated lock. Most of the time there should be no changes and we
// can return nodes previously allocated.
{
MutableInteger probePrepareHighestIndex = new MutableInteger(highestIndex.get());
NodeAllocation probeAllocation = prepareAllocation(application, cluster, requestedNodes, surplusActiveNodes,
- probePrepareHighestIndex, wantedGroups, PROBE_LOCK);
+ probePrepareHighestIndex, wantedGroups, PROBE_LOCK,
+ allocateOsRequirement);
if (probeAllocation.fulfilledAndNoChanges()) {
List<Node> acceptedNodes = probeAllocation.finalNodes();
surplusActiveNodes.removeAll(acceptedNodes);
@@ -80,10 +85,16 @@ public class GroupPreparer {
Mutex allocationLock = nodeRepository.lockUnallocated()) {
NodeAllocation allocation = prepareAllocation(application, cluster, requestedNodes, surplusActiveNodes,
- highestIndex, wantedGroups, allocationLock);
+ highestIndex, wantedGroups, allocationLock, allocateOsRequirement);
if (nodeRepository.zone().getCloud().dynamicProvisioning()) {
- Version osVersion = nodeRepository.osVersions().targetFor(NodeType.host).orElse(Version.emptyVersion);
+ final Version osVersion;
+ if (allocateOsRequirement.equals("rhel8")) {
+ osVersion = new Version(8);
+ } else {
+ osVersion = nodeRepository.osVersions().targetFor(NodeType.host).orElse(Version.emptyVersion);
+ }
+
List<ProvisionedHost> provisionedHosts = allocation.getFulfilledDockerDeficit()
.map(deficit -> hostProvisioner.get().provisionHosts(nodeRepository.database().getProvisionIndexes(deficit.getCount()),
deficit.getFlavor(),
@@ -122,13 +133,10 @@ public class GroupPreparer {
private NodeAllocation prepareAllocation(ApplicationId application, ClusterSpec cluster, NodeSpec requestedNodes,
List<Node> surplusActiveNodes, MutableInteger highestIndex, int wantedGroups,
- Mutex allocationLock) {
+ Mutex allocationLock, String allocateOsRequirement) {
LockedNodeList allNodes = nodeRepository.list(allocationLock);
NodeAllocation allocation = new NodeAllocation(allNodes, application, cluster, requestedNodes,
highestIndex, nodeRepository);
- String allocateOsRequirement = allocateOsRequirementFlag
- .with(FetchVector.Dimension.APPLICATION_ID, application.serializedForm())
- .value();
NodePrioritizer prioritizer = new NodePrioritizer(
allNodes, application, cluster, requestedNodes, wantedGroups,
nodeRepository.zone().getCloud().dynamicProvisioning(), nodeRepository.nameResolver(),
diff --git a/persistence/src/vespa/persistence/spi/bucket.h b/persistence/src/vespa/persistence/spi/bucket.h
index 30393109cc9..507cc80ad76 100644
--- a/persistence/src/vespa/persistence/spi/bucket.h
+++ b/persistence/src/vespa/persistence/spi/bucket.h
@@ -21,12 +21,12 @@ public:
explicit Bucket(const document::Bucket& b) noexcept
: _bucket(b) {}
- const document::Bucket &getBucket() const { return _bucket; }
- document::BucketId getBucketId() const { return _bucket.getBucketId(); }
- document::BucketSpace getBucketSpace() const { return _bucket.getBucketSpace(); }
+ const document::Bucket &getBucket() const noexcept { return _bucket; }
+ document::BucketId getBucketId() const noexcept { return _bucket.getBucketId(); }
+ document::BucketSpace getBucketSpace() const noexcept { return _bucket.getBucketSpace(); }
/** Convert easily to a document bucket id to make class easy to use. */
- operator document::BucketId() const { return _bucket.getBucketId(); }
+ operator document::BucketId() const noexcept { return _bucket.getBucketId(); }
bool operator==(const Bucket& o) const noexcept {
return (_bucket == o._bucket);
diff --git a/searchcommon/src/vespa/searchcommon/common/CMakeLists.txt b/searchcommon/src/vespa/searchcommon/common/CMakeLists.txt
index 23e6e8dd394..bf5686280e6 100644
--- a/searchcommon/src/vespa/searchcommon/common/CMakeLists.txt
+++ b/searchcommon/src/vespa/searchcommon/common/CMakeLists.txt
@@ -1,7 +1,9 @@
# Copyright 2017 Yahoo Holdings. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root.
vespa_add_library(searchcommon_searchcommon_common OBJECT
SOURCES
+ compaction_strategy.cpp
datatype.cpp
+ growstrategy.cpp
schema.cpp
schemaconfigurer.cpp
DEPENDS
diff --git a/searchcommon/src/vespa/searchcommon/common/compaction_strategy.cpp b/searchcommon/src/vespa/searchcommon/common/compaction_strategy.cpp
new file mode 100644
index 00000000000..c3377ed5857
--- /dev/null
+++ b/searchcommon/src/vespa/searchcommon/common/compaction_strategy.cpp
@@ -0,0 +1,15 @@
+// Copyright Verizon Media. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root.
+
+#include "compaction_strategy.h"
+#include <iostream>
+namespace search {
+
+std::ostream& operator<<(std::ostream& os, const CompactionStrategy& compaction_strategy)
+{
+ os << "{maxDeadBytesRatio=" << compaction_strategy.getMaxDeadBytesRatio() <<
+ ", maxDeadAddressSpaceRatio=" << compaction_strategy.getMaxDeadAddressSpaceRatio() <<
+ "}";
+ return os;
+}
+
+}
diff --git a/searchcommon/src/vespa/searchcommon/common/compaction_strategy.h b/searchcommon/src/vespa/searchcommon/common/compaction_strategy.h
index 0b3caf44481..ae354a4c4d2 100644
--- a/searchcommon/src/vespa/searchcommon/common/compaction_strategy.h
+++ b/searchcommon/src/vespa/searchcommon/common/compaction_strategy.h
@@ -3,6 +3,7 @@
#pragma once
#include <stdint.h>
+#include <iosfwd>
namespace search {
@@ -34,4 +35,6 @@ public:
bool operator!=(const CompactionStrategy & rhs) const { return !(operator==(rhs)); }
};
+std::ostream& operator<<(std::ostream& os, const CompactionStrategy& compaction_strategy);
+
} // namespace search
diff --git a/searchcommon/src/vespa/searchcommon/common/growstrategy.cpp b/searchcommon/src/vespa/searchcommon/common/growstrategy.cpp
new file mode 100644
index 00000000000..534be3060a7
--- /dev/null
+++ b/searchcommon/src/vespa/searchcommon/common/growstrategy.cpp
@@ -0,0 +1,18 @@
+// Copyright Verizon Media. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root.
+
+#include "growstrategy.h"
+#include <iostream>
+
+namespace search {
+
+std::ostream& operator<<(std::ostream& os, const GrowStrategy& grow_strategy)
+{
+ os << "{docsInitialCapacity=" << grow_strategy.getDocsInitialCapacity() <<
+ ", docsGrowFactor=" << grow_strategy.getDocsGrowFactor() <<
+ ", docsGrowDelta=" << grow_strategy.getDocsGrowDelta() <<
+ ", multiValueAllocGrowFactor=" << grow_strategy.getMultiValueAllocGrowFactor() <<
+ "}";
+ return os;
+}
+
+}
diff --git a/searchcommon/src/vespa/searchcommon/common/growstrategy.h b/searchcommon/src/vespa/searchcommon/common/growstrategy.h
index 4194541b8e5..f7c5c030d95 100644
--- a/searchcommon/src/vespa/searchcommon/common/growstrategy.h
+++ b/searchcommon/src/vespa/searchcommon/common/growstrategy.h
@@ -4,6 +4,7 @@
#include <vespa/vespalib/util/growstrategy.h>
#include <cstdint>
+#include <iosfwd>
namespace search {
@@ -18,10 +19,10 @@ public:
GrowStrategy()
: GrowStrategy(1024, 0.5, 0, 0.2)
{}
- GrowStrategy(uint32_t docsInitialCapacity, float docsGrowPercent,
+ GrowStrategy(uint32_t docsInitialCapacity, float docsGrowFactor,
uint32_t docsGrowDelta, float multiValueAllocGrowFactor)
: _docsInitialCapacity(docsInitialCapacity),
- _docsGrowFactor(docsGrowPercent),
+ _docsGrowFactor(docsGrowFactor),
_docsGrowDelta(docsGrowDelta),
_multiValueAllocGrowFactor(multiValueAllocGrowFactor)
{
@@ -54,5 +55,7 @@ public:
}
};
+std::ostream& operator<<(std::ostream& os, const GrowStrategy& grow_strategy);
+
}
diff --git a/searchcore/CMakeLists.txt b/searchcore/CMakeLists.txt
index 04cface5d40..d77bc69ba62 100644
--- a/searchcore/CMakeLists.txt
+++ b/searchcore/CMakeLists.txt
@@ -69,6 +69,7 @@ vespa_define_module(
src/tests/proton/attribute/imported_attributes_repo
src/tests/proton/bucketdb/bucketdb
src/tests/proton/common
+ src/tests/proton/common/alloc_config
src/tests/proton/common/attribute_updater
src/tests/proton/common/document_type_inspector
src/tests/proton/common/hw_info_sampler
diff --git a/searchcore/src/apps/tests/persistenceconformance_test.cpp b/searchcore/src/apps/tests/persistenceconformance_test.cpp
index 4715ff80d03..b47a1954c6f 100644
--- a/searchcore/src/apps/tests/persistenceconformance_test.cpp
+++ b/searchcore/src/apps/tests/persistenceconformance_test.cpp
@@ -13,6 +13,7 @@
#include <vespa/document/repo/documenttyperepo.h>
#include <vespa/document/test/make_bucket_space.h>
#include <vespa/searchcommon/common/schemaconfigurer.h>
+#include <vespa/searchcore/proton/common/alloc_config.h>
#include <vespa/searchcore/proton/common/hw_info.h>
#include <vespa/searchcore/proton/matching/querylimiter.h>
#include <vespa/searchcore/proton/metrics/metricswireservice.h>
@@ -145,6 +146,7 @@ public:
std::make_shared<DocumentDBMaintenanceConfig>(),
search::LogDocumentStore::Config(),
std::make_shared<const ThreadingServiceConfig>(ThreadingServiceConfig::make(1)),
+ std::make_shared<const AllocConfig>(),
"client",
docTypeName.getName());
}
diff --git a/searchcore/src/apps/vespa-feed-bm/vespa_feed_bm.cpp b/searchcore/src/apps/vespa-feed-bm/vespa_feed_bm.cpp
index 90942e9aef4..6cca2e4bd48 100644
--- a/searchcore/src/apps/vespa-feed-bm/vespa_feed_bm.cpp
+++ b/searchcore/src/apps/vespa-feed-bm/vespa_feed_bm.cpp
@@ -38,6 +38,7 @@
#include <vespa/messagebus/testlib/slobrok.h>
#include <vespa/metrics/config-metricsmanager.h>
#include <vespa/searchcommon/common/schemaconfigurer.h>
+#include <vespa/searchcore/proton/common/alloc_config.h>
#include <vespa/searchcore/proton/common/hw_info.h>
#include <vespa/searchcore/proton/matching/querylimiter.h>
#include <vespa/searchcore/proton/metrics/metricswireservice.h>
@@ -198,6 +199,7 @@ std::shared_ptr<DocumentDBConfig> make_document_db_config(std::shared_ptr<Docume
std::make_shared<DocumentDBMaintenanceConfig>(),
search::LogDocumentStore::Config(),
std::make_shared<const ThreadingServiceConfig>(ThreadingServiceConfig::make(1)),
+ std::make_shared<const AllocConfig>(),
"client",
doc_type_name.getName());
}
diff --git a/searchcore/src/tests/proton/attribute/attribute_manager/attribute_manager_test.cpp b/searchcore/src/tests/proton/attribute/attribute_manager/attribute_manager_test.cpp
index d25a234c6f8..3987a8685ea 100644
--- a/searchcore/src/tests/proton/attribute/attribute_manager/attribute_manager_test.cpp
+++ b/searchcore/src/tests/proton/attribute/attribute_manager/attribute_manager_test.cpp
@@ -232,8 +232,7 @@ struct ParallelAttributeManager
InitializerTask::SP documentMetaStoreInitTask;
BucketDBOwner::SP bucketDbOwner;
DocumentMetaStore::SP documentMetaStore;
- search::GrowStrategy attributeGrow;
- size_t attributeGrowNumDocs;
+ AllocStrategy alloc_strategy;
bool fastAccessAttributesOnly;
std::shared_ptr<AttributeManager::SP> mgr;
vespalib::ThreadStackExecutor masterExecutor;
@@ -250,15 +249,14 @@ ParallelAttributeManager::ParallelAttributeManager(search::SerialNum configSeria
: documentMetaStoreInitTask(std::make_shared<DummyInitializerTask>()),
bucketDbOwner(std::make_shared<BucketDBOwner>()),
documentMetaStore(std::make_shared<DocumentMetaStore>(bucketDbOwner)),
- attributeGrow(),
- attributeGrowNumDocs(1),
+ alloc_strategy(),
fastAccessAttributesOnly(false),
mgr(std::make_shared<AttributeManager::SP>()),
masterExecutor(1, 128 * 1024),
master(masterExecutor),
initializer(std::make_shared<AttributeManagerInitializer>(configSerialNum, documentMetaStoreInitTask,
documentMetaStore, baseAttrMgr, attrCfg,
- attributeGrow, attributeGrowNumDocs,
+ alloc_strategy,
fastAccessAttributesOnly, master, mgr))
{
documentMetaStore->setCommittedDocIdLimit(docIdLimit);
diff --git a/searchcore/src/tests/proton/attribute/attribute_test.cpp b/searchcore/src/tests/proton/attribute/attribute_test.cpp
index b67eaec1a8d..ebd3e27aeca 100644
--- a/searchcore/src/tests/proton/attribute/attribute_test.cpp
+++ b/searchcore/src/tests/proton/attribute/attribute_test.cpp
@@ -535,7 +535,7 @@ public:
AttributeCollectionSpecFactory _factory;
AttributeCollectionSpecTest(bool fastAccessOnly)
: _builder(),
- _factory(search::GrowStrategy(), 100, fastAccessOnly)
+ _factory(AllocStrategy(search::GrowStrategy(), search::CompactionStrategy(), 100), fastAccessOnly)
{
addAttribute("a1", false);
addAttribute("a2", true);
diff --git a/searchcore/src/tests/proton/common/alloc_config/CMakeLists.txt b/searchcore/src/tests/proton/common/alloc_config/CMakeLists.txt
new file mode 100644
index 00000000000..26a2dc72cc2
--- /dev/null
+++ b/searchcore/src/tests/proton/common/alloc_config/CMakeLists.txt
@@ -0,0 +1,9 @@
+# Copyright Verizon Media. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root.
+vespa_add_executable(searchcore_alloc_config_test_app TEST
+ SOURCES
+ alloc_config_test.cpp
+ DEPENDS
+ searchcore_pcommon
+ GTest::GTest
+)
+vespa_add_test(NAME searchcore_alloc_config_test_app COMMAND searchcore_alloc_config_test_app)
diff --git a/searchcore/src/tests/proton/common/alloc_config/alloc_config_test.cpp b/searchcore/src/tests/proton/common/alloc_config/alloc_config_test.cpp
new file mode 100644
index 00000000000..18a0ee47a47
--- /dev/null
+++ b/searchcore/src/tests/proton/common/alloc_config/alloc_config_test.cpp
@@ -0,0 +1,35 @@
+// Copyright Verizon Media. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root.
+
+#include <vespa/searchcore/proton/common/alloc_config.h>
+#include <vespa/searchcore/proton/common/subdbtype.h>
+#include <vespa/vespalib/gtest/gtest.h>
+
+using proton::AllocConfig;
+using proton::AllocStrategy;
+using proton::SubDbType;
+using search::CompactionStrategy;
+using search::GrowStrategy;
+
+namespace {
+
+CompactionStrategy baseline_compaction_strategy(0.2, 0.25);
+
+GrowStrategy make_grow_strategy(uint32_t initial_docs) {
+ return GrowStrategy(initial_docs, 0.1, 1, 0.15);
+}
+
+AllocStrategy make_alloc_strategy(uint32_t initial_docs) {
+ return AllocStrategy(make_grow_strategy(initial_docs), baseline_compaction_strategy, 10000);
+}
+
+};
+
+TEST(AllocConfigTest, can_make_allocation_strategy_for_sub_dbs)
+{
+ AllocConfig config(make_alloc_strategy(10000000), 5, 2);
+ EXPECT_EQ(make_alloc_strategy(20000000), config.make_alloc_strategy(SubDbType::READY));
+ EXPECT_EQ(make_alloc_strategy(100000), config.make_alloc_strategy(SubDbType::REMOVED));
+ EXPECT_EQ(make_alloc_strategy(30000000), config.make_alloc_strategy(SubDbType::NOTREADY));
+}
+
+GTEST_MAIN_RUN_ALL_TESTS()
diff --git a/searchcore/src/tests/proton/documentdb/configurer/configurer_test.cpp b/searchcore/src/tests/proton/documentdb/configurer/configurer_test.cpp
index d231040aeda..8f6b2f0bc10 100644
--- a/searchcore/src/tests/proton/documentdb/configurer/configurer_test.cpp
+++ b/searchcore/src/tests/proton/documentdb/configurer/configurer_test.cpp
@@ -613,6 +613,12 @@ TEST("require that attribute manager (imported attributes) should change when vi
EXPECT_TRUE(params.shouldAttributeManagerChange());
}
+TEST("require that attribute manager should change when alloc config has changed")
+{
+ ReconfigParams params(CCR().set_alloc_config_changed(true));
+ EXPECT_TRUE(params.shouldAttributeManagerChange());
+}
+
void
assertMaintenanceControllerShouldNotChange(DocumentDBConfig::ComparisonResult result)
{
@@ -684,6 +690,7 @@ TEST("require that subdbs should change if relevant config changed")
TEST_DO(assertSubDbsShouldChange(CCR().setRankingConstantsChanged(true)));
TEST_DO(assertSubDbsShouldChange(CCR().setOnnxModelsChanged(true)));
TEST_DO(assertSubDbsShouldChange(CCR().setSchemaChanged(true)));
+ TEST_DO(assertSubDbsShouldChange(CCR().set_alloc_config_changed(true)));
}
TEST_MAIN()
diff --git a/searchcore/src/tests/proton/documentdb/document_subdbs/document_subdbs_test.cpp b/searchcore/src/tests/proton/documentdb/document_subdbs/document_subdbs_test.cpp
index 9cdd984b152..40a9656ae41 100644
--- a/searchcore/src/tests/proton/documentdb/document_subdbs/document_subdbs_test.cpp
+++ b/searchcore/src/tests/proton/documentdb/document_subdbs/document_subdbs_test.cpp
@@ -123,8 +123,7 @@ struct MyStoreOnlyConfig
: _cfg(DocTypeName(DOCTYPE_NAME),
SUB_NAME,
BASE_DIR,
- search::GrowStrategy(),
- 0, 0, SubDbType::READY)
+ 0, SubDbType::READY)
{
}
};
diff --git a/searchcore/src/tests/proton/documentdb/documentbucketmover/documentbucketmover_test.cpp b/searchcore/src/tests/proton/documentdb/documentbucketmover/documentbucketmover_test.cpp
index cabcd33b2dd..a952efdecdc 100644
--- a/searchcore/src/tests/proton/documentdb/documentbucketmover/documentbucketmover_test.cpp
+++ b/searchcore/src/tests/proton/documentdb/documentbucketmover/documentbucketmover_test.cpp
@@ -44,14 +44,8 @@ using BucketIdSet = std::set<BucketId>;
using BucketIdVector = BucketId::List;
using DocumentVector = std::vector<Document::SP>;
using MoveOperationVector = std::vector<MoveOperation>;
-using ScanItr = BucketMoveJob::ScanIterator;
-
-namespace {
-
-const uint32_t FIRST_SCAN_PASS = 1;
-const uint32_t SECOND_SCAN_PASS = 2;
-
-}
+using ScanItr = bucketdb::ScanIterator;
+using ScanPass = ScanItr::Pass;
struct MyMoveOperationLimiter : public IMoveOperationLimiter {
uint32_t beginOpCount;
@@ -346,9 +340,8 @@ struct ScanFixtureBase
return ScanItr(_bucketDB->takeGuard(), BucketId());
}
- ScanItr getItr(BucketId bucket, BucketId endBucket = BucketId(), uint32_t pass = FIRST_SCAN_PASS) {
- return ScanItr(_bucketDB->takeGuard(), pass,
- bucket, endBucket);
+ ScanItr getItr(BucketId bucket, BucketId endBucket = BucketId(), ScanPass pass = ScanPass::FIRST) {
+ return ScanItr(_bucketDB->takeGuard(), pass, bucket, endBucket);
}
};
@@ -451,12 +444,12 @@ TEST_F("require that we can iterate from the middle of not ready buckets", ScanF
{
BucketId bucket = f._notReady.bucket(2);
{
- ScanItr itr = f.getItr(bucket, bucket, FIRST_SCAN_PASS);
+ ScanItr itr = f.getItr(bucket, bucket, ScanPass::FIRST);
assertEquals(BucketVector().
add(f._notReady.bucket(4)), itr, SubDbType::NOTREADY);
}
{
- ScanItr itr = f.getItr(BucketId(), bucket, SECOND_SCAN_PASS);
+ ScanItr itr = f.getItr(BucketId(), bucket, ScanPass::SECOND);
assertEquals(BucketVector().
add(f._notReady.bucket(2)), itr, SubDbType::NOTREADY);
}
@@ -478,12 +471,12 @@ TEST_F("require that we can iterate from the middle of ready buckets", ScanFixtu
add(f._notReady.bucket(4)), itr, SubDbType::NOTREADY);
}
{
- ScanItr itr = f.getItr(bucket, bucket, FIRST_SCAN_PASS);
+ ScanItr itr = f.getItr(bucket, bucket, ScanPass::FIRST);
assertEquals(BucketVector().
add(f._ready.bucket(8)), itr, SubDbType::READY);
}
{
- ScanItr itr = f.getItr(BucketId(), bucket, SECOND_SCAN_PASS);
+ ScanItr itr = f.getItr(BucketId(), bucket, ScanPass::SECOND);
assertEquals(BucketVector().
add(f._ready.bucket(6)), itr, SubDbType::READY);
}
diff --git a/searchcore/src/tests/proton/flushengine/flushengine_test.cpp b/searchcore/src/tests/proton/flushengine/flushengine_test.cpp
index b1c188b2f9f..d5823a8e055 100644
--- a/searchcore/src/tests/proton/flushengine/flushengine_test.cpp
+++ b/searchcore/src/tests/proton/flushengine/flushengine_test.cpp
@@ -42,8 +42,7 @@ public:
public:
SimpleExecutor()
: _done()
- {
- }
+ { }
Task::UP
execute(Task::UP task) override
@@ -83,8 +82,7 @@ public:
SimpleHandler &handler)
: _task(std::move(task)),
_handler(handler)
- {
- }
+ { }
search::SerialNum getFlushSerial() const override {
return _task->getFlushSerial();
@@ -95,19 +93,15 @@ class WrappedFlushTarget : public FlushTargetProxy
{
SimpleHandler &_handler;
public:
- WrappedFlushTarget(const IFlushTarget::SP &target,
- SimpleHandler &handler)
+ WrappedFlushTarget(const IFlushTarget::SP &target, SimpleHandler &handler)
: FlushTargetProxy(target),
_handler(handler)
- {
- }
+ { }
- Task::UP initFlush(SerialNum currentSerial, std::shared_ptr<search::IFlushToken> flush_token) override
- {
+ Task::UP initFlush(SerialNum currentSerial, std::shared_ptr<search::IFlushToken> flush_token) override {
Task::UP task(_target->initFlush(currentSerial, std::move(flush_token)));
if (task) {
- return std::make_unique<WrappedFlushTask>(std::move(task),
- _handler);
+ return std::make_unique<WrappedFlushTask>(std::move(task), _handler);
}
return task;
}
@@ -140,33 +134,25 @@ public:
_lock(),
_done(targets.size()),
_flushDoneHistory()
- {
- }
+ { }
- search::SerialNum
- getCurrentSerialNumber() const override
- {
- LOG(info, "SimpleHandler(%s)::getCurrentSerialNumber()",
- getName().c_str());
+ search::SerialNum getCurrentSerialNumber() const override {
+ LOG(info, "SimpleHandler(%s)::getCurrentSerialNumber()", getName().c_str());
return _currentSerial;
}
std::vector<IFlushTarget::SP>
- getFlushTargets() override
- {
- LOG(info, "SimpleHandler(%s)::getFlushTargets()",
- getName().c_str());
+ getFlushTargets() override {
+ LOG(info, "SimpleHandler(%s)::getFlushTargets()", getName().c_str());
std::vector<IFlushTarget::SP> wrappedTargets;
for (const auto &target : _targets) {
- wrappedTargets.push_back(std::make_shared<WrappedFlushTarget>
- (target, *this));
+ wrappedTargets.push_back(std::make_shared<WrappedFlushTarget>(target, *this));
}
return wrappedTargets;
}
// Called once by flush engine thread for each task done
- void taskDone()
- {
+ void taskDone() {
std::lock_guard<std::mutex> guard(_lock);
++_pendingDone;
}
@@ -174,12 +160,9 @@ public:
// Called by flush engine master thread after flush handler is
// added to flush engine and when one or more flush tasks related
// to flush handler have completed.
- void
- flushDone(search::SerialNum oldestSerial) override
- {
+ void flushDone(search::SerialNum oldestSerial) override {
std::lock_guard<std::mutex> guard(_lock);
- LOG(info, "SimpleHandler(%s)::flushDone(%" PRIu64 ")",
- getName().c_str(), oldestSerial);
+ LOG(info, "SimpleHandler(%s)::flushDone(%" PRIu64 ")", getName().c_str(), oldestSerial);
_oldestSerial = std::max(_oldestSerial, oldestSerial);
_flushDoneHistory.push_back(oldestSerial);
while (_pendingDone > 0) {
@@ -188,8 +171,7 @@ public:
}
}
- FlushDoneHistory getFlushDoneHistory()
- {
+ FlushDoneHistory getFlushDoneHistory() {
std::lock_guard<std::mutex> guard(_lock);
return _flushDoneHistory;
}
@@ -217,12 +199,11 @@ public:
search::SerialNum &currentSerial)
: _flushedSerial(flushedSerial), _currentSerial(currentSerial),
_start(start), _done(done), _proceed(proceed)
- {
- }
+ { }
void run() override {
_start.countDown();
- if (_proceed != NULL) {
+ if (_proceed != nullptr) {
_proceed->await();
}
_flushedSerial = _currentSerial;
@@ -270,8 +251,7 @@ public:
_taskStart(),
_taskDone(),
_task(std::move(task))
- {
- }
+ { }
SimpleTarget(search::SerialNum flushedSerial = 0, bool proceedImmediately = true)
: SimpleTarget("anon", flushedSerial, proceedImmediately)
@@ -316,8 +296,7 @@ public:
: SimpleTarget("anon"),
_mgain(false),
_serial(false)
- {
- }
+ { }
MemoryGain getApproxMemoryGain() const override {
LOG_ASSERT(_mgain == false);
@@ -389,8 +368,7 @@ public:
class NoFlushStrategy : public SimpleStrategy
{
- FlushContext::List getFlushTargets(const FlushContext::List &,
- const flushengine::TlsStatsMap &) const override {
+ FlushContext::List getFlushTargets(const FlushContext::List &, const flushengine::TlsStatsMap &) const override {
return FlushContext::List();
}
};
@@ -430,13 +408,11 @@ struct Fixture
: tlsStatsFactory(std::make_shared<SimpleTlsStatsFactory>()),
strategy(strategy_),
engine(tlsStatsFactory, strategy, numThreads, idleInterval)
- {
- }
+ { }
Fixture(uint32_t numThreads, vespalib::duration idleInterval)
: Fixture(numThreads, idleInterval, std::make_shared<SimpleStrategy>())
- {
- }
+ { }
void putFlushHandler(const vespalib::string &docTypeName, IFlushHandler::SP handler) {
engine.putFlushHandler(DocTypeName(docTypeName), handler);
@@ -446,17 +422,14 @@ struct Fixture
strategy->_targets.push_back(std::move(target));
}
- std::shared_ptr<SimpleHandler>
- addSimpleHandler(Targets targets)
- {
+ std::shared_ptr<SimpleHandler> addSimpleHandler(Targets targets) {
auto handler = std::make_shared<SimpleHandler>(targets, "handler", 20);
engine.putFlushHandler(DocTypeName("handler"), handler);
engine.start();
return handler;
}
- void assertOldestSerial(SimpleHandler &handler, search::SerialNum expOldestSerial)
- {
+ void assertOldestSerial(SimpleHandler &handler, search::SerialNum expOldestSerial) {
using namespace std::chrono_literals;
for (int pass = 0; pass < 600; ++pass) {
std::this_thread::sleep_for(100ms);
@@ -593,8 +566,7 @@ TEST_F("require that target can refuse flush", Fixture(2, IINTERVAL))
EXPECT_TRUE(!handler->_done.await(SHORT_TIMEOUT));
}
-TEST_F("require that targets are flushed when nothing new to flush",
- Fixture(2, IINTERVAL))
+TEST_F("require that targets are flushed when nothing new to flush", Fixture(2, IINTERVAL))
{
auto target = std::make_shared<SimpleTarget>("anon", 5); // oldest unflushed serial num = 5
auto handler = std::make_shared<SimpleHandler>(Targets({target}), "anon", 4); // current serial num = 4
@@ -640,7 +612,7 @@ TEST("require that threaded target works")
auto target = std::make_shared<ThreadedFlushTarget>(executor, getSerialNum, std::make_shared<SimpleTarget>());
EXPECT_FALSE(executor._done.await(SHORT_TIMEOUT));
- EXPECT_TRUE(target->initFlush(0, std::make_shared<search::FlushToken>()).get() != NULL);
+ EXPECT_TRUE(target->initFlush(0, std::make_shared<search::FlushToken>()));
EXPECT_TRUE(executor._done.await(LONG_TIMEOUT));
}
@@ -713,6 +685,30 @@ TEST_F("require that concurrency works", Fixture(2, 1ms))
target2->_proceed.countDown();
}
+TEST_F("require that concurrency works with triggerFlush", Fixture(2, 1ms))
+{
+ auto target1 = std::make_shared<SimpleTarget>("target1", 1, false);
+ auto target2 = std::make_shared<SimpleTarget>("target2", 2, false);
+ auto target3 = std::make_shared<SimpleTarget>("target3", 3, false);
+ auto handler = std::make_shared<SimpleHandler>(Targets({target1, target2, target3}), "handler", 9);
+ f.putFlushHandler("handler", handler);
+ std::thread thread([this]() { f.engine.triggerFlush(); });
+ std::this_thread::sleep_for(1s);
+ f.engine.start();
+
+ EXPECT_TRUE(target1->_initDone.await(LONG_TIMEOUT));
+ EXPECT_TRUE(target2->_initDone.await(LONG_TIMEOUT));
+ EXPECT_TRUE(!target3->_initDone.await(SHORT_TIMEOUT));
+ assertThatHandlersInCurrentSet(f.engine, {"handler.target1", "handler.target2"});
+ EXPECT_TRUE(!target3->_initDone.await(SHORT_TIMEOUT));
+ target1->_proceed.countDown();
+ EXPECT_TRUE(target1->_taskDone.await(LONG_TIMEOUT));
+ assertThatHandlersInCurrentSet(f.engine, {"handler.target2", "handler.target3"});
+ target3->_proceed.countDown();
+ target2->_proceed.countDown();
+ thread.join();
+}
+
TEST_F("require that state explorer can list flush targets", Fixture(1, 1ms))
{
auto target = std::make_shared<SimpleTarget>("target1", 100, false);
diff --git a/searchcore/src/tests/proton/proton_config_fetcher/proton_config_fetcher_test.cpp b/searchcore/src/tests/proton/proton_config_fetcher/proton_config_fetcher_test.cpp
index 93fe2f0ae24..2e53b97a878 100644
--- a/searchcore/src/tests/proton/proton_config_fetcher/proton_config_fetcher_test.cpp
+++ b/searchcore/src/tests/proton/proton_config_fetcher/proton_config_fetcher_test.cpp
@@ -6,7 +6,9 @@
#include <vespa/searchcore/proton/server/proton_config_fetcher.h>
#include <vespa/searchcore/proton/server/proton_config_snapshot.h>
#include <vespa/searchcore/proton/server/i_proton_configurer.h>
+#include <vespa/searchcore/proton/common/alloc_config.h>
#include <vespa/searchcore/proton/common/hw_info.h>
+#include <vespa/searchcore/proton/common/subdbtype.h>
#include <vespa/searchcore/config/config-ranking-constants.h>
#include <vespa/searchcore/config/config-onnx-models.h>
#include <vespa/searchsummary/config/config-juniperrc.h>
@@ -41,6 +43,8 @@ using document::DocumenttypesConfigBuilder;
using search::TuneFileDocumentDB;
using std::map;
using vespalib::VarHolder;
+using search::GrowStrategy;
+using search::CompactionStrategy;
struct DoctypeFixture {
using UP = std::unique_ptr<DoctypeFixture>;
@@ -389,6 +393,32 @@ TEST_FF("require that docstore config computes cachesize automatically if unset"
EXPECT_EQUAL(500000ul, config->getStoreConfig().getMaxCacheBytes());
}
+TEST_FF("require that allocation config is propagated",
+ ConfigTestFixture("test"),
+ DocumentDBConfigManager(f1.configId + "/test", "test"))
+{
+ f1.protonBuilder.distribution.redundancy = 5;
+ f1.protonBuilder.distribution.searchablecopies = 2;
+ f1.addDocType("test");
+ {
+ auto& allocation = f1.protonBuilder.documentdb.back().allocation;
+ allocation.initialnumdocs = 10000000;
+ allocation.growfactor = 0.1;
+ allocation.growbias = 1;
+ allocation.amortizecount = 10000;
+ allocation.multivaluegrowfactor = 0.15;
+ allocation.maxDeadBytesRatio = 0.25;
+ allocation.maxDeadAddressSpaceRatio = 0.3;
+ }
+ auto config = getDocumentDBConfig(f1, f2);
+ {
+ auto& alloc_config = config->get_alloc_config();
+ EXPECT_EQUAL(AllocStrategy(GrowStrategy(20000000, 0.1, 1, 0.15), CompactionStrategy(0.25, 0.3), 10000), alloc_config.make_alloc_strategy(SubDbType::READY));
+ EXPECT_EQUAL(AllocStrategy(GrowStrategy(100000, 0.1, 1, 0.15), CompactionStrategy(0.25, 0.3), 10000), alloc_config.make_alloc_strategy(SubDbType::REMOVED));
+ EXPECT_EQUAL(AllocStrategy(GrowStrategy(30000000, 0.1, 1, 0.15), CompactionStrategy(0.25, 0.3), 10000), alloc_config.make_alloc_strategy(SubDbType::NOTREADY));
+ }
+}
+
TEST("test HwInfo equality") {
EXPECT_TRUE(HwInfo::Cpu(1) == HwInfo::Cpu(1));
EXPECT_FALSE(HwInfo::Cpu(1) == HwInfo::Cpu(2));
diff --git a/searchcore/src/tests/proton/proton_configurer/proton_configurer_test.cpp b/searchcore/src/tests/proton/proton_configurer/proton_configurer_test.cpp
index 5a0cfc18c78..4f8e8e8aa8c 100644
--- a/searchcore/src/tests/proton/proton_configurer/proton_configurer_test.cpp
+++ b/searchcore/src/tests/proton/proton_configurer/proton_configurer_test.cpp
@@ -9,6 +9,7 @@
#include <vespa/config-summarymap.h>
#include <vespa/document/repo/documenttyperepo.h>
#include <vespa/fileacquirer/config-filedistributorrpc.h>
+#include <vespa/searchcore/proton/common/alloc_config.h>
#include <vespa/searchcore/proton/server/bootstrapconfig.h>
#include <vespa/searchcore/proton/server/bootstrapconfigmanager.h>
#include <vespa/searchcore/proton/server/documentdbconfigmanager.h>
@@ -103,6 +104,7 @@ struct DBConfigFixture {
std::make_shared<DocumentDBMaintenanceConfig>(),
search::LogDocumentStore::Config(),
std::make_shared<const ThreadingServiceConfig>(ThreadingServiceConfig::make(1)),
+ std::make_shared<const AllocConfig>(),
configId,
docTypeName);
}
diff --git a/searchcore/src/vespa/searchcore/config/proton.def b/searchcore/src/vespa/searchcore/config/proton.def
index 17d0ef8ad37..fb845982d74 100644
--- a/searchcore/src/vespa/searchcore/config/proton.def
+++ b/searchcore/src/vespa/searchcore/config/proton.def
@@ -306,6 +306,12 @@ documentdb[].allocation.amortizecount int default=10000
## used in multi-value attribute vectors to store underlying values.
documentdb[].allocation.multivaluegrowfactor double default=0.2
+## The ratio of used bytes that can be dead before attempting to perform compaction.
+documentdb[].allocation.max_dead_bytes_ratio double default=0.2
+
+## The ratio of used address space that can be dead before attempting to perform compaction.
+documentdb[].allocation.max_dead_address_space_ratio double default=0.2
+
## The interval of when periodic tasks should be run
periodic.interval double default=3600.0
diff --git a/searchcore/src/vespa/searchcore/proton/attribute/attribute_collection_spec_factory.cpp b/searchcore/src/vespa/searchcore/proton/attribute/attribute_collection_spec_factory.cpp
index 23701bfdd5d..4189688ea81 100644
--- a/searchcore/src/vespa/searchcore/proton/attribute/attribute_collection_spec_factory.cpp
+++ b/searchcore/src/vespa/searchcore/proton/attribute/attribute_collection_spec_factory.cpp
@@ -9,11 +9,9 @@ using search::GrowStrategy;
namespace proton {
AttributeCollectionSpecFactory::AttributeCollectionSpecFactory(
- const search::GrowStrategy &growStrategy,
- size_t growNumDocs,
+ const AllocStrategy &alloc_strategy,
bool fastAccessOnly)
- : _growStrategy(growStrategy),
- _growNumDocs(growNumDocs),
+ : _alloc_strategy(alloc_strategy),
_fastAccessOnly(fastAccessOnly)
{
}
@@ -25,8 +23,8 @@ AttributeCollectionSpecFactory::create(const AttributesConfig &attrCfg,
{
AttributeCollectionSpec::AttributeList attrs;
// Amortize memory spike cost over N docs
- const size_t skew = _growNumDocs/(attrCfg.attribute.size()+1);
- GrowStrategy grow = _growStrategy;
+ const size_t skew = _alloc_strategy.get_amortize_count()/(attrCfg.attribute.size()+1);
+ GrowStrategy grow = _alloc_strategy.get_grow_strategy();
grow.setDocsInitialCapacity(std::max(grow.getDocsInitialCapacity(),docIdLimit));
for (const auto &attr : attrCfg.attribute) {
search::attribute::Config cfg = ConfigConverter::convert(attr);
@@ -35,6 +33,7 @@ AttributeCollectionSpecFactory::create(const AttributesConfig &attrCfg,
}
grow.setDocsGrowDelta(grow.getDocsGrowDelta() + skew);
cfg.setGrowStrategy(grow);
+ cfg.setCompactionStrategy(_alloc_strategy.get_compaction_strategy());
attrs.push_back(AttributeSpec(attr.name, cfg));
}
return std::make_unique<AttributeCollectionSpec>(attrs, docIdLimit, serialNum);
diff --git a/searchcore/src/vespa/searchcore/proton/attribute/attribute_collection_spec_factory.h b/searchcore/src/vespa/searchcore/proton/attribute/attribute_collection_spec_factory.h
index bfa6681d6f2..074c56448f3 100644
--- a/searchcore/src/vespa/searchcore/proton/attribute/attribute_collection_spec_factory.h
+++ b/searchcore/src/vespa/searchcore/proton/attribute/attribute_collection_spec_factory.h
@@ -4,7 +4,7 @@
#include "attribute_collection_spec.h"
#include <vespa/searchcommon/attribute/config.h>
-#include <vespa/searchcommon/common/growstrategy.h>
+#include <vespa/searchcore/proton/common/alloc_strategy.h>
#include <vespa/searchlib/common/serialnum.h>
#include <vespa/config-attributes.h>
@@ -19,13 +19,11 @@ class AttributeCollectionSpecFactory
private:
typedef vespa::config::search::AttributesConfig AttributesConfig;
- const search::GrowStrategy _growStrategy;
- const size_t _growNumDocs;
+ const AllocStrategy _alloc_strategy;
const bool _fastAccessOnly;
public:
- AttributeCollectionSpecFactory(const search::GrowStrategy &growStrategy,
- size_t growNumDocs,
+ AttributeCollectionSpecFactory(const AllocStrategy& alloc_strategy,
bool fastAccessOnly);
AttributeCollectionSpec::UP create(const AttributesConfig &attrCfg,
diff --git a/searchcore/src/vespa/searchcore/proton/attribute/attribute_manager_initializer.cpp b/searchcore/src/vespa/searchcore/proton/attribute/attribute_manager_initializer.cpp
index 9198cfdafab..109a4bb2192 100644
--- a/searchcore/src/vespa/searchcore/proton/attribute/attribute_manager_initializer.cpp
+++ b/searchcore/src/vespa/searchcore/proton/attribute/attribute_manager_initializer.cpp
@@ -7,6 +7,7 @@
#include <future>
using search::AttributeVector;
+using search::CompactionStrategy;
using search::GrowStrategy;
using search::SerialNum;
using vespa::config::search::AttributesConfig;
@@ -135,7 +136,7 @@ AttributeCollectionSpec::UP
AttributeManagerInitializer::createAttributeSpec() const
{
uint32_t docIdLimit = 1; // The real docIdLimit is used after attributes are loaded to pad them
- AttributeCollectionSpecFactory factory(_attributeGrow, _attributeGrowNumDocs, _fastAccessAttributesOnly);
+ AttributeCollectionSpecFactory factory(_alloc_strategy, _fastAccessAttributesOnly);
return factory.create(_attrCfg, docIdLimit, _configSerialNum);
}
@@ -144,8 +145,7 @@ AttributeManagerInitializer::AttributeManagerInitializer(SerialNum configSerialN
DocumentMetaStore::SP documentMetaStore,
AttributeManager::SP baseAttrMgr,
const AttributesConfig &attrCfg,
- const GrowStrategy &attributeGrow,
- size_t attributeGrowNumDocs,
+ const AllocStrategy& alloc_strategy,
bool fastAccessAttributesOnly,
searchcorespi::index::IThreadService &master,
std::shared_ptr<AttributeManager::SP> attrMgrResult)
@@ -153,8 +153,7 @@ AttributeManagerInitializer::AttributeManagerInitializer(SerialNum configSerialN
_documentMetaStore(documentMetaStore),
_attrMgr(),
_attrCfg(attrCfg),
- _attributeGrow(attributeGrow),
- _attributeGrowNumDocs(attributeGrowNumDocs),
+ _alloc_strategy(alloc_strategy),
_fastAccessAttributesOnly(fastAccessAttributesOnly),
_master(master),
_attributesResult(),
diff --git a/searchcore/src/vespa/searchcore/proton/attribute/attribute_manager_initializer.h b/searchcore/src/vespa/searchcore/proton/attribute/attribute_manager_initializer.h
index d56f4c25ace..f74a0f1519d 100644
--- a/searchcore/src/vespa/searchcore/proton/attribute/attribute_manager_initializer.h
+++ b/searchcore/src/vespa/searchcore/proton/attribute/attribute_manager_initializer.h
@@ -4,9 +4,9 @@
#include "attributemanager.h"
#include "initialized_attributes_result.h"
-#include <vespa/searchcommon/common/growstrategy.h>
#include <vespa/searchcore/proton/documentmetastore/documentmetastore.h>
#include <vespa/searchcore/proton/initializer/initializer_task.h>
+#include <vespa/searchcore/proton/common/alloc_strategy.h>
#include <vespa/searchlib/common/serialnum.h>
#include <vespa/config-attributes.h>
@@ -24,8 +24,7 @@ private:
DocumentMetaStore::SP _documentMetaStore;
AttributeManager::SP _attrMgr;
vespa::config::search::AttributesConfig _attrCfg;
- search::GrowStrategy _attributeGrow;
- size_t _attributeGrowNumDocs;
+ AllocStrategy _alloc_strategy;
bool _fastAccessAttributesOnly;
searchcorespi::index::IThreadService &_master;
InitializedAttributesResult _attributesResult;
@@ -39,8 +38,7 @@ public:
DocumentMetaStore::SP documentMetaStore,
AttributeManager::SP baseAttrMgr,
const vespa::config::search::AttributesConfig &attrCfg,
- const search::GrowStrategy &attributeGrow,
- size_t attributeGrowNumDocs,
+ const AllocStrategy& alloc_strategy,
bool fastAccessAttributesOnly,
searchcorespi::index::IThreadService &master,
std::shared_ptr<AttributeManager::SP> attrMgrResult);
diff --git a/searchcore/src/vespa/searchcore/proton/attribute/attributemanager.cpp b/searchcore/src/vespa/searchcore/proton/attribute/attributemanager.cpp
index 0b00e274522..f635ee34a04 100644
--- a/searchcore/src/vespa/searchcore/proton/attribute/attributemanager.cpp
+++ b/searchcore/src/vespa/searchcore/proton/attribute/attributemanager.cpp
@@ -177,10 +177,14 @@ AttributeManager::transferExistingAttributes(const AttributeManager &currMgr,
auto shrinker = wrap->getShrinker();
assert(shrinker);
addAttribute(AttributeWrap::normalAttribute(av), shrinker);
+ auto id = _attributeFieldWriter.getExecutorIdFromName(av->getNamePrefix());
+ auto cfg = aspec.getConfig();
+ _attributeFieldWriter.execute(id, [av, cfg]() { av->update_config(cfg); });
} else {
toBeAdded.push_back(aspec);
}
}
+ _attributeFieldWriter.sync();
}
void
diff --git a/searchcore/src/vespa/searchcore/proton/bucketdb/CMakeLists.txt b/searchcore/src/vespa/searchcore/proton/bucketdb/CMakeLists.txt
index 6619f9e419d..66a58ad66a3 100644
--- a/searchcore/src/vespa/searchcore/proton/bucketdb/CMakeLists.txt
+++ b/searchcore/src/vespa/searchcore/proton/bucketdb/CMakeLists.txt
@@ -6,6 +6,7 @@ vespa_add_library(searchcore_bucketdb STATIC
bucket_db_owner.cpp
bucketdb.cpp
bucketdbhandler.cpp
+ bucketscaniterator.cpp
bucketsessionbase.cpp
bucketstate.cpp
checksumaggregators.cpp
diff --git a/searchcore/src/vespa/searchcore/proton/bucketdb/bucketdb.h b/searchcore/src/vespa/searchcore/proton/bucketdb/bucketdb.h
index 05388931e20..e6848d095df 100644
--- a/searchcore/src/vespa/searchcore/proton/bucketdb/bucketdb.h
+++ b/searchcore/src/vespa/searchcore/proton/bucketdb/bucketdb.h
@@ -31,7 +31,7 @@ private:
void checkEmpty() const;
public:
BucketDB();
- virtual ~BucketDB();
+ ~BucketDB();
const BucketState & add(const GlobalId &gid,
const BucketId &bucketId, const Timestamp &timestamp, uint32_t docSize,
diff --git a/searchcore/src/vespa/searchcore/proton/bucketdb/bucketscaniterator.cpp b/searchcore/src/vespa/searchcore/proton/bucketdb/bucketscaniterator.cpp
new file mode 100644
index 00000000000..46cbb4fc37f
--- /dev/null
+++ b/searchcore/src/vespa/searchcore/proton/bucketdb/bucketscaniterator.cpp
@@ -0,0 +1,29 @@
+// Copyright 2017 Yahoo Holdings. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root.
+
+#include "bucketscaniterator.h"
+
+using document::BucketId;
+using storage::spi::BucketInfo;
+
+namespace proton::bucketdb {
+
+ScanIterator::ScanIterator(BucketDBOwner::Guard db, Pass pass, BucketId lastBucket, BucketId endBucket)
+ : _db(std::move(db)),
+ _itr(lastBucket.isSet() ? _db->upperBound(lastBucket) : _db->begin()),
+ _end(pass == Pass::SECOND && endBucket.isSet() ?
+ _db->upperBound(endBucket) : _db->end())
+{ }
+
+ScanIterator::ScanIterator(BucketDBOwner::Guard db, BucketId bucket)
+ : _db(std::move(db)),
+ _itr(_db->lowerBound(bucket)),
+ _end(_db->end())
+{ }
+
+ScanIterator::ScanIterator(ScanIterator &&rhs)
+ : _db(std::move(rhs._db)),
+ _itr(rhs._itr),
+ _end(rhs._end)
+{ }
+
+} // namespace proton
diff --git a/searchcore/src/vespa/searchcore/proton/bucketdb/bucketscaniterator.h b/searchcore/src/vespa/searchcore/proton/bucketdb/bucketscaniterator.h
new file mode 100644
index 00000000000..a437230ed0f
--- /dev/null
+++ b/searchcore/src/vespa/searchcore/proton/bucketdb/bucketscaniterator.h
@@ -0,0 +1,49 @@
+// Copyright 2017 Yahoo Holdings. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root.
+
+#pragma once
+
+#include "bucket_db_owner.h"
+#include "bucketdb.h"
+
+namespace proton::bucketdb {
+
+struct ScanPosition {
+ document::BucketId _lastBucket;
+
+ ScanPosition() : _lastBucket() { }
+ bool validBucket() const { return _lastBucket.isSet(); }
+};
+
+
+class ScanIterator {
+private:
+ using BucketId = document::BucketId;
+ using BucketIterator = BucketDB::ConstMapIterator;
+ BucketDBOwner::Guard _db;
+ BucketIterator _itr;
+ BucketIterator _end;
+
+public:
+ enum class Pass {FIRST, SECOND};
+ ScanIterator(BucketDBOwner::Guard db, Pass pass, BucketId lastBucket, BucketId endBucket);
+
+ ScanIterator(BucketDBOwner::Guard db, BucketId bucket);
+
+ ScanIterator(const ScanIterator &) = delete;
+ ScanIterator(ScanIterator &&rhs);
+ ScanIterator &operator=(const ScanIterator &) = delete;
+ ScanIterator &operator=(ScanIterator &&rhs) = delete;
+
+ bool valid() const { return _itr != _end; }
+ bool isActive() const { return _itr->second.isActive(); }
+ BucketId getBucket() const { return _itr->first; }
+ bool hasReadyBucketDocs() const { return _itr->second.getReadyCount() != 0; }
+ bool hasNotReadyBucketDocs() const { return _itr->second.getNotReadyCount() != 0; }
+
+ ScanIterator & operator++() {
+ ++_itr;
+ return *this;
+ }
+};
+
+}
diff --git a/searchcore/src/vespa/searchcore/proton/common/CMakeLists.txt b/searchcore/src/vespa/searchcore/proton/common/CMakeLists.txt
index d59a4075dce..a91c35f0485 100644
--- a/searchcore/src/vespa/searchcore/proton/common/CMakeLists.txt
+++ b/searchcore/src/vespa/searchcore/proton/common/CMakeLists.txt
@@ -1,6 +1,8 @@
# Copyright 2017 Yahoo Holdings. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root.
vespa_add_library(searchcore_pcommon STATIC
SOURCES
+ alloc_config.cpp
+ alloc_strategy.cpp
attribute_updater.cpp
attributefieldvaluenode.cpp
cachedselect.cpp
diff --git a/searchcore/src/vespa/searchcore/proton/common/alloc_config.cpp b/searchcore/src/vespa/searchcore/proton/common/alloc_config.cpp
new file mode 100644
index 00000000000..1611d00fb0f
--- /dev/null
+++ b/searchcore/src/vespa/searchcore/proton/common/alloc_config.cpp
@@ -0,0 +1,56 @@
+// Copyright Verizon Media. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root.
+
+#include "alloc_config.h"
+#include <vespa/searchcore/proton/common/subdbtype.h>
+#include <algorithm>
+
+using search::CompactionStrategy;
+using search::GrowStrategy;
+
+namespace proton {
+
+AllocConfig::AllocConfig(const AllocStrategy& alloc_strategy,
+ uint32_t redundancy, uint32_t searchable_copies)
+ : _alloc_strategy(alloc_strategy),
+ _redundancy(redundancy),
+ _searchable_copies(searchable_copies)
+{
+}
+
+AllocConfig::AllocConfig()
+ : AllocConfig(AllocStrategy(), 1, 1)
+{
+}
+
+AllocConfig::~AllocConfig() = default;
+
+bool
+AllocConfig::operator==(const AllocConfig &rhs) const noexcept
+{
+ return ((_alloc_strategy == rhs._alloc_strategy) &&
+ (_redundancy == rhs._redundancy) &&
+ (_searchable_copies == rhs._searchable_copies));
+}
+
+AllocStrategy
+AllocConfig::make_alloc_strategy(SubDbType sub_db_type) const
+{
+ auto &baseline_grow_strategy = _alloc_strategy.get_grow_strategy();
+ size_t initial_capacity = baseline_grow_strategy.getDocsInitialCapacity();
+ switch (sub_db_type) {
+ case SubDbType::READY:
+ initial_capacity *= _searchable_copies;
+ break;
+ case SubDbType::NOTREADY:
+ initial_capacity *= (_redundancy - _searchable_copies);
+ break;
+ case SubDbType::REMOVED:
+ default:
+ initial_capacity = std::max(1024ul, initial_capacity / 100);
+ break;
+ }
+ GrowStrategy grow_strategy(initial_capacity, baseline_grow_strategy.getDocsGrowFactor(), baseline_grow_strategy.getDocsGrowDelta(), baseline_grow_strategy.getMultiValueAllocGrowFactor());
+ return AllocStrategy(grow_strategy, _alloc_strategy.get_compaction_strategy(), _alloc_strategy.get_amortize_count());
+}
+
+}
diff --git a/searchcore/src/vespa/searchcore/proton/common/alloc_config.h b/searchcore/src/vespa/searchcore/proton/common/alloc_config.h
new file mode 100644
index 00000000000..25b953b9871
--- /dev/null
+++ b/searchcore/src/vespa/searchcore/proton/common/alloc_config.h
@@ -0,0 +1,34 @@
+// Copyright Verizon Media. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root.
+
+#pragma once
+
+#include "alloc_strategy.h"
+
+namespace proton {
+
+enum class SubDbType;
+
+/*
+ * Class representing allocation config for proton which can be used
+ * to make an allocation strategy for large data structures owned by a
+ * document sub db (e.g. attribute vectors, document meta store).
+ */
+class AllocConfig
+{
+ AllocStrategy _alloc_strategy; // baseline before adjusting for redundancy / searchable copies
+ const uint32_t _redundancy;
+ const uint32_t _searchable_copies;
+
+public:
+ AllocConfig(const AllocStrategy& alloc_strategy, uint32_t redundancy, uint32_t searchable_copies);
+ AllocConfig();
+ ~AllocConfig();
+
+ bool operator==(const AllocConfig &rhs) const noexcept;
+ bool operator!=(const AllocConfig &rhs) const noexcept {
+ return !operator==(rhs);
+ }
+ AllocStrategy make_alloc_strategy(SubDbType sub_db_type) const;
+};
+
+}
diff --git a/searchcore/src/vespa/searchcore/proton/common/alloc_strategy.cpp b/searchcore/src/vespa/searchcore/proton/common/alloc_strategy.cpp
new file mode 100644
index 00000000000..3af72757ccb
--- /dev/null
+++ b/searchcore/src/vespa/searchcore/proton/common/alloc_strategy.cpp
@@ -0,0 +1,41 @@
+// Copyright Verizon Media. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root.
+
+#include "alloc_strategy.h"
+#include <iostream>
+
+using search::CompactionStrategy;
+using search::GrowStrategy;
+
+namespace proton {
+
+AllocStrategy::AllocStrategy(const GrowStrategy& grow_strategy,
+ const CompactionStrategy& compaction_strategy,
+ uint32_t amortize_count)
+ : _grow_strategy(grow_strategy),
+ _compaction_strategy(compaction_strategy),
+ _amortize_count(amortize_count)
+{
+}
+
+AllocStrategy::AllocStrategy()
+ : AllocStrategy(GrowStrategy(), CompactionStrategy(), 10000)
+{
+}
+
+AllocStrategy::~AllocStrategy() = default;
+
+bool
+AllocStrategy::operator==(const AllocStrategy &rhs) const noexcept
+{
+ return ((_grow_strategy == rhs._grow_strategy) &&
+ (_compaction_strategy == rhs._compaction_strategy) &&
+ (_amortize_count == rhs._amortize_count));
+}
+
+std::ostream& operator<<(std::ostream& os, const AllocStrategy&alloc_strategy)
+{
+ os << "{ grow_strategy=" << alloc_strategy.get_grow_strategy() << ", compaction_strategy=" << alloc_strategy.get_compaction_strategy() << ", amortize_count=" << alloc_strategy.get_amortize_count() << "}";
+ return os;
+}
+
+}
diff --git a/searchcore/src/vespa/searchcore/proton/common/alloc_strategy.h b/searchcore/src/vespa/searchcore/proton/common/alloc_strategy.h
new file mode 100644
index 00000000000..74bcc1772ee
--- /dev/null
+++ b/searchcore/src/vespa/searchcore/proton/common/alloc_strategy.h
@@ -0,0 +1,41 @@
+// Copyright Verizon Media. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root.
+
+#pragma once
+
+#include <vespa/searchcommon/common/compaction_strategy.h>
+#include <vespa/searchcommon/common/growstrategy.h>
+#include <iosfwd>
+
+namespace proton {
+
+/*
+ * Class representing allocation strategy for large data structures
+ * owned by a document sub db (e.g. attribute vectors, document meta store).
+ */
+class AllocStrategy
+{
+protected:
+ const search::GrowStrategy _grow_strategy;
+ const search::CompactionStrategy _compaction_strategy;
+ const uint32_t _amortize_count;
+
+public:
+ AllocStrategy(const search::GrowStrategy& grow_strategy,
+ const search::CompactionStrategy& compaction_strategy,
+ uint32_t amortize_count);
+
+ AllocStrategy();
+ ~AllocStrategy();
+
+ bool operator==(const AllocStrategy &rhs) const noexcept;
+ bool operator!=(const AllocStrategy &rhs) const noexcept {
+ return !operator==(rhs);
+ }
+ const search::GrowStrategy& get_grow_strategy() const noexcept { return _grow_strategy; }
+ const search::CompactionStrategy& get_compaction_strategy() const noexcept { return _compaction_strategy; }
+ uint32_t get_amortize_count() const noexcept { return _amortize_count; }
+};
+
+std::ostream& operator<<(std::ostream& os, const AllocStrategy&alloc_strategy);
+
+}
diff --git a/searchcore/src/vespa/searchcore/proton/flushengine/flushengine.h b/searchcore/src/vespa/searchcore/proton/flushengine/flushengine.h
index b3376ece351..8aea546bd14 100644
--- a/searchcore/src/vespa/searchcore/proton/flushengine/flushengine.h
+++ b/searchcore/src/vespa/searchcore/proton/flushengine/flushengine.h
@@ -79,7 +79,7 @@ private:
uint32_t initFlush(const IFlushHandler::SP &handler, const IFlushTarget::SP &target);
void flushDone(const FlushContext &ctx, uint32_t taskId);
bool canFlushMore(const std::unique_lock<std::mutex> &guard) const;
- bool wait(vespalib::duration minimumWaitTimeIfReady, bool considerPendingPrune);
+ bool wait(vespalib::duration minimumWaitTimeIfReady, bool ignorePendingPrune);
bool isFlushing(const std::lock_guard<std::mutex> &guard, const vespalib::string & name) const;
friend class FlushTask;
diff --git a/searchcore/src/vespa/searchcore/proton/server/bucketmovejob.cpp b/searchcore/src/vespa/searchcore/proton/server/bucketmovejob.cpp
index cf6ea7f7787..1c642205c86 100644
--- a/searchcore/src/vespa/searchcore/proton/server/bucketmovejob.cpp
+++ b/searchcore/src/vespa/searchcore/proton/server/bucketmovejob.cpp
@@ -21,37 +21,10 @@ namespace proton {
namespace {
-const uint32_t FIRST_SCAN_PASS = 1;
-const uint32_t SECOND_SCAN_PASS = 2;
-
const char * bool2str(bool v) { return (v ? "T" : "F"); }
}
-BucketMoveJob::ScanIterator::
-ScanIterator(BucketDBOwner::Guard db, uint32_t pass, BucketId lastBucket, BucketId endBucket)
- : _db(std::move(db)),
- _itr(lastBucket.isSet() ? _db->upperBound(lastBucket) : _db->begin()),
- _end(pass == SECOND_SCAN_PASS && endBucket.isSet() ?
- _db->upperBound(endBucket) : _db->end())
-{
-}
-
-BucketMoveJob::ScanIterator::
-ScanIterator(BucketDBOwner::Guard db, BucketId bucket)
- : _db(std::move(db)),
- _itr(_db->lowerBound(bucket)),
- _end(_db->end())
-{
-}
-
-BucketMoveJob::ScanIterator::ScanIterator(ScanIterator &&rhs)
- : _db(std::move(rhs._db)),
- _itr(rhs._itr),
- _end(rhs._end)
-{
-}
-
void
BucketMoveJob::checkBucket(const BucketId &bucket,
ScanIterator &itr,
@@ -181,7 +154,7 @@ BucketMoveJob(const IBucketStateCalculator::SP &calc,
_mover(getLimiter()),
_doneScan(false),
_scanPos(),
- _scanPass(FIRST_SCAN_PASS),
+ _scanPass(ScanPass::FIRST),
_endPos(),
_bucketSpace(bucketSpace),
_delayedBuckets(),
@@ -279,7 +252,7 @@ BucketMoveJob::changedCalculator()
_endPos = _scanPos;
}
_doneScan = false;
- _scanPass = FIRST_SCAN_PASS;
+ _scanPass = ScanPass::FIRST;
maybeCancelMover(_mover);
maybeCancelMover(_delayedMover);
}
@@ -287,9 +260,6 @@ BucketMoveJob::changedCalculator()
bool
BucketMoveJob::scanAndMove(size_t maxBucketsToScan, size_t maxDocsToMove)
{
- if (done()) {
- return true;
- }
IFrozenBucketHandler::ExclusiveBucketGuard::UP bucketGuard;
// Look for delayed bucket to be processed now
while (!_delayedBuckets.empty() && _delayedMover.bucketDone()) {
@@ -310,10 +280,10 @@ BucketMoveJob::scanAndMove(size_t maxBucketsToScan, size_t maxDocsToMove)
ScanResult res = scanBuckets(maxBucketsToScan - bucketsScanned, bucketGuard);
bucketsScanned += res.first;
if (res.second) {
- if (_scanPass == FIRST_SCAN_PASS &&
+ if (_scanPass == ScanPass::FIRST &&
_endPos.validBucket()) {
_scanPos = ScanPosition();
- _scanPass = SECOND_SCAN_PASS;
+ _scanPass = ScanPass::SECOND;
} else {
_doneScan = true;
break;
@@ -334,7 +304,7 @@ BucketMoveJob::scanAndMove(size_t maxBucketsToScan, size_t maxDocsToMove)
bool
BucketMoveJob::run()
{
- if (isBlocked()) {
+ if (isBlocked() || done()) {
return true; // indicate work is done, since node state is bad
}
/// Returning false here will immediately post the job back on the executor. This will give a busy loop,
diff --git a/searchcore/src/vespa/searchcore/proton/server/bucketmovejob.h b/searchcore/src/vespa/searchcore/proton/server/bucketmovejob.h
index 26755eca7b1..8a84a10199c 100644
--- a/searchcore/src/vespa/searchcore/proton/server/bucketmovejob.h
+++ b/searchcore/src/vespa/searchcore/proton/server/bucketmovejob.h
@@ -9,7 +9,7 @@
#include "ibucketstatechangedhandler.h"
#include "iclusterstatechangedhandler.h"
#include "ifrozenbuckethandler.h"
-#include <vespa/searchcore/proton/bucketdb/bucket_db_owner.h>
+#include <vespa/searchcore/proton/bucketdb/bucketscaniterator.h>
#include <vespa/searchcore/proton/bucketdb/i_bucket_create_listener.h>
#include <set>
@@ -34,48 +34,10 @@ class BucketMoveJob : public BlockableMaintenanceJob,
public IBucketStateChangedHandler,
public IDiskMemUsageListener
{
-public:
- struct ScanPosition {
- document::BucketId _lastBucket;
-
- ScanPosition() : _lastBucket() { }
- bool validBucket() const { return _lastBucket.isSet(); }
- };
-
- typedef BucketDB::ConstMapIterator BucketIterator;
-
- class ScanIterator {
- private:
- BucketDBOwner::Guard _db;
- BucketIterator _itr;
- BucketIterator _end;
-
- public:
- ScanIterator(BucketDBOwner::Guard db,
- uint32_t pass,
- document::BucketId lastBucket,
- document::BucketId endBucket);
-
- ScanIterator(BucketDBOwner::Guard db, document::BucketId bucket);
-
- ScanIterator(const ScanIterator &) = delete;
- ScanIterator(ScanIterator &&rhs);
- ScanIterator &operator=(const ScanIterator &) = delete;
- ScanIterator &operator=(ScanIterator &&rhs) = delete;
-
- bool valid() const { return _itr != _end; }
- bool isActive() const { return _itr->second.isActive(); }
- document::BucketId getBucket() const { return _itr->first; }
- bool hasReadyBucketDocs() const { return _itr->second.getReadyCount() != 0; }
- bool hasNotReadyBucketDocs() const { return _itr->second.getNotReadyCount() != 0; }
-
- ScanIterator & operator++() {
- ++_itr;
- return *this;
- }
- };
-
private:
+ using ScanPosition = bucketdb::ScanPosition;
+ using ScanIterator = bucketdb::ScanIterator;
+ using ScanPass = ScanIterator::Pass;
using ScanResult = std::pair<size_t, bool>;
std::shared_ptr<IBucketStateCalculator> _calc;
IDocumentMoveHandler &_moveHandler;
@@ -85,7 +47,7 @@ private:
DocumentBucketMover _mover;
bool _doneScan;
ScanPosition _scanPos;
- uint32_t _scanPass;
+ ScanPass _scanPass;
ScanPosition _endPos;
document::BucketSpace _bucketSpace;
diff --git a/searchcore/src/vespa/searchcore/proton/server/documentdb.cpp b/searchcore/src/vespa/searchcore/proton/server/documentdb.cpp
index 60878c2b314..166d1e928bb 100644
--- a/searchcore/src/vespa/searchcore/proton/server/documentdb.cpp
+++ b/searchcore/src/vespa/searchcore/proton/server/documentdb.cpp
@@ -70,39 +70,11 @@ namespace proton {
namespace {
constexpr uint32_t indexing_thread_stack_size = 128 * 1024;
-using Allocation = ProtonConfig::Documentdb::Allocation;
-GrowStrategy
-makeGrowStrategy(uint32_t docsInitialCapacity, const Allocation &allocCfg)
-{
- return GrowStrategy(docsInitialCapacity, allocCfg.growfactor, allocCfg.growbias, allocCfg.multivaluegrowfactor);
-}
-
-DocumentSubDBCollection::Config
-makeSubDBConfig(const ProtonConfig::Distribution & distCfg, const Allocation & allocCfg, size_t numSearcherThreads) {
- size_t initialNumDocs(allocCfg.initialnumdocs);
- GrowStrategy searchableGrowth = makeGrowStrategy(initialNumDocs * distCfg.searchablecopies, allocCfg);
- GrowStrategy removedGrowth = makeGrowStrategy(std::max(1024ul, initialNumDocs/100), allocCfg);
- GrowStrategy notReadyGrowth = makeGrowStrategy(initialNumDocs * (distCfg.redundancy - distCfg.searchablecopies), allocCfg);
- return DocumentSubDBCollection::Config(searchableGrowth, notReadyGrowth, removedGrowth, allocCfg.amortizecount, numSearcherThreads);
-}
-
index::IndexConfig
makeIndexConfig(const ProtonConfig::Index & cfg) {
return index::IndexConfig(WarmupConfig(vespalib::from_s(cfg.warmup.time), cfg.warmup.unpack), cfg.maxflushed, cfg.cache.size);
}
-ProtonConfig::Documentdb _G_defaultProtonDocumentDBConfig;
-
-const ProtonConfig::Documentdb *
-findDocumentDB(const ProtonConfig::DocumentdbVector & documentDBs, const vespalib::string & docType) {
- for (const auto & dbCfg : documentDBs) {
- if (dbCfg.inputdoctypename == docType) {
- return & dbCfg;
- }
- }
- return &_G_defaultProtonDocumentDBConfig;
-}
-
class MetricsUpdateHook : public metrics::UpdateHook {
DocumentDB &_db;
public:
@@ -183,9 +155,7 @@ DocumentDB::DocumentDB(const vespalib::string &baseDir,
_feedHandler(std::make_unique<FeedHandler>(_writeService, tlsSpec, docTypeName, *this, _writeFilter, *this, tlsWriterFactory)),
_subDBs(*this, *this, *_feedHandler, _docTypeName, _writeService, warmupExecutor, fileHeaderContext,
metricsWireService, getMetrics(), queryLimiter, clock, _configMutex, _baseDir,
- makeSubDBConfig(protonCfg.distribution,
- findDocumentDB(protonCfg.documentdb, docTypeName.getName())->allocation,
- protonCfg.numsearcherthreads),
+ DocumentSubDBCollection::Config(protonCfg.numsearcherthreads),
hwInfo),
_maintenanceController(_writeService.master(), sharedExecutor, _docTypeName),
_lidSpaceCompactionHandlers(),
diff --git a/searchcore/src/vespa/searchcore/proton/server/documentdbconfig.cpp b/searchcore/src/vespa/searchcore/proton/server/documentdbconfig.cpp
index baa6c8eb450..7c487043b5b 100644
--- a/searchcore/src/vespa/searchcore/proton/server/documentdbconfig.cpp
+++ b/searchcore/src/vespa/searchcore/proton/server/documentdbconfig.cpp
@@ -14,6 +14,7 @@
#include <vespa/searchcore/config/config-ranking-constants.h>
#include <vespa/searchcore/config/config-onnx-models.h>
#include <vespa/searchcore/proton/attribute/attribute_aspect_delayer.h>
+#include <vespa/searchcore/proton/common/alloc_config.h>
#include <vespa/searchcore/proton/common/document_type_inspector.h>
#include <vespa/searchcore/proton/common/indexschema_inspector.h>
@@ -49,7 +50,8 @@ DocumentDBConfig::ComparisonResult::ComparisonResult()
storeChanged(false),
visibilityDelayChanged(false),
flushChanged(false),
- threading_service_config_changed(false)
+ threading_service_config_changed(false),
+ alloc_config_changed(false)
{ }
DocumentDBConfig::DocumentDBConfig(
@@ -70,6 +72,7 @@ DocumentDBConfig::DocumentDBConfig(
const DocumentDBMaintenanceConfig::SP &maintenance,
const search::LogDocumentStore::Config & storeConfig,
std::shared_ptr<const ThreadingServiceConfig> threading_service_config,
+ std::shared_ptr<const AllocConfig> alloc_config,
const vespalib::string &configId,
const vespalib::string &docTypeName) noexcept
: _configId(configId),
@@ -91,6 +94,7 @@ DocumentDBConfig::DocumentDBConfig(
_maintenance(maintenance),
_storeConfig(storeConfig),
_threading_service_config(std::move(threading_service_config)),
+ _alloc_config(std::move(alloc_config)),
_orig(),
_delayedAttributeAspects(false)
{ }
@@ -117,6 +121,7 @@ DocumentDBConfig(const DocumentDBConfig &cfg)
_maintenance(cfg._maintenance),
_storeConfig(cfg._storeConfig),
_threading_service_config(cfg._threading_service_config),
+ _alloc_config(cfg._alloc_config),
_orig(cfg._orig),
_delayedAttributeAspects(false)
{ }
@@ -141,7 +146,8 @@ DocumentDBConfig::operator==(const DocumentDBConfig & rhs) const
equals<Schema>(_schema.get(), rhs._schema.get()) &&
equals<DocumentDBMaintenanceConfig>(_maintenance.get(), rhs._maintenance.get()) &&
_storeConfig == rhs._storeConfig &&
- equals<ThreadingServiceConfig>(_threading_service_config.get(), rhs._threading_service_config.get());
+ equals<ThreadingServiceConfig>(_threading_service_config.get(), rhs._threading_service_config.get()) &&
+ equals<AllocConfig>(_alloc_config.get(), rhs._alloc_config.get());
}
@@ -167,6 +173,7 @@ DocumentDBConfig::compare(const DocumentDBConfig &rhs) const
retval.visibilityDelayChanged = (_maintenance->getVisibilityDelay() != rhs._maintenance->getVisibilityDelay());
retval.flushChanged = !equals<DocumentDBMaintenanceConfig>(_maintenance.get(), rhs._maintenance.get(), [](const auto &l, const auto &r) { return l.getFlushConfig() == r.getFlushConfig(); });
retval.threading_service_config_changed = !equals<ThreadingServiceConfig>(_threading_service_config.get(), rhs._threading_service_config.get());
+ retval.alloc_config_changed = !equals<AllocConfig>(_alloc_config.get(), rhs._alloc_config.get());
return retval;
}
@@ -188,7 +195,8 @@ DocumentDBConfig::valid() const
_tuneFileDocumentDB &&
_schema &&
_maintenance &&
- _threading_service_config;
+ _threading_service_config &&
+ _alloc_config;
}
namespace
@@ -232,6 +240,7 @@ DocumentDBConfig::makeReplayConfig(const SP & orig)
o._maintenance,
o._storeConfig,
o._threading_service_config,
+ o._alloc_config,
o._configId,
o._docTypeName);
ret->_orig = orig;
@@ -274,6 +283,7 @@ DocumentDBConfig::newFromAttributesConfig(const AttributesConfigSP &attributes)
_maintenance,
_storeConfig,
_threading_service_config,
+ _alloc_config,
_configId,
_docTypeName);
}
@@ -311,6 +321,7 @@ DocumentDBConfig::makeDelayedAttributeAspectConfig(const SP &newCfg, const Docum
n._maintenance,
n._storeConfig,
n._threading_service_config,
+ n._alloc_config,
n._configId,
n._docTypeName);
result->_delayedAttributeAspects = true;
diff --git a/searchcore/src/vespa/searchcore/proton/server/documentdbconfig.h b/searchcore/src/vespa/searchcore/proton/server/documentdbconfig.h
index dc163e91ade..8e24ed8e96a 100644
--- a/searchcore/src/vespa/searchcore/proton/server/documentdbconfig.h
+++ b/searchcore/src/vespa/searchcore/proton/server/documentdbconfig.h
@@ -30,6 +30,7 @@ namespace document::internal { class InternalDocumenttypesType; }
namespace proton {
class ThreadingServiceConfig;
+class AllocConfig;
class DocumentDBConfig
{
@@ -55,6 +56,7 @@ public:
bool visibilityDelayChanged;
bool flushChanged;
bool threading_service_config_changed;
+ bool alloc_config_changed;
ComparisonResult();
ComparisonResult &setRankProfilesChanged(bool val) { rankProfilesChanged = val; return *this; }
@@ -88,6 +90,7 @@ public:
return *this;
}
ComparisonResult &set_threading_service_config_changed(bool val) { threading_service_config_changed = val; return *this; }
+ ComparisonResult &set_alloc_config_changed(bool val) { alloc_config_changed = val; return *this; }
};
using SP = std::shared_ptr<DocumentDBConfig>;
@@ -131,6 +134,7 @@ private:
MaintenanceConfigSP _maintenance;
search::LogDocumentStore::Config _storeConfig;
std::shared_ptr<const ThreadingServiceConfig> _threading_service_config;
+ std::shared_ptr<const AllocConfig> _alloc_config;
SP _orig;
bool _delayedAttributeAspects;
@@ -169,6 +173,7 @@ public:
const DocumentDBMaintenanceConfig::SP &maintenance,
const search::LogDocumentStore::Config & storeConfig,
std::shared_ptr<const ThreadingServiceConfig> threading_service_config,
+ std::shared_ptr<const AllocConfig> alloc_config,
const vespalib::string &configId,
const vespalib::string &docTypeName) noexcept;
@@ -210,6 +215,8 @@ public:
bool getDelayedAttributeAspects() const { return _delayedAttributeAspects; }
const ThreadingServiceConfig& get_threading_service_config() const { return *_threading_service_config; }
const std::shared_ptr<const ThreadingServiceConfig>& get_threading_service_config_shared_ptr() const { return _threading_service_config; }
+ const AllocConfig& get_alloc_config() const { return *_alloc_config; }
+ const std::shared_ptr<const AllocConfig>& get_alloc_config_shared_ptr() const { return _alloc_config; }
bool operator==(const DocumentDBConfig &rhs) const;
diff --git a/searchcore/src/vespa/searchcore/proton/server/documentdbconfigmanager.cpp b/searchcore/src/vespa/searchcore/proton/server/documentdbconfigmanager.cpp
index 115a49fe997..f5a594d2f36 100644
--- a/searchcore/src/vespa/searchcore/proton/server/documentdbconfigmanager.cpp
+++ b/searchcore/src/vespa/searchcore/proton/server/documentdbconfigmanager.cpp
@@ -3,6 +3,7 @@
#include "documentdbconfigmanager.h"
#include "bootstrapconfig.h"
#include "threading_service_config.h"
+#include <vespa/searchcore/proton/common/alloc_config.h>
#include <vespa/searchcore/proton/common/hw_info.h>
#include <vespa/searchcore/config/config-ranking-constants.h>
#include <vespa/searchcore/config/config-onnx-models.h>
@@ -252,6 +253,19 @@ build_threading_service_config(const ProtonConfig &proton_config,
hw_info.cpu()));
}
+std::shared_ptr<const AllocConfig>
+build_alloc_config(const ProtonConfig& proton_config, const vespalib::string& doc_type_name)
+{
+ auto& document_db_config_entry = find_document_db_config_entry(proton_config.documentdb, doc_type_name);
+ auto& alloc_config = document_db_config_entry.allocation;
+ auto& distribution_config = proton_config.distribution;
+ search::GrowStrategy grow_strategy(alloc_config.initialnumdocs, alloc_config.growfactor, alloc_config.growbias, alloc_config.multivaluegrowfactor);
+ search::CompactionStrategy compaction_strategy(alloc_config.maxDeadBytesRatio, alloc_config.maxDeadAddressSpaceRatio);
+ return std::make_shared<const AllocConfig>
+ (AllocStrategy(grow_strategy, compaction_strategy, alloc_config.amortizecount),
+ distribution_config.redundancy, distribution_config.searchablecopies);
+}
+
}
void
@@ -275,6 +289,7 @@ DocumentDBConfigManager::update(const ConfigSnapshot &snapshot)
MaintenanceConfigSP oldMaintenanceConfig;
MaintenanceConfigSP newMaintenanceConfig;
std::shared_ptr<const ThreadingServiceConfig> old_threading_service_config;
+ std::shared_ptr<const AllocConfig> old_alloc_config;
if (!_ignoreForwardedConfig) {
if (!(_bootstrapConfig->getDocumenttypesConfigSP() &&
@@ -299,6 +314,7 @@ DocumentDBConfigManager::update(const ConfigSnapshot &snapshot)
newIndexschemaConfig = current->getIndexschemaConfigSP();
oldMaintenanceConfig = current->getMaintenanceConfigSP();
old_threading_service_config = current->get_threading_service_config_shared_ptr();
+ old_alloc_config = current->get_alloc_config_shared_ptr();
currentGeneration = current->getGeneration();
}
@@ -382,6 +398,10 @@ DocumentDBConfigManager::update(const ConfigSnapshot &snapshot)
(*new_threading_service_config == *old_threading_service_config)) {
new_threading_service_config = old_threading_service_config;
}
+ auto new_alloc_config = build_alloc_config(_bootstrapConfig->getProtonConfig(), _docTypeName);
+ if (new_alloc_config && old_alloc_config &&(*new_alloc_config == *old_alloc_config)) {
+ new_alloc_config = old_alloc_config;
+ }
auto newSnapshot = std::make_shared<DocumentDBConfig>(generation,
newRankProfilesConfig,
newRankingConstants,
@@ -399,6 +419,7 @@ DocumentDBConfigManager::update(const ConfigSnapshot &snapshot)
newMaintenanceConfig,
storeConfig,
new_threading_service_config,
+ new_alloc_config,
_configId,
_docTypeName);
assert(newSnapshot->valid());
diff --git a/searchcore/src/vespa/searchcore/proton/server/documentsubdbcollection.cpp b/searchcore/src/vespa/searchcore/proton/server/documentsubdbcollection.cpp
index 61cf45a81d7..6ce0b896a50 100644
--- a/searchcore/src/vespa/searchcore/proton/server/documentsubdbcollection.cpp
+++ b/searchcore/src/vespa/searchcore/proton/server/documentsubdbcollection.cpp
@@ -19,13 +19,8 @@ using vespalib::makeLambdaTask;
namespace proton {
-DocumentSubDBCollection::Config::Config(GrowStrategy ready, GrowStrategy notReady, GrowStrategy removed,
- size_t fixedAttributeTotalSkew, size_t numSearchThreads)
- : _readyGrowth(ready),
- _notReadyGrowth(notReady),
- _removedGrowth(removed),
- _fixedAttributeTotalSkew(fixedAttributeTotalSkew),
- _numSearchThreads(numSearchThreads)
+DocumentSubDBCollection::Config::Config(size_t numSearchThreads)
+ : _numSearchThreads(numSearchThreads)
{ }
DocumentSubDBCollection::DocumentSubDBCollection(
@@ -66,7 +61,6 @@ DocumentSubDBCollection::DocumentSubDBCollection(
SearchableDocSubDB::Config(
FastAccessDocSubDB::Config(
StoreOnlyDocSubDB::Config(docTypeName, "0.ready", baseDir,
- cfg.getReadyGrowth(), cfg.getFixedAttributeTotalSkew(),
_readySubDbId, SubDbType::READY),
true, true, false),
cfg.getNumSearchThreads()),
@@ -76,15 +70,14 @@ DocumentSubDBCollection::DocumentSubDBCollection(
_subDBs.push_back
(new StoreOnlyDocSubDB(
- StoreOnlyDocSubDB::Config(docTypeName, "1.removed", baseDir, cfg.getRemovedGrowth(),
- cfg.getFixedAttributeTotalSkew(), _remSubDbId, SubDbType::REMOVED),
+ StoreOnlyDocSubDB::Config(docTypeName, "1.removed", baseDir,
+ _remSubDbId, SubDbType::REMOVED),
context));
_subDBs.push_back
(new FastAccessDocSubDB(
FastAccessDocSubDB::Config(
StoreOnlyDocSubDB::Config(docTypeName, "2.notready", baseDir,
- cfg.getNotReadyGrowth(), cfg.getFixedAttributeTotalSkew(),
_notReadySubDbId, SubDbType::NOTREADY),
true, true, true),
FastAccessDocSubDB::Context(context, metrics.notReady.attributes, metricsWireService)));
diff --git a/searchcore/src/vespa/searchcore/proton/server/documentsubdbcollection.h b/searchcore/src/vespa/searchcore/proton/server/documentsubdbcollection.h
index 317ec191d60..c86370c942b 100644
--- a/searchcore/src/vespa/searchcore/proton/server/documentsubdbcollection.h
+++ b/searchcore/src/vespa/searchcore/proton/server/documentsubdbcollection.h
@@ -5,7 +5,6 @@
#include <vespa/searchcore/proton/bucketdb/bucketdbhandler.h>
#include <vespa/searchcore/proton/common/hw_info.h>
#include <vespa/searchcore/proton/persistenceengine/i_document_retriever.h>
-#include <vespa/searchcommon/common/growstrategy.h>
#include <vespa/searchlib/common/serialnum.h>
#include <vespa/vespalib/util/varholder.h>
#include <mutex>
@@ -61,19 +60,9 @@ public:
using SerialNum = search::SerialNum;
class Config {
public:
- using GrowStrategy = search::GrowStrategy;
- Config(GrowStrategy ready, GrowStrategy notReady, GrowStrategy removed,
- size_t fixedAttributeTotalSkew, size_t numSearchThreads);
- GrowStrategy getReadyGrowth() const { return _readyGrowth; }
- GrowStrategy getNotReadyGrowth() const { return _notReadyGrowth; }
- GrowStrategy getRemovedGrowth() const { return _removedGrowth; }
- size_t getNumSearchThreads() const { return _numSearchThreads; }
- size_t getFixedAttributeTotalSkew() const { return _fixedAttributeTotalSkew; }
+ Config(size_t numSearchThreads);
+ size_t getNumSearchThreads() const noexcept { return _numSearchThreads; }
private:
- const GrowStrategy _readyGrowth;
- const GrowStrategy _notReadyGrowth;
- const GrowStrategy _removedGrowth;
- const size_t _fixedAttributeTotalSkew;
const size_t _numSearchThreads;
};
diff --git a/searchcore/src/vespa/searchcore/proton/server/fast_access_doc_subdb.cpp b/searchcore/src/vespa/searchcore/proton/server/fast_access_doc_subdb.cpp
index f92f3e74e31..ca2a98d43e0 100644
--- a/searchcore/src/vespa/searchcore/proton/server/fast_access_doc_subdb.cpp
+++ b/searchcore/src/vespa/searchcore/proton/server/fast_access_doc_subdb.cpp
@@ -13,6 +13,7 @@
#include <vespa/searchcore/proton/attribute/attribute_populator.h>
#include <vespa/searchcore/proton/attribute/filter_attribute_manager.h>
#include <vespa/searchcore/proton/attribute/sequential_attributes_initializer.h>
+#include <vespa/searchcore/proton/common/alloc_config.h>
#include <vespa/searchcore/proton/matching/sessionmanager.h>
#include <vespa/searchcore/proton/reprocessing/attribute_reprocessing_initializer.h>
#include <vespa/searchcore/proton/reprocessing/document_reprocessing_handler.h>
@@ -62,6 +63,7 @@ FastAccessDocSubDB::createAttributeManagerInitializer(const DocumentDBConfig &co
DocumentMetaStore::SP documentMetaStore,
std::shared_ptr<AttributeManager::SP> attrMgrResult) const
{
+ AllocStrategy alloc_strategy = configSnapshot.get_alloc_config().make_alloc_strategy(_subDbType);
IAttributeFactory::SP attrFactory = std::make_shared<AttributeFactory>();
AttributeManager::SP baseAttrMgr =
std::make_shared<AttributeManager>(_baseDir + "/attribute",
@@ -77,8 +79,7 @@ FastAccessDocSubDB::createAttributeManagerInitializer(const DocumentDBConfig &co
documentMetaStore,
baseAttrMgr,
(_hasAttributes ? configSnapshot.getAttributesConfig() : AttributesConfig()),
- _attributeGrow,
- _attributeGrowNumDocs,
+ alloc_strategy,
_fastAccessAttributesOnly,
_writeService.master(),
attrMgrResult);
@@ -101,11 +102,10 @@ FastAccessDocSubDB::setupAttributeManager(AttributeManager::SP attrMgrResult)
AttributeCollectionSpec::UP
-FastAccessDocSubDB::createAttributeSpec(const AttributesConfig &attrCfg, SerialNum serialNum) const
+FastAccessDocSubDB::createAttributeSpec(const AttributesConfig &attrCfg, const AllocStrategy& alloc_strategy, SerialNum serialNum) const
{
uint32_t docIdLimit(_dms->getCommittedDocIdLimit());
- AttributeCollectionSpecFactory factory(_attributeGrow,
- _attributeGrowNumDocs, _fastAccessAttributesOnly);
+ AttributeCollectionSpecFactory factory(alloc_strategy, _fastAccessAttributesOnly);
return factory.create(attrCfg, docIdLimit, serialNum);
}
@@ -246,7 +246,8 @@ FastAccessDocSubDB::applyConfig(const DocumentDBConfig &newConfigSnapshot, const
{
(void) resolver;
- reconfigure(newConfigSnapshot.getStoreConfig());
+ AllocStrategy alloc_strategy = newConfigSnapshot.get_alloc_config().make_alloc_strategy(_subDbType);
+ reconfigure(newConfigSnapshot.getStoreConfig(), alloc_strategy);
IReprocessingTask::List tasks;
/*
* If attribute manager should change then document retriever
@@ -262,7 +263,7 @@ FastAccessDocSubDB::applyConfig(const DocumentDBConfig &newConfigSnapshot, const
std::make_unique<AttributeWriterFactory>(), getSubDbName());
proton::IAttributeManager::SP oldMgr = extractAttributeManager(_fastAccessFeedView.get());
AttributeCollectionSpec::UP attrSpec =
- createAttributeSpec(newConfigSnapshot.getAttributesConfig(), serialNum);
+ createAttributeSpec(newConfigSnapshot.getAttributesConfig(), alloc_strategy, serialNum);
IReprocessingInitializer::UP initializer =
configurer.reconfigure(newConfigSnapshot, oldConfigSnapshot, *attrSpec);
if (initializer->hasReprocessors()) {
diff --git a/searchcore/src/vespa/searchcore/proton/server/fast_access_doc_subdb.h b/searchcore/src/vespa/searchcore/proton/server/fast_access_doc_subdb.h
index 2bf574aba10..7e2ac3a67cc 100644
--- a/searchcore/src/vespa/searchcore/proton/server/fast_access_doc_subdb.h
+++ b/searchcore/src/vespa/searchcore/proton/server/fast_access_doc_subdb.h
@@ -84,7 +84,7 @@ protected:
MetricsWireService &_metricsWireService;
DocIdLimit _docIdLimit;
- AttributeCollectionSpec::UP createAttributeSpec(const AttributesConfig &attrCfg, SerialNum serialNum) const;
+ AttributeCollectionSpec::UP createAttributeSpec(const AttributesConfig &attrCfg, const AllocStrategy& alloc_strategy, SerialNum serialNum) const;
AttributeManager::SP getAndResetInitAttributeManager();
virtual IFlushTargetList getFlushTargetsInternal() override;
void reconfigureAttributeMetrics(const IAttributeManager &newMgr, const IAttributeManager &oldMgr);
diff --git a/searchcore/src/vespa/searchcore/proton/server/reconfig_params.cpp b/searchcore/src/vespa/searchcore/proton/server/reconfig_params.cpp
index 4fc241571ac..bb6c5423175 100644
--- a/searchcore/src/vespa/searchcore/proton/server/reconfig_params.cpp
+++ b/searchcore/src/vespa/searchcore/proton/server/reconfig_params.cpp
@@ -27,7 +27,8 @@ ReconfigParams::configHasChanged() const
_res.tuneFileDocumentDBChanged ||
_res.schemaChanged ||
_res.maintenanceChanged ||
- _res.storeChanged;
+ _res.storeChanged ||
+ _res.alloc_config_changed;
}
bool
@@ -51,7 +52,7 @@ ReconfigParams::shouldIndexManagerChange() const
bool
ReconfigParams::shouldAttributeManagerChange() const
{
- return _res.attributesChanged || _res.importedFieldsChanged || _res.visibilityDelayChanged;
+ return _res.attributesChanged || _res.importedFieldsChanged || _res.visibilityDelayChanged || _res.alloc_config_changed;
}
bool
diff --git a/searchcore/src/vespa/searchcore/proton/server/searchabledocsubdb.cpp b/searchcore/src/vespa/searchcore/proton/server/searchabledocsubdb.cpp
index 2768c7ea337..51e6f8e45df 100644
--- a/searchcore/src/vespa/searchcore/proton/server/searchabledocsubdb.cpp
+++ b/searchcore/src/vespa/searchcore/proton/server/searchabledocsubdb.cpp
@@ -7,6 +7,7 @@
#include "i_document_subdb_owner.h"
#include "ibucketstatecalculator.h"
#include <vespa/searchcore/proton/attribute/attribute_writer.h>
+#include <vespa/searchcore/proton/common/alloc_config.h>
#include <vespa/searchcore/proton/flushengine/threadedflushtarget.h>
#include <vespa/searchcore/proton/index/index_manager_initializer.h>
#include <vespa/searchcore/proton/index/index_writer.h>
@@ -143,7 +144,8 @@ IReprocessingTask::List
SearchableDocSubDB::applyConfig(const DocumentDBConfig &newConfigSnapshot, const DocumentDBConfig &oldConfigSnapshot,
SerialNum serialNum, const ReconfigParams &params, IDocumentDBReferenceResolver &resolver)
{
- StoreOnlyDocSubDB::reconfigure(newConfigSnapshot.getStoreConfig());
+ AllocStrategy alloc_strategy = newConfigSnapshot.get_alloc_config().make_alloc_strategy(_subDbType);
+ StoreOnlyDocSubDB::reconfigure(newConfigSnapshot.getStoreConfig(), alloc_strategy);
IReprocessingTask::List tasks;
applyFlushConfig(newConfigSnapshot.getMaintenanceConfigSP()->getFlushConfig());
if (params.shouldMatchersChange() && _addMetrics) {
@@ -152,7 +154,7 @@ SearchableDocSubDB::applyConfig(const DocumentDBConfig &newConfigSnapshot, const
if (params.shouldAttributeManagerChange()) {
proton::IAttributeManager::SP oldMgr = getAttributeManager();
AttributeCollectionSpec::UP attrSpec =
- createAttributeSpec(newConfigSnapshot.getAttributesConfig(), serialNum);
+ createAttributeSpec(newConfigSnapshot.getAttributesConfig(), alloc_strategy, serialNum);
IReprocessingInitializer::UP initializer =
_configurer.reconfigure(newConfigSnapshot, oldConfigSnapshot, *attrSpec, params, resolver);
if (initializer && initializer->hasReprocessors()) {
diff --git a/searchcore/src/vespa/searchcore/proton/server/storeonlydocsubdb.cpp b/searchcore/src/vespa/searchcore/proton/server/storeonlydocsubdb.cpp
index 2aeece204fb..9b504c74635 100644
--- a/searchcore/src/vespa/searchcore/proton/server/storeonlydocsubdb.cpp
+++ b/searchcore/src/vespa/searchcore/proton/server/storeonlydocsubdb.cpp
@@ -10,6 +10,7 @@
#include "storeonlydocsubdb.h"
#include <vespa/searchcore/proton/attribute/attribute_writer.h>
#include <vespa/searchcore/proton/bucketdb/ibucketdbhandlerinitializer.h>
+#include <vespa/searchcore/proton/common/alloc_config.h>
#include <vespa/searchcore/proton/docsummary/summaryflushtarget.h>
#include <vespa/searchcore/proton/docsummary/summarymanagerinitializer.h>
#include <vespa/searchcore/proton/documentmetastore/documentmetastoreinitializer.h>
@@ -28,6 +29,7 @@
#include <vespa/log/log.h>
LOG_SETUP(".proton.server.storeonlydocsubdb");
+using search::CompactionStrategy;
using search::GrowStrategy;
using search::AttributeGuard;
using search::AttributeVector;
@@ -56,13 +58,10 @@ IIndexWriter::SP nullIndexWriter;
StoreOnlyDocSubDB::Config::Config(const DocTypeName &docTypeName, const vespalib::string &subName,
const vespalib::string &baseDir,
- const search::GrowStrategy &attributeGrow, size_t attributeGrowNumDocs,
uint32_t subDbId, SubDbType subDbType)
: _docTypeName(docTypeName),
_subName(subName),
_baseDir(baseDir + "/" + subName),
- _attributeGrow(attributeGrow),
- _attributeGrowNumDocs(attributeGrowNumDocs),
_subDbId(subDbId),
_subDbType(subDbType)
{ }
@@ -99,8 +98,6 @@ StoreOnlyDocSubDB::StoreOnlyDocSubDB(const Config &cfg, const Context &ctx)
_bucketDB(ctx._bucketDB),
_bucketDBHandlerInitializer(ctx._bucketDBHandlerInitializer),
_metaStoreCtx(),
- _attributeGrow(cfg._attributeGrow),
- _attributeGrowNumDocs(cfg._attributeGrowNumDocs),
_flushedDocumentMetaStoreSerialNum(0u),
_flushedDocumentStoreSerialNum(0u),
_dms(),
@@ -222,11 +219,12 @@ StoreOnlyDocSubDB::getNewestFlushedSerial()
initializer::InitializerTask::SP
StoreOnlyDocSubDB::
createSummaryManagerInitializer(const search::LogDocumentStore::Config & storeCfg,
+ const AllocStrategy& alloc_strategy,
const search::TuneFileSummary &tuneFile,
search::IBucketizer::SP bucketizer,
std::shared_ptr<SummaryManager::SP> result) const
{
- GrowStrategy grow = _attributeGrow;
+ GrowStrategy grow = alloc_strategy.get_grow_strategy();
vespalib::string baseDir(_baseDir + "/summary");
return std::make_shared<SummaryManagerInitializer>
(grow, baseDir, getSubDbName(), _docTypeName, _writeService.shared(),
@@ -245,12 +243,13 @@ StoreOnlyDocSubDB::setupSummaryManager(SummaryManager::SP summaryManager)
InitializerTask::SP
StoreOnlyDocSubDB::
-createDocumentMetaStoreInitializer(const search::TuneFileAttributes &tuneFile,
+createDocumentMetaStoreInitializer(const AllocStrategy& alloc_strategy,
+ const search::TuneFileAttributes &tuneFile,
std::shared_ptr<DocumentMetaStoreInitializerResult::SP> result) const
{
- GrowStrategy grow = _attributeGrow;
+ GrowStrategy grow = alloc_strategy.get_grow_strategy();
// Amortize memory spike cost over N docs
- grow.setDocsGrowDelta(grow.getDocsGrowDelta() + _attributeGrowNumDocs);
+ grow.setDocsGrowDelta(grow.getDocsGrowDelta() + alloc_strategy.get_amortize_count());
vespalib::string baseDir(_baseDir + "/documentmetastore");
vespalib::string name = DocumentMetaStore::getFixedName();
vespalib::string attrFileName = baseDir + "/" + name; // XXX: Wrong
@@ -292,10 +291,13 @@ StoreOnlyDocSubDB::createInitializer(const DocumentDBConfig &configSnapshot, Ser
{
auto result = std::make_unique<DocumentSubDbInitializer>(const_cast<StoreOnlyDocSubDB &>(*this),
_writeService.master());
- auto dmsInitTask = createDocumentMetaStoreInitializer(configSnapshot.getTuneFileDocumentDBSP()->_attr,
+ AllocStrategy alloc_strategy = configSnapshot.get_alloc_config().make_alloc_strategy(_subDbType);
+ auto dmsInitTask = createDocumentMetaStoreInitializer(alloc_strategy,
+ configSnapshot.getTuneFileDocumentDBSP()->_attr,
result->writableResult().writableDocumentMetaStore());
result->addDocumentMetaStoreInitTask(dmsInitTask);
auto summaryTask = createSummaryManagerInitializer(configSnapshot.getStoreConfig(),
+ alloc_strategy,
configSnapshot.getTuneFileDocumentDBSP()->_summary,
result->result().documentMetaStore()->documentMetaStore(),
result->writableResult().writableSummaryManager());
@@ -409,14 +411,22 @@ StoreOnlyDocSubDB::applyConfig(const DocumentDBConfig &newConfigSnapshot, const
(void) params;
(void) resolver;
assert(_writeService.master().isCurrentThread());
- reconfigure(newConfigSnapshot.getStoreConfig());
+ AllocStrategy alloc_strategy = newConfigSnapshot.get_alloc_config().make_alloc_strategy(_subDbType);
+ reconfigure(newConfigSnapshot.getStoreConfig(), alloc_strategy);
initFeedView(newConfigSnapshot);
return IReprocessingTask::List();
}
void
-StoreOnlyDocSubDB::reconfigure(const search::LogDocumentStore::Config & config)
+StoreOnlyDocSubDB::reconfigure(const search::LogDocumentStore::Config & config, const AllocStrategy& alloc_strategy)
{
+ auto cfg = _dms->getConfig();
+ GrowStrategy grow = alloc_strategy.get_grow_strategy();
+ // Amortize memory spike cost over N docs
+ grow.setDocsGrowDelta(grow.getDocsGrowDelta() + alloc_strategy.get_amortize_count());
+ cfg.setGrowStrategy(grow);
+ cfg.setCompactionStrategy(alloc_strategy.get_compaction_strategy());
+ _dms->update_config(cfg); // Update grow and compaction config
_rSummaryMgr->reconfigure(config);
}
diff --git a/searchcore/src/vespa/searchcore/proton/server/storeonlydocsubdb.h b/searchcore/src/vespa/searchcore/proton/server/storeonlydocsubdb.h
index 7c3f7c82eb0..37229bd551c 100644
--- a/searchcore/src/vespa/searchcore/proton/server/storeonlydocsubdb.h
+++ b/searchcore/src/vespa/searchcore/proton/server/storeonlydocsubdb.h
@@ -20,6 +20,7 @@
namespace proton {
+class AllocStrategy;
struct DocumentDBTaggedMetrics;
class DocumentMetaStoreInitializerResult;
class FeedHandler;
@@ -85,14 +86,12 @@ public:
const DocTypeName _docTypeName;
const vespalib::string _subName;
const vespalib::string _baseDir;
- const search::GrowStrategy _attributeGrow;
- const size_t _attributeGrowNumDocs;
const uint32_t _subDbId;
const SubDbType _subDbType;
Config(const DocTypeName &docTypeName, const vespalib::string &subName,
- const vespalib::string &baseDir, const search::GrowStrategy &attributeGrow,
- size_t attributeGrowNumDocs, uint32_t subDbId, SubDbType subDbType);
+ const vespalib::string &baseDir,
+ uint32_t subDbId, SubDbType subDbType);
~Config();
};
@@ -130,8 +129,6 @@ protected:
BucketDBOwner::SP _bucketDB;
bucketdb::IBucketDBHandlerInitializer &_bucketDBHandlerInitializer;
IDocumentMetaStoreContext::SP _metaStoreCtx;
- const search::GrowStrategy _attributeGrow;
- const size_t _attributeGrowNumDocs;
// The following two serial numbers reflect state at program startup
// and are used by replay logic.
SerialNum _flushedDocumentMetaStoreSerialNum;
@@ -164,6 +161,7 @@ protected:
std::shared_ptr<initializer::InitializerTask>
createSummaryManagerInitializer(const search::LogDocumentStore::Config & protonSummaryCfg,
+ const AllocStrategy& alloc_strategy,
const search::TuneFileSummary &tuneFile,
search::IBucketizer::SP bucketizer,
std::shared_ptr<SummaryManager::SP> result) const;
@@ -171,7 +169,8 @@ protected:
void setupSummaryManager(SummaryManager::SP summaryManager);
std::shared_ptr<initializer::InitializerTask>
- createDocumentMetaStoreInitializer(const search::TuneFileAttributes &tuneFile,
+ createDocumentMetaStoreInitializer(const AllocStrategy& alloc_strategy,
+ const search::TuneFileAttributes &tuneFile,
std::shared_ptr<std::shared_ptr<DocumentMetaStoreInitializerResult>> result) const;
void setupDocumentMetaStore(std::shared_ptr<DocumentMetaStoreInitializerResult> dmsResult);
@@ -181,7 +180,8 @@ protected:
StoreOnlyFeedView::PersistentParams getFeedViewPersistentParams();
vespalib::string getSubDbName() const;
- void reconfigure(const search::LogDocumentStore::Config & protonConfig);
+ void reconfigure(const search::LogDocumentStore::Config & protonConfig,
+ const AllocStrategy& alloc_strategy);
public:
StoreOnlyDocSubDB(const Config &cfg, const Context &ctx);
~StoreOnlyDocSubDB() override;
diff --git a/searchcore/src/vespa/searchcore/proton/test/documentdb_config_builder.cpp b/searchcore/src/vespa/searchcore/proton/test/documentdb_config_builder.cpp
index 335c7aca24c..17cb91ac4ce 100644
--- a/searchcore/src/vespa/searchcore/proton/test/documentdb_config_builder.cpp
+++ b/searchcore/src/vespa/searchcore/proton/test/documentdb_config_builder.cpp
@@ -10,6 +10,7 @@
#include <vespa/searchsummary/config/config-juniperrc.h>
#include <vespa/document/config/config-documenttypes.h>
#include <vespa/config-imported-fields.h>
+#include <vespa/searchcore/proton/common/alloc_config.h>
#include <vespa/searchcore/proton/server/threading_service_config.h>
using document::DocumenttypesConfig;
@@ -47,6 +48,7 @@ DocumentDBConfigBuilder::DocumentDBConfigBuilder(int64_t generation,
_maintenance(std::make_shared<DocumentDBMaintenanceConfig>()),
_store(),
_threading_service_config(std::make_shared<const ThreadingServiceConfig>(ThreadingServiceConfig::make(1))),
+ _alloc_config(std::make_shared<const AllocConfig>()),
_configId(configId),
_docTypeName(docTypeName)
{ }
@@ -70,6 +72,7 @@ DocumentDBConfigBuilder::DocumentDBConfigBuilder(const DocumentDBConfig &cfg)
_maintenance(cfg.getMaintenanceConfigSP()),
_store(cfg.getStoreConfig()),
_threading_service_config(cfg.get_threading_service_config_shared_ptr()),
+ _alloc_config(cfg.get_alloc_config_shared_ptr()),
_configId(cfg.getConfigId()),
_docTypeName(cfg.getDocTypeName())
{}
@@ -97,6 +100,7 @@ DocumentDBConfigBuilder::build()
_maintenance,
_store,
_threading_service_config,
+ _alloc_config,
_configId,
_docTypeName);
}
diff --git a/searchcore/src/vespa/searchcore/proton/test/documentdb_config_builder.h b/searchcore/src/vespa/searchcore/proton/test/documentdb_config_builder.h
index 218a7c56fa9..706e14e73db 100644
--- a/searchcore/src/vespa/searchcore/proton/test/documentdb_config_builder.h
+++ b/searchcore/src/vespa/searchcore/proton/test/documentdb_config_builder.h
@@ -28,6 +28,7 @@ private:
DocumentDBConfig::MaintenanceConfigSP _maintenance;
search::LogDocumentStore::Config _store;
std::shared_ptr<const ThreadingServiceConfig> _threading_service_config;
+ std::shared_ptr<const AllocConfig> _alloc_config;
vespalib::string _configId;
vespalib::string _docTypeName;
diff --git a/searchlib/src/tests/attribute/compaction/attribute_compaction_test.cpp b/searchlib/src/tests/attribute/compaction/attribute_compaction_test.cpp
index dde6b14121d..36a697eaa12 100644
--- a/searchlib/src/tests/attribute/compaction/attribute_compaction_test.cpp
+++ b/searchlib/src/tests/attribute/compaction/attribute_compaction_test.cpp
@@ -226,4 +226,24 @@ TEST_F("Compaction is not executed when free lists are used",
EXPECT_EQUAL(1001u, afterSpace.dead());
}
+TEST_F("Compaction is peformed when compaction strategy is changed to enable compaction",
+ Fixture(compactAddressSpaceAttributeConfig(false)))
+{
+ populate_and_hammer(f, true);
+ AddressSpace after1 = f.getMultiValueAddressSpaceUsage("after1");
+ // 100 * 1000 dead arrays due to new values for docids
+ // 1 reserved array accounted as dead
+ EXPECT_EQUAL(100001u, after1.dead());
+ f._v->update_config(compactAddressSpaceAttributeConfig(true));
+ auto old_dead = after1.dead();
+ AddressSpace after2 = f.getMultiValueAddressSpaceUsage("after2");
+ while (after2.dead() < old_dead) {
+ old_dead = after2.dead();
+ f._v->commit(); // new commit might trigger further compaction
+ after2 = f.getMultiValueAddressSpaceUsage("after2");
+ }
+ // DEAD_ARRAYS_SLACK in multi value mapping is is 64k
+ EXPECT_GREATER(65536u, after2.dead());
+}
+
TEST_MAIN() { TEST_RUN_ALL(); }
diff --git a/searchlib/src/vespa/searchlib/attribute/attributevector.cpp b/searchlib/src/vespa/searchlib/attribute/attributevector.cpp
index 2168bbe4276..d2574bd32a2 100644
--- a/searchlib/src/vespa/searchlib/attribute/attributevector.cpp
+++ b/searchlib/src/vespa/searchlib/attribute/attributevector.cpp
@@ -21,6 +21,7 @@
#include <vespa/vespalib/util/exceptions.h>
#include <vespa/searchlib/util/logutil.h>
#include <vespa/searchcommon/attribute/attribute_utils.h>
+#include <thread>
#include <vespa/log/log.h>
LOG_SETUP(".searchlib.attribute.attributevector");
@@ -769,6 +770,31 @@ AttributeVector::logEnumStoreEvent(const char *reason, const char *stage)
EV_STATE(eventName.c_str(), jstr.toString().data());
}
+void
+AttributeVector::drain_hold(uint64_t hold_limit)
+{
+ incGeneration();
+ for (int retry = 0; ; ++retry) {
+ removeAllOldGenerations();
+ updateStat(true);
+ if (_status.getOnHold() <= hold_limit) {
+ return;
+ }
+ std::this_thread::sleep_for(retry < 20 ? 20ms : 100ms);
+ }
+}
+
+void
+AttributeVector::update_config(const Config& cfg)
+{
+ commit(true);
+ drain_hold(1024 * 1024); // Wait until 1MiB or less on hold
+ _config.setGrowStrategy(cfg.getGrowStrategy());
+ _config.setCompactionStrategy(cfg.getCompactionStrategy());
+ commit(); // might trigger compaction if compaction strategy changed
+ drain_hold(1024 * 1024); // Wait until 1MiB or less on hold
+}
+
template bool AttributeVector::append<StringChangeData>(ChangeVectorT< ChangeTemplate<StringChangeData> > &changes, uint32_t , const StringChangeData &, int32_t, bool);
template bool AttributeVector::update<StringChangeData>(ChangeVectorT< ChangeTemplate<StringChangeData> > &changes, uint32_t , const StringChangeData &);
template bool AttributeVector::remove<StringChangeData>(ChangeVectorT< ChangeTemplate<StringChangeData> > &changes, uint32_t , const StringChangeData &, int32_t);
diff --git a/searchlib/src/vespa/searchlib/attribute/attributevector.h b/searchlib/src/vespa/searchlib/attribute/attributevector.h
index 4fc6589850e..f308ee8d024 100644
--- a/searchlib/src/vespa/searchlib/attribute/attributevector.h
+++ b/searchlib/src/vespa/searchlib/attribute/attributevector.h
@@ -393,6 +393,7 @@ public:
/** Return the fixed length of the attribute. If 0 then you must inquire each document. */
size_t getFixedWidth() const override { return _config.basicType().fixedSize(); }
const Config &getConfig() const { return _config; }
+ void update_config(const Config& cfg);
BasicType getInternalBasicType() const { return _config.basicType(); }
CollectionType getInternalCollectionType() const { return _config.collectionType(); }
const BaseName & getBaseFileName() const { return _baseFileName; }
@@ -667,6 +668,8 @@ public:
static bool isEnumerated(const vespalib::GenericHeader &header);
virtual vespalib::MemoryUsage getChangeVectorMemoryUsage() const;
+
+ void drain_hold(uint64_t hold_limit);
};
}
diff --git a/storage/src/vespa/storage/bucketdb/bucketcopy.h b/storage/src/vespa/storage/bucketdb/bucketcopy.h
index 94a1e63e53e..3e93e4594a6 100644
--- a/storage/src/vespa/storage/bucketdb/bucketcopy.h
+++ b/storage/src/vespa/storage/bucketdb/bucketcopy.h
@@ -28,9 +28,9 @@ public:
{
}
- bool trusted() const { return _flags & TRUSTED; }
+ bool trusted() const noexcept { return _flags & TRUSTED; }
- BucketCopy& setTrusted(bool val = true) {
+ BucketCopy& setTrusted(bool val = true) noexcept {
if (!val) {
clearTrusted();
} else {
@@ -40,46 +40,44 @@ public:
return *this;
}
- void clearTrusted() { _flags &= ~TRUSTED; }
+ void clearTrusted() noexcept { _flags &= ~TRUSTED; }
- bool valid() const { return getBucketInfo().valid(); }
- bool empty() const { return getBucketInfo().empty(); }
- bool wasRecentlyCreated() const {
+ bool valid() const noexcept { return getBucketInfo().valid(); }
+ bool empty() const noexcept { return getBucketInfo().empty(); }
+ bool wasRecentlyCreated() const noexcept {
return (getChecksum() == 1
&& getDocumentCount() == 0
&& getTotalDocumentSize() == 0);
}
- static BucketCopy recentlyCreatedCopy(uint64_t timestamp, uint16_t nodeIdx)
- {
+ static BucketCopy recentlyCreatedCopy(uint64_t timestamp, uint16_t nodeIdx) noexcept {
return BucketCopy(timestamp, nodeIdx, api::BucketInfo(1, 0, 0, 0, 0));
}
- uint16_t getNode() const { return _node; }
- uint64_t getTimestamp() const { return _timestamp; }
+ uint16_t getNode() const noexcept { return _node; }
+ uint64_t getTimestamp() const noexcept { return _timestamp; }
- uint32_t getChecksum() const { return _info.getChecksum(); }
- uint32_t getDocumentCount() const { return _info.getDocumentCount(); }
- uint32_t getTotalDocumentSize() const
- { return _info.getTotalDocumentSize(); }
- uint32_t getMetaCount() const { return _info.getMetaCount(); }
- uint32_t getUsedFileSize() const { return _info.getUsedFileSize(); }
- bool active() const { return _info.isActive(); }
- bool ready() const { return _info.isReady(); }
+ uint32_t getChecksum() const noexcept { return _info.getChecksum(); }
+ uint32_t getDocumentCount() const noexcept { return _info.getDocumentCount(); }
+ uint32_t getTotalDocumentSize() const noexcept { return _info.getTotalDocumentSize(); }
+ uint32_t getMetaCount() const noexcept { return _info.getMetaCount(); }
+ uint32_t getUsedFileSize() const noexcept { return _info.getUsedFileSize(); }
+ bool active() const noexcept { return _info.isActive(); }
+ bool ready() const noexcept { return _info.isReady(); }
- const api::BucketInfo& getBucketInfo() const { return _info; }
+ const api::BucketInfo& getBucketInfo() const noexcept { return _info; }
- void setBucketInfo(uint64_t timestamp, const api::BucketInfo& bInfo) {
+ void setBucketInfo(uint64_t timestamp, const api::BucketInfo& bInfo) noexcept {
_info = bInfo;
_timestamp = timestamp;
}
- void setActive(bool setactive) {
+ void setActive(bool setactive) noexcept {
_info.setActive(setactive);
}
bool consistentWith(const BucketCopy& other,
- bool countInvalidAsConsistent = false) const
+ bool countInvalidAsConsistent = false) const noexcept
{
// If both are valid, check checksum and doc count.
if (valid() && other.valid()) {
@@ -94,7 +92,7 @@ public:
std::string toString() const;
- bool operator==(const BucketCopy& other) const {
+ bool operator==(const BucketCopy& other) const noexcept {
return
getBucketInfo() == other.getBucketInfo() &&
_flags == other._flags;
diff --git a/storage/src/vespa/storage/bucketdb/bucketmanager.cpp b/storage/src/vespa/storage/bucketdb/bucketmanager.cpp
index 5be6f310c71..2d70ee8d3ba 100644
--- a/storage/src/vespa/storage/bucketdb/bucketmanager.cpp
+++ b/storage/src/vespa/storage/bucketdb/bucketmanager.cpp
@@ -166,17 +166,17 @@ namespace {
uint64_t active;
uint64_t ready;
- Count() : docs(0), bytes(0), buckets(0), active(0), ready(0) {}
+ Count() noexcept : docs(0), bytes(0), buckets(0), active(0), ready(0) {}
};
Count count;
uint32_t lowestUsedBit;
- MetricsUpdater()
+ MetricsUpdater() noexcept
: count(), lowestUsedBit(58) {}
void operator()(document::BucketId::Type bucketId,
- const StorBucketDatabase::Entry& data)
+ const StorBucketDatabase::Entry& data) noexcept
{
document::BucketId bucket(
document::BucketId::keyToBucketId(bucketId));
@@ -198,7 +198,7 @@ namespace {
}
};
- void add(const MetricsUpdater& rhs) {
+ void add(const MetricsUpdater& rhs) noexcept {
auto& d = count;
auto& s = rhs.count;
d.buckets += s.buckets;
diff --git a/storage/src/vespa/storage/distributor/pending_bucket_space_db_transition_entry.h b/storage/src/vespa/storage/distributor/pending_bucket_space_db_transition_entry.h
index 124ee1bdf45..785419e78cf 100644
--- a/storage/src/vespa/storage/distributor/pending_bucket_space_db_transition_entry.h
+++ b/storage/src/vespa/storage/distributor/pending_bucket_space_db_transition_entry.h
@@ -8,7 +8,7 @@ namespace storage::distributor::dbtransition {
struct Entry {
Entry(const document::BucketId& bid,
- const BucketCopy& copy_)
+ const BucketCopy& copy_) noexcept
: bucket_key(bid.toKey()),
copy(copy_)
{}
diff --git a/storage/src/vespa/storage/distributor/pendingmessagetracker.h b/storage/src/vespa/storage/distributor/pendingmessagetracker.h
index 51971a276b4..13d83157150 100644
--- a/storage/src/vespa/storage/distributor/pendingmessagetracker.h
+++ b/storage/src/vespa/storage/distributor/pendingmessagetracker.h
@@ -196,18 +196,6 @@ private:
// to be present for that exact purpose.
mutable std::mutex _lock;
- /**
- * Increment latency and operation count stats for the node the message
- * was sent towards based on the registered send time and the current time.
- *
- * In the event that system time has moved backwards across sending a
- * command and reciving its reply, the latency will not be recorded but
- * the total number of messages will increase.
- *
- * _lock MUST be held upon invocation.
- */
- void updateNodeStatsOnReply(const MessageEntry& entry);
-
void getStatusStartPage(std::ostream& out) const;
void getStatusPerNode(std::ostream& out) const;
void getStatusPerBucket(std::ostream& out) const;
diff --git a/storage/src/vespa/storage/persistence/asynchandler.cpp b/storage/src/vespa/storage/persistence/asynchandler.cpp
index b8ed6b8ec91..7f8da8e76e7 100644
--- a/storage/src/vespa/storage/persistence/asynchandler.cpp
+++ b/storage/src/vespa/storage/persistence/asynchandler.cpp
@@ -104,7 +104,7 @@ AsyncHandler::handleRunTask(RunTaskCommand& cmd, MessageTracker::UP tracker) con
});
spi::Bucket bucket(cmd.getBucket());
auto onDone = std::make_unique<ResultTaskOperationDone>(_sequencedExecutor, cmd.getBucketId(), std::move(task));
- cmd.task().run(bucket, std::make_shared<vespalib::KeepAlive<decltype(onDone)>>(std::move(onDone)));
+ cmd.run(bucket, std::make_shared<vespalib::KeepAlive<decltype(onDone)>>(std::move(onDone)));
return tracker;
}
diff --git a/storage/src/vespa/storage/persistence/filestorage/filestormanager.cpp b/storage/src/vespa/storage/persistence/filestorage/filestormanager.cpp
index a46b4205570..c71a7fee424 100644
--- a/storage/src/vespa/storage/persistence/filestorage/filestormanager.cpp
+++ b/storage/src/vespa/storage/persistence/filestorage/filestormanager.cpp
@@ -1,7 +1,7 @@
// Copyright 2017 Yahoo Holdings. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root.
-#include "filestorhandlerimpl.h"
#include "filestormanager.h"
+#include "filestorhandlerimpl.h"
#include <vespa/storage/bucketdb/minimumusedbitstracker.h>
#include <vespa/storage/common/bucketmessages.h>
#include <vespa/storage/common/content_bucket_space_repo.h>
@@ -76,6 +76,8 @@ FileStorManager(const config::ConfigUri & configUri, spi::PersistenceProvider& p
_configFetcher(configUri.getContext()),
_use_async_message_handling_on_schedule(false),
_metrics(std::make_unique<FileStorMetrics>()),
+ _filestorHandler(),
+ _sequencedExecutor(),
_closed(false),
_lock(),
_host_info_reporter(_component.getStateUpdater()),
@@ -810,6 +812,8 @@ FileStorManager::sendUp(const std::shared_ptr<api::StorageMessage>& msg)
void FileStorManager::onClose()
{
LOG(debug, "Start closing");
+ _bucketExecutorRegistration.reset();
+ _resource_usage_listener_registration.reset();
// Avoid getting config during shutdown
_configFetcher.close();
LOG(debug, "Closed _configFetcher.");
@@ -978,7 +982,10 @@ FileStorManager::execute(const spi::Bucket &bucket, std::unique_ptr<spi::BucketT
StorBucketDatabase::WrappedEntry entry(_component.getBucketDatabase(bucket.getBucketSpace()).get(
bucket.getBucketId(), "FileStorManager::execute"));
if (entry.exist()) {
- _filestorHandler->schedule(std::make_shared<RunTaskCommand>(bucket, std::move(task)));
+ auto cmd = std::make_shared<RunTaskCommand>(bucket, std::move(task));
+ if ( ! _filestorHandler->schedule(cmd) ) {
+ task = cmd->stealTask();
+ }
}
return task;
}
diff --git a/storage/src/vespa/storage/persistence/filestorage/service_layer_host_info_reporter.h b/storage/src/vespa/storage/persistence/filestorage/service_layer_host_info_reporter.h
index be0abc94987..b58e047d0af 100644
--- a/storage/src/vespa/storage/persistence/filestorage/service_layer_host_info_reporter.h
+++ b/storage/src/vespa/storage/persistence/filestorage/service_layer_host_info_reporter.h
@@ -25,7 +25,7 @@ public:
ServiceLayerHostInfoReporter(const ServiceLayerHostInfoReporter&) = delete;
ServiceLayerHostInfoReporter& operator=(const ServiceLayerHostInfoReporter&) = delete;
- ~ServiceLayerHostInfoReporter();
+ ~ServiceLayerHostInfoReporter() override;
void report(vespalib::JsonStream& output) override;
const spi::ResourceUsage &get_old_resource_usage() noexcept { return _old_resource_usage; }
diff --git a/storage/src/vespa/storage/persistence/messages.cpp b/storage/src/vespa/storage/persistence/messages.cpp
index 7ccb3ee895d..beae22a429b 100644
--- a/storage/src/vespa/storage/persistence/messages.cpp
+++ b/storage/src/vespa/storage/persistence/messages.cpp
@@ -187,9 +187,7 @@ RunTaskCommand::RunTaskCommand(const spi::Bucket &bucket, std::unique_ptr<spi::B
: api::InternalCommand(ID),
_task(std::move(task)),
_bucket(bucket)
-{
- assert(_task);
-}
+{ }
RunTaskCommand::~RunTaskCommand() = default;
@@ -203,6 +201,14 @@ RunTaskCommand::print(std::ostream& out, bool verbose, const std::string& indent
}
}
+void
+RunTaskCommand::run(const spi::Bucket & bucket, std::shared_ptr<vespalib::IDestructorCallback> onComplete)
+{
+ if (_task) {
+ _task->run(bucket, std::move(onComplete));
+ }
+}
+
RunTaskReply::RunTaskReply(const RunTaskCommand& cmd)
: api::InternalReply(ID, cmd)
{}
diff --git a/storage/src/vespa/storage/persistence/messages.h b/storage/src/vespa/storage/persistence/messages.h
index 043747d10d2..50834782f39 100644
--- a/storage/src/vespa/storage/persistence/messages.h
+++ b/storage/src/vespa/storage/persistence/messages.h
@@ -249,11 +249,10 @@ public:
document::Bucket getBucket() const override { return _bucket.getBucket(); }
std::unique_ptr<api::StorageReply> makeReply() override;
- spi::BucketTask & task() & {
- return *_task;
- }
+ void run(const spi::Bucket & bucket, std::shared_ptr<vespalib::IDestructorCallback> onComplete);
void print(std::ostream& out, bool verbose, const std::string& indent) const override;
+ std::unique_ptr<spi::BucketTask> stealTask() { return std::move(_task); }
private:
std::unique_ptr<spi::BucketTask> _task;
spi::Bucket _bucket;
diff --git a/storageapi/src/vespa/storageapi/buckets/bucketinfo.cpp b/storageapi/src/vespa/storageapi/buckets/bucketinfo.cpp
index 9e4e48d67f4..4c3b290d4d7 100644
--- a/storageapi/src/vespa/storageapi/buckets/bucketinfo.cpp
+++ b/storageapi/src/vespa/storageapi/buckets/bucketinfo.cpp
@@ -19,8 +19,7 @@ BucketInfo::BucketInfo() noexcept
_active(false)
{}
-BucketInfo::BucketInfo(uint32_t checksum, uint32_t docCount,
- uint32_t totDocSize) noexcept
+BucketInfo::BucketInfo(uint32_t checksum, uint32_t docCount, uint32_t totDocSize) noexcept
: _lastModified(0),
_checksum(checksum),
_docCount(docCount),
@@ -73,7 +72,7 @@ BucketInfo::BucketInfo(uint32_t checksum, uint32_t docCount,
{}
bool
-BucketInfo::operator==(const BucketInfo& info) const
+BucketInfo::operator==(const BucketInfo& info) const noexcept
{
return (_checksum == info._checksum &&
_docCount == info._docCount &&
diff --git a/storageapi/src/vespa/storageapi/buckets/bucketinfo.h b/storageapi/src/vespa/storageapi/buckets/bucketinfo.h
index 5b2de4b4d61..7e6e7a2aed2 100644
--- a/storageapi/src/vespa/storageapi/buckets/bucketinfo.h
+++ b/storageapi/src/vespa/storageapi/buckets/bucketinfo.h
@@ -42,37 +42,37 @@ public:
uint32_t metaCount, uint32_t usedFileSize,
bool ready, bool active, Timestamp lastModified) noexcept;
- Timestamp getLastModified() const { return _lastModified; }
- uint32_t getChecksum() const { return _checksum; }
- uint32_t getDocumentCount() const { return _docCount; }
- uint32_t getTotalDocumentSize() const { return _totDocSize; }
- uint32_t getMetaCount() const { return _metaCount; }
- uint32_t getUsedFileSize() const { return _usedFileSize; }
- bool isReady() const { return _ready; }
- bool isActive() const { return _active; }
+ Timestamp getLastModified() const noexcept { return _lastModified; }
+ uint32_t getChecksum() const noexcept { return _checksum; }
+ uint32_t getDocumentCount() const noexcept { return _docCount; }
+ uint32_t getTotalDocumentSize() const noexcept { return _totDocSize; }
+ uint32_t getMetaCount() const noexcept { return _metaCount; }
+ uint32_t getUsedFileSize() const noexcept { return _usedFileSize; }
+ bool isReady() const noexcept { return _ready; }
+ bool isActive() const noexcept { return _active; }
- void setChecksum(uint32_t crc) { _checksum = crc; }
- void setDocumentCount(uint32_t count) { _docCount = count; }
- void setTotalDocumentSize(uint32_t size) { _totDocSize = size; }
- void setMetaCount(uint32_t count) { _metaCount = count; }
- void setUsedFileSize(uint32_t size) { _usedFileSize = size; }
- void setReady(bool ready = true) { _ready = ready; }
- void setActive(bool active = true) { _active = active; }
- void setLastModified(Timestamp lastModified) { _lastModified = lastModified; }
+ void setChecksum(uint32_t crc) noexcept { _checksum = crc; }
+ void setDocumentCount(uint32_t count) noexcept { _docCount = count; }
+ void setTotalDocumentSize(uint32_t size) noexcept { _totDocSize = size; }
+ void setMetaCount(uint32_t count) noexcept { _metaCount = count; }
+ void setUsedFileSize(uint32_t size) noexcept { _usedFileSize = size; }
+ void setReady(bool ready = true) noexcept { _ready = ready; }
+ void setActive(bool active = true) noexcept { _active = active; }
+ void setLastModified(Timestamp lastModified) noexcept { _lastModified = lastModified; }
/**
* Only compare checksum, total document count and document
* size, not meta count or used file size.
*/
- bool equalDocumentInfo(const BucketInfo& other) const {
+ bool equalDocumentInfo(const BucketInfo& other) const noexcept {
return (_checksum == other._checksum
&& _docCount == other._docCount
&& _totDocSize == other._totDocSize);
}
- bool operator==(const BucketInfo& info) const;
- bool valid() const { return (_docCount > 0 || _totDocSize == 0); }
- bool empty() const {
+ bool operator==(const BucketInfo& info) const noexcept;
+ bool valid() const noexcept { return (_docCount > 0 || _totDocSize == 0); }
+ bool empty() const noexcept {
return _metaCount == 0 && _usedFileSize == 0 && _checksum == 0;
}
vespalib::string toString() const;
diff --git a/vespalib/src/vespa/vespalib/gtest/gtest.h b/vespalib/src/vespa/vespalib/gtest/gtest.h
index 87362687103..f4aaa670e76 100644
--- a/vespalib/src/vespa/vespalib/gtest/gtest.h
+++ b/vespalib/src/vespa/vespalib/gtest/gtest.h
@@ -26,3 +26,16 @@ main(int argc, char* argv[]) \
#else
#define VESPA_GTEST_TYPED_TEST_SUITE TYPED_TEST_CASE
#endif
+
+#define VESPA_EXPECT_EXCEPTION(TRY_BLOCK, EXCEPTION_TYPE, MESSAGE) \
+ try { \
+ TRY_BLOCK; \
+ FAIL() << "exception '" << MESSAGE << "' not thrown at all!"; \
+ } catch(EXCEPTION_TYPE& e) { \
+ EXPECT_TRUE(contains(stringref(e.what()), stringref(MESSAGE))) << \
+ " e.what(): " << e.what() << "\n"; \
+ } catch(...) { \
+ FAIL() << "wrong exception type thrown"; \
+ throw; \
+ }
+
diff --git a/vespalib/src/vespa/vespalib/util/executor.h b/vespalib/src/vespa/vespalib/util/executor.h
index 57ad28344b9..ce610f4e84c 100644
--- a/vespalib/src/vespa/vespalib/util/executor.h
+++ b/vespalib/src/vespa/vespalib/util/executor.h
@@ -20,7 +20,7 @@ public:
struct Task {
typedef std::unique_ptr<Task> UP;
virtual void run() = 0;
- virtual ~Task() {}
+ virtual ~Task() = default;
};
enum class OptimizeFor {LATENCY, THROUGHPUT, ADAPTIVE};
@@ -41,7 +41,7 @@ public:
* In case you have a lazy executor that naps inbetween.
**/
virtual void wakeup() = 0;
- virtual ~Executor() =default;
+ virtual ~Executor() = default;
};
} // namespace vespalib
diff --git a/vespalib/src/vespa/vespalib/util/growstrategy.h b/vespalib/src/vespa/vespalib/util/growstrategy.h
index bb3b9196997..c7ada75f317 100644
--- a/vespalib/src/vespa/vespalib/util/growstrategy.h
+++ b/vespalib/src/vespa/vespalib/util/growstrategy.h
@@ -2,7 +2,7 @@
#pragma once
-#include <cstdint>
+#include <cstddef>
namespace vespalib {
diff --git a/yolean/src/main/java/com/yahoo/yolean/chain/Chain.java b/yolean/src/main/java/com/yahoo/yolean/chain/Chain.java
index e57e83c644f..4d638847ed9 100644
--- a/yolean/src/main/java/com/yahoo/yolean/chain/Chain.java
+++ b/yolean/src/main/java/com/yahoo/yolean/chain/Chain.java
@@ -3,10 +3,8 @@ package com.yahoo.yolean.chain;
import com.google.common.collect.ImmutableList;
-import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
-import java.util.Collections;
import java.util.Iterator;
import java.util.List;