summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--config-model/src/main/java/com/yahoo/schema/document/Attribute.java6
-rw-r--r--config-model/src/main/java/com/yahoo/schema/processing/Processing.java2
-rw-r--r--config-model/src/main/java/com/yahoo/schema/processing/SingleValueOnlyAttributeValidator.java (renamed from config-model/src/main/java/com/yahoo/schema/processing/BoolAttributeValidator.java)9
-rw-r--r--config-model/src/test/java/com/yahoo/schema/processing/BoolAttributeValidatorTestCase.java50
-rw-r--r--config-model/src/test/java/com/yahoo/schema/processing/SingleValueOnlyAttributeValidatorTestCase.java73
-rw-r--r--configdefinitions/src/vespa/attributes.def2
-rw-r--r--searchcore/src/vespa/searchcore/config/proton.def84
-rw-r--r--searchlib/src/tests/attribute/attributemanager/attributemanager_test.cpp2
-rw-r--r--searchlib/src/vespa/searchlib/attribute/configconverter.cpp1
-rw-r--r--vespajlib/src/main/java/com/yahoo/slime/ArrayValue.java108
-rw-r--r--vespajlib/src/test/java/com/yahoo/slime/ArrayValueTestCase.java188
11 files changed, 416 insertions, 109 deletions
diff --git a/config-model/src/main/java/com/yahoo/schema/document/Attribute.java b/config-model/src/main/java/com/yahoo/schema/document/Attribute.java
index 34e86cbf4a8..70fcf64dff3 100644
--- a/config-model/src/main/java/com/yahoo/schema/document/Attribute.java
+++ b/config-model/src/main/java/com/yahoo/schema/document/Attribute.java
@@ -100,7 +100,8 @@ public final class Attribute implements Cloneable, Serializable {
BOOL("bool", "BOOL"),
PREDICATE("predicate", "PREDICATE"),
TENSOR("tensor", "TENSOR"),
- REFERENCE("reference", "REFERENCE");
+ REFERENCE("reference", "REFERENCE"),
+ RAW("raw", "RAW");
private final String myName; // different from what name() returns.
private final String exportAttributeTypeName;
@@ -290,7 +291,7 @@ public final class Attribute implements Cloneable, Serializable {
} else if (fval instanceof ByteFieldValue) {
return Type.BYTE;
} else if (fval instanceof Raw) {
- return Type.BYTE;
+ return Type.RAW;
} else if (fval instanceof PredicateFieldValue) {
return Type.PREDICATE;
} else if (fval instanceof TensorFieldValue) {
@@ -344,6 +345,7 @@ public final class Attribute implements Cloneable, Serializable {
case PREDICATE -> DataType.PREDICATE;
case TENSOR -> DataType.getTensor(tensorType.orElseThrow(IllegalStateException::new));
case REFERENCE-> createReferenceDataType();
+ case RAW -> DataType.RAW;
default -> throw new IllegalArgumentException("Unknown attribute type " + attributeType);
};
}
diff --git a/config-model/src/main/java/com/yahoo/schema/processing/Processing.java b/config-model/src/main/java/com/yahoo/schema/processing/Processing.java
index 8f7e8daeed0..df4b0d0d941 100644
--- a/config-model/src/main/java/com/yahoo/schema/processing/Processing.java
+++ b/config-model/src/main/java/com/yahoo/schema/processing/Processing.java
@@ -89,7 +89,7 @@ public class Processing {
OnnxModelConfigGenerator::new,
OnnxModelTypeResolver::new,
RankingExpressionTypeResolver::new,
- BoolAttributeValidator::new,
+ SingleValueOnlyAttributeValidator::new,
PagedAttributeValidator::new,
// These should be last:
IndexingValidation::new,
diff --git a/config-model/src/main/java/com/yahoo/schema/processing/BoolAttributeValidator.java b/config-model/src/main/java/com/yahoo/schema/processing/SingleValueOnlyAttributeValidator.java
index bdb1eed4b10..b2786e6c785 100644
--- a/config-model/src/main/java/com/yahoo/schema/processing/BoolAttributeValidator.java
+++ b/config-model/src/main/java/com/yahoo/schema/processing/SingleValueOnlyAttributeValidator.java
@@ -14,9 +14,9 @@ import com.yahoo.vespa.model.container.search.QueryProfiles;
*
* @author geirst
*/
-public class BoolAttributeValidator extends Processor {
+public class SingleValueOnlyAttributeValidator extends Processor {
- public BoolAttributeValidator(Schema schema, DeployLogger deployLogger, RankProfileRegistry rankProfileRegistry, QueryProfiles queryProfiles) {
+ public SingleValueOnlyAttributeValidator(Schema schema, DeployLogger deployLogger, RankProfileRegistry rankProfileRegistry, QueryProfiles queryProfiles) {
super(schema, deployLogger, rankProfileRegistry, queryProfiles);
}
@@ -27,9 +27,10 @@ public class BoolAttributeValidator extends Processor {
if (attribute == null) {
continue;
}
- if (attribute.getType().equals(Attribute.Type.BOOL) &&
+ if ((attribute.getType().equals(Attribute.Type.BOOL) ||
+ attribute.getType().equals(Attribute.Type.RAW)) &&
!attribute.getCollectionType().equals(Attribute.CollectionType.SINGLE)) {
- fail(schema, field, "Only single value bool attribute fields are supported");
+ fail(schema, field, "Only single value " + attribute.getType().getName() + " attribute fields are supported");
}
}
}
diff --git a/config-model/src/test/java/com/yahoo/schema/processing/BoolAttributeValidatorTestCase.java b/config-model/src/test/java/com/yahoo/schema/processing/BoolAttributeValidatorTestCase.java
deleted file mode 100644
index f19b1f43115..00000000000
--- a/config-model/src/test/java/com/yahoo/schema/processing/BoolAttributeValidatorTestCase.java
+++ /dev/null
@@ -1,50 +0,0 @@
-// Copyright Yahoo. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root.
-package com.yahoo.schema.processing;
-
-import com.yahoo.schema.parser.ParseException;
-import org.junit.jupiter.api.Test;
-
-import static com.yahoo.schema.ApplicationBuilder.createFromString;
-import static org.junit.jupiter.api.Assertions.assertEquals;
-import static com.yahoo.config.model.test.TestUtil.joinLines;
-import static org.junit.jupiter.api.Assertions.fail;
-
-/**
- * @author geirst
- */
-public class BoolAttributeValidatorTestCase {
-
- @Test
- void array_of_bool_attribute_is_not_supported() throws ParseException {
- try {
- createFromString(getSd("field b type array<bool> { indexing: attribute }"));
- fail("Expected exception");
- }
- catch (IllegalArgumentException e) {
- assertEquals("For schema 'test', field 'b': Only single value bool attribute fields are supported",
- e.getMessage());
- }
- }
-
- @Test
- void weigtedset_of_bool_attribute_is_not_supported() throws ParseException {
- try {
- createFromString(getSd("field b type weightedset<bool> { indexing: attribute }"));
- fail("Expected exception");
- }
- catch (IllegalArgumentException e) {
- assertEquals("For schema 'test', field 'b': Only single value bool attribute fields are supported",
- e.getMessage());
- }
- }
-
- private String getSd(String field) {
- return joinLines(
- "schema test {",
- " document test {",
- " " + field,
- " }",
- "}");
- }
-
-}
diff --git a/config-model/src/test/java/com/yahoo/schema/processing/SingleValueOnlyAttributeValidatorTestCase.java b/config-model/src/test/java/com/yahoo/schema/processing/SingleValueOnlyAttributeValidatorTestCase.java
new file mode 100644
index 00000000000..a7f4125a537
--- /dev/null
+++ b/config-model/src/test/java/com/yahoo/schema/processing/SingleValueOnlyAttributeValidatorTestCase.java
@@ -0,0 +1,73 @@
+// Copyright Yahoo. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root.
+package com.yahoo.schema.processing;
+
+import com.yahoo.schema.parser.ParseException;
+import org.junit.jupiter.api.Test;
+
+import static com.yahoo.schema.ApplicationBuilder.createFromString;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static com.yahoo.config.model.test.TestUtil.joinLines;
+import static org.junit.jupiter.api.Assertions.fail;
+
+/**
+ * @author geirst
+ */
+public class SingleValueOnlyAttributeValidatorTestCase {
+
+ private static void array_attribute_is_not_supported(String type) throws ParseException {
+ try {
+ createFromString(getSd("field b type array<" + type + "> { indexing: attribute }"));
+ fail("Expected exception");
+ }
+ catch (IllegalArgumentException e) {
+ assertEquals("For schema 'test', field 'b': Only single value " + type + " attribute fields are supported",
+ e.getMessage());
+ }
+ }
+
+ private static void weightedset_attribute_is_not_supported(String type) throws ParseException {
+ try {
+ createFromString(getSd("field b type weightedset<" + type + "> { indexing: attribute }"));
+ fail("Expected exception");
+ }
+ catch (IllegalArgumentException e) {
+ if (type.equals("raw")) {
+ assertEquals("weightedset of complex type '[type BUILTIN] {raw}' is not supported",
+ e.getMessage());
+ } else {
+ assertEquals("For schema 'test', field 'b': Only single value " + type + " attribute fields are supported",
+ e.getMessage());
+ }
+ }
+ }
+
+ @Test
+ void array_of_bool_attribute_is_not_supported() throws ParseException {
+ array_attribute_is_not_supported("bool");
+ }
+
+ @Test
+ void weightedset_of_bool_attribute_is_not_supported() throws ParseException {
+ weightedset_attribute_is_not_supported("bool");
+ }
+
+ @Test
+ void array_of_raw_attribute_is_not_supported() throws ParseException {
+ array_attribute_is_not_supported("raw");
+ }
+
+ @Test
+ void weightedset_of_raw_attribute_is_not_supported() throws ParseException {
+ weightedset_attribute_is_not_supported("raw");
+ }
+
+ private static String getSd(String field) {
+ return joinLines(
+ "schema test {",
+ " document test {",
+ " " + field,
+ " }",
+ "}");
+ }
+
+}
diff --git a/configdefinitions/src/vespa/attributes.def b/configdefinitions/src/vespa/attributes.def
index 31f2bfc281d..2810284b3a3 100644
--- a/configdefinitions/src/vespa/attributes.def
+++ b/configdefinitions/src/vespa/attributes.def
@@ -2,7 +2,7 @@
namespace=vespa.config.search
attribute[].name string
-attribute[].datatype enum { STRING, BOOL, UINT2, UINT4, INT8, INT16, INT32, INT64, FLOAT16, FLOAT, DOUBLE, PREDICATE, TENSOR, REFERENCE, NONE } default=NONE
+attribute[].datatype enum { STRING, BOOL, UINT2, UINT4, INT8, INT16, INT32, INT64, FLOAT16, FLOAT, DOUBLE, PREDICATE, TENSOR, REFERENCE, RAW, NONE } default=NONE
attribute[].collectiontype enum { SINGLE, ARRAY, WEIGHTEDSET } default=SINGLE
attribute[].dictionary.type enum { BTREE, HASH, BTREE_AND_HASH } default = BTREE
attribute[].dictionary.match enum { CASE_SENSITIVE, CASE_INSENSITIVE, CASED, UNCASED } default=UNCASED
diff --git a/searchcore/src/vespa/searchcore/config/proton.def b/searchcore/src/vespa/searchcore/config/proton.def
index 375ff0f2012..e85e6c58e11 100644
--- a/searchcore/src/vespa/searchcore/config/proton.def
+++ b/searchcore/src/vespa/searchcore/config/proton.def
@@ -129,10 +129,6 @@ indexing.write.io enum {NORMAL, OSYNC, DIRECTIO} default=DIRECTIO restart
## Control io options during read both under dump and fusion.
indexing.read.io enum {NORMAL, DIRECTIO} default=DIRECTIO restart
-## Overrides the number of threads used for writing fields across all document dbs.
-## See feeding.concurrency for details.
-indexing.threads int default=1 restart
-
## Option to specify what is most important during indexing.
## This is experimental and will most likely be temporary.
indexing.optimize enum {LATENCY, THROUGHPUT, ADAPTIVE} default=THROUGHPUT restart
@@ -141,9 +137,6 @@ indexing.optimize enum {LATENCY, THROUGHPUT, ADAPTIVE} default=THROUGHPUT restar
## indexing threads. Only used when visibility delay is zero.
indexing.tasklimit int default=-1000
-## Deprecated and ignored, will soon go away
-indexing.semiunboundtasklimit int default = 1000
-
## Kind of watermark for when to activate extra manpower
## Utilized if optimize is set to either THROUGHPUT or ADAPTIVE
indexing.kind_of_watermark int default = 0 restart
@@ -151,7 +144,6 @@ indexing.kind_of_watermark int default = 0 restart
## Controls minimum reaction time in seconds if using THROUGHPUT
indexing.reactiontime double default = 0.001 restart
-
## How long a freshly loaded index shall be warmed up
## before being used for serving
index.warmup.time double default=0.0 restart
@@ -169,10 +161,6 @@ index.maxflushed int default=2
## Setting to 1 will force an immediate fusion.
index.maxflushedretired int default=20
-## How much memory is set aside for caching.
-## Now only used for caching of dictionary lookups.
-index.cache.size long default=0 restart
-
## Control io options during flushing of attributes.
attribute.write.io enum {NORMAL, OSYNC, DIRECTIO} default=DIRECTIO restart
@@ -180,10 +168,12 @@ attribute.write.io enum {NORMAL, OSYNC, DIRECTIO} default=DIRECTIO restart
search.mmap.options[] enum {POPULATE, HUGETLB} restart
## Advise to give to os when mapping memory.
+## TODO Check if default should be random
search.mmap.advise enum {NORMAL, RANDOM, SEQUENTIAL} default=NORMAL restart
## Max number of threads allowed to handle large queries concurrently
## Positive number means there is a limit, 0 or negative means no limit.
+## TODO Check if ever used in config.
search.memory.limiter.maxthreads int default=0
## Minimum coverage of corpus to postprocess before applying above concurrency limit.
@@ -210,17 +200,6 @@ distribution.searchablecopies long default=1
## Negative numbers are a percentage of memory.
summary.cache.maxbytes long default=-4
-## Include visits in the cache, if the visitoperation allows it.
-## This will enable another separate cache of summary.cache.maxbytes size.
-## IGNORED and DEPRECATED Will go away soon
-summary.cache.allowvisitcaching bool default=true
-
-## Control number of cache entries preallocated.
-## Default is no preallocation.
-## Can be set to a higher number to avoid resizing.
-## IGNORED and DEPRECATED Will go away soon
-summary.cache.initialentries long default=0 restart
-
## Control compression type of the summary while in the cache.
summary.cache.compression.type enum {NONE, LZ4, ZSTD} default=LZ4
@@ -234,6 +213,7 @@ summary.cache.update_strategy enum {INVALIDATE, UPDATE} default=INVALIDATE
## Control compression type of the summary while in memory during compaction
## NB So far only stragey=LOG honours it.
+## TODO Use same as for store (chunk.compression).
summary.log.compact.compression.type enum {NONE, LZ4, ZSTD} default=ZSTD
## Control compression level of the summary while in memory during compaction
@@ -253,14 +233,11 @@ summary.log.chunk.compression.level int default=9
## Max size in bytes per chunk.
summary.log.chunk.maxbytes int default=65536
-## Skip crc32 check on read.
-## IGNORED and DEPRECATED Will go away soon
-summary.log.chunk.skipcrconread bool default=false
-
## Max size per summary file.
summary.log.maxfilesize long default=1000000000
## Max number of lid entries per file
+## TODO Decide based on memory on node.
summary.log.maxnumlids int default=40000000
## Max disk bloat factor. This will trigger compacting.
@@ -279,6 +256,7 @@ summary.write.io enum {NORMAL, OSYNC, DIRECTIO} default=DIRECTIO
## Control io options during read of stored documents.
## All summary.read options will take effect immediately on new files written.
## On old files it will take effect either upon compact or on restart.
+## TODO Default is probably DIRECTIO
summary.read.io enum {NORMAL, DIRECTIO, MMAP } default=MMAP restart
## Multiple optional options for use with mmap
@@ -331,6 +309,7 @@ documentdb[].allocation.active_buffers_ratio double default=0.1
periodic.interval double default=3600.0
## Connect spec for transactionlog server.
+## TODO Consider not using RPC at all
tlsspec string default="tcp/localhost:13700" restart
## ConfigId for transactionlogserver
@@ -424,6 +403,7 @@ visit.ignoremaxbytes bool default=true
## Number of initializer threads used for loading structures from disk at proton startup.
## The threads are shared between document databases when value is larger than 0.
## When set to 0 (default) we use 1 separate thread per document database.
+## TODO Consider if really necessary, could be automatic.
initialize.threads int default = 0
## Portion of max address space used in components in attribute vectors
@@ -457,10 +437,12 @@ hwinfo.disk.writespeed double default = 200.0 restart
## Amount of data to write to temporary file when sampling disk write speed.
## Default is 1 GiB.
+## TODO Check if still in use
hwinfo.disk.samplewritesize long default = 1073741824 restart
## Minimum write speed needed to avoid disk being considered slow.
## Unit is MiB/s, default is 100.0 MiB/s.
+## TODO Check if still in use
hwinfo.disk.slowwritespeedlimit double default = 100.0 restart
## The size of physical memory (in bytes) available to proton.
@@ -496,6 +478,7 @@ feeding.niceness double default = 0.0 restart
## This limit is only considered when executing tasks for handling external feed operations.
## In that case the calling thread (persistence thread) is blocked until the master thread has capacity to handle more tasks.
## When this limit is set to 0 it is ignored.
+## TODO Check if still in use
feeding.master_task_limit int default = 0
## Adjustment to resource limit when determining if maintenance jobs can run.
@@ -513,6 +496,45 @@ maintenancejobs.maxoutstandingmoveops int default=100
## in depth understanding is present.
bucketdb.checksumtype enum {LEGACY, XXHASH64} default = LEGACY restart
+## Chooses the throttling policy used to control the window size
+## of the SharedOperationThrottler component used by the transaction log replay feed state.
+replay_throttling_policy.type enum { UNLIMITED, DYNAMIC } default=DYNAMIC
+## Only used if replay_throttling_policy.type == DYNAMIC:
+## TODO consider just hardcoding values as they have never been tuned.
+replay_throttling_policy.min_window_size int default=100
+replay_throttling_policy.max_window_size int default=10000
+replay_throttling_policy.window_size_increment int default=20
+
+## Everything below are deprecated and ignored. Will go away at any time.
+
+## Deprecated and ignored, will soon go away
+indexing.semiunboundtasklimit int default = 1000
+
+## Include visits in the cache, if the visitoperation allows it.
+## This will enable another separate cache of summary.cache.maxbytes size.
+## IGNORED and DEPRECATED Will go away soon
+summary.cache.allowvisitcaching bool default=true
+
+## Control number of cache entries preallocated.
+## Default is no preallocation.
+## Can be set to a higher number to avoid resizing.
+## IGNORED and DEPRECATED Will go away soon
+summary.cache.initialentries long default=0 restart
+
+## Skip crc32 check on read.
+## IGNORED and DEPRECATED Will go away soon
+summary.log.chunk.skipcrconread bool default=false
+
+## Overrides the number of threads used for writing fields across all document dbs.
+## See feeding.concurrency for details.
+## DEPRECATED - Remove usage
+indexing.threads int default=1 restart
+
+## How much memory is set aside for caching.
+## Now only used for caching of dictionary lookups.
+## TODO Still relevant, check config model, seems unused.
+index.cache.size long default=0 restart
+
## Specifies which tensor implementation to use for all backend code.
##
## TENSOR_ENGINE (default) uses DefaultTensorEngine, which has been the production implementation for years.
@@ -521,12 +543,6 @@ bucketdb.checksumtype enum {LEGACY, XXHASH64} default = LEGACY restart
tensor_implementation enum {TENSOR_ENGINE, FAST_VALUE} default = FAST_VALUE
## Whether to report issues back to the container via protobuf field
+## TODO Remove always on
forward_issues bool default = true
-## Chooses the throttling policy used to control the window size
-## of the SharedOperationThrottler component used by the transaction log replay feed state.
-replay_throttling_policy.type enum { UNLIMITED, DYNAMIC } default=DYNAMIC
-## Only used if replay_throttling_policy.type == DYNAMIC:
-replay_throttling_policy.min_window_size int default=100
-replay_throttling_policy.max_window_size int default=10000
-replay_throttling_policy.window_size_increment int default=20
diff --git a/searchlib/src/tests/attribute/attributemanager/attributemanager_test.cpp b/searchlib/src/tests/attribute/attributemanager/attributemanager_test.cpp
index 1d3995bc494..24213c53cbe 100644
--- a/searchlib/src/tests/attribute/attributemanager/attributemanager_test.cpp
+++ b/searchlib/src/tests/attribute/attributemanager/attributemanager_test.cpp
@@ -199,6 +199,8 @@ TEST("require that config can be converted")
EXPECT_TRUE(assertDataType(AVBT::DOUBLE, CACAD::DOUBLE));
EXPECT_TRUE(assertDataType(AVBT::PREDICATE, CACAD::PREDICATE));
EXPECT_TRUE(assertDataType(AVBT::TENSOR, CACAD::TENSOR));
+ EXPECT_TRUE(assertDataType(AVBT::REFERENCE, CACAD::REFERENCE));
+ EXPECT_TRUE(assertDataType(AVBT::RAW, CACAD::RAW));
EXPECT_TRUE(assertDataType(AVBT::NONE, CACAD::NONE));
EXPECT_TRUE(assertCollectionType(AVCT::SINGLE, CACAC::SINGLE));
diff --git a/searchlib/src/vespa/searchlib/attribute/configconverter.cpp b/searchlib/src/vespa/searchlib/attribute/configconverter.cpp
index a2079a88ddf..a799abb34c7 100644
--- a/searchlib/src/vespa/searchlib/attribute/configconverter.cpp
+++ b/searchlib/src/vespa/searchlib/attribute/configconverter.cpp
@@ -32,6 +32,7 @@ getDataTypeMap()
map[AttributesConfig::Attribute::Datatype::PREDICATE] = BasicType::PREDICATE;
map[AttributesConfig::Attribute::Datatype::TENSOR] = BasicType::TENSOR;
map[AttributesConfig::Attribute::Datatype::REFERENCE] = BasicType::REFERENCE;
+ map[AttributesConfig::Attribute::Datatype::RAW] = BasicType::RAW;
map[AttributesConfig::Attribute::Datatype::NONE] = BasicType::NONE;
return map;
}
diff --git a/vespajlib/src/main/java/com/yahoo/slime/ArrayValue.java b/vespajlib/src/main/java/com/yahoo/slime/ArrayValue.java
index dbd9771afe9..9f455a5b7d4 100644
--- a/vespajlib/src/main/java/com/yahoo/slime/ArrayValue.java
+++ b/vespajlib/src/main/java/com/yahoo/slime/ArrayValue.java
@@ -6,9 +6,94 @@ package com.yahoo.slime;
*/
final class ArrayValue extends Value {
- private int capacity = 16;
+ static final int initial_capacity = 16;
+ static final Impl initial_impl = new EmptyImpl();
+
+ private interface Impl {
+ public void prepareFor(ArrayValue self, Type type);
+ public Value add(Value value, int used);
+ public Value get(int index);
+ }
+
+ private static final class EmptyImpl implements Impl {
+ public void prepareFor(ArrayValue self, Type type) {
+ if (type == Type.LONG) {
+ self.impl = new LongImpl();
+ } else if (type == Type.DOUBLE) {
+ self.impl = new DoubleImpl();
+ } else {
+ self.impl = new GenericImpl(this, 0);
+ }
+ }
+ public Value add(Value value, int used) { return NixValue.invalid(); }
+ public Value get(int index) { return NixValue.invalid(); }
+ }
+
+ private static final class LongImpl implements Impl {
+ private long[] values = new long[initial_capacity];
+ public void prepareFor(ArrayValue self, Type type) {
+ if (type != Type.LONG) {
+ self.impl = new GenericImpl(this, self.used);
+ }
+ }
+ public Value add(Value value, int used) {
+ if (used == values.length) {
+ long[] v = values;
+ values = new long[v.length << 1];
+ System.arraycopy(v, 0, values, 0, used);
+ }
+ values[used] = value.asLong();
+ return get(used);
+ }
+ public Value get(int index) { return new LongValue(values[index]); }
+ }
+
+ private static final class DoubleImpl implements Impl {
+ private double[] values = new double[initial_capacity];
+ public void prepareFor(ArrayValue self, Type type) {
+ if (type != Type.DOUBLE) {
+ self.impl = new GenericImpl(this, self.used);
+ }
+ }
+ public Value add(Value value, int used) {
+ if (used == values.length) {
+ double[] v = values;
+ values = new double[v.length << 1];
+ System.arraycopy(v, 0, values, 0, used);
+ }
+ values[used] = value.asDouble();
+ return get(used);
+ }
+ public Value get(int index) { return new DoubleValue(values[index]); }
+ }
+
+ private static final class GenericImpl implements Impl {
+ private Value[] values;
+ GenericImpl(Impl src, int len) {
+ int capacity = initial_capacity;
+ while (capacity < (len + 1)) {
+ capacity = capacity << 1;
+ }
+ values = new Value[capacity];
+ for (int i = 0; i < len; i++) {
+ values[i] = src.get(i);
+ }
+ }
+ public void prepareFor(ArrayValue self, Type type) {}
+ public Value add(Value value, int used) {
+ if (used == values.length) {
+ Value[] v = values;
+ values = new Value[v.length << 1];
+ System.arraycopy(v, 0, values, 0, used);
+ }
+ values[used] = value;
+ return get(used);
+ }
+ public Value get(int index) { return values[index]; }
+ }
+
+ private Impl impl = initial_impl;
private int used = 0;
- private Value[] values = new Value[capacity];
private final SymbolTable names;
public ArrayValue(SymbolTable names) { this.names = names; }
@@ -16,33 +101,22 @@ final class ArrayValue extends Value {
public int children() { return used; }
public int entries() { return used; }
public Value entry(int index) {
- return (index < used) ? values[index] : NixValue.invalid();
+ return (index >= 0 && index < used) ? impl.get(index) : NixValue.invalid();
}
public void accept(Visitor v) { v.visitArray(this); }
public void traverse(ArrayTraverser at) {
for (int i = 0; i < used; i++) {
- at.entry(i, values[i]);
+ at.entry(i, impl.get(i));
}
}
- private void grow() {
- Value[] v = values;
- capacity = (capacity << 1);
- values = new Value[capacity];
- System.arraycopy(v, 0, values, 0, used);
- }
-
protected Value addLeaf(Value value) {
- if (used == capacity) {
- grow();
- }
- values[used++] = value;
- return value;
+ impl.prepareFor(this, value.type());
+ return impl.add(value, used++);
}
public Value addArray() { return addLeaf(new ArrayValue(names)); }
public Value addObject() { return addLeaf(new ObjectValue(names)); }
-
}
diff --git a/vespajlib/src/test/java/com/yahoo/slime/ArrayValueTestCase.java b/vespajlib/src/test/java/com/yahoo/slime/ArrayValueTestCase.java
new file mode 100644
index 00000000000..c9ff86e7c2e
--- /dev/null
+++ b/vespajlib/src/test/java/com/yahoo/slime/ArrayValueTestCase.java
@@ -0,0 +1,188 @@
+// Copyright Yahoo. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root.
+package com.yahoo.slime;
+
+import org.junit.Test;
+
+import static org.hamcrest.MatcherAssert.assertThat;
+import static org.hamcrest.CoreMatchers.is;
+import static org.hamcrest.CoreMatchers.not;
+import static org.hamcrest.CoreMatchers.sameInstance;
+
+import java.util.List;
+import java.util.ArrayList;
+
+public class ArrayValueTestCase {
+
+ static ArrayValue makeArray() {
+ return new ArrayValue(new SymbolTable());
+ }
+
+ @Test
+ public void testSymbolTableForwarding() {
+ SymbolTable names = new SymbolTable();
+ assertThat(names.symbols(), is(0));
+ new ArrayValue(names).addArray().addObject().setLong("foo", 3);
+ assertThat(names.symbols(), is(1));
+ }
+
+ @Test
+ public void testOutOfBoundsAccess() {
+ var array = makeArray();
+ array.addBool(true);
+ assertThat(array.entry(-1).valid(), is(false));
+ assertThat(array.entry(1).valid(), is(false));
+ }
+
+ @Test
+ public void testGenericArray() {
+ var array = makeArray();
+ var added = new ArrayList<Cursor>();
+ for (int i = 0; i < 128; ++i) {
+ added.add(array.addString("foo" + i));
+ }
+ for (int i = 0; i < 128; i++) {
+ var e1 = array.entry(i);
+ var e2 = array.entry(i);
+ var e3 = added.get(i);
+ assertThat(e1, sameInstance(e2));
+ assertThat(e1, sameInstance(e3));
+ }
+ }
+
+ @Test
+ public void testNativeLongArray() {
+ var array = makeArray();
+ var added = new ArrayList<Cursor>();
+ for (int i = 0; i < 128; ++i) {
+ added.add(array.addLong(i));
+ }
+ for (int i = 0; i < 128; ++i) {
+ long expect = i;
+ var e1 = array.entry(i);
+ var e2 = array.entry(i);
+ var e3 = added.get(i);
+ assertThat(e1, not(sameInstance(e2)));
+ assertThat(e1, not(sameInstance(e3)));
+ assertThat(e1.equalTo(e2), is(true));
+ assertThat(e1.equalTo(e3), is(true));
+ assertThat(e1.type(), is(Type.LONG));
+ assertThat(e1.asLong(), is(expect));
+ }
+ }
+
+ @Test
+ public void testNativeDoubleArray() {
+ var array = makeArray();
+ var added = new ArrayList<Cursor>();
+ for (int i = 0; i < 128; ++i) {
+ added.add(array.addDouble((double)i));
+ }
+ for (int i = 0; i < 128; ++i) {
+ double expect = i;
+ var e1 = array.entry(i);
+ var e2 = array.entry(i);
+ var e3 = added.get(i);
+ assertThat(e1, not(sameInstance(e2)));
+ assertThat(e1, not(sameInstance(e3)));
+ assertThat(e1.equalTo(e2), is(true));
+ assertThat(e1.equalTo(e3), is(true));
+ assertThat(e1.type(), is(Type.DOUBLE));
+ assertThat(e1.asDouble(), is(expect));
+ }
+ }
+
+ @Test
+ public void testLongToGenericConversion() {
+ for (Type type: Type.values()) {
+ if (type != Type.LONG) {
+ var array = makeArray();
+ var added = new ArrayList<Cursor>();
+ for (int i = 0; i < 64; ++i) {
+ added.add(array.addLong(i));
+ }
+ switch (type) {
+ case NIX: added.add(array.addNix()); break;
+ case BOOL: added.add(array.addBool(true)); break;
+ case DOUBLE: added.add(array.addDouble(42.0)); break;
+ case STRING: added.add(array.addString("foo")); break;
+ case DATA: added.add(array.addData(new byte[1])); break;
+ case ARRAY: added.add(array.addArray()); break;
+ case OBJECT: added.add(array.addObject()); break;
+ }
+ assertThat(array.entries(), is(65));
+ assertThat(array.entry(64).type(), is(type));
+ assertThat(added.get(64), sameInstance(array.entry(64)));
+ for (int i = 0; i < 64; ++i) {
+ var e1 = array.entry(i);
+ var e2 = array.entry(i);
+ var e3 = added.get(i);
+ long expect = i;
+ assertThat(e1, sameInstance(e2));
+ assertThat(e1, not(sameInstance(e3)));
+ assertThat(e1.equalTo(e2), is(true));
+ assertThat(e1.equalTo(e3), is(true));
+ assertThat(e1.type(), is(Type.LONG));
+ assertThat(e1.asLong(), is(expect));
+ }
+ }
+ }
+ }
+
+ @Test
+ public void testDoubleToGenericConversion() {
+ for (Type type: Type.values()) {
+ if (type != Type.DOUBLE) {
+ var array = makeArray();
+ var added = new ArrayList<Cursor>();
+ for (int i = 0; i < 64; ++i) {
+ added.add(array.addDouble(i));
+ }
+ switch (type) {
+ case NIX: added.add(array.addNix()); break;
+ case BOOL: added.add(array.addBool(true)); break;
+ case LONG: added.add(array.addLong(42)); break;
+ case STRING: added.add(array.addString("foo")); break;
+ case DATA: added.add(array.addData(new byte[1])); break;
+ case ARRAY: added.add(array.addArray()); break;
+ case OBJECT: added.add(array.addObject()); break;
+ }
+ assertThat(array.entries(), is(65));
+ assertThat(array.entry(64).type(), is(type));
+ assertThat(added.get(64), sameInstance(array.entry(64)));
+ for (int i = 0; i < 64; ++i) {
+ var e1 = array.entry(i);
+ var e2 = array.entry(i);
+ var e3 = added.get(i);
+ double expect = i;
+ assertThat(e1, sameInstance(e2));
+ assertThat(e1, not(sameInstance(e3)));
+ assertThat(e1.equalTo(e2), is(true));
+ assertThat(e1.equalTo(e3), is(true));
+ assertThat(e1.type(), is(Type.DOUBLE));
+ assertThat(e1.asDouble(), is(expect));
+ }
+ }
+ }
+ }
+
+ @Test
+ public void testGenericArrayStart() {
+ for (Type type: Type.values()) {
+ if (type != Type.LONG && type != Type.DOUBLE) {
+ var array = makeArray();
+ Cursor added = null;
+ switch (type) {
+ case NIX: added = array.addNix(); break;
+ case BOOL: added = array.addBool(true); break;
+ case STRING: added = array.addString("foo"); break;
+ case DATA: added = array.addData(new byte[1]); break;
+ case ARRAY: added = array.addArray(); break;
+ case OBJECT: added = array.addObject(); break;
+ }
+ assertThat(array.entries(), is(1));
+ assertThat(array.entry(0).type(), is(type));
+ assertThat(added, sameInstance(array.entry(0)));
+ }
+ }
+ }
+}