summaryrefslogtreecommitdiffstats
path: root/model-evaluation/src/test
diff options
context:
space:
mode:
authorJon Bratseth <bratseth@oath.com>2018-10-01 10:42:16 +0200
committerJon Bratseth <bratseth@oath.com>2018-10-01 10:42:16 +0200
commit50bc3b3c198d29374448cc3eac73fbb26e42cab0 (patch)
tree668c2fdcf18b25fda38e1faa10bd479b76e1ecb6 /model-evaluation/src/test
parent0ff988ecf9704faac33f6201cb59349e48846457 (diff)
Fill in missing types
Diffstat (limited to 'model-evaluation/src/test')
-rw-r--r--model-evaluation/src/test/java/ai/vespa/models/evaluation/MlModelsImportingTest.java42
-rw-r--r--model-evaluation/src/test/resources/config/models/rank-profiles.cfg6
2 files changed, 24 insertions, 24 deletions
diff --git a/model-evaluation/src/test/java/ai/vespa/models/evaluation/MlModelsImportingTest.java b/model-evaluation/src/test/java/ai/vespa/models/evaluation/MlModelsImportingTest.java
index 287a2387b34..c4b163e89c0 100644
--- a/model-evaluation/src/test/java/ai/vespa/models/evaluation/MlModelsImportingTest.java
+++ b/model-evaluation/src/test/java/ai/vespa/models/evaluation/MlModelsImportingTest.java
@@ -29,15 +29,16 @@ public class MlModelsImportingTest {
// TODO: When we get type information in Models, replace the evaluator.context().names() check below by that
{
Model xgboost = tester.models().get("xgboost_2_2");
- tester.assertFunction("xgboost_2_2",
- "(optimized sum of condition trees of size 192 bytes)",
- xgboost);
// Function
assertEquals(1, xgboost.functions().size());
+ tester.assertFunction("xgboost_2_2",
+ "(optimized sum of condition trees of size 192 bytes)",
+ xgboost);
ExpressionFunction function = xgboost.functions().get(0);
- assertEquals("xgboost_2_2", function.getName());
- // assertEquals("f109, f29, f56, f60", commaSeparated(xgboost.functions().get(0).arguments())); TODO
+ assertEquals(TensorType.fromSpec("tensor()"), function.returnType().get());
+ assertEquals("f109, f29, f56, f60", commaSeparated(function.arguments()));
+ function.arguments().forEach(arg -> assertEquals(TensorType.empty, function.argumentTypes().get(arg)));
// Evaluator
FunctionEvaluator evaluator = xgboost.evaluatorOf();
@@ -48,14 +49,12 @@ public class MlModelsImportingTest {
{
Model onnxMnistSoftmax = tester.models().get("mnist_softmax");
- tester.assertFunction("default.add",
- "join(reduce(join(rename(Placeholder, (d0, d1), (d0, d2)), constant(mnist_softmax_Variable), f(a,b)(a * b)), sum, d2), constant(mnist_softmax_Variable_1), f(a,b)(a + b))",
- onnxMnistSoftmax);
- assertEquals("tensor(d1[10],d2[784])",
- onnxMnistSoftmax.evaluatorOf("default.add").context().get("constant(mnist_softmax_Variable)").type().toString());
// Function
assertEquals(1, onnxMnistSoftmax.functions().size());
+ tester.assertFunction("default.add",
+ "join(reduce(join(rename(Placeholder, (d0, d1), (d0, d2)), constant(mnist_softmax_Variable), f(a,b)(a * b)), sum, d2), constant(mnist_softmax_Variable_1), f(a,b)(a + b))",
+ onnxMnistSoftmax);
ExpressionFunction function = onnxMnistSoftmax.functions().get(0);
assertEquals(TensorType.fromSpec("tensor(d1[10])"), function.returnType().get());
assertEquals(1, function.arguments().size());
@@ -63,6 +62,8 @@ public class MlModelsImportingTest {
assertEquals(TensorType.fromSpec("tensor(d0[],d1[784])"), function.argumentTypes().get("Placeholder"));
// Evaluator
+ assertEquals("tensor(d1[10],d2[784])",
+ onnxMnistSoftmax.evaluatorOf("default.add").context().get("constant(mnist_softmax_Variable)").type().toString());
FunctionEvaluator evaluator = onnxMnistSoftmax.evaluatorOf(); // Verify exactly one output available
assertEquals("Placeholder, constant(mnist_softmax_Variable), constant(mnist_softmax_Variable_1)", evaluator.context().names().stream().sorted().collect(Collectors.joining(", ")));
assertEquals(-1.6372650861740112E-6, evaluator.evaluate().sum().asDouble(), delta);
@@ -70,17 +71,17 @@ public class MlModelsImportingTest {
{
Model tfMnistSoftmax = tester.models().get("mnist_softmax_saved");
- tester.assertFunction("serving_default.y",
- "join(reduce(join(rename(Placeholder, (d0, d1), (d0, d2)), constant(mnist_softmax_saved_layer_Variable_read), f(a,b)(a * b)), sum, d2), constant(mnist_softmax_saved_layer_Variable_1_read), f(a,b)(a + b))",
- tfMnistSoftmax);
// Function
assertEquals(1, tfMnistSoftmax.functions().size());
+ tester.assertFunction("serving_default.y",
+ "join(reduce(join(rename(Placeholder, (d0, d1), (d0, d2)), constant(mnist_softmax_saved_layer_Variable_read), f(a,b)(a * b)), sum, d2), constant(mnist_softmax_saved_layer_Variable_1_read), f(a,b)(a + b))",
+ tfMnistSoftmax);
ExpressionFunction function = tfMnistSoftmax.functions().get(0);
assertEquals(TensorType.fromSpec("tensor(d1[10])"), function.returnType().get());
assertEquals(1, function.arguments().size());
- assertEquals("x", function.arguments().get(0));
- assertEquals(TensorType.fromSpec("tensor(d0[],d1[784])"), function.argumentTypes().get("x"));
+ assertEquals("Placeholder", function.arguments().get(0));
+ assertEquals(TensorType.fromSpec("tensor(d0[],d1[784])"), function.argumentTypes().get("Placeholder"));
// Evaluator
FunctionEvaluator evaluator = tfMnistSoftmax.evaluatorOf(); // Verify exactly one output available
@@ -90,10 +91,6 @@ public class MlModelsImportingTest {
{
Model tfMnist = tester.models().get("mnist_saved");
- tester.assertFunction("serving_default.y",
- "join(reduce(join(map(join(reduce(join(join(join(rankingExpression(imported_ml_function_mnist_saved_dnn_hidden1_add), 0.009999999776482582, f(a,b)(a * b)), rankingExpression(imported_ml_function_mnist_saved_dnn_hidden1_add), f(a,b)(max(a,b))), constant(mnist_saved_dnn_hidden2_weights_read), f(a,b)(a * b)), sum, d3), constant(mnist_saved_dnn_hidden2_bias_read), f(a,b)(a + b)), f(a)(1.050701 * if (a >= 0, a, 1.673263 * (exp(a) - 1)))), constant(mnist_saved_dnn_outputs_weights_read), f(a,b)(a * b)), sum, d2), constant(mnist_saved_dnn_outputs_bias_read), f(a,b)(a + b))",
- tfMnist);
-
// Generated function
tester.assertFunction("imported_ml_function_mnist_saved_dnn_hidden1_add",
"join(reduce(join(rename(input, (d0, d1), (d0, d4)), constant(mnist_saved_dnn_hidden1_weights_read), f(a,b)(a * b)), sum, d4), constant(mnist_saved_dnn_hidden1_bias_read), f(a,b)(a + b))",
@@ -101,11 +98,14 @@ public class MlModelsImportingTest {
// Function
assertEquals(2, tfMnist.functions().size()); // TODO: Filter out generated function
+ tester.assertFunction("serving_default.y",
+ "join(reduce(join(map(join(reduce(join(join(join(rankingExpression(imported_ml_function_mnist_saved_dnn_hidden1_add), 0.009999999776482582, f(a,b)(a * b)), rankingExpression(imported_ml_function_mnist_saved_dnn_hidden1_add), f(a,b)(max(a,b))), constant(mnist_saved_dnn_hidden2_weights_read), f(a,b)(a * b)), sum, d3), constant(mnist_saved_dnn_hidden2_bias_read), f(a,b)(a + b)), f(a)(1.050701 * if (a >= 0, a, 1.673263 * (exp(a) - 1)))), constant(mnist_saved_dnn_outputs_weights_read), f(a,b)(a * b)), sum, d2), constant(mnist_saved_dnn_outputs_bias_read), f(a,b)(a + b))",
+ tfMnist);
ExpressionFunction function = tfMnist.functions().get(1);
assertEquals(TensorType.fromSpec("tensor(d1[10])"), function.returnType().get());
assertEquals(1, function.arguments().size());
- assertEquals("x", function.arguments().get(0));
- assertEquals(TensorType.fromSpec("tensor(d0[],d1[784])"), function.argumentTypes().get("x"));
+ assertEquals("input", function.arguments().get(0));
+ assertEquals(TensorType.fromSpec("tensor(d0[],d1[784])"), function.argumentTypes().get("input"));
// Evaluator
FunctionEvaluator evaluator = tfMnist.evaluatorOf("serving_default");
diff --git a/model-evaluation/src/test/resources/config/models/rank-profiles.cfg b/model-evaluation/src/test/resources/config/models/rank-profiles.cfg
index 9175b60315b..c25c5ba555b 100644
--- a/model-evaluation/src/test/resources/config/models/rank-profiles.cfg
+++ b/model-evaluation/src/test/resources/config/models/rank-profiles.cfg
@@ -11,7 +11,7 @@ rankprofile[1].fef.property[0].value "if (f29 < -0.1234567, if (f56 < -0.242398,
rankprofile[2].name "mnist_softmax_saved"
rankprofile[2].fef.property[0].name "rankingExpression(serving_default.y).rankingScript"
rankprofile[2].fef.property[0].value "join(reduce(join(rename(Placeholder, (d0, d1), (d0, d2)), constant(mnist_softmax_saved_layer_Variable_read), f(a,b)(a * b)), sum, d2), constant(mnist_softmax_saved_layer_Variable_1_read), f(a,b)(a + b))"
-rankprofile[2].fef.property[1].name "rankingExpression(serving_default.y).x.type"
+rankprofile[2].fef.property[1].name "rankingExpression(serving_default.y).Placeholder.type"
rankprofile[2].fef.property[1].value "tensor(d0[],d1[784])"
rankprofile[2].fef.property[2].name "rankingExpression(serving_default.y).type"
rankprofile[2].fef.property[2].value "tensor(d1[10])"
@@ -22,7 +22,7 @@ rankprofile[3].fef.property[1].name "rankingExpression(imported_ml_function_mnis
rankprofile[3].fef.property[1].value "tensor(d3[300])"
rankprofile[3].fef.property[2].name "rankingExpression(serving_default.y).rankingScript"
rankprofile[3].fef.property[2].value "join(reduce(join(map(join(reduce(join(join(join(rankingExpression(imported_ml_function_mnist_saved_dnn_hidden1_add), 0.009999999776482582, f(a,b)(a * b)), rankingExpression(imported_ml_function_mnist_saved_dnn_hidden1_add), f(a,b)(max(a,b))), constant(mnist_saved_dnn_hidden2_weights_read), f(a,b)(a * b)), sum, d3), constant(mnist_saved_dnn_hidden2_bias_read), f(a,b)(a + b)), f(a)(1.050701 * if (a >= 0, a, 1.673263 * (exp(a) - 1)))), constant(mnist_saved_dnn_outputs_weights_read), f(a,b)(a * b)), sum, d2), constant(mnist_saved_dnn_outputs_bias_read), f(a,b)(a + b))"
-rankprofile[3].fef.property[3].name "rankingExpression(serving_default.y).x.type"
+rankprofile[3].fef.property[3].name "rankingExpression(serving_default.y).input.type"
rankprofile[3].fef.property[3].value "tensor(d0[],d1[784])"
rankprofile[3].fef.property[4].name "rankingExpression(serving_default.y).type"
-rankprofile[3].fef.property[4].value "tensor(d1[10])" \ No newline at end of file
+rankprofile[3].fef.property[4].value "tensor(d1[10])"