aboutsummaryrefslogtreecommitdiffstats
path: root/eval
diff options
context:
space:
mode:
authorHåvard Pettersen <havardpe@oath.com>2018-01-31 16:06:44 +0000
committerHåvard Pettersen <havardpe@oath.com>2018-01-31 16:06:44 +0000
commitfb104cc2c07bd134703aca801488849d27c34a80 (patch)
treefc583a55e2962e8e04592569d70e45acf624dc66 /eval
parent25b32d8cb23893f98dbf38d35b2161a2197f9f28 (diff)
added test constructor to make interpreted function from tensor function
Diffstat (limited to 'eval')
-rw-r--r--eval/src/vespa/eval/eval/interpreted_function.cpp9
-rw-r--r--eval/src/vespa/eval/eval/interpreted_function.h4
2 files changed, 13 insertions, 0 deletions
diff --git a/eval/src/vespa/eval/eval/interpreted_function.cpp b/eval/src/vespa/eval/eval/interpreted_function.cpp
index 0974590726a..e362faadf46 100644
--- a/eval/src/vespa/eval/eval/interpreted_function.cpp
+++ b/eval/src/vespa/eval/eval/interpreted_function.cpp
@@ -61,6 +61,15 @@ InterpretedFunction::Context::Context(const InterpretedFunction &ifun)
{
}
+InterpretedFunction::InterpretedFunction(const TensorEngine &engine, const TensorFunction &function)
+ : _program(),
+ _stash(),
+ _num_params(0),
+ _tensor_engine(engine)
+{
+ _program = compile_tensor_function(function, _stash);
+}
+
InterpretedFunction::InterpretedFunction(const TensorEngine &engine, const nodes::Node &root, size_t num_params_in, const NodeTypes &types)
: _program(),
_stash(),
diff --git a/eval/src/vespa/eval/eval/interpreted_function.h b/eval/src/vespa/eval/eval/interpreted_function.h
index 5c896c64074..a9b6082fa4e 100644
--- a/eval/src/vespa/eval/eval/interpreted_function.h
+++ b/eval/src/vespa/eval/eval/interpreted_function.h
@@ -12,6 +12,8 @@ namespace eval {
namespace nodes { class Node; }
class TensorEngine;
+class TensorFunction;
+class TensorSpec;
/**
* A Function that has been prepared for execution. This will
@@ -87,6 +89,8 @@ private:
public:
typedef std::unique_ptr<InterpretedFunction> UP;
+ // for testing; use with care; the tensor function must be kept alive
+ InterpretedFunction(const TensorEngine &engine, const TensorFunction &function);
InterpretedFunction(const TensorEngine &engine, const nodes::Node &root, size_t num_params_in, const NodeTypes &types);
InterpretedFunction(const TensorEngine &engine, const Function &function, const NodeTypes &types)
: InterpretedFunction(engine, function.root(), function.num_params(), types) {}