aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--config-model/src/main/java/com/yahoo/vespa/model/content/GlobalDistributionValidator.java10
-rw-r--r--config-model/src/main/java/com/yahoo/vespa/model/content/cluster/ContentCluster.java16
-rw-r--r--config-model/src/main/resources/schema/content.rnc8
-rw-r--r--config-model/src/test/java/com/yahoo/vespa/model/content/ContentSearchClusterTest.java36
-rw-r--r--config-model/src/test/java/com/yahoo/vespa/model/content/GlobalDistributionValidatorTest.java48
-rw-r--r--config-model/src/test/java/com/yahoo/vespa/model/content/utils/ContentClusterBuilder.java11
-rw-r--r--eval/src/vespa/eval/tensor/sparse/direct_sparse_tensor_builder.h17
-rw-r--r--eval/src/vespa/eval/tensor/sparse/sparse_tensor_address_ref.h13
-rw-r--r--node-admin/src/main/java/com/yahoo/vespa/hosted/node/admin/docker/DockerOperationsImpl.java2
-rw-r--r--searchlib/src/main/java/com/yahoo/searchlib/rankingexpression/integration/tensorflow/TensorFlowImporter.java5
-rw-r--r--searchlib/src/test/files/integration/tensorflow/3_layer_mnist/mnist.py95
-rw-r--r--searchlib/src/test/files/integration/tensorflow/3_layer_mnist/saved/saved_model.pbtxt8550
-rw-r--r--searchlib/src/test/files/integration/tensorflow/3_layer_mnist/saved/variables/variables.data-00000-of-00001bin0 -> 1080200 bytes
-rw-r--r--searchlib/src/test/files/integration/tensorflow/3_layer_mnist/saved/variables/variables.indexbin0 -> 371 bytes
-rw-r--r--searchlib/src/test/java/com/yahoo/searchlib/rankingexpression/integration/tensorflow/Mnist_SoftmaxTestCase.java117
-rw-r--r--searchlib/src/test/java/com/yahoo/searchlib/rankingexpression/integration/tensorflow/TensorflowImportTestCase.java298
-rw-r--r--searchlib/src/vespa/searchlib/aggregation/grouping.cpp10
-rw-r--r--searchlib/src/vespa/searchlib/aggregation/grouping.h16
-rw-r--r--storage/src/vespa/storage/visiting/visitor.cpp2
-rw-r--r--storageapi/src/vespa/storageapi/messageapi/storagemessage.h2
-rw-r--r--vespajlib/src/main/java/com/yahoo/tensor/functions/ScalarFunctions.java24
-rw-r--r--zkfacade/src/main/java/com/yahoo/vespa/curator/Curator.java2
22 files changed, 9104 insertions, 178 deletions
diff --git a/config-model/src/main/java/com/yahoo/vespa/model/content/GlobalDistributionValidator.java b/config-model/src/main/java/com/yahoo/vespa/model/content/GlobalDistributionValidator.java
index e18f730ef91..4bdef0607a2 100644
--- a/config-model/src/main/java/com/yahoo/vespa/model/content/GlobalDistributionValidator.java
+++ b/config-model/src/main/java/com/yahoo/vespa/model/content/GlobalDistributionValidator.java
@@ -22,10 +22,12 @@ public class GlobalDistributionValidator {
public void validate(Map<String, NewDocumentType> documentDefinitions,
Set<NewDocumentType> globallyDistributedDocuments,
- Redundancy redundancy) {
-
- verifyGlobalDocumentsHaveRequiredRedundancy(globallyDistributedDocuments, redundancy);
- verifySearchableCopiesIsSameAsRedundancy(globallyDistributedDocuments, redundancy);
+ Redundancy redundancy,
+ boolean enableMultipleBucketSpaces) {
+ if (!enableMultipleBucketSpaces) {
+ verifyGlobalDocumentsHaveRequiredRedundancy(globallyDistributedDocuments, redundancy);
+ verifySearchableCopiesIsSameAsRedundancy(globallyDistributedDocuments, redundancy);
+ }
verifyReferredDocumentsArePresent(documentDefinitions);
verifyReferredDocumentsAreGlobal(documentDefinitions, globallyDistributedDocuments);
}
diff --git a/config-model/src/main/java/com/yahoo/vespa/model/content/cluster/ContentCluster.java b/config-model/src/main/java/com/yahoo/vespa/model/content/cluster/ContentCluster.java
index db849d0ae50..bc5a170ae28 100644
--- a/config-model/src/main/java/com/yahoo/vespa/model/content/cluster/ContentCluster.java
+++ b/config-model/src/main/java/com/yahoo/vespa/model/content/cluster/ContentCluster.java
@@ -66,6 +66,8 @@ public class ContentCluster extends AbstractConfigProducer implements StorDistri
ContentSearchCluster search;
private final Map<String, NewDocumentType> documentDefinitions;
private final Set<NewDocumentType> globallyDistributedDocuments;
+ // Experimental flag (TODO: remove when feature is enabled by default)
+ private boolean enableMultipleBucketSpaces = false;
com.yahoo.vespa.model.content.StorageGroup rootGroup;
StorageCluster storageNodes;
DistributorCluster distributorNodes;
@@ -147,6 +149,10 @@ public class ContentCluster extends AbstractConfigProducer implements StorDistri
if (tuning != null) {
setupTuning(c, tuning);
}
+ ModelElement experimental = contentElement.getChild("experimental");
+ if (experimental != null) {
+ setupExperimental(c, experimental);
+ }
if (context.getParentProducer().getRoot() == null) return c;
@@ -244,6 +250,13 @@ public class ContentCluster extends AbstractConfigProducer implements StorDistri
}
}
+ private void setupExperimental(ContentCluster cluster, ModelElement experimental) {
+ Boolean enableMultipleBucketSpaces = experimental.childAsBoolean("enable-multiple-bucket-spaces");
+ if (enableMultipleBucketSpaces != null) {
+ cluster.enableMultipleBucketSpaces = enableMultipleBucketSpaces;
+ }
+ }
+
private void validateGroupSiblings(String cluster, StorageGroup group) {
HashSet<String> siblings = new HashSet<>();
for (StorageGroup g : group.getSubgroups()) {
@@ -628,7 +641,7 @@ public class ContentCluster extends AbstractConfigProducer implements StorDistri
}
}
new ReservedDocumentTypeNameValidator().validate(documentDefinitions);
- new GlobalDistributionValidator().validate(documentDefinitions, globallyDistributedDocuments, redundancy);
+ new GlobalDistributionValidator().validate(documentDefinitions, globallyDistributedDocuments, redundancy, enableMultipleBucketSpaces);
}
public static Map<String, Integer> METRIC_INDEX_MAP = new TreeMap<>();
@@ -713,5 +726,6 @@ public class ContentCluster extends AbstractConfigProducer implements StorDistri
docTypeBuilder.bucketspace(bucketSpace);
builder.documenttype(docTypeBuilder);
}
+ builder.enable_multiple_bucket_spaces(enableMultipleBucketSpaces);
}
}
diff --git a/config-model/src/main/resources/schema/content.rnc b/config-model/src/main/resources/schema/content.rnc
index 8325a542dbd..ffbf1a929bc 100644
--- a/config-model/src/main/resources/schema/content.rnc
+++ b/config-model/src/main/resources/schema/content.rnc
@@ -114,7 +114,9 @@ Content = element content {
Documents? &
ContentNodes? &
TopGroup? &
- Controllers?
+ Controllers? &
+ # Contains experimental feature switches
+ Experimental?
}
Controllers =
@@ -367,3 +369,7 @@ TuningCompression = element compression {
element type { string "none" | string "lz4" | string "zstd" }? &
element level { xsd:nonNegativeInteger }?
}
+
+Experimental = element experimental {
+ element enable-multiple-bucket-spaces { xsd:boolean }?
+}
diff --git a/config-model/src/test/java/com/yahoo/vespa/model/content/ContentSearchClusterTest.java b/config-model/src/test/java/com/yahoo/vespa/model/content/ContentSearchClusterTest.java
index 41bba055a50..b1de548356c 100644
--- a/config-model/src/test/java/com/yahoo/vespa/model/content/ContentSearchClusterTest.java
+++ b/config-model/src/test/java/com/yahoo/vespa/model/content/ContentSearchClusterTest.java
@@ -18,6 +18,7 @@ import static com.yahoo.vespa.model.content.utils.ContentClusterUtils.createClus
import static com.yahoo.vespa.model.content.utils.SearchDefinitionBuilder.createSearchDefinitions;
import static junit.framework.TestCase.assertEquals;
import static junit.framework.TestCase.assertFalse;
+import static junit.framework.TestCase.assertTrue;
/**
* Unit tests for content search cluster.
@@ -38,12 +39,26 @@ public class ContentSearchClusterTest {
}
private static ContentCluster createClusterWithGlobalType() throws Exception {
- return createCluster(new ContentClusterBuilder().docTypes(Arrays.asList(
- DocType.indexGlobal("global"),
- DocType.index("regular"))).getXml(),
+ return createCluster(createClusterBuilderWithGlobalType().getXml(),
createSearchDefinitions("global", "regular"));
}
+ private static ContentCluster createClusterWithMultipleBucketSpacesEnabled() throws Exception {
+ ContentClusterBuilder builder = createClusterBuilderWithGlobalType();
+ builder.groupXml(joinLines("<group>",
+ "<node distribution-key='0' hostalias='mockhost'/>",
+ "<node distribution-key='1' hostalias='mockhost'/>",
+ "</group>"));
+ builder.enableMultipleBucketSpaces(true);
+ String clusterXml = builder.getXml();
+ return createCluster(clusterXml, createSearchDefinitions("global", "regular"));
+ }
+
+ private static ContentClusterBuilder createClusterBuilderWithGlobalType() {
+ return new ContentClusterBuilder()
+ .docTypes(Arrays.asList(DocType.indexGlobal("global"), DocType.index("regular")));
+ }
+
private static ProtonConfig getProtonConfig(ContentCluster cluster) {
ProtonConfig.Builder protonCfgBuilder = new ProtonConfig.Builder();
cluster.getSearch().getConfig(protonCfgBuilder);
@@ -131,11 +146,22 @@ public class ContentSearchClusterTest {
@Test
public void require_that_bucket_spaces_config_is_produced_for_content_cluster() throws Exception {
BucketspacesConfig config = getBucketspacesConfig(createClusterWithGlobalType());
+ assertBucketspacesConfigWithTwoDocumentTypes(config);
+ // Safeguard against flipping the switch
+ assertFalse(config.enable_multiple_bucket_spaces());
+ }
+
+ @Test
+ public void require_that_multiple_bucket_spaces_can_be_enabled() throws Exception {
+ BucketspacesConfig config = getBucketspacesConfig(createClusterWithMultipleBucketSpacesEnabled());
+ assertBucketspacesConfigWithTwoDocumentTypes(config);
+ assertTrue(config.enable_multiple_bucket_spaces());
+ }
+
+ private static void assertBucketspacesConfigWithTwoDocumentTypes(BucketspacesConfig config) {
assertEquals(2, config.documenttype().size());
assertDocumentType("global", "global", config.documenttype(0));
assertDocumentType("regular", "default", config.documenttype(1));
- // Safeguard against flipping the switch
- assertFalse(config.enable_multiple_bucket_spaces());
}
}
diff --git a/config-model/src/test/java/com/yahoo/vespa/model/content/GlobalDistributionValidatorTest.java b/config-model/src/test/java/com/yahoo/vespa/model/content/GlobalDistributionValidatorTest.java
index 7958d5e2c1a..b8252f2f081 100644
--- a/config-model/src/test/java/com/yahoo/vespa/model/content/GlobalDistributionValidatorTest.java
+++ b/config-model/src/test/java/com/yahoo/vespa/model/content/GlobalDistributionValidatorTest.java
@@ -37,8 +37,17 @@ public class GlobalDistributionValidatorTest {
"The following document types are marked as global, " +
"but do not have high enough redundancy to make the documents globally distributed: " +
"'bar', 'foo'. Redundancy is 2, expected 3.");
- new GlobalDistributionValidator()
- .validate(fixture.getDocumentTypes(), fixture.getGloballyDistributedDocuments(), redundancy);
+ validate(fixture, redundancy);
+ }
+
+ @Test
+ public void validation_of_redundancy_is_deactivated_if_multiple_bucket_spaces_is_enabled() {
+ Fixture fixture = new Fixture()
+ .addGlobalDocument(createDocumentType("foo"))
+ .addGlobalDocument(createDocumentType("bar"));
+ Redundancy redundancy = createRedundancyWithoutGlobalDistributionAndTooFewSearchableCopies();
+
+ validate(fixture, redundancy, true);
}
@Test
@@ -52,8 +61,7 @@ public class GlobalDistributionValidatorTest {
exceptionRule.expectMessage(
"The following document types have the number of searchable copies less than redundancy: " +
"'bar', 'foo'. Searchable copies is 1, while redundancy is 2.");
- new GlobalDistributionValidator()
- .validate(fixture.getDocumentTypes(), fixture.getGloballyDistributedDocuments(), redundancy);
+ validate(fixture, redundancy);
}
@Test
@@ -61,14 +69,13 @@ public class GlobalDistributionValidatorTest {
Fixture fixture = new Fixture()
.addGlobalDocument(createDocumentType("foo"));
Redundancy redundancy = createRedundancyWithGlobalDistribution();
- new GlobalDistributionValidator()
- .validate(fixture.getDocumentTypes(), fixture.getGloballyDistributedDocuments(), redundancy);
+ validate(fixture, redundancy);
}
@Test
public void validation_succeeds_on_no_documents() {
new GlobalDistributionValidator()
- .validate(emptyMap(), emptySet(), createRedundancyWithoutGlobalDistribution());
+ .validate(emptyMap(), emptySet(), createRedundancyWithoutGlobalDistribution(), false);
}
@Test
@@ -76,8 +83,7 @@ public class GlobalDistributionValidatorTest {
Fixture fixture = new Fixture()
.addNonGlobalDocument(createDocumentType("foo"));
Redundancy redundancy = createRedundancyWithoutGlobalDistribution();
- new GlobalDistributionValidator()
- .validate(fixture.getDocumentTypes(), fixture.getGloballyDistributedDocuments(), redundancy);
+ validate(fixture, redundancy);
}
@Test
@@ -90,8 +96,7 @@ public class GlobalDistributionValidatorTest {
exceptionRule.expect(IllegalArgumentException.class);
exceptionRule.expectMessage(
"The following document types are referenced from other documents, but are not globally distributed: 'parent'");
- new GlobalDistributionValidator()
- .validate(fixture.getDocumentTypes(), fixture.getGloballyDistributedDocuments(), redundancy);
+ validate(fixture, redundancy);
}
@Test
@@ -101,8 +106,7 @@ public class GlobalDistributionValidatorTest {
.addGlobalDocument(parent)
.addNonGlobalDocument(createDocumentType("child", parent));
Redundancy redundancy = createRedundancyWithGlobalDistribution();
- new GlobalDistributionValidator()
- .validate(fixture.getDocumentTypes(), fixture.getGloballyDistributedDocuments(), redundancy);
+ validate(fixture, redundancy);
}
@Test
@@ -115,8 +119,7 @@ public class GlobalDistributionValidatorTest {
exceptionRule.expect(IllegalArgumentException.class);
exceptionRule.expectMessage(
"The following document types are referenced from other documents, but are not listed in services.xml: 'unknown'");
- new GlobalDistributionValidator()
- .validate(fixture.getDocumentTypes(), fixture.getGloballyDistributedDocuments(), redundancy);
+ validate(fixture, redundancy);
}
@Test
@@ -145,11 +148,26 @@ public class GlobalDistributionValidatorTest {
return redundancy;
}
+ private static Redundancy createRedundancyWithoutGlobalDistributionAndTooFewSearchableCopies() {
+ Redundancy redundancy = new Redundancy(2, 2, 1);
+ redundancy.setTotalNodes(3);
+ return redundancy;
+ }
+
private static NewDocumentType createDocumentType(String name, NewDocumentType... references) {
Set<NewDocumentType.Name> documentReferences = Stream.of(references).map(NewDocumentType::getFullName).collect(toSet());
return new NewDocumentType(new NewDocumentType.Name(name), documentReferences);
}
+ private static void validate(Fixture fixture, Redundancy redundancy) {
+ validate(fixture, redundancy, false);
+ }
+
+ private static void validate(Fixture fixture, Redundancy redundancy, boolean enableMultipleBucketSpaces) {
+ new GlobalDistributionValidator()
+ .validate(fixture.getDocumentTypes(), fixture.getGloballyDistributedDocuments(), redundancy, enableMultipleBucketSpaces);
+ }
+
private static class Fixture {
private final Map<String, NewDocumentType> documentTypes = new HashMap<>();
private final Set<NewDocumentType> globallyDistributedDocuments = new HashSet<>();
diff --git a/config-model/src/test/java/com/yahoo/vespa/model/content/utils/ContentClusterBuilder.java b/config-model/src/test/java/com/yahoo/vespa/model/content/utils/ContentClusterBuilder.java
index 95c57bb544c..79bc9504659 100644
--- a/config-model/src/test/java/com/yahoo/vespa/model/content/utils/ContentClusterBuilder.java
+++ b/config-model/src/test/java/com/yahoo/vespa/model/content/utils/ContentClusterBuilder.java
@@ -26,6 +26,7 @@ public class ContentClusterBuilder {
private Optional<String> dispatchXml = Optional.empty();
private Optional<Double> protonDiskLimit = Optional.empty();
private Optional<Double> protonMemoryLimit = Optional.empty();
+ private Optional<Boolean> enableMultipleBucketSpaces = Optional.empty();
public ContentClusterBuilder() {
}
@@ -77,6 +78,11 @@ public class ContentClusterBuilder {
return this;
}
+ public ContentClusterBuilder enableMultipleBucketSpaces(boolean value) {
+ this.enableMultipleBucketSpaces = Optional.of(value);
+ return this;
+ }
+
public ContentCluster build(MockRoot root) throws Exception {
return ContentClusterUtils.createCluster(getXml(), root);
}
@@ -94,6 +100,11 @@ public class ContentClusterBuilder {
if (dispatchXml.isPresent()) {
xml += dispatchXml.get();
}
+ if (enableMultipleBucketSpaces.isPresent()) {
+ xml += joinLines("<experimental>",
+ "<enable-multiple-bucket-spaces>" + (enableMultipleBucketSpaces.get() ? "true" : "false") + "</enable-multiple-bucket-spaces>",
+ "</experimental>");
+ }
return xml + groupXml +
"</content>";
}
diff --git a/eval/src/vespa/eval/tensor/sparse/direct_sparse_tensor_builder.h b/eval/src/vespa/eval/tensor/sparse/direct_sparse_tensor_builder.h
index c977131fcd3..9ec98b2c11d 100644
--- a/eval/src/vespa/eval/tensor/sparse/direct_sparse_tensor_builder.h
+++ b/eval/src/vespa/eval/tensor/sparse/direct_sparse_tensor_builder.h
@@ -40,8 +40,7 @@ public:
void
copyCells(const Cells &cells_in, const eval::ValueType &cells_in_type)
{
- SparseTensorAddressPadder addressPadder(_type,
- cells_in_type);
+ SparseTensorAddressPadder addressPadder(_type, cells_in_type);
for (const auto &cell : cells_in) {
addressPadder.padAddress(cell.first);
SparseTensorAddressRef oldRef = addressPadder.getAddressRef();
@@ -64,8 +63,7 @@ public:
{
}
- DirectTensorBuilder(const eval::ValueType &type_in,
- const Cells &cells_in)
+ DirectTensorBuilder(const eval::ValueType &type_in, const Cells &cells_in)
: _stash(TensorImplType::STASH_CHUNK_SIZE),
_type(type_in),
_cells()
@@ -94,14 +92,12 @@ public:
}
template <class Function>
- void insertCell(SparseTensorAddressRef address, double value,
- Function &&func)
+ void insertCell(SparseTensorAddressRef address, double value, Function &&func)
{
- SparseTensorAddressRef oldRef(address);
- auto res = _cells.insert(std::make_pair(oldRef, value));
+ auto res = _cells.insert(std::make_pair(address, value));
if (res.second) {
// Replace key with own copy
- res.first->first = SparseTensorAddressRef(oldRef, _stash);
+ res.first->first = SparseTensorAddressRef(address, _stash);
} else {
res.first->second = func(res.first->second, value);
}
@@ -113,8 +109,7 @@ public:
}
template <class Function>
- void insertCell(SparseTensorAddressBuilder &address, double value,
- Function &&func)
+ void insertCell(SparseTensorAddressBuilder &address, double value, Function &&func)
{
insertCell(address.getAddressRef(), value, func);
}
diff --git a/eval/src/vespa/eval/tensor/sparse/sparse_tensor_address_ref.h b/eval/src/vespa/eval/tensor/sparse/sparse_tensor_address_ref.h
index b179b431b94..8533aa29829 100644
--- a/eval/src/vespa/eval/tensor/sparse/sparse_tensor_address_ref.h
+++ b/eval/src/vespa/eval/tensor/sparse/sparse_tensor_address_ref.h
@@ -16,6 +16,11 @@ class SparseTensorAddressRef
const void *_start;
uint32_t _size;
uint32_t _hash;
+ static void * copy(const SparseTensorAddressRef rhs, Stash &stash) {
+ void *res = stash.alloc(rhs._size);
+ memcpy(res, rhs._start, rhs._size);
+ return res;
+ }
public:
SparseTensorAddressRef()
: _start(nullptr), _size(0u), _hash(0u)
@@ -29,14 +34,10 @@ public:
}
SparseTensorAddressRef(const SparseTensorAddressRef rhs, Stash &stash)
- : _start(nullptr),
+ : _start(copy(rhs, stash)),
_size(rhs._size),
_hash(rhs._hash)
- {
- void *res = stash.alloc(rhs._size);
- memcpy(res, rhs._start, rhs._size);
- _start = res;
- }
+ {}
uint32_t hash() const { return _hash; }
diff --git a/node-admin/src/main/java/com/yahoo/vespa/hosted/node/admin/docker/DockerOperationsImpl.java b/node-admin/src/main/java/com/yahoo/vespa/hosted/node/admin/docker/DockerOperationsImpl.java
index d2863468ee7..cc1fe0c2449 100644
--- a/node-admin/src/main/java/com/yahoo/vespa/hosted/node/admin/docker/DockerOperationsImpl.java
+++ b/node-admin/src/main/java/com/yahoo/vespa/hosted/node/admin/docker/DockerOperationsImpl.java
@@ -62,7 +62,7 @@ public class DockerOperationsImpl implements DockerOperations {
DIRECTORIES_TO_MOUNT.put(getDefaults().underVespaHome("logs/yms_agent"), false);
DIRECTORIES_TO_MOUNT.put(getDefaults().underVespaHome("logs/ysar"), false);
DIRECTORIES_TO_MOUNT.put(getDefaults().underVespaHome("logs/ystatus"), false);
- DIRECTORIES_TO_MOUNT.put(getDefaults().underVespaHome("logs/zpe_policy_updater"), false);
+ DIRECTORIES_TO_MOUNT.put(getDefaults().underVespaHome("logs/zpu"), false);
DIRECTORIES_TO_MOUNT.put(getDefaults().underVespaHome("var/cache"), false);
DIRECTORIES_TO_MOUNT.put(getDefaults().underVespaHome("var/crash"), false);
DIRECTORIES_TO_MOUNT.put(getDefaults().underVespaHome("var/db/jdisc"), false);
diff --git a/searchlib/src/main/java/com/yahoo/searchlib/rankingexpression/integration/tensorflow/TensorFlowImporter.java b/searchlib/src/main/java/com/yahoo/searchlib/rankingexpression/integration/tensorflow/TensorFlowImporter.java
index e62ff4c54bf..24aeaf94e4f 100644
--- a/searchlib/src/main/java/com/yahoo/searchlib/rankingexpression/integration/tensorflow/TensorFlowImporter.java
+++ b/searchlib/src/main/java/com/yahoo/searchlib/rankingexpression/integration/tensorflow/TensorFlowImporter.java
@@ -111,9 +111,12 @@ public class TensorFlowImporter {
switch (tfNode.getOp().toLowerCase()) {
case "add" : case "add_n" : return operationMapper.join(importArguments(tfNode, graph, model, result), ScalarFunctions.add());
case "acos" : return operationMapper.map(importArguments(tfNode, graph, model, result), ScalarFunctions.acos());
- case "placeholder" : return operationMapper.placeholder(tfNode, result);
+ case "elu": return operationMapper.map(importArguments(tfNode, graph, model, result), ScalarFunctions.elu());
case "identity" : return operationMapper.identity(tfNode, model, result);
+ case "placeholder" : return operationMapper.placeholder(tfNode, result);
+ case "relu": return operationMapper.map(importArguments(tfNode, graph, model, result), ScalarFunctions.relu());
case "matmul" : return operationMapper.matmul(importArguments(tfNode, graph, model, result));
+ case "sigmoid": return operationMapper.map(importArguments(tfNode, graph, model, result), ScalarFunctions.sigmoid());
case "softmax" : return operationMapper.softmax(importArguments(tfNode, graph, model, result));
default : throw new IllegalArgumentException("Conversion of TensorFlow operation '" + tfNode.getOp() + "' is not supported");
}
diff --git a/searchlib/src/test/files/integration/tensorflow/3_layer_mnist/mnist.py b/searchlib/src/test/files/integration/tensorflow/3_layer_mnist/mnist.py
new file mode 100644
index 00000000000..090ab2a9b81
--- /dev/null
+++ b/searchlib/src/test/files/integration/tensorflow/3_layer_mnist/mnist.py
@@ -0,0 +1,95 @@
+
+# Common imports
+import numpy as np
+import tensorflow as tf
+
+from tensorflow.examples.tutorials.mnist import input_data
+from datetime import datetime
+
+now = datetime.utcnow().strftime("%Y%m%d%H%M%S")
+root_logdir = "tf_logs"
+logdir = "{}/run-{}/".format(root_logdir, now)
+
+mnist = input_data.read_data_sets("/tmp/data/")
+X_train = mnist.train.images
+X_test = mnist.test.images
+y_train = mnist.train.labels.astype("int")
+y_test = mnist.test.labels.astype("int")
+
+n_inputs = 28*28 # MNIST
+n_hidden1 = 300
+n_hidden2 = 100
+n_hidden3 = 40
+n_outputs = 10
+
+learning_rate = 0.01
+n_epochs = 40
+batch_size = 50
+
+X = tf.placeholder(tf.float32, shape=(None, n_inputs), name="X")
+y = tf.placeholder(tf.int64, shape=(None), name="y")
+
+
+def neuron_layer(X, n_neurons, name, activation=None):
+ with tf.name_scope(name):
+ n_inputs = int(X.get_shape()[1])
+ stddev = 2 / np.sqrt(n_inputs)
+ init = tf.truncated_normal((n_inputs, n_neurons), stddev=stddev)
+ W = tf.Variable(init, name="weights")
+ b = tf.Variable(tf.zeros([n_neurons]), name="bias")
+ Z = tf.matmul(X, W) + b
+ if activation is not None:
+ return activation(Z)
+ else:
+ return Z
+
+
+with tf.name_scope("dnn"):
+ hidden1 = neuron_layer(X, n_hidden1, name="hidden1", activation=tf.nn.elu)
+ hidden2 = neuron_layer(hidden1, n_hidden2, name="hidden2", activation=tf.nn.relu)
+ hidden3 = neuron_layer(hidden2, n_hidden3, name="hidden3", activation=tf.nn.sigmoid)
+ logits = neuron_layer(hidden3, n_outputs, name="outputs") #, activation=tf.nn.sigmoid)
+
+with tf.name_scope("loss"):
+ xentropy = tf.nn.sparse_softmax_cross_entropy_with_logits(labels=y, logits=logits)
+ loss = tf.reduce_mean(xentropy, name="loss")
+
+with tf.name_scope("train"):
+ optimizer = tf.train.GradientDescentOptimizer(learning_rate)
+ training_op = optimizer.minimize(loss)
+
+with tf.name_scope("eval"):
+ correct = tf.nn.in_top_k(logits, y, 1)
+ accuracy = tf.reduce_mean(tf.cast(correct, tf.float32))
+
+init = tf.global_variables_initializer()
+accuracy_summary = tf.summary.scalar('Accuracy', accuracy)
+file_writer = tf.summary.FileWriter(logdir, tf.get_default_graph())
+
+with tf.Session() as sess:
+ init.run()
+ for epoch in range(n_epochs):
+ for iteration in range(mnist.train.num_examples // batch_size):
+ X_batch, y_batch = mnist.train.next_batch(batch_size)
+ sess.run(training_op, feed_dict={X: X_batch, y: y_batch})
+ acc_train = accuracy.eval(feed_dict={X: X_batch, y: y_batch})
+ acc_val = accuracy.eval(feed_dict={X: mnist.validation.images,
+ y: mnist.validation.labels})
+ print(epoch, "Train accuracy:", acc_train, "Val accuracy:", acc_val)
+
+ # Save summary for tensorboard
+ summary_str = accuracy_summary.eval(feed_dict={X: mnist.validation.images,
+ y: mnist.validation.labels})
+ file_writer.add_summary(summary_str, epoch)
+
+ export_path = "saved"
+ print('Exporting trained model to ', export_path)
+ builder = tf.saved_model.builder.SavedModelBuilder(export_path)
+ signature = tf.saved_model.signature_def_utils.predict_signature_def(inputs = {'x':X}, outputs = {'y':logits})
+ builder.add_meta_graph_and_variables(sess,
+ [tf.saved_model.tag_constants.SERVING],
+ signature_def_map={'serving_default':signature})
+ builder.save(as_text=True)
+
+file_writer.close()
+
diff --git a/searchlib/src/test/files/integration/tensorflow/3_layer_mnist/saved/saved_model.pbtxt b/searchlib/src/test/files/integration/tensorflow/3_layer_mnist/saved/saved_model.pbtxt
new file mode 100644
index 00000000000..c8f7ecf11f8
--- /dev/null
+++ b/searchlib/src/test/files/integration/tensorflow/3_layer_mnist/saved/saved_model.pbtxt
@@ -0,0 +1,8550 @@
+saved_model_schema_version: 1
+meta_graphs {
+ meta_info_def {
+ stripped_op_list {
+ op {
+ name: "Add"
+ input_arg {
+ name: "x"
+ type_attr: "T"
+ }
+ input_arg {
+ name: "y"
+ type_attr: "T"
+ }
+ output_arg {
+ name: "z"
+ type_attr: "T"
+ }
+ attr {
+ name: "T"
+ type: "type"
+ allowed_values {
+ list {
+ type: DT_HALF
+ type: DT_FLOAT
+ type: DT_DOUBLE
+ type: DT_UINT8
+ type: DT_INT8
+ type: DT_INT16
+ type: DT_INT32
+ type: DT_INT64
+ type: DT_COMPLEX64
+ type: DT_COMPLEX128
+ type: DT_STRING
+ }
+ }
+ }
+ }
+ op {
+ name: "ApplyGradientDescent"
+ input_arg {
+ name: "var"
+ type_attr: "T"
+ is_ref: true
+ }
+ input_arg {
+ name: "alpha"
+ type_attr: "T"
+ }
+ input_arg {
+ name: "delta"
+ type_attr: "T"
+ }
+ output_arg {
+ name: "out"
+ type_attr: "T"
+ is_ref: true
+ }
+ attr {
+ name: "T"
+ type: "type"
+ allowed_values {
+ list {
+ type: DT_FLOAT
+ type: DT_DOUBLE
+ type: DT_INT64
+ type: DT_INT32
+ type: DT_UINT8
+ type: DT_UINT16
+ type: DT_INT16
+ type: DT_INT8
+ type: DT_COMPLEX64
+ type: DT_COMPLEX128
+ type: DT_QINT8
+ type: DT_QUINT8
+ type: DT_QINT32
+ type: DT_HALF
+ }
+ }
+ }
+ attr {
+ name: "use_locking"
+ type: "bool"
+ default_value {
+ b: false
+ }
+ }
+ }
+ op {
+ name: "Assign"
+ input_arg {
+ name: "ref"
+ type_attr: "T"
+ is_ref: true
+ }
+ input_arg {
+ name: "value"
+ type_attr: "T"
+ }
+ output_arg {
+ name: "output_ref"
+ type_attr: "T"
+ is_ref: true
+ }
+ attr {
+ name: "T"
+ type: "type"
+ }
+ attr {
+ name: "validate_shape"
+ type: "bool"
+ default_value {
+ b: true
+ }
+ }
+ attr {
+ name: "use_locking"
+ type: "bool"
+ default_value {
+ b: true
+ }
+ }
+ allows_uninitialized_input: true
+ }
+ op {
+ name: "BroadcastGradientArgs"
+ input_arg {
+ name: "s0"
+ type_attr: "T"
+ }
+ input_arg {
+ name: "s1"
+ type_attr: "T"
+ }
+ output_arg {
+ name: "r0"
+ type_attr: "T"
+ }
+ output_arg {
+ name: "r1"
+ type_attr: "T"
+ }
+ attr {
+ name: "T"
+ type: "type"
+ default_value {
+ type: DT_INT32
+ }
+ allowed_values {
+ list {
+ type: DT_INT32
+ type: DT_INT64
+ }
+ }
+ }
+ }
+ op {
+ name: "Cast"
+ input_arg {
+ name: "x"
+ type_attr: "SrcT"
+ }
+ output_arg {
+ name: "y"
+ type_attr: "DstT"
+ }
+ attr {
+ name: "SrcT"
+ type: "type"
+ }
+ attr {
+ name: "DstT"
+ type: "type"
+ }
+ }
+ op {
+ name: "Const"
+ output_arg {
+ name: "output"
+ type_attr: "dtype"
+ }
+ attr {
+ name: "value"
+ type: "tensor"
+ }
+ attr {
+ name: "dtype"
+ type: "type"
+ }
+ }
+ op {
+ name: "Elu"
+ input_arg {
+ name: "features"
+ type_attr: "T"
+ }
+ output_arg {
+ name: "activations"
+ type_attr: "T"
+ }
+ attr {
+ name: "T"
+ type: "type"
+ allowed_values {
+ list {
+ type: DT_HALF
+ type: DT_FLOAT
+ type: DT_DOUBLE
+ }
+ }
+ }
+ }
+ op {
+ name: "EluGrad"
+ input_arg {
+ name: "gradients"
+ type_attr: "T"
+ }
+ input_arg {
+ name: "outputs"
+ type_attr: "T"
+ }
+ output_arg {
+ name: "backprops"
+ type_attr: "T"
+ }
+ attr {
+ name: "T"
+ type: "type"
+ allowed_values {
+ list {
+ type: DT_HALF
+ type: DT_FLOAT
+ type: DT_DOUBLE
+ }
+ }
+ }
+ }
+ op {
+ name: "ExpandDims"
+ input_arg {
+ name: "input"
+ type_attr: "T"
+ }
+ input_arg {
+ name: "dim"
+ type_attr: "Tdim"
+ }
+ output_arg {
+ name: "output"
+ type_attr: "T"
+ }
+ attr {
+ name: "T"
+ type: "type"
+ }
+ attr {
+ name: "Tdim"
+ type: "type"
+ default_value {
+ type: DT_INT32
+ }
+ allowed_values {
+ list {
+ type: DT_INT32
+ type: DT_INT64
+ }
+ }
+ }
+ }
+ op {
+ name: "Fill"
+ input_arg {
+ name: "dims"
+ type: DT_INT32
+ }
+ input_arg {
+ name: "value"
+ type_attr: "T"
+ }
+ output_arg {
+ name: "output"
+ type_attr: "T"
+ }
+ attr {
+ name: "T"
+ type: "type"
+ }
+ }
+ op {
+ name: "FloorDiv"
+ input_arg {
+ name: "x"
+ type_attr: "T"
+ }
+ input_arg {
+ name: "y"
+ type_attr: "T"
+ }
+ output_arg {
+ name: "z"
+ type_attr: "T"
+ }
+ attr {
+ name: "T"
+ type: "type"
+ allowed_values {
+ list {
+ type: DT_HALF
+ type: DT_FLOAT
+ type: DT_DOUBLE
+ type: DT_UINT8
+ type: DT_INT8
+ type: DT_UINT16
+ type: DT_INT16
+ type: DT_INT32
+ type: DT_INT64
+ type: DT_COMPLEX64
+ type: DT_COMPLEX128
+ }
+ }
+ }
+ }
+ op {
+ name: "Identity"
+ input_arg {
+ name: "input"
+ type_attr: "T"
+ }
+ output_arg {
+ name: "output"
+ type_attr: "T"
+ }
+ attr {
+ name: "T"
+ type: "type"
+ }
+ }
+ op {
+ name: "InTopKV2"
+ input_arg {
+ name: "predictions"
+ type: DT_FLOAT
+ }
+ input_arg {
+ name: "targets"
+ type_attr: "T"
+ }
+ input_arg {
+ name: "k"
+ type_attr: "T"
+ }
+ output_arg {
+ name: "precision"
+ type: DT_BOOL
+ }
+ attr {
+ name: "T"
+ type: "type"
+ default_value {
+ type: DT_INT32
+ }
+ allowed_values {
+ list {
+ type: DT_INT32
+ type: DT_INT64
+ }
+ }
+ }
+ }
+ op {
+ name: "MatMul"
+ input_arg {
+ name: "a"
+ type_attr: "T"
+ }
+ input_arg {
+ name: "b"
+ type_attr: "T"
+ }
+ output_arg {
+ name: "product"
+ type_attr: "T"
+ }
+ attr {
+ name: "transpose_a"
+ type: "bool"
+ default_value {
+ b: false
+ }
+ }
+ attr {
+ name: "transpose_b"
+ type: "bool"
+ default_value {
+ b: false
+ }
+ }
+ attr {
+ name: "T"
+ type: "type"
+ allowed_values {
+ list {
+ type: DT_HALF
+ type: DT_FLOAT
+ type: DT_DOUBLE
+ type: DT_INT32
+ type: DT_COMPLEX64
+ type: DT_COMPLEX128
+ }
+ }
+ }
+ }
+ op {
+ name: "Maximum"
+ input_arg {
+ name: "x"
+ type_attr: "T"
+ }
+ input_arg {
+ name: "y"
+ type_attr: "T"
+ }
+ output_arg {
+ name: "z"
+ type_attr: "T"
+ }
+ attr {
+ name: "T"
+ type: "type"
+ allowed_values {
+ list {
+ type: DT_HALF
+ type: DT_FLOAT
+ type: DT_DOUBLE
+ type: DT_INT32
+ type: DT_INT64
+ }
+ }
+ }
+ is_commutative: true
+ }
+ op {
+ name: "Mean"
+ input_arg {
+ name: "input"
+ type_attr: "T"
+ }
+ input_arg {
+ name: "reduction_indices"
+ type_attr: "Tidx"
+ }
+ output_arg {
+ name: "output"
+ type_attr: "T"
+ }
+ attr {
+ name: "keep_dims"
+ type: "bool"
+ default_value {
+ b: false
+ }
+ }
+ attr {
+ name: "T"
+ type: "type"
+ allowed_values {
+ list {
+ type: DT_FLOAT
+ type: DT_DOUBLE
+ type: DT_INT64
+ type: DT_INT32
+ type: DT_UINT8
+ type: DT_UINT16
+ type: DT_INT16
+ type: DT_INT8
+ type: DT_COMPLEX64
+ type: DT_COMPLEX128
+ type: DT_QINT8
+ type: DT_QUINT8
+ type: DT_QINT32
+ type: DT_HALF
+ }
+ }
+ }
+ attr {
+ name: "Tidx"
+ type: "type"
+ default_value {
+ type: DT_INT32
+ }
+ allowed_values {
+ list {
+ type: DT_INT32
+ type: DT_INT64
+ }
+ }
+ }
+ }
+ op {
+ name: "MergeV2Checkpoints"
+ input_arg {
+ name: "checkpoint_prefixes"
+ type: DT_STRING
+ }
+ input_arg {
+ name: "destination_prefix"
+ type: DT_STRING
+ }
+ attr {
+ name: "delete_old_dirs"
+ type: "bool"
+ default_value {
+ b: true
+ }
+ }
+ is_stateful: true
+ }
+ op {
+ name: "Mul"
+ input_arg {
+ name: "x"
+ type_attr: "T"
+ }
+ input_arg {
+ name: "y"
+ type_attr: "T"
+ }
+ output_arg {
+ name: "z"
+ type_attr: "T"
+ }
+ attr {
+ name: "T"
+ type: "type"
+ allowed_values {
+ list {
+ type: DT_HALF
+ type: DT_FLOAT
+ type: DT_DOUBLE
+ type: DT_UINT8
+ type: DT_INT8
+ type: DT_UINT16
+ type: DT_INT16
+ type: DT_INT32
+ type: DT_INT64
+ type: DT_COMPLEX64
+ type: DT_COMPLEX128
+ }
+ }
+ }
+ is_commutative: true
+ }
+ op {
+ name: "NoOp"
+ }
+ op {
+ name: "Pack"
+ input_arg {
+ name: "values"
+ type_attr: "T"
+ number_attr: "N"
+ }
+ output_arg {
+ name: "output"
+ type_attr: "T"
+ }
+ attr {
+ name: "N"
+ type: "int"
+ has_minimum: true
+ minimum: 1
+ }
+ attr {
+ name: "T"
+ type: "type"
+ }
+ attr {
+ name: "axis"
+ type: "int"
+ default_value {
+ i: 0
+ }
+ }
+ }
+ op {
+ name: "Placeholder"
+ output_arg {
+ name: "output"
+ type_attr: "dtype"
+ }
+ attr {
+ name: "dtype"
+ type: "type"
+ }
+ attr {
+ name: "shape"
+ type: "shape"
+ default_value {
+ shape {
+ unknown_rank: true
+ }
+ }
+ }
+ }
+ op {
+ name: "PreventGradient"
+ input_arg {
+ name: "input"
+ type_attr: "T"
+ }
+ output_arg {
+ name: "output"
+ type_attr: "T"
+ }
+ attr {
+ name: "T"
+ type: "type"
+ }
+ attr {
+ name: "message"
+ type: "string"
+ default_value {
+ s: ""
+ }
+ }
+ }
+ op {
+ name: "Prod"
+ input_arg {
+ name: "input"
+ type_attr: "T"
+ }
+ input_arg {
+ name: "reduction_indices"
+ type_attr: "Tidx"
+ }
+ output_arg {
+ name: "output"
+ type_attr: "T"
+ }
+ attr {
+ name: "keep_dims"
+ type: "bool"
+ default_value {
+ b: false
+ }
+ }
+ attr {
+ name: "T"
+ type: "type"
+ allowed_values {
+ list {
+ type: DT_FLOAT
+ type: DT_DOUBLE
+ type: DT_INT64
+ type: DT_INT32
+ type: DT_UINT8
+ type: DT_UINT16
+ type: DT_INT16
+ type: DT_INT8
+ type: DT_COMPLEX64
+ type: DT_COMPLEX128
+ type: DT_QINT8
+ type: DT_QUINT8
+ type: DT_QINT32
+ type: DT_HALF
+ }
+ }
+ }
+ attr {
+ name: "Tidx"
+ type: "type"
+ default_value {
+ type: DT_INT32
+ }
+ allowed_values {
+ list {
+ type: DT_INT32
+ type: DT_INT64
+ }
+ }
+ }
+ }
+ op {
+ name: "RealDiv"
+ input_arg {
+ name: "x"
+ type_attr: "T"
+ }
+ input_arg {
+ name: "y"
+ type_attr: "T"
+ }
+ output_arg {
+ name: "z"
+ type_attr: "T"
+ }
+ attr {
+ name: "T"
+ type: "type"
+ allowed_values {
+ list {
+ type: DT_HALF
+ type: DT_FLOAT
+ type: DT_DOUBLE
+ type: DT_UINT8
+ type: DT_INT8
+ type: DT_UINT16
+ type: DT_INT16
+ type: DT_INT32
+ type: DT_INT64
+ type: DT_COMPLEX64
+ type: DT_COMPLEX128
+ }
+ }
+ }
+ }
+ op {
+ name: "Relu"
+ input_arg {
+ name: "features"
+ type_attr: "T"
+ }
+ output_arg {
+ name: "activations"
+ type_attr: "T"
+ }
+ attr {
+ name: "T"
+ type: "type"
+ allowed_values {
+ list {
+ type: DT_FLOAT
+ type: DT_DOUBLE
+ type: DT_INT32
+ type: DT_INT64
+ type: DT_UINT8
+ type: DT_INT16
+ type: DT_INT8
+ type: DT_UINT16
+ type: DT_HALF
+ }
+ }
+ }
+ }
+ op {
+ name: "ReluGrad"
+ input_arg {
+ name: "gradients"
+ type_attr: "T"
+ }
+ input_arg {
+ name: "features"
+ type_attr: "T"
+ }
+ output_arg {
+ name: "backprops"
+ type_attr: "T"
+ }
+ attr {
+ name: "T"
+ type: "type"
+ allowed_values {
+ list {
+ type: DT_FLOAT
+ type: DT_DOUBLE
+ type: DT_INT32
+ type: DT_INT64
+ type: DT_UINT8
+ type: DT_INT16
+ type: DT_INT8
+ type: DT_UINT16
+ type: DT_HALF
+ }
+ }
+ }
+ }
+ op {
+ name: "Reshape"
+ input_arg {
+ name: "tensor"
+ type_attr: "T"
+ }
+ input_arg {
+ name: "shape"
+ type_attr: "Tshape"
+ }
+ output_arg {
+ name: "output"
+ type_attr: "T"
+ }
+ attr {
+ name: "T"
+ type: "type"
+ }
+ attr {
+ name: "Tshape"
+ type: "type"
+ default_value {
+ type: DT_INT32
+ }
+ allowed_values {
+ list {
+ type: DT_INT32
+ type: DT_INT64
+ }
+ }
+ }
+ }
+ op {
+ name: "RestoreV2"
+ input_arg {
+ name: "prefix"
+ type: DT_STRING
+ }
+ input_arg {
+ name: "tensor_names"
+ type: DT_STRING
+ }
+ input_arg {
+ name: "shape_and_slices"
+ type: DT_STRING
+ }
+ output_arg {
+ name: "tensors"
+ type_list_attr: "dtypes"
+ }
+ attr {
+ name: "dtypes"
+ type: "list(type)"
+ has_minimum: true
+ minimum: 1
+ }
+ is_stateful: true
+ }
+ op {
+ name: "SaveV2"
+ input_arg {
+ name: "prefix"
+ type: DT_STRING
+ }
+ input_arg {
+ name: "tensor_names"
+ type: DT_STRING
+ }
+ input_arg {
+ name: "shape_and_slices"
+ type: DT_STRING
+ }
+ input_arg {
+ name: "tensors"
+ type_list_attr: "dtypes"
+ }
+ attr {
+ name: "dtypes"
+ type: "list(type)"
+ has_minimum: true
+ minimum: 1
+ }
+ is_stateful: true
+ }
+ op {
+ name: "ScalarSummary"
+ input_arg {
+ name: "tags"
+ type: DT_STRING
+ }
+ input_arg {
+ name: "values"
+ type_attr: "T"
+ }
+ output_arg {
+ name: "summary"
+ type: DT_STRING
+ }
+ attr {
+ name: "T"
+ type: "type"
+ allowed_values {
+ list {
+ type: DT_FLOAT
+ type: DT_DOUBLE
+ type: DT_INT32
+ type: DT_INT64
+ type: DT_UINT8
+ type: DT_INT16
+ type: DT_INT8
+ type: DT_UINT16
+ type: DT_HALF
+ }
+ }
+ }
+ }
+ op {
+ name: "Shape"
+ input_arg {
+ name: "input"
+ type_attr: "T"
+ }
+ output_arg {
+ name: "output"
+ type_attr: "out_type"
+ }
+ attr {
+ name: "T"
+ type: "type"
+ }
+ attr {
+ name: "out_type"
+ type: "type"
+ default_value {
+ type: DT_INT32
+ }
+ allowed_values {
+ list {
+ type: DT_INT32
+ type: DT_INT64
+ }
+ }
+ }
+ }
+ op {
+ name: "ShardedFilename"
+ input_arg {
+ name: "basename"
+ type: DT_STRING
+ }
+ input_arg {
+ name: "shard"
+ type: DT_INT32
+ }
+ input_arg {
+ name: "num_shards"
+ type: DT_INT32
+ }
+ output_arg {
+ name: "filename"
+ type: DT_STRING
+ }
+ }
+ op {
+ name: "Sigmoid"
+ input_arg {
+ name: "x"
+ type_attr: "T"
+ }
+ output_arg {
+ name: "y"
+ type_attr: "T"
+ }
+ attr {
+ name: "T"
+ type: "type"
+ allowed_values {
+ list {
+ type: DT_HALF
+ type: DT_FLOAT
+ type: DT_DOUBLE
+ type: DT_COMPLEX64
+ type: DT_COMPLEX128
+ }
+ }
+ }
+ }
+ op {
+ name: "SigmoidGrad"
+ input_arg {
+ name: "y"
+ type_attr: "T"
+ }
+ input_arg {
+ name: "dy"
+ type_attr: "T"
+ }
+ output_arg {
+ name: "z"
+ type_attr: "T"
+ }
+ attr {
+ name: "T"
+ type: "type"
+ allowed_values {
+ list {
+ type: DT_HALF
+ type: DT_FLOAT
+ type: DT_DOUBLE
+ type: DT_COMPLEX64
+ type: DT_COMPLEX128
+ }
+ }
+ }
+ }
+ op {
+ name: "SparseSoftmaxCrossEntropyWithLogits"
+ input_arg {
+ name: "features"
+ type_attr: "T"
+ }
+ input_arg {
+ name: "labels"
+ type_attr: "Tlabels"
+ }
+ output_arg {
+ name: "loss"
+ type_attr: "T"
+ }
+ output_arg {
+ name: "backprop"
+ type_attr: "T"
+ }
+ attr {
+ name: "T"
+ type: "type"
+ allowed_values {
+ list {
+ type: DT_HALF
+ type: DT_FLOAT
+ type: DT_DOUBLE
+ }
+ }
+ }
+ attr {
+ name: "Tlabels"
+ type: "type"
+ default_value {
+ type: DT_INT64
+ }
+ allowed_values {
+ list {
+ type: DT_INT32
+ type: DT_INT64
+ }
+ }
+ }
+ }
+ op {
+ name: "StringJoin"
+ input_arg {
+ name: "inputs"
+ type: DT_STRING
+ number_attr: "N"
+ }
+ output_arg {
+ name: "output"
+ type: DT_STRING
+ }
+ attr {
+ name: "N"
+ type: "int"
+ has_minimum: true
+ minimum: 1
+ }
+ attr {
+ name: "separator"
+ type: "string"
+ default_value {
+ s: ""
+ }
+ }
+ }
+ op {
+ name: "Sum"
+ input_arg {
+ name: "input"
+ type_attr: "T"
+ }
+ input_arg {
+ name: "reduction_indices"
+ type_attr: "Tidx"
+ }
+ output_arg {
+ name: "output"
+ type_attr: "T"
+ }
+ attr {
+ name: "keep_dims"
+ type: "bool"
+ default_value {
+ b: false
+ }
+ }
+ attr {
+ name: "T"
+ type: "type"
+ allowed_values {
+ list {
+ type: DT_FLOAT
+ type: DT_DOUBLE
+ type: DT_INT64
+ type: DT_INT32
+ type: DT_UINT8
+ type: DT_UINT16
+ type: DT_INT16
+ type: DT_INT8
+ type: DT_COMPLEX64
+ type: DT_COMPLEX128
+ type: DT_QINT8
+ type: DT_QUINT8
+ type: DT_QINT32
+ type: DT_HALF
+ }
+ }
+ }
+ attr {
+ name: "Tidx"
+ type: "type"
+ default_value {
+ type: DT_INT32
+ }
+ allowed_values {
+ list {
+ type: DT_INT32
+ type: DT_INT64
+ }
+ }
+ }
+ }
+ op {
+ name: "Tile"
+ input_arg {
+ name: "input"
+ type_attr: "T"
+ }
+ input_arg {
+ name: "multiples"
+ type_attr: "Tmultiples"
+ }
+ output_arg {
+ name: "output"
+ type_attr: "T"
+ }
+ attr {
+ name: "T"
+ type: "type"
+ }
+ attr {
+ name: "Tmultiples"
+ type: "type"
+ default_value {
+ type: DT_INT32
+ }
+ allowed_values {
+ list {
+ type: DT_INT32
+ type: DT_INT64
+ }
+ }
+ }
+ }
+ op {
+ name: "TruncatedNormal"
+ input_arg {
+ name: "shape"
+ type_attr: "T"
+ }
+ output_arg {
+ name: "output"
+ type_attr: "dtype"
+ }
+ attr {
+ name: "seed"
+ type: "int"
+ default_value {
+ i: 0
+ }
+ }
+ attr {
+ name: "seed2"
+ type: "int"
+ default_value {
+ i: 0
+ }
+ }
+ attr {
+ name: "dtype"
+ type: "type"
+ allowed_values {
+ list {
+ type: DT_HALF
+ type: DT_FLOAT
+ type: DT_DOUBLE
+ }
+ }
+ }
+ attr {
+ name: "T"
+ type: "type"
+ allowed_values {
+ list {
+ type: DT_INT32
+ type: DT_INT64
+ }
+ }
+ }
+ is_stateful: true
+ }
+ op {
+ name: "VariableV2"
+ output_arg {
+ name: "ref"
+ type_attr: "dtype"
+ is_ref: true
+ }
+ attr {
+ name: "shape"
+ type: "shape"
+ }
+ attr {
+ name: "dtype"
+ type: "type"
+ }
+ attr {
+ name: "container"
+ type: "string"
+ default_value {
+ s: ""
+ }
+ }
+ attr {
+ name: "shared_name"
+ type: "string"
+ default_value {
+ s: ""
+ }
+ }
+ is_stateful: true
+ }
+ op {
+ name: "ZerosLike"
+ input_arg {
+ name: "x"
+ type_attr: "T"
+ }
+ output_arg {
+ name: "y"
+ type_attr: "T"
+ }
+ attr {
+ name: "T"
+ type: "type"
+ }
+ }
+ }
+ tags: "serve"
+ tensorflow_version: "1.4.1"
+ tensorflow_git_version: "v1.4.0-19-ga52c8d9"
+ }
+ graph_def {
+ node {
+ name: "X"
+ op: "Placeholder"
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: -1
+ }
+ dim {
+ size: 784
+ }
+ }
+ }
+ }
+ }
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "shape"
+ value {
+ shape {
+ dim {
+ size: -1
+ }
+ dim {
+ size: 784
+ }
+ }
+ }
+ }
+ }
+ node {
+ name: "y"
+ op: "Placeholder"
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ unknown_rank: true
+ }
+ }
+ }
+ }
+ attr {
+ key: "dtype"
+ value {
+ type: DT_INT64
+ }
+ }
+ attr {
+ key: "shape"
+ value {
+ shape {
+ unknown_rank: true
+ }
+ }
+ }
+ }
+ node {
+ name: "dnn/hidden1/truncated_normal/shape"
+ op: "Const"
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: 2
+ }
+ }
+ }
+ }
+ }
+ attr {
+ key: "dtype"
+ value {
+ type: DT_INT32
+ }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_INT32
+ tensor_shape {
+ dim {
+ size: 2
+ }
+ }
+ tensor_content: "\020\003\000\000,\001\000\000"
+ }
+ }
+ }
+ }
+ node {
+ name: "dnn/hidden1/truncated_normal/mean"
+ op: "Const"
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ }
+ }
+ }
+ }
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_FLOAT
+ tensor_shape {
+ }
+ float_val: 0.0
+ }
+ }
+ }
+ }
+ node {
+ name: "dnn/hidden1/truncated_normal/stddev"
+ op: "Const"
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ }
+ }
+ }
+ }
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_FLOAT
+ tensor_shape {
+ }
+ float_val: 0.0714285746216774
+ }
+ }
+ }
+ }
+ node {
+ name: "dnn/hidden1/truncated_normal/TruncatedNormal"
+ op: "TruncatedNormal"
+ input: "dnn/hidden1/truncated_normal/shape"
+ attr {
+ key: "T"
+ value {
+ type: DT_INT32
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: 784
+ }
+ dim {
+ size: 300
+ }
+ }
+ }
+ }
+ }
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "seed"
+ value {
+ i: 0
+ }
+ }
+ attr {
+ key: "seed2"
+ value {
+ i: 0
+ }
+ }
+ }
+ node {
+ name: "dnn/hidden1/truncated_normal/mul"
+ op: "Mul"
+ input: "dnn/hidden1/truncated_normal/TruncatedNormal"
+ input: "dnn/hidden1/truncated_normal/stddev"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: 784
+ }
+ dim {
+ size: 300
+ }
+ }
+ }
+ }
+ }
+ }
+ node {
+ name: "dnn/hidden1/truncated_normal"
+ op: "Add"
+ input: "dnn/hidden1/truncated_normal/mul"
+ input: "dnn/hidden1/truncated_normal/mean"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: 784
+ }
+ dim {
+ size: 300
+ }
+ }
+ }
+ }
+ }
+ }
+ node {
+ name: "dnn/hidden1/weights"
+ op: "VariableV2"
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: 784
+ }
+ dim {
+ size: 300
+ }
+ }
+ }
+ }
+ }
+ attr {
+ key: "container"
+ value {
+ s: ""
+ }
+ }
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "shape"
+ value {
+ shape {
+ dim {
+ size: 784
+ }
+ dim {
+ size: 300
+ }
+ }
+ }
+ }
+ attr {
+ key: "shared_name"
+ value {
+ s: ""
+ }
+ }
+ }
+ node {
+ name: "dnn/hidden1/weights/Assign"
+ op: "Assign"
+ input: "dnn/hidden1/weights"
+ input: "dnn/hidden1/truncated_normal"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "_class"
+ value {
+ list {
+ s: "loc:@dnn/hidden1/weights"
+ }
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: 784
+ }
+ dim {
+ size: 300
+ }
+ }
+ }
+ }
+ }
+ attr {
+ key: "use_locking"
+ value {
+ b: true
+ }
+ }
+ attr {
+ key: "validate_shape"
+ value {
+ b: true
+ }
+ }
+ }
+ node {
+ name: "dnn/hidden1/weights/read"
+ op: "Identity"
+ input: "dnn/hidden1/weights"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "_class"
+ value {
+ list {
+ s: "loc:@dnn/hidden1/weights"
+ }
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: 784
+ }
+ dim {
+ size: 300
+ }
+ }
+ }
+ }
+ }
+ }
+ node {
+ name: "dnn/hidden1/zeros"
+ op: "Const"
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: 300
+ }
+ }
+ }
+ }
+ }
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_FLOAT
+ tensor_shape {
+ dim {
+ size: 300
+ }
+ }
+ float_val: 0.0
+ }
+ }
+ }
+ }
+ node {
+ name: "dnn/hidden1/bias"
+ op: "VariableV2"
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: 300
+ }
+ }
+ }
+ }
+ }
+ attr {
+ key: "container"
+ value {
+ s: ""
+ }
+ }
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "shape"
+ value {
+ shape {
+ dim {
+ size: 300
+ }
+ }
+ }
+ }
+ attr {
+ key: "shared_name"
+ value {
+ s: ""
+ }
+ }
+ }
+ node {
+ name: "dnn/hidden1/bias/Assign"
+ op: "Assign"
+ input: "dnn/hidden1/bias"
+ input: "dnn/hidden1/zeros"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "_class"
+ value {
+ list {
+ s: "loc:@dnn/hidden1/bias"
+ }
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: 300
+ }
+ }
+ }
+ }
+ }
+ attr {
+ key: "use_locking"
+ value {
+ b: true
+ }
+ }
+ attr {
+ key: "validate_shape"
+ value {
+ b: true
+ }
+ }
+ }
+ node {
+ name: "dnn/hidden1/bias/read"
+ op: "Identity"
+ input: "dnn/hidden1/bias"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "_class"
+ value {
+ list {
+ s: "loc:@dnn/hidden1/bias"
+ }
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: 300
+ }
+ }
+ }
+ }
+ }
+ }
+ node {
+ name: "dnn/hidden1/MatMul"
+ op: "MatMul"
+ input: "X"
+ input: "dnn/hidden1/weights/read"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: -1
+ }
+ dim {
+ size: 300
+ }
+ }
+ }
+ }
+ }
+ attr {
+ key: "transpose_a"
+ value {
+ b: false
+ }
+ }
+ attr {
+ key: "transpose_b"
+ value {
+ b: false
+ }
+ }
+ }
+ node {
+ name: "dnn/hidden1/add"
+ op: "Add"
+ input: "dnn/hidden1/MatMul"
+ input: "dnn/hidden1/bias/read"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: -1
+ }
+ dim {
+ size: 300
+ }
+ }
+ }
+ }
+ }
+ }
+ node {
+ name: "dnn/hidden1/Elu"
+ op: "Elu"
+ input: "dnn/hidden1/add"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: -1
+ }
+ dim {
+ size: 300
+ }
+ }
+ }
+ }
+ }
+ }
+ node {
+ name: "dnn/hidden2/truncated_normal/shape"
+ op: "Const"
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: 2
+ }
+ }
+ }
+ }
+ }
+ attr {
+ key: "dtype"
+ value {
+ type: DT_INT32
+ }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_INT32
+ tensor_shape {
+ dim {
+ size: 2
+ }
+ }
+ tensor_content: ",\001\000\000d\000\000\000"
+ }
+ }
+ }
+ }
+ node {
+ name: "dnn/hidden2/truncated_normal/mean"
+ op: "Const"
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ }
+ }
+ }
+ }
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_FLOAT
+ tensor_shape {
+ }
+ float_val: 0.0
+ }
+ }
+ }
+ }
+ node {
+ name: "dnn/hidden2/truncated_normal/stddev"
+ op: "Const"
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ }
+ }
+ }
+ }
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_FLOAT
+ tensor_shape {
+ }
+ float_val: 0.1154700517654419
+ }
+ }
+ }
+ }
+ node {
+ name: "dnn/hidden2/truncated_normal/TruncatedNormal"
+ op: "TruncatedNormal"
+ input: "dnn/hidden2/truncated_normal/shape"
+ attr {
+ key: "T"
+ value {
+ type: DT_INT32
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: 300
+ }
+ dim {
+ size: 100
+ }
+ }
+ }
+ }
+ }
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "seed"
+ value {
+ i: 0
+ }
+ }
+ attr {
+ key: "seed2"
+ value {
+ i: 0
+ }
+ }
+ }
+ node {
+ name: "dnn/hidden2/truncated_normal/mul"
+ op: "Mul"
+ input: "dnn/hidden2/truncated_normal/TruncatedNormal"
+ input: "dnn/hidden2/truncated_normal/stddev"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: 300
+ }
+ dim {
+ size: 100
+ }
+ }
+ }
+ }
+ }
+ }
+ node {
+ name: "dnn/hidden2/truncated_normal"
+ op: "Add"
+ input: "dnn/hidden2/truncated_normal/mul"
+ input: "dnn/hidden2/truncated_normal/mean"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: 300
+ }
+ dim {
+ size: 100
+ }
+ }
+ }
+ }
+ }
+ }
+ node {
+ name: "dnn/hidden2/weights"
+ op: "VariableV2"
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: 300
+ }
+ dim {
+ size: 100
+ }
+ }
+ }
+ }
+ }
+ attr {
+ key: "container"
+ value {
+ s: ""
+ }
+ }
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "shape"
+ value {
+ shape {
+ dim {
+ size: 300
+ }
+ dim {
+ size: 100
+ }
+ }
+ }
+ }
+ attr {
+ key: "shared_name"
+ value {
+ s: ""
+ }
+ }
+ }
+ node {
+ name: "dnn/hidden2/weights/Assign"
+ op: "Assign"
+ input: "dnn/hidden2/weights"
+ input: "dnn/hidden2/truncated_normal"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "_class"
+ value {
+ list {
+ s: "loc:@dnn/hidden2/weights"
+ }
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: 300
+ }
+ dim {
+ size: 100
+ }
+ }
+ }
+ }
+ }
+ attr {
+ key: "use_locking"
+ value {
+ b: true
+ }
+ }
+ attr {
+ key: "validate_shape"
+ value {
+ b: true
+ }
+ }
+ }
+ node {
+ name: "dnn/hidden2/weights/read"
+ op: "Identity"
+ input: "dnn/hidden2/weights"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "_class"
+ value {
+ list {
+ s: "loc:@dnn/hidden2/weights"
+ }
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: 300
+ }
+ dim {
+ size: 100
+ }
+ }
+ }
+ }
+ }
+ }
+ node {
+ name: "dnn/hidden2/zeros"
+ op: "Const"
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: 100
+ }
+ }
+ }
+ }
+ }
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_FLOAT
+ tensor_shape {
+ dim {
+ size: 100
+ }
+ }
+ float_val: 0.0
+ }
+ }
+ }
+ }
+ node {
+ name: "dnn/hidden2/bias"
+ op: "VariableV2"
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: 100
+ }
+ }
+ }
+ }
+ }
+ attr {
+ key: "container"
+ value {
+ s: ""
+ }
+ }
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "shape"
+ value {
+ shape {
+ dim {
+ size: 100
+ }
+ }
+ }
+ }
+ attr {
+ key: "shared_name"
+ value {
+ s: ""
+ }
+ }
+ }
+ node {
+ name: "dnn/hidden2/bias/Assign"
+ op: "Assign"
+ input: "dnn/hidden2/bias"
+ input: "dnn/hidden2/zeros"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "_class"
+ value {
+ list {
+ s: "loc:@dnn/hidden2/bias"
+ }
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: 100
+ }
+ }
+ }
+ }
+ }
+ attr {
+ key: "use_locking"
+ value {
+ b: true
+ }
+ }
+ attr {
+ key: "validate_shape"
+ value {
+ b: true
+ }
+ }
+ }
+ node {
+ name: "dnn/hidden2/bias/read"
+ op: "Identity"
+ input: "dnn/hidden2/bias"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "_class"
+ value {
+ list {
+ s: "loc:@dnn/hidden2/bias"
+ }
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: 100
+ }
+ }
+ }
+ }
+ }
+ }
+ node {
+ name: "dnn/hidden2/MatMul"
+ op: "MatMul"
+ input: "dnn/hidden1/Elu"
+ input: "dnn/hidden2/weights/read"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: -1
+ }
+ dim {
+ size: 100
+ }
+ }
+ }
+ }
+ }
+ attr {
+ key: "transpose_a"
+ value {
+ b: false
+ }
+ }
+ attr {
+ key: "transpose_b"
+ value {
+ b: false
+ }
+ }
+ }
+ node {
+ name: "dnn/hidden2/add"
+ op: "Add"
+ input: "dnn/hidden2/MatMul"
+ input: "dnn/hidden2/bias/read"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: -1
+ }
+ dim {
+ size: 100
+ }
+ }
+ }
+ }
+ }
+ }
+ node {
+ name: "dnn/hidden2/Relu"
+ op: "Relu"
+ input: "dnn/hidden2/add"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: -1
+ }
+ dim {
+ size: 100
+ }
+ }
+ }
+ }
+ }
+ }
+ node {
+ name: "dnn/hidden3/truncated_normal/shape"
+ op: "Const"
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: 2
+ }
+ }
+ }
+ }
+ }
+ attr {
+ key: "dtype"
+ value {
+ type: DT_INT32
+ }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_INT32
+ tensor_shape {
+ dim {
+ size: 2
+ }
+ }
+ tensor_content: "d\000\000\000(\000\000\000"
+ }
+ }
+ }
+ }
+ node {
+ name: "dnn/hidden3/truncated_normal/mean"
+ op: "Const"
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ }
+ }
+ }
+ }
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_FLOAT
+ tensor_shape {
+ }
+ float_val: 0.0
+ }
+ }
+ }
+ }
+ node {
+ name: "dnn/hidden3/truncated_normal/stddev"
+ op: "Const"
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ }
+ }
+ }
+ }
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_FLOAT
+ tensor_shape {
+ }
+ float_val: 0.20000000298023224
+ }
+ }
+ }
+ }
+ node {
+ name: "dnn/hidden3/truncated_normal/TruncatedNormal"
+ op: "TruncatedNormal"
+ input: "dnn/hidden3/truncated_normal/shape"
+ attr {
+ key: "T"
+ value {
+ type: DT_INT32
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: 100
+ }
+ dim {
+ size: 40
+ }
+ }
+ }
+ }
+ }
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "seed"
+ value {
+ i: 0
+ }
+ }
+ attr {
+ key: "seed2"
+ value {
+ i: 0
+ }
+ }
+ }
+ node {
+ name: "dnn/hidden3/truncated_normal/mul"
+ op: "Mul"
+ input: "dnn/hidden3/truncated_normal/TruncatedNormal"
+ input: "dnn/hidden3/truncated_normal/stddev"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: 100
+ }
+ dim {
+ size: 40
+ }
+ }
+ }
+ }
+ }
+ }
+ node {
+ name: "dnn/hidden3/truncated_normal"
+ op: "Add"
+ input: "dnn/hidden3/truncated_normal/mul"
+ input: "dnn/hidden3/truncated_normal/mean"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: 100
+ }
+ dim {
+ size: 40
+ }
+ }
+ }
+ }
+ }
+ }
+ node {
+ name: "dnn/hidden3/weights"
+ op: "VariableV2"
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: 100
+ }
+ dim {
+ size: 40
+ }
+ }
+ }
+ }
+ }
+ attr {
+ key: "container"
+ value {
+ s: ""
+ }
+ }
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "shape"
+ value {
+ shape {
+ dim {
+ size: 100
+ }
+ dim {
+ size: 40
+ }
+ }
+ }
+ }
+ attr {
+ key: "shared_name"
+ value {
+ s: ""
+ }
+ }
+ }
+ node {
+ name: "dnn/hidden3/weights/Assign"
+ op: "Assign"
+ input: "dnn/hidden3/weights"
+ input: "dnn/hidden3/truncated_normal"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "_class"
+ value {
+ list {
+ s: "loc:@dnn/hidden3/weights"
+ }
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: 100
+ }
+ dim {
+ size: 40
+ }
+ }
+ }
+ }
+ }
+ attr {
+ key: "use_locking"
+ value {
+ b: true
+ }
+ }
+ attr {
+ key: "validate_shape"
+ value {
+ b: true
+ }
+ }
+ }
+ node {
+ name: "dnn/hidden3/weights/read"
+ op: "Identity"
+ input: "dnn/hidden3/weights"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "_class"
+ value {
+ list {
+ s: "loc:@dnn/hidden3/weights"
+ }
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: 100
+ }
+ dim {
+ size: 40
+ }
+ }
+ }
+ }
+ }
+ }
+ node {
+ name: "dnn/hidden3/zeros"
+ op: "Const"
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: 40
+ }
+ }
+ }
+ }
+ }
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_FLOAT
+ tensor_shape {
+ dim {
+ size: 40
+ }
+ }
+ float_val: 0.0
+ }
+ }
+ }
+ }
+ node {
+ name: "dnn/hidden3/bias"
+ op: "VariableV2"
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: 40
+ }
+ }
+ }
+ }
+ }
+ attr {
+ key: "container"
+ value {
+ s: ""
+ }
+ }
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "shape"
+ value {
+ shape {
+ dim {
+ size: 40
+ }
+ }
+ }
+ }
+ attr {
+ key: "shared_name"
+ value {
+ s: ""
+ }
+ }
+ }
+ node {
+ name: "dnn/hidden3/bias/Assign"
+ op: "Assign"
+ input: "dnn/hidden3/bias"
+ input: "dnn/hidden3/zeros"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "_class"
+ value {
+ list {
+ s: "loc:@dnn/hidden3/bias"
+ }
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: 40
+ }
+ }
+ }
+ }
+ }
+ attr {
+ key: "use_locking"
+ value {
+ b: true
+ }
+ }
+ attr {
+ key: "validate_shape"
+ value {
+ b: true
+ }
+ }
+ }
+ node {
+ name: "dnn/hidden3/bias/read"
+ op: "Identity"
+ input: "dnn/hidden3/bias"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "_class"
+ value {
+ list {
+ s: "loc:@dnn/hidden3/bias"
+ }
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: 40
+ }
+ }
+ }
+ }
+ }
+ }
+ node {
+ name: "dnn/hidden3/MatMul"
+ op: "MatMul"
+ input: "dnn/hidden2/Relu"
+ input: "dnn/hidden3/weights/read"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: -1
+ }
+ dim {
+ size: 40
+ }
+ }
+ }
+ }
+ }
+ attr {
+ key: "transpose_a"
+ value {
+ b: false
+ }
+ }
+ attr {
+ key: "transpose_b"
+ value {
+ b: false
+ }
+ }
+ }
+ node {
+ name: "dnn/hidden3/add"
+ op: "Add"
+ input: "dnn/hidden3/MatMul"
+ input: "dnn/hidden3/bias/read"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: -1
+ }
+ dim {
+ size: 40
+ }
+ }
+ }
+ }
+ }
+ }
+ node {
+ name: "dnn/hidden3/Sigmoid"
+ op: "Sigmoid"
+ input: "dnn/hidden3/add"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: -1
+ }
+ dim {
+ size: 40
+ }
+ }
+ }
+ }
+ }
+ }
+ node {
+ name: "dnn/outputs/truncated_normal/shape"
+ op: "Const"
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: 2
+ }
+ }
+ }
+ }
+ }
+ attr {
+ key: "dtype"
+ value {
+ type: DT_INT32
+ }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_INT32
+ tensor_shape {
+ dim {
+ size: 2
+ }
+ }
+ tensor_content: "(\000\000\000\n\000\000\000"
+ }
+ }
+ }
+ }
+ node {
+ name: "dnn/outputs/truncated_normal/mean"
+ op: "Const"
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ }
+ }
+ }
+ }
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_FLOAT
+ tensor_shape {
+ }
+ float_val: 0.0
+ }
+ }
+ }
+ }
+ node {
+ name: "dnn/outputs/truncated_normal/stddev"
+ op: "Const"
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ }
+ }
+ }
+ }
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_FLOAT
+ tensor_shape {
+ }
+ float_val: 0.3162277638912201
+ }
+ }
+ }
+ }
+ node {
+ name: "dnn/outputs/truncated_normal/TruncatedNormal"
+ op: "TruncatedNormal"
+ input: "dnn/outputs/truncated_normal/shape"
+ attr {
+ key: "T"
+ value {
+ type: DT_INT32
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: 40
+ }
+ dim {
+ size: 10
+ }
+ }
+ }
+ }
+ }
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "seed"
+ value {
+ i: 0
+ }
+ }
+ attr {
+ key: "seed2"
+ value {
+ i: 0
+ }
+ }
+ }
+ node {
+ name: "dnn/outputs/truncated_normal/mul"
+ op: "Mul"
+ input: "dnn/outputs/truncated_normal/TruncatedNormal"
+ input: "dnn/outputs/truncated_normal/stddev"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: 40
+ }
+ dim {
+ size: 10
+ }
+ }
+ }
+ }
+ }
+ }
+ node {
+ name: "dnn/outputs/truncated_normal"
+ op: "Add"
+ input: "dnn/outputs/truncated_normal/mul"
+ input: "dnn/outputs/truncated_normal/mean"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: 40
+ }
+ dim {
+ size: 10
+ }
+ }
+ }
+ }
+ }
+ }
+ node {
+ name: "dnn/outputs/weights"
+ op: "VariableV2"
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: 40
+ }
+ dim {
+ size: 10
+ }
+ }
+ }
+ }
+ }
+ attr {
+ key: "container"
+ value {
+ s: ""
+ }
+ }
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "shape"
+ value {
+ shape {
+ dim {
+ size: 40
+ }
+ dim {
+ size: 10
+ }
+ }
+ }
+ }
+ attr {
+ key: "shared_name"
+ value {
+ s: ""
+ }
+ }
+ }
+ node {
+ name: "dnn/outputs/weights/Assign"
+ op: "Assign"
+ input: "dnn/outputs/weights"
+ input: "dnn/outputs/truncated_normal"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "_class"
+ value {
+ list {
+ s: "loc:@dnn/outputs/weights"
+ }
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: 40
+ }
+ dim {
+ size: 10
+ }
+ }
+ }
+ }
+ }
+ attr {
+ key: "use_locking"
+ value {
+ b: true
+ }
+ }
+ attr {
+ key: "validate_shape"
+ value {
+ b: true
+ }
+ }
+ }
+ node {
+ name: "dnn/outputs/weights/read"
+ op: "Identity"
+ input: "dnn/outputs/weights"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "_class"
+ value {
+ list {
+ s: "loc:@dnn/outputs/weights"
+ }
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: 40
+ }
+ dim {
+ size: 10
+ }
+ }
+ }
+ }
+ }
+ }
+ node {
+ name: "dnn/outputs/zeros"
+ op: "Const"
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: 10
+ }
+ }
+ }
+ }
+ }
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_FLOAT
+ tensor_shape {
+ dim {
+ size: 10
+ }
+ }
+ float_val: 0.0
+ }
+ }
+ }
+ }
+ node {
+ name: "dnn/outputs/bias"
+ op: "VariableV2"
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: 10
+ }
+ }
+ }
+ }
+ }
+ attr {
+ key: "container"
+ value {
+ s: ""
+ }
+ }
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "shape"
+ value {
+ shape {
+ dim {
+ size: 10
+ }
+ }
+ }
+ }
+ attr {
+ key: "shared_name"
+ value {
+ s: ""
+ }
+ }
+ }
+ node {
+ name: "dnn/outputs/bias/Assign"
+ op: "Assign"
+ input: "dnn/outputs/bias"
+ input: "dnn/outputs/zeros"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "_class"
+ value {
+ list {
+ s: "loc:@dnn/outputs/bias"
+ }
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: 10
+ }
+ }
+ }
+ }
+ }
+ attr {
+ key: "use_locking"
+ value {
+ b: true
+ }
+ }
+ attr {
+ key: "validate_shape"
+ value {
+ b: true
+ }
+ }
+ }
+ node {
+ name: "dnn/outputs/bias/read"
+ op: "Identity"
+ input: "dnn/outputs/bias"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "_class"
+ value {
+ list {
+ s: "loc:@dnn/outputs/bias"
+ }
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: 10
+ }
+ }
+ }
+ }
+ }
+ }
+ node {
+ name: "dnn/outputs/MatMul"
+ op: "MatMul"
+ input: "dnn/hidden3/Sigmoid"
+ input: "dnn/outputs/weights/read"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: -1
+ }
+ dim {
+ size: 10
+ }
+ }
+ }
+ }
+ }
+ attr {
+ key: "transpose_a"
+ value {
+ b: false
+ }
+ }
+ attr {
+ key: "transpose_b"
+ value {
+ b: false
+ }
+ }
+ }
+ node {
+ name: "dnn/outputs/add"
+ op: "Add"
+ input: "dnn/outputs/MatMul"
+ input: "dnn/outputs/bias/read"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: -1
+ }
+ dim {
+ size: 10
+ }
+ }
+ }
+ }
+ }
+ }
+ node {
+ name: "loss/SparseSoftmaxCrossEntropyWithLogits/Shape"
+ op: "Shape"
+ input: "y"
+ attr {
+ key: "T"
+ value {
+ type: DT_INT64
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: -1
+ }
+ }
+ }
+ }
+ }
+ attr {
+ key: "out_type"
+ value {
+ type: DT_INT32
+ }
+ }
+ }
+ node {
+ name: "loss/SparseSoftmaxCrossEntropyWithLogits/SparseSoftmaxCrossEntropyWithLogits"
+ op: "SparseSoftmaxCrossEntropyWithLogits"
+ input: "dnn/outputs/add"
+ input: "y"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "Tlabels"
+ value {
+ type: DT_INT64
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: -1
+ }
+ }
+ shape {
+ dim {
+ size: -1
+ }
+ dim {
+ size: 10
+ }
+ }
+ }
+ }
+ }
+ }
+ node {
+ name: "loss/Const"
+ op: "Const"
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: 1
+ }
+ }
+ }
+ }
+ }
+ attr {
+ key: "dtype"
+ value {
+ type: DT_INT32
+ }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_INT32
+ tensor_shape {
+ dim {
+ size: 1
+ }
+ }
+ int_val: 0
+ }
+ }
+ }
+ }
+ node {
+ name: "loss/loss"
+ op: "Mean"
+ input: "loss/SparseSoftmaxCrossEntropyWithLogits/SparseSoftmaxCrossEntropyWithLogits"
+ input: "loss/Const"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "Tidx"
+ value {
+ type: DT_INT32
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ }
+ }
+ }
+ }
+ attr {
+ key: "keep_dims"
+ value {
+ b: false
+ }
+ }
+ }
+ node {
+ name: "train/gradients/Shape"
+ op: "Const"
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ }
+ }
+ }
+ }
+ }
+ attr {
+ key: "dtype"
+ value {
+ type: DT_INT32
+ }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_INT32
+ tensor_shape {
+ dim {
+ }
+ }
+ }
+ }
+ }
+ }
+ node {
+ name: "train/gradients/Const"
+ op: "Const"
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ }
+ }
+ }
+ }
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_FLOAT
+ tensor_shape {
+ }
+ float_val: 1.0
+ }
+ }
+ }
+ }
+ node {
+ name: "train/gradients/Fill"
+ op: "Fill"
+ input: "train/gradients/Shape"
+ input: "train/gradients/Const"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ }
+ }
+ }
+ }
+ }
+ node {
+ name: "train/gradients/loss/loss_grad/Reshape/shape"
+ op: "Const"
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: 1
+ }
+ }
+ }
+ }
+ }
+ attr {
+ key: "dtype"
+ value {
+ type: DT_INT32
+ }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_INT32
+ tensor_shape {
+ dim {
+ size: 1
+ }
+ }
+ int_val: 1
+ }
+ }
+ }
+ }
+ node {
+ name: "train/gradients/loss/loss_grad/Reshape"
+ op: "Reshape"
+ input: "train/gradients/Fill"
+ input: "train/gradients/loss/loss_grad/Reshape/shape"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "Tshape"
+ value {
+ type: DT_INT32
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: 1
+ }
+ }
+ }
+ }
+ }
+ }
+ node {
+ name: "train/gradients/loss/loss_grad/Shape"
+ op: "Shape"
+ input: "loss/SparseSoftmaxCrossEntropyWithLogits/SparseSoftmaxCrossEntropyWithLogits"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: 1
+ }
+ }
+ }
+ }
+ }
+ attr {
+ key: "out_type"
+ value {
+ type: DT_INT32
+ }
+ }
+ }
+ node {
+ name: "train/gradients/loss/loss_grad/Tile"
+ op: "Tile"
+ input: "train/gradients/loss/loss_grad/Reshape"
+ input: "train/gradients/loss/loss_grad/Shape"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "Tmultiples"
+ value {
+ type: DT_INT32
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: -1
+ }
+ }
+ }
+ }
+ }
+ }
+ node {
+ name: "train/gradients/loss/loss_grad/Shape_1"
+ op: "Shape"
+ input: "loss/SparseSoftmaxCrossEntropyWithLogits/SparseSoftmaxCrossEntropyWithLogits"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: 1
+ }
+ }
+ }
+ }
+ }
+ attr {
+ key: "out_type"
+ value {
+ type: DT_INT32
+ }
+ }
+ }
+ node {
+ name: "train/gradients/loss/loss_grad/Shape_2"
+ op: "Const"
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ }
+ }
+ }
+ }
+ }
+ attr {
+ key: "dtype"
+ value {
+ type: DT_INT32
+ }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_INT32
+ tensor_shape {
+ dim {
+ }
+ }
+ }
+ }
+ }
+ }
+ node {
+ name: "train/gradients/loss/loss_grad/Const"
+ op: "Const"
+ attr {
+ key: "_class"
+ value {
+ list {
+ s: "loc:@train/gradients/loss/loss_grad/Shape_1"
+ }
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: 1
+ }
+ }
+ }
+ }
+ }
+ attr {
+ key: "dtype"
+ value {
+ type: DT_INT32
+ }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_INT32
+ tensor_shape {
+ dim {
+ size: 1
+ }
+ }
+ int_val: 0
+ }
+ }
+ }
+ }
+ node {
+ name: "train/gradients/loss/loss_grad/Prod"
+ op: "Prod"
+ input: "train/gradients/loss/loss_grad/Shape_1"
+ input: "train/gradients/loss/loss_grad/Const"
+ attr {
+ key: "T"
+ value {
+ type: DT_INT32
+ }
+ }
+ attr {
+ key: "Tidx"
+ value {
+ type: DT_INT32
+ }
+ }
+ attr {
+ key: "_class"
+ value {
+ list {
+ s: "loc:@train/gradients/loss/loss_grad/Shape_1"
+ }
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ }
+ }
+ }
+ }
+ attr {
+ key: "keep_dims"
+ value {
+ b: false
+ }
+ }
+ }
+ node {
+ name: "train/gradients/loss/loss_grad/Const_1"
+ op: "Const"
+ attr {
+ key: "_class"
+ value {
+ list {
+ s: "loc:@train/gradients/loss/loss_grad/Shape_1"
+ }
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: 1
+ }
+ }
+ }
+ }
+ }
+ attr {
+ key: "dtype"
+ value {
+ type: DT_INT32
+ }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_INT32
+ tensor_shape {
+ dim {
+ size: 1
+ }
+ }
+ int_val: 0
+ }
+ }
+ }
+ }
+ node {
+ name: "train/gradients/loss/loss_grad/Prod_1"
+ op: "Prod"
+ input: "train/gradients/loss/loss_grad/Shape_2"
+ input: "train/gradients/loss/loss_grad/Const_1"
+ attr {
+ key: "T"
+ value {
+ type: DT_INT32
+ }
+ }
+ attr {
+ key: "Tidx"
+ value {
+ type: DT_INT32
+ }
+ }
+ attr {
+ key: "_class"
+ value {
+ list {
+ s: "loc:@train/gradients/loss/loss_grad/Shape_1"
+ }
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ }
+ }
+ }
+ }
+ attr {
+ key: "keep_dims"
+ value {
+ b: false
+ }
+ }
+ }
+ node {
+ name: "train/gradients/loss/loss_grad/Maximum/y"
+ op: "Const"
+ attr {
+ key: "_class"
+ value {
+ list {
+ s: "loc:@train/gradients/loss/loss_grad/Shape_1"
+ }
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ }
+ }
+ }
+ }
+ attr {
+ key: "dtype"
+ value {
+ type: DT_INT32
+ }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_INT32
+ tensor_shape {
+ }
+ int_val: 1
+ }
+ }
+ }
+ }
+ node {
+ name: "train/gradients/loss/loss_grad/Maximum"
+ op: "Maximum"
+ input: "train/gradients/loss/loss_grad/Prod_1"
+ input: "train/gradients/loss/loss_grad/Maximum/y"
+ attr {
+ key: "T"
+ value {
+ type: DT_INT32
+ }
+ }
+ attr {
+ key: "_class"
+ value {
+ list {
+ s: "loc:@train/gradients/loss/loss_grad/Shape_1"
+ }
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ }
+ }
+ }
+ }
+ }
+ node {
+ name: "train/gradients/loss/loss_grad/floordiv"
+ op: "FloorDiv"
+ input: "train/gradients/loss/loss_grad/Prod"
+ input: "train/gradients/loss/loss_grad/Maximum"
+ attr {
+ key: "T"
+ value {
+ type: DT_INT32
+ }
+ }
+ attr {
+ key: "_class"
+ value {
+ list {
+ s: "loc:@train/gradients/loss/loss_grad/Shape_1"
+ }
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ }
+ }
+ }
+ }
+ }
+ node {
+ name: "train/gradients/loss/loss_grad/Cast"
+ op: "Cast"
+ input: "train/gradients/loss/loss_grad/floordiv"
+ attr {
+ key: "DstT"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "SrcT"
+ value {
+ type: DT_INT32
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ }
+ }
+ }
+ }
+ }
+ node {
+ name: "train/gradients/loss/loss_grad/truediv"
+ op: "RealDiv"
+ input: "train/gradients/loss/loss_grad/Tile"
+ input: "train/gradients/loss/loss_grad/Cast"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: -1
+ }
+ }
+ }
+ }
+ }
+ }
+ node {
+ name: "train/gradients/zeros_like"
+ op: "ZerosLike"
+ input: "loss/SparseSoftmaxCrossEntropyWithLogits/SparseSoftmaxCrossEntropyWithLogits:1"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: -1
+ }
+ dim {
+ size: 10
+ }
+ }
+ }
+ }
+ }
+ }
+ node {
+ name: "train/gradients/loss/SparseSoftmaxCrossEntropyWithLogits/SparseSoftmaxCrossEntropyWithLogits_grad/PreventGradient"
+ op: "PreventGradient"
+ input: "loss/SparseSoftmaxCrossEntropyWithLogits/SparseSoftmaxCrossEntropyWithLogits:1"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: -1
+ }
+ dim {
+ size: 10
+ }
+ }
+ }
+ }
+ }
+ attr {
+ key: "message"
+ value {
+ s: "Currently there is no way to take the second derivative of sparse_softmax_cross_entropy_with_logits due to the fused implementation\'s interaction with tf.gradients()"
+ }
+ }
+ }
+ node {
+ name: "train/gradients/loss/SparseSoftmaxCrossEntropyWithLogits/SparseSoftmaxCrossEntropyWithLogits_grad/ExpandDims/dim"
+ op: "Const"
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ }
+ }
+ }
+ }
+ attr {
+ key: "dtype"
+ value {
+ type: DT_INT32
+ }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_INT32
+ tensor_shape {
+ }
+ int_val: -1
+ }
+ }
+ }
+ }
+ node {
+ name: "train/gradients/loss/SparseSoftmaxCrossEntropyWithLogits/SparseSoftmaxCrossEntropyWithLogits_grad/ExpandDims"
+ op: "ExpandDims"
+ input: "train/gradients/loss/loss_grad/truediv"
+ input: "train/gradients/loss/SparseSoftmaxCrossEntropyWithLogits/SparseSoftmaxCrossEntropyWithLogits_grad/ExpandDims/dim"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "Tdim"
+ value {
+ type: DT_INT32
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: -1
+ }
+ dim {
+ size: 1
+ }
+ }
+ }
+ }
+ }
+ }
+ node {
+ name: "train/gradients/loss/SparseSoftmaxCrossEntropyWithLogits/SparseSoftmaxCrossEntropyWithLogits_grad/mul"
+ op: "Mul"
+ input: "train/gradients/loss/SparseSoftmaxCrossEntropyWithLogits/SparseSoftmaxCrossEntropyWithLogits_grad/ExpandDims"
+ input: "train/gradients/loss/SparseSoftmaxCrossEntropyWithLogits/SparseSoftmaxCrossEntropyWithLogits_grad/PreventGradient"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: -1
+ }
+ dim {
+ size: 10
+ }
+ }
+ }
+ }
+ }
+ }
+ node {
+ name: "train/gradients/dnn/outputs/add_grad/Shape"
+ op: "Shape"
+ input: "dnn/outputs/MatMul"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: 2
+ }
+ }
+ }
+ }
+ }
+ attr {
+ key: "out_type"
+ value {
+ type: DT_INT32
+ }
+ }
+ }
+ node {
+ name: "train/gradients/dnn/outputs/add_grad/Shape_1"
+ op: "Const"
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: 1
+ }
+ }
+ }
+ }
+ }
+ attr {
+ key: "dtype"
+ value {
+ type: DT_INT32
+ }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_INT32
+ tensor_shape {
+ dim {
+ size: 1
+ }
+ }
+ int_val: 10
+ }
+ }
+ }
+ }
+ node {
+ name: "train/gradients/dnn/outputs/add_grad/BroadcastGradientArgs"
+ op: "BroadcastGradientArgs"
+ input: "train/gradients/dnn/outputs/add_grad/Shape"
+ input: "train/gradients/dnn/outputs/add_grad/Shape_1"
+ attr {
+ key: "T"
+ value {
+ type: DT_INT32
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: -1
+ }
+ }
+ shape {
+ dim {
+ size: -1
+ }
+ }
+ }
+ }
+ }
+ }
+ node {
+ name: "train/gradients/dnn/outputs/add_grad/Sum"
+ op: "Sum"
+ input: "train/gradients/loss/SparseSoftmaxCrossEntropyWithLogits/SparseSoftmaxCrossEntropyWithLogits_grad/mul"
+ input: "train/gradients/dnn/outputs/add_grad/BroadcastGradientArgs"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "Tidx"
+ value {
+ type: DT_INT32
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ unknown_rank: true
+ }
+ }
+ }
+ }
+ attr {
+ key: "keep_dims"
+ value {
+ b: false
+ }
+ }
+ }
+ node {
+ name: "train/gradients/dnn/outputs/add_grad/Reshape"
+ op: "Reshape"
+ input: "train/gradients/dnn/outputs/add_grad/Sum"
+ input: "train/gradients/dnn/outputs/add_grad/Shape"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "Tshape"
+ value {
+ type: DT_INT32
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: -1
+ }
+ dim {
+ size: 10
+ }
+ }
+ }
+ }
+ }
+ }
+ node {
+ name: "train/gradients/dnn/outputs/add_grad/Sum_1"
+ op: "Sum"
+ input: "train/gradients/loss/SparseSoftmaxCrossEntropyWithLogits/SparseSoftmaxCrossEntropyWithLogits_grad/mul"
+ input: "train/gradients/dnn/outputs/add_grad/BroadcastGradientArgs:1"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "Tidx"
+ value {
+ type: DT_INT32
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ unknown_rank: true
+ }
+ }
+ }
+ }
+ attr {
+ key: "keep_dims"
+ value {
+ b: false
+ }
+ }
+ }
+ node {
+ name: "train/gradients/dnn/outputs/add_grad/Reshape_1"
+ op: "Reshape"
+ input: "train/gradients/dnn/outputs/add_grad/Sum_1"
+ input: "train/gradients/dnn/outputs/add_grad/Shape_1"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "Tshape"
+ value {
+ type: DT_INT32
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: 10
+ }
+ }
+ }
+ }
+ }
+ }
+ node {
+ name: "train/gradients/dnn/outputs/add_grad/tuple/group_deps"
+ op: "NoOp"
+ input: "^train/gradients/dnn/outputs/add_grad/Reshape"
+ input: "^train/gradients/dnn/outputs/add_grad/Reshape_1"
+ }
+ node {
+ name: "train/gradients/dnn/outputs/add_grad/tuple/control_dependency"
+ op: "Identity"
+ input: "train/gradients/dnn/outputs/add_grad/Reshape"
+ input: "^train/gradients/dnn/outputs/add_grad/tuple/group_deps"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "_class"
+ value {
+ list {
+ s: "loc:@train/gradients/dnn/outputs/add_grad/Reshape"
+ }
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: -1
+ }
+ dim {
+ size: 10
+ }
+ }
+ }
+ }
+ }
+ }
+ node {
+ name: "train/gradients/dnn/outputs/add_grad/tuple/control_dependency_1"
+ op: "Identity"
+ input: "train/gradients/dnn/outputs/add_grad/Reshape_1"
+ input: "^train/gradients/dnn/outputs/add_grad/tuple/group_deps"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "_class"
+ value {
+ list {
+ s: "loc:@train/gradients/dnn/outputs/add_grad/Reshape_1"
+ }
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: 10
+ }
+ }
+ }
+ }
+ }
+ }
+ node {
+ name: "train/gradients/dnn/outputs/MatMul_grad/MatMul"
+ op: "MatMul"
+ input: "train/gradients/dnn/outputs/add_grad/tuple/control_dependency"
+ input: "dnn/outputs/weights/read"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: -1
+ }
+ dim {
+ size: 40
+ }
+ }
+ }
+ }
+ }
+ attr {
+ key: "transpose_a"
+ value {
+ b: false
+ }
+ }
+ attr {
+ key: "transpose_b"
+ value {
+ b: true
+ }
+ }
+ }
+ node {
+ name: "train/gradients/dnn/outputs/MatMul_grad/MatMul_1"
+ op: "MatMul"
+ input: "dnn/hidden3/Sigmoid"
+ input: "train/gradients/dnn/outputs/add_grad/tuple/control_dependency"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: 40
+ }
+ dim {
+ size: 10
+ }
+ }
+ }
+ }
+ }
+ attr {
+ key: "transpose_a"
+ value {
+ b: true
+ }
+ }
+ attr {
+ key: "transpose_b"
+ value {
+ b: false
+ }
+ }
+ }
+ node {
+ name: "train/gradients/dnn/outputs/MatMul_grad/tuple/group_deps"
+ op: "NoOp"
+ input: "^train/gradients/dnn/outputs/MatMul_grad/MatMul"
+ input: "^train/gradients/dnn/outputs/MatMul_grad/MatMul_1"
+ }
+ node {
+ name: "train/gradients/dnn/outputs/MatMul_grad/tuple/control_dependency"
+ op: "Identity"
+ input: "train/gradients/dnn/outputs/MatMul_grad/MatMul"
+ input: "^train/gradients/dnn/outputs/MatMul_grad/tuple/group_deps"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "_class"
+ value {
+ list {
+ s: "loc:@train/gradients/dnn/outputs/MatMul_grad/MatMul"
+ }
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: -1
+ }
+ dim {
+ size: 40
+ }
+ }
+ }
+ }
+ }
+ }
+ node {
+ name: "train/gradients/dnn/outputs/MatMul_grad/tuple/control_dependency_1"
+ op: "Identity"
+ input: "train/gradients/dnn/outputs/MatMul_grad/MatMul_1"
+ input: "^train/gradients/dnn/outputs/MatMul_grad/tuple/group_deps"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "_class"
+ value {
+ list {
+ s: "loc:@train/gradients/dnn/outputs/MatMul_grad/MatMul_1"
+ }
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: 40
+ }
+ dim {
+ size: 10
+ }
+ }
+ }
+ }
+ }
+ }
+ node {
+ name: "train/gradients/dnn/hidden3/Sigmoid_grad/SigmoidGrad"
+ op: "SigmoidGrad"
+ input: "dnn/hidden3/Sigmoid"
+ input: "train/gradients/dnn/outputs/MatMul_grad/tuple/control_dependency"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: -1
+ }
+ dim {
+ size: 40
+ }
+ }
+ }
+ }
+ }
+ }
+ node {
+ name: "train/gradients/dnn/hidden3/add_grad/Shape"
+ op: "Shape"
+ input: "dnn/hidden3/MatMul"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: 2
+ }
+ }
+ }
+ }
+ }
+ attr {
+ key: "out_type"
+ value {
+ type: DT_INT32
+ }
+ }
+ }
+ node {
+ name: "train/gradients/dnn/hidden3/add_grad/Shape_1"
+ op: "Const"
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: 1
+ }
+ }
+ }
+ }
+ }
+ attr {
+ key: "dtype"
+ value {
+ type: DT_INT32
+ }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_INT32
+ tensor_shape {
+ dim {
+ size: 1
+ }
+ }
+ int_val: 40
+ }
+ }
+ }
+ }
+ node {
+ name: "train/gradients/dnn/hidden3/add_grad/BroadcastGradientArgs"
+ op: "BroadcastGradientArgs"
+ input: "train/gradients/dnn/hidden3/add_grad/Shape"
+ input: "train/gradients/dnn/hidden3/add_grad/Shape_1"
+ attr {
+ key: "T"
+ value {
+ type: DT_INT32
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: -1
+ }
+ }
+ shape {
+ dim {
+ size: -1
+ }
+ }
+ }
+ }
+ }
+ }
+ node {
+ name: "train/gradients/dnn/hidden3/add_grad/Sum"
+ op: "Sum"
+ input: "train/gradients/dnn/hidden3/Sigmoid_grad/SigmoidGrad"
+ input: "train/gradients/dnn/hidden3/add_grad/BroadcastGradientArgs"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "Tidx"
+ value {
+ type: DT_INT32
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ unknown_rank: true
+ }
+ }
+ }
+ }
+ attr {
+ key: "keep_dims"
+ value {
+ b: false
+ }
+ }
+ }
+ node {
+ name: "train/gradients/dnn/hidden3/add_grad/Reshape"
+ op: "Reshape"
+ input: "train/gradients/dnn/hidden3/add_grad/Sum"
+ input: "train/gradients/dnn/hidden3/add_grad/Shape"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "Tshape"
+ value {
+ type: DT_INT32
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: -1
+ }
+ dim {
+ size: 40
+ }
+ }
+ }
+ }
+ }
+ }
+ node {
+ name: "train/gradients/dnn/hidden3/add_grad/Sum_1"
+ op: "Sum"
+ input: "train/gradients/dnn/hidden3/Sigmoid_grad/SigmoidGrad"
+ input: "train/gradients/dnn/hidden3/add_grad/BroadcastGradientArgs:1"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "Tidx"
+ value {
+ type: DT_INT32
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ unknown_rank: true
+ }
+ }
+ }
+ }
+ attr {
+ key: "keep_dims"
+ value {
+ b: false
+ }
+ }
+ }
+ node {
+ name: "train/gradients/dnn/hidden3/add_grad/Reshape_1"
+ op: "Reshape"
+ input: "train/gradients/dnn/hidden3/add_grad/Sum_1"
+ input: "train/gradients/dnn/hidden3/add_grad/Shape_1"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "Tshape"
+ value {
+ type: DT_INT32
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: 40
+ }
+ }
+ }
+ }
+ }
+ }
+ node {
+ name: "train/gradients/dnn/hidden3/add_grad/tuple/group_deps"
+ op: "NoOp"
+ input: "^train/gradients/dnn/hidden3/add_grad/Reshape"
+ input: "^train/gradients/dnn/hidden3/add_grad/Reshape_1"
+ }
+ node {
+ name: "train/gradients/dnn/hidden3/add_grad/tuple/control_dependency"
+ op: "Identity"
+ input: "train/gradients/dnn/hidden3/add_grad/Reshape"
+ input: "^train/gradients/dnn/hidden3/add_grad/tuple/group_deps"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "_class"
+ value {
+ list {
+ s: "loc:@train/gradients/dnn/hidden3/add_grad/Reshape"
+ }
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: -1
+ }
+ dim {
+ size: 40
+ }
+ }
+ }
+ }
+ }
+ }
+ node {
+ name: "train/gradients/dnn/hidden3/add_grad/tuple/control_dependency_1"
+ op: "Identity"
+ input: "train/gradients/dnn/hidden3/add_grad/Reshape_1"
+ input: "^train/gradients/dnn/hidden3/add_grad/tuple/group_deps"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "_class"
+ value {
+ list {
+ s: "loc:@train/gradients/dnn/hidden3/add_grad/Reshape_1"
+ }
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: 40
+ }
+ }
+ }
+ }
+ }
+ }
+ node {
+ name: "train/gradients/dnn/hidden3/MatMul_grad/MatMul"
+ op: "MatMul"
+ input: "train/gradients/dnn/hidden3/add_grad/tuple/control_dependency"
+ input: "dnn/hidden3/weights/read"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: -1
+ }
+ dim {
+ size: 100
+ }
+ }
+ }
+ }
+ }
+ attr {
+ key: "transpose_a"
+ value {
+ b: false
+ }
+ }
+ attr {
+ key: "transpose_b"
+ value {
+ b: true
+ }
+ }
+ }
+ node {
+ name: "train/gradients/dnn/hidden3/MatMul_grad/MatMul_1"
+ op: "MatMul"
+ input: "dnn/hidden2/Relu"
+ input: "train/gradients/dnn/hidden3/add_grad/tuple/control_dependency"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: 100
+ }
+ dim {
+ size: 40
+ }
+ }
+ }
+ }
+ }
+ attr {
+ key: "transpose_a"
+ value {
+ b: true
+ }
+ }
+ attr {
+ key: "transpose_b"
+ value {
+ b: false
+ }
+ }
+ }
+ node {
+ name: "train/gradients/dnn/hidden3/MatMul_grad/tuple/group_deps"
+ op: "NoOp"
+ input: "^train/gradients/dnn/hidden3/MatMul_grad/MatMul"
+ input: "^train/gradients/dnn/hidden3/MatMul_grad/MatMul_1"
+ }
+ node {
+ name: "train/gradients/dnn/hidden3/MatMul_grad/tuple/control_dependency"
+ op: "Identity"
+ input: "train/gradients/dnn/hidden3/MatMul_grad/MatMul"
+ input: "^train/gradients/dnn/hidden3/MatMul_grad/tuple/group_deps"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "_class"
+ value {
+ list {
+ s: "loc:@train/gradients/dnn/hidden3/MatMul_grad/MatMul"
+ }
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: -1
+ }
+ dim {
+ size: 100
+ }
+ }
+ }
+ }
+ }
+ }
+ node {
+ name: "train/gradients/dnn/hidden3/MatMul_grad/tuple/control_dependency_1"
+ op: "Identity"
+ input: "train/gradients/dnn/hidden3/MatMul_grad/MatMul_1"
+ input: "^train/gradients/dnn/hidden3/MatMul_grad/tuple/group_deps"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "_class"
+ value {
+ list {
+ s: "loc:@train/gradients/dnn/hidden3/MatMul_grad/MatMul_1"
+ }
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: 100
+ }
+ dim {
+ size: 40
+ }
+ }
+ }
+ }
+ }
+ }
+ node {
+ name: "train/gradients/dnn/hidden2/Relu_grad/ReluGrad"
+ op: "ReluGrad"
+ input: "train/gradients/dnn/hidden3/MatMul_grad/tuple/control_dependency"
+ input: "dnn/hidden2/Relu"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: -1
+ }
+ dim {
+ size: 100
+ }
+ }
+ }
+ }
+ }
+ }
+ node {
+ name: "train/gradients/dnn/hidden2/add_grad/Shape"
+ op: "Shape"
+ input: "dnn/hidden2/MatMul"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: 2
+ }
+ }
+ }
+ }
+ }
+ attr {
+ key: "out_type"
+ value {
+ type: DT_INT32
+ }
+ }
+ }
+ node {
+ name: "train/gradients/dnn/hidden2/add_grad/Shape_1"
+ op: "Const"
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: 1
+ }
+ }
+ }
+ }
+ }
+ attr {
+ key: "dtype"
+ value {
+ type: DT_INT32
+ }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_INT32
+ tensor_shape {
+ dim {
+ size: 1
+ }
+ }
+ int_val: 100
+ }
+ }
+ }
+ }
+ node {
+ name: "train/gradients/dnn/hidden2/add_grad/BroadcastGradientArgs"
+ op: "BroadcastGradientArgs"
+ input: "train/gradients/dnn/hidden2/add_grad/Shape"
+ input: "train/gradients/dnn/hidden2/add_grad/Shape_1"
+ attr {
+ key: "T"
+ value {
+ type: DT_INT32
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: -1
+ }
+ }
+ shape {
+ dim {
+ size: -1
+ }
+ }
+ }
+ }
+ }
+ }
+ node {
+ name: "train/gradients/dnn/hidden2/add_grad/Sum"
+ op: "Sum"
+ input: "train/gradients/dnn/hidden2/Relu_grad/ReluGrad"
+ input: "train/gradients/dnn/hidden2/add_grad/BroadcastGradientArgs"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "Tidx"
+ value {
+ type: DT_INT32
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ unknown_rank: true
+ }
+ }
+ }
+ }
+ attr {
+ key: "keep_dims"
+ value {
+ b: false
+ }
+ }
+ }
+ node {
+ name: "train/gradients/dnn/hidden2/add_grad/Reshape"
+ op: "Reshape"
+ input: "train/gradients/dnn/hidden2/add_grad/Sum"
+ input: "train/gradients/dnn/hidden2/add_grad/Shape"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "Tshape"
+ value {
+ type: DT_INT32
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: -1
+ }
+ dim {
+ size: 100
+ }
+ }
+ }
+ }
+ }
+ }
+ node {
+ name: "train/gradients/dnn/hidden2/add_grad/Sum_1"
+ op: "Sum"
+ input: "train/gradients/dnn/hidden2/Relu_grad/ReluGrad"
+ input: "train/gradients/dnn/hidden2/add_grad/BroadcastGradientArgs:1"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "Tidx"
+ value {
+ type: DT_INT32
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ unknown_rank: true
+ }
+ }
+ }
+ }
+ attr {
+ key: "keep_dims"
+ value {
+ b: false
+ }
+ }
+ }
+ node {
+ name: "train/gradients/dnn/hidden2/add_grad/Reshape_1"
+ op: "Reshape"
+ input: "train/gradients/dnn/hidden2/add_grad/Sum_1"
+ input: "train/gradients/dnn/hidden2/add_grad/Shape_1"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "Tshape"
+ value {
+ type: DT_INT32
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: 100
+ }
+ }
+ }
+ }
+ }
+ }
+ node {
+ name: "train/gradients/dnn/hidden2/add_grad/tuple/group_deps"
+ op: "NoOp"
+ input: "^train/gradients/dnn/hidden2/add_grad/Reshape"
+ input: "^train/gradients/dnn/hidden2/add_grad/Reshape_1"
+ }
+ node {
+ name: "train/gradients/dnn/hidden2/add_grad/tuple/control_dependency"
+ op: "Identity"
+ input: "train/gradients/dnn/hidden2/add_grad/Reshape"
+ input: "^train/gradients/dnn/hidden2/add_grad/tuple/group_deps"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "_class"
+ value {
+ list {
+ s: "loc:@train/gradients/dnn/hidden2/add_grad/Reshape"
+ }
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: -1
+ }
+ dim {
+ size: 100
+ }
+ }
+ }
+ }
+ }
+ }
+ node {
+ name: "train/gradients/dnn/hidden2/add_grad/tuple/control_dependency_1"
+ op: "Identity"
+ input: "train/gradients/dnn/hidden2/add_grad/Reshape_1"
+ input: "^train/gradients/dnn/hidden2/add_grad/tuple/group_deps"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "_class"
+ value {
+ list {
+ s: "loc:@train/gradients/dnn/hidden2/add_grad/Reshape_1"
+ }
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: 100
+ }
+ }
+ }
+ }
+ }
+ }
+ node {
+ name: "train/gradients/dnn/hidden2/MatMul_grad/MatMul"
+ op: "MatMul"
+ input: "train/gradients/dnn/hidden2/add_grad/tuple/control_dependency"
+ input: "dnn/hidden2/weights/read"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: -1
+ }
+ dim {
+ size: 300
+ }
+ }
+ }
+ }
+ }
+ attr {
+ key: "transpose_a"
+ value {
+ b: false
+ }
+ }
+ attr {
+ key: "transpose_b"
+ value {
+ b: true
+ }
+ }
+ }
+ node {
+ name: "train/gradients/dnn/hidden2/MatMul_grad/MatMul_1"
+ op: "MatMul"
+ input: "dnn/hidden1/Elu"
+ input: "train/gradients/dnn/hidden2/add_grad/tuple/control_dependency"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: 300
+ }
+ dim {
+ size: 100
+ }
+ }
+ }
+ }
+ }
+ attr {
+ key: "transpose_a"
+ value {
+ b: true
+ }
+ }
+ attr {
+ key: "transpose_b"
+ value {
+ b: false
+ }
+ }
+ }
+ node {
+ name: "train/gradients/dnn/hidden2/MatMul_grad/tuple/group_deps"
+ op: "NoOp"
+ input: "^train/gradients/dnn/hidden2/MatMul_grad/MatMul"
+ input: "^train/gradients/dnn/hidden2/MatMul_grad/MatMul_1"
+ }
+ node {
+ name: "train/gradients/dnn/hidden2/MatMul_grad/tuple/control_dependency"
+ op: "Identity"
+ input: "train/gradients/dnn/hidden2/MatMul_grad/MatMul"
+ input: "^train/gradients/dnn/hidden2/MatMul_grad/tuple/group_deps"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "_class"
+ value {
+ list {
+ s: "loc:@train/gradients/dnn/hidden2/MatMul_grad/MatMul"
+ }
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: -1
+ }
+ dim {
+ size: 300
+ }
+ }
+ }
+ }
+ }
+ }
+ node {
+ name: "train/gradients/dnn/hidden2/MatMul_grad/tuple/control_dependency_1"
+ op: "Identity"
+ input: "train/gradients/dnn/hidden2/MatMul_grad/MatMul_1"
+ input: "^train/gradients/dnn/hidden2/MatMul_grad/tuple/group_deps"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "_class"
+ value {
+ list {
+ s: "loc:@train/gradients/dnn/hidden2/MatMul_grad/MatMul_1"
+ }
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: 300
+ }
+ dim {
+ size: 100
+ }
+ }
+ }
+ }
+ }
+ }
+ node {
+ name: "train/gradients/dnn/hidden1/Elu_grad/EluGrad"
+ op: "EluGrad"
+ input: "train/gradients/dnn/hidden2/MatMul_grad/tuple/control_dependency"
+ input: "dnn/hidden1/Elu"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: -1
+ }
+ dim {
+ size: 300
+ }
+ }
+ }
+ }
+ }
+ }
+ node {
+ name: "train/gradients/dnn/hidden1/add_grad/Shape"
+ op: "Shape"
+ input: "dnn/hidden1/MatMul"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: 2
+ }
+ }
+ }
+ }
+ }
+ attr {
+ key: "out_type"
+ value {
+ type: DT_INT32
+ }
+ }
+ }
+ node {
+ name: "train/gradients/dnn/hidden1/add_grad/Shape_1"
+ op: "Const"
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: 1
+ }
+ }
+ }
+ }
+ }
+ attr {
+ key: "dtype"
+ value {
+ type: DT_INT32
+ }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_INT32
+ tensor_shape {
+ dim {
+ size: 1
+ }
+ }
+ int_val: 300
+ }
+ }
+ }
+ }
+ node {
+ name: "train/gradients/dnn/hidden1/add_grad/BroadcastGradientArgs"
+ op: "BroadcastGradientArgs"
+ input: "train/gradients/dnn/hidden1/add_grad/Shape"
+ input: "train/gradients/dnn/hidden1/add_grad/Shape_1"
+ attr {
+ key: "T"
+ value {
+ type: DT_INT32
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: -1
+ }
+ }
+ shape {
+ dim {
+ size: -1
+ }
+ }
+ }
+ }
+ }
+ }
+ node {
+ name: "train/gradients/dnn/hidden1/add_grad/Sum"
+ op: "Sum"
+ input: "train/gradients/dnn/hidden1/Elu_grad/EluGrad"
+ input: "train/gradients/dnn/hidden1/add_grad/BroadcastGradientArgs"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "Tidx"
+ value {
+ type: DT_INT32
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ unknown_rank: true
+ }
+ }
+ }
+ }
+ attr {
+ key: "keep_dims"
+ value {
+ b: false
+ }
+ }
+ }
+ node {
+ name: "train/gradients/dnn/hidden1/add_grad/Reshape"
+ op: "Reshape"
+ input: "train/gradients/dnn/hidden1/add_grad/Sum"
+ input: "train/gradients/dnn/hidden1/add_grad/Shape"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "Tshape"
+ value {
+ type: DT_INT32
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: -1
+ }
+ dim {
+ size: 300
+ }
+ }
+ }
+ }
+ }
+ }
+ node {
+ name: "train/gradients/dnn/hidden1/add_grad/Sum_1"
+ op: "Sum"
+ input: "train/gradients/dnn/hidden1/Elu_grad/EluGrad"
+ input: "train/gradients/dnn/hidden1/add_grad/BroadcastGradientArgs:1"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "Tidx"
+ value {
+ type: DT_INT32
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ unknown_rank: true
+ }
+ }
+ }
+ }
+ attr {
+ key: "keep_dims"
+ value {
+ b: false
+ }
+ }
+ }
+ node {
+ name: "train/gradients/dnn/hidden1/add_grad/Reshape_1"
+ op: "Reshape"
+ input: "train/gradients/dnn/hidden1/add_grad/Sum_1"
+ input: "train/gradients/dnn/hidden1/add_grad/Shape_1"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "Tshape"
+ value {
+ type: DT_INT32
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: 300
+ }
+ }
+ }
+ }
+ }
+ }
+ node {
+ name: "train/gradients/dnn/hidden1/add_grad/tuple/group_deps"
+ op: "NoOp"
+ input: "^train/gradients/dnn/hidden1/add_grad/Reshape"
+ input: "^train/gradients/dnn/hidden1/add_grad/Reshape_1"
+ }
+ node {
+ name: "train/gradients/dnn/hidden1/add_grad/tuple/control_dependency"
+ op: "Identity"
+ input: "train/gradients/dnn/hidden1/add_grad/Reshape"
+ input: "^train/gradients/dnn/hidden1/add_grad/tuple/group_deps"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "_class"
+ value {
+ list {
+ s: "loc:@train/gradients/dnn/hidden1/add_grad/Reshape"
+ }
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: -1
+ }
+ dim {
+ size: 300
+ }
+ }
+ }
+ }
+ }
+ }
+ node {
+ name: "train/gradients/dnn/hidden1/add_grad/tuple/control_dependency_1"
+ op: "Identity"
+ input: "train/gradients/dnn/hidden1/add_grad/Reshape_1"
+ input: "^train/gradients/dnn/hidden1/add_grad/tuple/group_deps"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "_class"
+ value {
+ list {
+ s: "loc:@train/gradients/dnn/hidden1/add_grad/Reshape_1"
+ }
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: 300
+ }
+ }
+ }
+ }
+ }
+ }
+ node {
+ name: "train/gradients/dnn/hidden1/MatMul_grad/MatMul"
+ op: "MatMul"
+ input: "train/gradients/dnn/hidden1/add_grad/tuple/control_dependency"
+ input: "dnn/hidden1/weights/read"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: -1
+ }
+ dim {
+ size: 784
+ }
+ }
+ }
+ }
+ }
+ attr {
+ key: "transpose_a"
+ value {
+ b: false
+ }
+ }
+ attr {
+ key: "transpose_b"
+ value {
+ b: true
+ }
+ }
+ }
+ node {
+ name: "train/gradients/dnn/hidden1/MatMul_grad/MatMul_1"
+ op: "MatMul"
+ input: "X"
+ input: "train/gradients/dnn/hidden1/add_grad/tuple/control_dependency"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: 784
+ }
+ dim {
+ size: 300
+ }
+ }
+ }
+ }
+ }
+ attr {
+ key: "transpose_a"
+ value {
+ b: true
+ }
+ }
+ attr {
+ key: "transpose_b"
+ value {
+ b: false
+ }
+ }
+ }
+ node {
+ name: "train/gradients/dnn/hidden1/MatMul_grad/tuple/group_deps"
+ op: "NoOp"
+ input: "^train/gradients/dnn/hidden1/MatMul_grad/MatMul"
+ input: "^train/gradients/dnn/hidden1/MatMul_grad/MatMul_1"
+ }
+ node {
+ name: "train/gradients/dnn/hidden1/MatMul_grad/tuple/control_dependency"
+ op: "Identity"
+ input: "train/gradients/dnn/hidden1/MatMul_grad/MatMul"
+ input: "^train/gradients/dnn/hidden1/MatMul_grad/tuple/group_deps"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "_class"
+ value {
+ list {
+ s: "loc:@train/gradients/dnn/hidden1/MatMul_grad/MatMul"
+ }
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: -1
+ }
+ dim {
+ size: 784
+ }
+ }
+ }
+ }
+ }
+ }
+ node {
+ name: "train/gradients/dnn/hidden1/MatMul_grad/tuple/control_dependency_1"
+ op: "Identity"
+ input: "train/gradients/dnn/hidden1/MatMul_grad/MatMul_1"
+ input: "^train/gradients/dnn/hidden1/MatMul_grad/tuple/group_deps"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "_class"
+ value {
+ list {
+ s: "loc:@train/gradients/dnn/hidden1/MatMul_grad/MatMul_1"
+ }
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: 784
+ }
+ dim {
+ size: 300
+ }
+ }
+ }
+ }
+ }
+ }
+ node {
+ name: "train/GradientDescent/learning_rate"
+ op: "Const"
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ }
+ }
+ }
+ }
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_FLOAT
+ tensor_shape {
+ }
+ float_val: 0.009999999776482582
+ }
+ }
+ }
+ }
+ node {
+ name: "train/GradientDescent/update_dnn/hidden1/weights/ApplyGradientDescent"
+ op: "ApplyGradientDescent"
+ input: "dnn/hidden1/weights"
+ input: "train/GradientDescent/learning_rate"
+ input: "train/gradients/dnn/hidden1/MatMul_grad/tuple/control_dependency_1"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "_class"
+ value {
+ list {
+ s: "loc:@dnn/hidden1/weights"
+ }
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: 784
+ }
+ dim {
+ size: 300
+ }
+ }
+ }
+ }
+ }
+ attr {
+ key: "use_locking"
+ value {
+ b: false
+ }
+ }
+ }
+ node {
+ name: "train/GradientDescent/update_dnn/hidden1/bias/ApplyGradientDescent"
+ op: "ApplyGradientDescent"
+ input: "dnn/hidden1/bias"
+ input: "train/GradientDescent/learning_rate"
+ input: "train/gradients/dnn/hidden1/add_grad/tuple/control_dependency_1"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "_class"
+ value {
+ list {
+ s: "loc:@dnn/hidden1/bias"
+ }
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: 300
+ }
+ }
+ }
+ }
+ }
+ attr {
+ key: "use_locking"
+ value {
+ b: false
+ }
+ }
+ }
+ node {
+ name: "train/GradientDescent/update_dnn/hidden2/weights/ApplyGradientDescent"
+ op: "ApplyGradientDescent"
+ input: "dnn/hidden2/weights"
+ input: "train/GradientDescent/learning_rate"
+ input: "train/gradients/dnn/hidden2/MatMul_grad/tuple/control_dependency_1"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "_class"
+ value {
+ list {
+ s: "loc:@dnn/hidden2/weights"
+ }
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: 300
+ }
+ dim {
+ size: 100
+ }
+ }
+ }
+ }
+ }
+ attr {
+ key: "use_locking"
+ value {
+ b: false
+ }
+ }
+ }
+ node {
+ name: "train/GradientDescent/update_dnn/hidden2/bias/ApplyGradientDescent"
+ op: "ApplyGradientDescent"
+ input: "dnn/hidden2/bias"
+ input: "train/GradientDescent/learning_rate"
+ input: "train/gradients/dnn/hidden2/add_grad/tuple/control_dependency_1"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "_class"
+ value {
+ list {
+ s: "loc:@dnn/hidden2/bias"
+ }
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: 100
+ }
+ }
+ }
+ }
+ }
+ attr {
+ key: "use_locking"
+ value {
+ b: false
+ }
+ }
+ }
+ node {
+ name: "train/GradientDescent/update_dnn/hidden3/weights/ApplyGradientDescent"
+ op: "ApplyGradientDescent"
+ input: "dnn/hidden3/weights"
+ input: "train/GradientDescent/learning_rate"
+ input: "train/gradients/dnn/hidden3/MatMul_grad/tuple/control_dependency_1"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "_class"
+ value {
+ list {
+ s: "loc:@dnn/hidden3/weights"
+ }
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: 100
+ }
+ dim {
+ size: 40
+ }
+ }
+ }
+ }
+ }
+ attr {
+ key: "use_locking"
+ value {
+ b: false
+ }
+ }
+ }
+ node {
+ name: "train/GradientDescent/update_dnn/hidden3/bias/ApplyGradientDescent"
+ op: "ApplyGradientDescent"
+ input: "dnn/hidden3/bias"
+ input: "train/GradientDescent/learning_rate"
+ input: "train/gradients/dnn/hidden3/add_grad/tuple/control_dependency_1"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "_class"
+ value {
+ list {
+ s: "loc:@dnn/hidden3/bias"
+ }
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: 40
+ }
+ }
+ }
+ }
+ }
+ attr {
+ key: "use_locking"
+ value {
+ b: false
+ }
+ }
+ }
+ node {
+ name: "train/GradientDescent/update_dnn/outputs/weights/ApplyGradientDescent"
+ op: "ApplyGradientDescent"
+ input: "dnn/outputs/weights"
+ input: "train/GradientDescent/learning_rate"
+ input: "train/gradients/dnn/outputs/MatMul_grad/tuple/control_dependency_1"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "_class"
+ value {
+ list {
+ s: "loc:@dnn/outputs/weights"
+ }
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: 40
+ }
+ dim {
+ size: 10
+ }
+ }
+ }
+ }
+ }
+ attr {
+ key: "use_locking"
+ value {
+ b: false
+ }
+ }
+ }
+ node {
+ name: "train/GradientDescent/update_dnn/outputs/bias/ApplyGradientDescent"
+ op: "ApplyGradientDescent"
+ input: "dnn/outputs/bias"
+ input: "train/GradientDescent/learning_rate"
+ input: "train/gradients/dnn/outputs/add_grad/tuple/control_dependency_1"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "_class"
+ value {
+ list {
+ s: "loc:@dnn/outputs/bias"
+ }
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: 10
+ }
+ }
+ }
+ }
+ }
+ attr {
+ key: "use_locking"
+ value {
+ b: false
+ }
+ }
+ }
+ node {
+ name: "train/GradientDescent"
+ op: "NoOp"
+ input: "^train/GradientDescent/update_dnn/hidden1/weights/ApplyGradientDescent"
+ input: "^train/GradientDescent/update_dnn/hidden1/bias/ApplyGradientDescent"
+ input: "^train/GradientDescent/update_dnn/hidden2/weights/ApplyGradientDescent"
+ input: "^train/GradientDescent/update_dnn/hidden2/bias/ApplyGradientDescent"
+ input: "^train/GradientDescent/update_dnn/hidden3/weights/ApplyGradientDescent"
+ input: "^train/GradientDescent/update_dnn/hidden3/bias/ApplyGradientDescent"
+ input: "^train/GradientDescent/update_dnn/outputs/weights/ApplyGradientDescent"
+ input: "^train/GradientDescent/update_dnn/outputs/bias/ApplyGradientDescent"
+ }
+ node {
+ name: "eval/in_top_k/InTopKV2/k"
+ op: "Const"
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ }
+ }
+ }
+ }
+ attr {
+ key: "dtype"
+ value {
+ type: DT_INT64
+ }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_INT64
+ tensor_shape {
+ }
+ int64_val: 1
+ }
+ }
+ }
+ }
+ node {
+ name: "eval/in_top_k/InTopKV2"
+ op: "InTopKV2"
+ input: "dnn/outputs/add"
+ input: "y"
+ input: "eval/in_top_k/InTopKV2/k"
+ attr {
+ key: "T"
+ value {
+ type: DT_INT64
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: -1
+ }
+ }
+ }
+ }
+ }
+ }
+ node {
+ name: "eval/Cast"
+ op: "Cast"
+ input: "eval/in_top_k/InTopKV2"
+ attr {
+ key: "DstT"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "SrcT"
+ value {
+ type: DT_BOOL
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: -1
+ }
+ }
+ }
+ }
+ }
+ }
+ node {
+ name: "eval/Const"
+ op: "Const"
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: 1
+ }
+ }
+ }
+ }
+ }
+ attr {
+ key: "dtype"
+ value {
+ type: DT_INT32
+ }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_INT32
+ tensor_shape {
+ dim {
+ size: 1
+ }
+ }
+ int_val: 0
+ }
+ }
+ }
+ }
+ node {
+ name: "eval/Mean"
+ op: "Mean"
+ input: "eval/Cast"
+ input: "eval/Const"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "Tidx"
+ value {
+ type: DT_INT32
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ }
+ }
+ }
+ }
+ attr {
+ key: "keep_dims"
+ value {
+ b: false
+ }
+ }
+ }
+ node {
+ name: "init"
+ op: "NoOp"
+ input: "^dnn/hidden1/weights/Assign"
+ input: "^dnn/hidden1/bias/Assign"
+ input: "^dnn/hidden2/weights/Assign"
+ input: "^dnn/hidden2/bias/Assign"
+ input: "^dnn/hidden3/weights/Assign"
+ input: "^dnn/hidden3/bias/Assign"
+ input: "^dnn/outputs/weights/Assign"
+ input: "^dnn/outputs/bias/Assign"
+ }
+ node {
+ name: "Accuracy/tags"
+ op: "Const"
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ }
+ }
+ }
+ }
+ attr {
+ key: "dtype"
+ value {
+ type: DT_STRING
+ }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_STRING
+ tensor_shape {
+ }
+ string_val: "Accuracy"
+ }
+ }
+ }
+ }
+ node {
+ name: "Accuracy"
+ op: "ScalarSummary"
+ input: "Accuracy/tags"
+ input: "eval/Mean"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ }
+ }
+ }
+ }
+ }
+ node {
+ name: "save/Const"
+ op: "Const"
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ }
+ }
+ }
+ }
+ attr {
+ key: "dtype"
+ value {
+ type: DT_STRING
+ }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_STRING
+ tensor_shape {
+ }
+ string_val: "model"
+ }
+ }
+ }
+ }
+ node {
+ name: "save/StringJoin/inputs_1"
+ op: "Const"
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ }
+ }
+ }
+ }
+ attr {
+ key: "dtype"
+ value {
+ type: DT_STRING
+ }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_STRING
+ tensor_shape {
+ }
+ string_val: "_temp_b37397c1a53b45c6ad354497f210d9d4/part"
+ }
+ }
+ }
+ }
+ node {
+ name: "save/StringJoin"
+ op: "StringJoin"
+ input: "save/Const"
+ input: "save/StringJoin/inputs_1"
+ attr {
+ key: "N"
+ value {
+ i: 2
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ }
+ }
+ }
+ }
+ attr {
+ key: "separator"
+ value {
+ s: ""
+ }
+ }
+ }
+ node {
+ name: "save/num_shards"
+ op: "Const"
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ }
+ }
+ }
+ }
+ attr {
+ key: "dtype"
+ value {
+ type: DT_INT32
+ }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_INT32
+ tensor_shape {
+ }
+ int_val: 1
+ }
+ }
+ }
+ }
+ node {
+ name: "save/ShardedFilename/shard"
+ op: "Const"
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ }
+ }
+ }
+ }
+ attr {
+ key: "dtype"
+ value {
+ type: DT_INT32
+ }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_INT32
+ tensor_shape {
+ }
+ int_val: 0
+ }
+ }
+ }
+ }
+ node {
+ name: "save/ShardedFilename"
+ op: "ShardedFilename"
+ input: "save/StringJoin"
+ input: "save/ShardedFilename/shard"
+ input: "save/num_shards"
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ }
+ }
+ }
+ }
+ }
+ node {
+ name: "save/SaveV2/tensor_names"
+ op: "Const"
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: 8
+ }
+ }
+ }
+ }
+ }
+ attr {
+ key: "dtype"
+ value {
+ type: DT_STRING
+ }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_STRING
+ tensor_shape {
+ dim {
+ size: 8
+ }
+ }
+ string_val: "dnn/hidden1/bias"
+ string_val: "dnn/hidden1/weights"
+ string_val: "dnn/hidden2/bias"
+ string_val: "dnn/hidden2/weights"
+ string_val: "dnn/hidden3/bias"
+ string_val: "dnn/hidden3/weights"
+ string_val: "dnn/outputs/bias"
+ string_val: "dnn/outputs/weights"
+ }
+ }
+ }
+ }
+ node {
+ name: "save/SaveV2/shape_and_slices"
+ op: "Const"
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: 8
+ }
+ }
+ }
+ }
+ }
+ attr {
+ key: "dtype"
+ value {
+ type: DT_STRING
+ }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_STRING
+ tensor_shape {
+ dim {
+ size: 8
+ }
+ }
+ string_val: ""
+ string_val: ""
+ string_val: ""
+ string_val: ""
+ string_val: ""
+ string_val: ""
+ string_val: ""
+ string_val: ""
+ }
+ }
+ }
+ }
+ node {
+ name: "save/SaveV2"
+ op: "SaveV2"
+ input: "save/ShardedFilename"
+ input: "save/SaveV2/tensor_names"
+ input: "save/SaveV2/shape_and_slices"
+ input: "dnn/hidden1/bias"
+ input: "dnn/hidden1/weights"
+ input: "dnn/hidden2/bias"
+ input: "dnn/hidden2/weights"
+ input: "dnn/hidden3/bias"
+ input: "dnn/hidden3/weights"
+ input: "dnn/outputs/bias"
+ input: "dnn/outputs/weights"
+ attr {
+ key: "dtypes"
+ value {
+ list {
+ type: DT_FLOAT
+ type: DT_FLOAT
+ type: DT_FLOAT
+ type: DT_FLOAT
+ type: DT_FLOAT
+ type: DT_FLOAT
+ type: DT_FLOAT
+ type: DT_FLOAT
+ }
+ }
+ }
+ }
+ node {
+ name: "save/control_dependency"
+ op: "Identity"
+ input: "save/ShardedFilename"
+ input: "^save/SaveV2"
+ attr {
+ key: "T"
+ value {
+ type: DT_STRING
+ }
+ }
+ attr {
+ key: "_class"
+ value {
+ list {
+ s: "loc:@save/ShardedFilename"
+ }
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ }
+ }
+ }
+ }
+ }
+ node {
+ name: "save/MergeV2Checkpoints/checkpoint_prefixes"
+ op: "Pack"
+ input: "save/ShardedFilename"
+ input: "^save/control_dependency"
+ attr {
+ key: "N"
+ value {
+ i: 1
+ }
+ }
+ attr {
+ key: "T"
+ value {
+ type: DT_STRING
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: 1
+ }
+ }
+ }
+ }
+ }
+ attr {
+ key: "axis"
+ value {
+ i: 0
+ }
+ }
+ }
+ node {
+ name: "save/MergeV2Checkpoints"
+ op: "MergeV2Checkpoints"
+ input: "save/MergeV2Checkpoints/checkpoint_prefixes"
+ input: "save/Const"
+ attr {
+ key: "delete_old_dirs"
+ value {
+ b: true
+ }
+ }
+ }
+ node {
+ name: "save/Identity"
+ op: "Identity"
+ input: "save/Const"
+ input: "^save/control_dependency"
+ input: "^save/MergeV2Checkpoints"
+ attr {
+ key: "T"
+ value {
+ type: DT_STRING
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ }
+ }
+ }
+ }
+ }
+ node {
+ name: "save/RestoreV2/tensor_names"
+ op: "Const"
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: 1
+ }
+ }
+ }
+ }
+ }
+ attr {
+ key: "dtype"
+ value {
+ type: DT_STRING
+ }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_STRING
+ tensor_shape {
+ dim {
+ size: 1
+ }
+ }
+ string_val: "dnn/hidden1/bias"
+ }
+ }
+ }
+ }
+ node {
+ name: "save/RestoreV2/shape_and_slices"
+ op: "Const"
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: 1
+ }
+ }
+ }
+ }
+ }
+ attr {
+ key: "dtype"
+ value {
+ type: DT_STRING
+ }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_STRING
+ tensor_shape {
+ dim {
+ size: 1
+ }
+ }
+ string_val: ""
+ }
+ }
+ }
+ }
+ node {
+ name: "save/RestoreV2"
+ op: "RestoreV2"
+ input: "save/Const"
+ input: "save/RestoreV2/tensor_names"
+ input: "save/RestoreV2/shape_and_slices"
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ unknown_rank: true
+ }
+ }
+ }
+ }
+ attr {
+ key: "dtypes"
+ value {
+ list {
+ type: DT_FLOAT
+ }
+ }
+ }
+ }
+ node {
+ name: "save/Assign"
+ op: "Assign"
+ input: "dnn/hidden1/bias"
+ input: "save/RestoreV2"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "_class"
+ value {
+ list {
+ s: "loc:@dnn/hidden1/bias"
+ }
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: 300
+ }
+ }
+ }
+ }
+ }
+ attr {
+ key: "use_locking"
+ value {
+ b: true
+ }
+ }
+ attr {
+ key: "validate_shape"
+ value {
+ b: true
+ }
+ }
+ }
+ node {
+ name: "save/RestoreV2_1/tensor_names"
+ op: "Const"
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: 1
+ }
+ }
+ }
+ }
+ }
+ attr {
+ key: "dtype"
+ value {
+ type: DT_STRING
+ }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_STRING
+ tensor_shape {
+ dim {
+ size: 1
+ }
+ }
+ string_val: "dnn/hidden1/weights"
+ }
+ }
+ }
+ }
+ node {
+ name: "save/RestoreV2_1/shape_and_slices"
+ op: "Const"
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: 1
+ }
+ }
+ }
+ }
+ }
+ attr {
+ key: "dtype"
+ value {
+ type: DT_STRING
+ }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_STRING
+ tensor_shape {
+ dim {
+ size: 1
+ }
+ }
+ string_val: ""
+ }
+ }
+ }
+ }
+ node {
+ name: "save/RestoreV2_1"
+ op: "RestoreV2"
+ input: "save/Const"
+ input: "save/RestoreV2_1/tensor_names"
+ input: "save/RestoreV2_1/shape_and_slices"
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ unknown_rank: true
+ }
+ }
+ }
+ }
+ attr {
+ key: "dtypes"
+ value {
+ list {
+ type: DT_FLOAT
+ }
+ }
+ }
+ }
+ node {
+ name: "save/Assign_1"
+ op: "Assign"
+ input: "dnn/hidden1/weights"
+ input: "save/RestoreV2_1"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "_class"
+ value {
+ list {
+ s: "loc:@dnn/hidden1/weights"
+ }
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: 784
+ }
+ dim {
+ size: 300
+ }
+ }
+ }
+ }
+ }
+ attr {
+ key: "use_locking"
+ value {
+ b: true
+ }
+ }
+ attr {
+ key: "validate_shape"
+ value {
+ b: true
+ }
+ }
+ }
+ node {
+ name: "save/RestoreV2_2/tensor_names"
+ op: "Const"
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: 1
+ }
+ }
+ }
+ }
+ }
+ attr {
+ key: "dtype"
+ value {
+ type: DT_STRING
+ }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_STRING
+ tensor_shape {
+ dim {
+ size: 1
+ }
+ }
+ string_val: "dnn/hidden2/bias"
+ }
+ }
+ }
+ }
+ node {
+ name: "save/RestoreV2_2/shape_and_slices"
+ op: "Const"
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: 1
+ }
+ }
+ }
+ }
+ }
+ attr {
+ key: "dtype"
+ value {
+ type: DT_STRING
+ }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_STRING
+ tensor_shape {
+ dim {
+ size: 1
+ }
+ }
+ string_val: ""
+ }
+ }
+ }
+ }
+ node {
+ name: "save/RestoreV2_2"
+ op: "RestoreV2"
+ input: "save/Const"
+ input: "save/RestoreV2_2/tensor_names"
+ input: "save/RestoreV2_2/shape_and_slices"
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ unknown_rank: true
+ }
+ }
+ }
+ }
+ attr {
+ key: "dtypes"
+ value {
+ list {
+ type: DT_FLOAT
+ }
+ }
+ }
+ }
+ node {
+ name: "save/Assign_2"
+ op: "Assign"
+ input: "dnn/hidden2/bias"
+ input: "save/RestoreV2_2"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "_class"
+ value {
+ list {
+ s: "loc:@dnn/hidden2/bias"
+ }
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: 100
+ }
+ }
+ }
+ }
+ }
+ attr {
+ key: "use_locking"
+ value {
+ b: true
+ }
+ }
+ attr {
+ key: "validate_shape"
+ value {
+ b: true
+ }
+ }
+ }
+ node {
+ name: "save/RestoreV2_3/tensor_names"
+ op: "Const"
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: 1
+ }
+ }
+ }
+ }
+ }
+ attr {
+ key: "dtype"
+ value {
+ type: DT_STRING
+ }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_STRING
+ tensor_shape {
+ dim {
+ size: 1
+ }
+ }
+ string_val: "dnn/hidden2/weights"
+ }
+ }
+ }
+ }
+ node {
+ name: "save/RestoreV2_3/shape_and_slices"
+ op: "Const"
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: 1
+ }
+ }
+ }
+ }
+ }
+ attr {
+ key: "dtype"
+ value {
+ type: DT_STRING
+ }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_STRING
+ tensor_shape {
+ dim {
+ size: 1
+ }
+ }
+ string_val: ""
+ }
+ }
+ }
+ }
+ node {
+ name: "save/RestoreV2_3"
+ op: "RestoreV2"
+ input: "save/Const"
+ input: "save/RestoreV2_3/tensor_names"
+ input: "save/RestoreV2_3/shape_and_slices"
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ unknown_rank: true
+ }
+ }
+ }
+ }
+ attr {
+ key: "dtypes"
+ value {
+ list {
+ type: DT_FLOAT
+ }
+ }
+ }
+ }
+ node {
+ name: "save/Assign_3"
+ op: "Assign"
+ input: "dnn/hidden2/weights"
+ input: "save/RestoreV2_3"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "_class"
+ value {
+ list {
+ s: "loc:@dnn/hidden2/weights"
+ }
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: 300
+ }
+ dim {
+ size: 100
+ }
+ }
+ }
+ }
+ }
+ attr {
+ key: "use_locking"
+ value {
+ b: true
+ }
+ }
+ attr {
+ key: "validate_shape"
+ value {
+ b: true
+ }
+ }
+ }
+ node {
+ name: "save/RestoreV2_4/tensor_names"
+ op: "Const"
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: 1
+ }
+ }
+ }
+ }
+ }
+ attr {
+ key: "dtype"
+ value {
+ type: DT_STRING
+ }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_STRING
+ tensor_shape {
+ dim {
+ size: 1
+ }
+ }
+ string_val: "dnn/hidden3/bias"
+ }
+ }
+ }
+ }
+ node {
+ name: "save/RestoreV2_4/shape_and_slices"
+ op: "Const"
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: 1
+ }
+ }
+ }
+ }
+ }
+ attr {
+ key: "dtype"
+ value {
+ type: DT_STRING
+ }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_STRING
+ tensor_shape {
+ dim {
+ size: 1
+ }
+ }
+ string_val: ""
+ }
+ }
+ }
+ }
+ node {
+ name: "save/RestoreV2_4"
+ op: "RestoreV2"
+ input: "save/Const"
+ input: "save/RestoreV2_4/tensor_names"
+ input: "save/RestoreV2_4/shape_and_slices"
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ unknown_rank: true
+ }
+ }
+ }
+ }
+ attr {
+ key: "dtypes"
+ value {
+ list {
+ type: DT_FLOAT
+ }
+ }
+ }
+ }
+ node {
+ name: "save/Assign_4"
+ op: "Assign"
+ input: "dnn/hidden3/bias"
+ input: "save/RestoreV2_4"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "_class"
+ value {
+ list {
+ s: "loc:@dnn/hidden3/bias"
+ }
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: 40
+ }
+ }
+ }
+ }
+ }
+ attr {
+ key: "use_locking"
+ value {
+ b: true
+ }
+ }
+ attr {
+ key: "validate_shape"
+ value {
+ b: true
+ }
+ }
+ }
+ node {
+ name: "save/RestoreV2_5/tensor_names"
+ op: "Const"
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: 1
+ }
+ }
+ }
+ }
+ }
+ attr {
+ key: "dtype"
+ value {
+ type: DT_STRING
+ }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_STRING
+ tensor_shape {
+ dim {
+ size: 1
+ }
+ }
+ string_val: "dnn/hidden3/weights"
+ }
+ }
+ }
+ }
+ node {
+ name: "save/RestoreV2_5/shape_and_slices"
+ op: "Const"
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: 1
+ }
+ }
+ }
+ }
+ }
+ attr {
+ key: "dtype"
+ value {
+ type: DT_STRING
+ }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_STRING
+ tensor_shape {
+ dim {
+ size: 1
+ }
+ }
+ string_val: ""
+ }
+ }
+ }
+ }
+ node {
+ name: "save/RestoreV2_5"
+ op: "RestoreV2"
+ input: "save/Const"
+ input: "save/RestoreV2_5/tensor_names"
+ input: "save/RestoreV2_5/shape_and_slices"
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ unknown_rank: true
+ }
+ }
+ }
+ }
+ attr {
+ key: "dtypes"
+ value {
+ list {
+ type: DT_FLOAT
+ }
+ }
+ }
+ }
+ node {
+ name: "save/Assign_5"
+ op: "Assign"
+ input: "dnn/hidden3/weights"
+ input: "save/RestoreV2_5"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "_class"
+ value {
+ list {
+ s: "loc:@dnn/hidden3/weights"
+ }
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: 100
+ }
+ dim {
+ size: 40
+ }
+ }
+ }
+ }
+ }
+ attr {
+ key: "use_locking"
+ value {
+ b: true
+ }
+ }
+ attr {
+ key: "validate_shape"
+ value {
+ b: true
+ }
+ }
+ }
+ node {
+ name: "save/RestoreV2_6/tensor_names"
+ op: "Const"
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: 1
+ }
+ }
+ }
+ }
+ }
+ attr {
+ key: "dtype"
+ value {
+ type: DT_STRING
+ }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_STRING
+ tensor_shape {
+ dim {
+ size: 1
+ }
+ }
+ string_val: "dnn/outputs/bias"
+ }
+ }
+ }
+ }
+ node {
+ name: "save/RestoreV2_6/shape_and_slices"
+ op: "Const"
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: 1
+ }
+ }
+ }
+ }
+ }
+ attr {
+ key: "dtype"
+ value {
+ type: DT_STRING
+ }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_STRING
+ tensor_shape {
+ dim {
+ size: 1
+ }
+ }
+ string_val: ""
+ }
+ }
+ }
+ }
+ node {
+ name: "save/RestoreV2_6"
+ op: "RestoreV2"
+ input: "save/Const"
+ input: "save/RestoreV2_6/tensor_names"
+ input: "save/RestoreV2_6/shape_and_slices"
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ unknown_rank: true
+ }
+ }
+ }
+ }
+ attr {
+ key: "dtypes"
+ value {
+ list {
+ type: DT_FLOAT
+ }
+ }
+ }
+ }
+ node {
+ name: "save/Assign_6"
+ op: "Assign"
+ input: "dnn/outputs/bias"
+ input: "save/RestoreV2_6"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "_class"
+ value {
+ list {
+ s: "loc:@dnn/outputs/bias"
+ }
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: 10
+ }
+ }
+ }
+ }
+ }
+ attr {
+ key: "use_locking"
+ value {
+ b: true
+ }
+ }
+ attr {
+ key: "validate_shape"
+ value {
+ b: true
+ }
+ }
+ }
+ node {
+ name: "save/RestoreV2_7/tensor_names"
+ op: "Const"
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: 1
+ }
+ }
+ }
+ }
+ }
+ attr {
+ key: "dtype"
+ value {
+ type: DT_STRING
+ }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_STRING
+ tensor_shape {
+ dim {
+ size: 1
+ }
+ }
+ string_val: "dnn/outputs/weights"
+ }
+ }
+ }
+ }
+ node {
+ name: "save/RestoreV2_7/shape_and_slices"
+ op: "Const"
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: 1
+ }
+ }
+ }
+ }
+ }
+ attr {
+ key: "dtype"
+ value {
+ type: DT_STRING
+ }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_STRING
+ tensor_shape {
+ dim {
+ size: 1
+ }
+ }
+ string_val: ""
+ }
+ }
+ }
+ }
+ node {
+ name: "save/RestoreV2_7"
+ op: "RestoreV2"
+ input: "save/Const"
+ input: "save/RestoreV2_7/tensor_names"
+ input: "save/RestoreV2_7/shape_and_slices"
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ unknown_rank: true
+ }
+ }
+ }
+ }
+ attr {
+ key: "dtypes"
+ value {
+ list {
+ type: DT_FLOAT
+ }
+ }
+ }
+ }
+ node {
+ name: "save/Assign_7"
+ op: "Assign"
+ input: "dnn/outputs/weights"
+ input: "save/RestoreV2_7"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "_class"
+ value {
+ list {
+ s: "loc:@dnn/outputs/weights"
+ }
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: 40
+ }
+ dim {
+ size: 10
+ }
+ }
+ }
+ }
+ }
+ attr {
+ key: "use_locking"
+ value {
+ b: true
+ }
+ }
+ attr {
+ key: "validate_shape"
+ value {
+ b: true
+ }
+ }
+ }
+ node {
+ name: "save/restore_shard"
+ op: "NoOp"
+ input: "^save/Assign"
+ input: "^save/Assign_1"
+ input: "^save/Assign_2"
+ input: "^save/Assign_3"
+ input: "^save/Assign_4"
+ input: "^save/Assign_5"
+ input: "^save/Assign_6"
+ input: "^save/Assign_7"
+ }
+ node {
+ name: "save/restore_all"
+ op: "NoOp"
+ input: "^save/restore_shard"
+ }
+ versions {
+ producer: 24
+ }
+ }
+ saver_def {
+ filename_tensor_name: "save/Const:0"
+ save_tensor_name: "save/Identity:0"
+ restore_op_name: "save/restore_all"
+ max_to_keep: 5
+ sharded: true
+ keep_checkpoint_every_n_hours: 10000.0
+ version: V2
+ }
+ collection_def {
+ key: "summaries"
+ value {
+ node_list {
+ value: "Accuracy:0"
+ }
+ }
+ }
+ collection_def {
+ key: "train_op"
+ value {
+ node_list {
+ value: "train/GradientDescent"
+ }
+ }
+ }
+ collection_def {
+ key: "trainable_variables"
+ value {
+ bytes_list {
+ value: "\n\025dnn/hidden1/weights:0\022\032dnn/hidden1/weights/Assign\032\032dnn/hidden1/weights/read:02\036dnn/hidden1/truncated_normal:0"
+ value: "\n\022dnn/hidden1/bias:0\022\027dnn/hidden1/bias/Assign\032\027dnn/hidden1/bias/read:02\023dnn/hidden1/zeros:0"
+ value: "\n\025dnn/hidden2/weights:0\022\032dnn/hidden2/weights/Assign\032\032dnn/hidden2/weights/read:02\036dnn/hidden2/truncated_normal:0"
+ value: "\n\022dnn/hidden2/bias:0\022\027dnn/hidden2/bias/Assign\032\027dnn/hidden2/bias/read:02\023dnn/hidden2/zeros:0"
+ value: "\n\025dnn/hidden3/weights:0\022\032dnn/hidden3/weights/Assign\032\032dnn/hidden3/weights/read:02\036dnn/hidden3/truncated_normal:0"
+ value: "\n\022dnn/hidden3/bias:0\022\027dnn/hidden3/bias/Assign\032\027dnn/hidden3/bias/read:02\023dnn/hidden3/zeros:0"
+ value: "\n\025dnn/outputs/weights:0\022\032dnn/outputs/weights/Assign\032\032dnn/outputs/weights/read:02\036dnn/outputs/truncated_normal:0"
+ value: "\n\022dnn/outputs/bias:0\022\027dnn/outputs/bias/Assign\032\027dnn/outputs/bias/read:02\023dnn/outputs/zeros:0"
+ }
+ }
+ }
+ collection_def {
+ key: "variables"
+ value {
+ bytes_list {
+ value: "\n\025dnn/hidden1/weights:0\022\032dnn/hidden1/weights/Assign\032\032dnn/hidden1/weights/read:02\036dnn/hidden1/truncated_normal:0"
+ value: "\n\022dnn/hidden1/bias:0\022\027dnn/hidden1/bias/Assign\032\027dnn/hidden1/bias/read:02\023dnn/hidden1/zeros:0"
+ value: "\n\025dnn/hidden2/weights:0\022\032dnn/hidden2/weights/Assign\032\032dnn/hidden2/weights/read:02\036dnn/hidden2/truncated_normal:0"
+ value: "\n\022dnn/hidden2/bias:0\022\027dnn/hidden2/bias/Assign\032\027dnn/hidden2/bias/read:02\023dnn/hidden2/zeros:0"
+ value: "\n\025dnn/hidden3/weights:0\022\032dnn/hidden3/weights/Assign\032\032dnn/hidden3/weights/read:02\036dnn/hidden3/truncated_normal:0"
+ value: "\n\022dnn/hidden3/bias:0\022\027dnn/hidden3/bias/Assign\032\027dnn/hidden3/bias/read:02\023dnn/hidden3/zeros:0"
+ value: "\n\025dnn/outputs/weights:0\022\032dnn/outputs/weights/Assign\032\032dnn/outputs/weights/read:02\036dnn/outputs/truncated_normal:0"
+ value: "\n\022dnn/outputs/bias:0\022\027dnn/outputs/bias/Assign\032\027dnn/outputs/bias/read:02\023dnn/outputs/zeros:0"
+ }
+ }
+ }
+ signature_def {
+ key: "serving_default"
+ value {
+ inputs {
+ key: "x"
+ value {
+ name: "X:0"
+ dtype: DT_FLOAT
+ tensor_shape {
+ dim {
+ size: -1
+ }
+ dim {
+ size: 784
+ }
+ }
+ }
+ }
+ outputs {
+ key: "y"
+ value {
+ name: "dnn/outputs/add:0"
+ dtype: DT_FLOAT
+ tensor_shape {
+ dim {
+ size: -1
+ }
+ dim {
+ size: 10
+ }
+ }
+ }
+ }
+ method_name: "tensorflow/serving/predict"
+ }
+ }
+}
diff --git a/searchlib/src/test/files/integration/tensorflow/3_layer_mnist/saved/variables/variables.data-00000-of-00001 b/searchlib/src/test/files/integration/tensorflow/3_layer_mnist/saved/variables/variables.data-00000-of-00001
new file mode 100644
index 00000000000..e286d3bb9de
--- /dev/null
+++ b/searchlib/src/test/files/integration/tensorflow/3_layer_mnist/saved/variables/variables.data-00000-of-00001
Binary files differ
diff --git a/searchlib/src/test/files/integration/tensorflow/3_layer_mnist/saved/variables/variables.index b/searchlib/src/test/files/integration/tensorflow/3_layer_mnist/saved/variables/variables.index
new file mode 100644
index 00000000000..7643ec22a7d
--- /dev/null
+++ b/searchlib/src/test/files/integration/tensorflow/3_layer_mnist/saved/variables/variables.index
Binary files differ
diff --git a/searchlib/src/test/java/com/yahoo/searchlib/rankingexpression/integration/tensorflow/Mnist_SoftmaxTestCase.java b/searchlib/src/test/java/com/yahoo/searchlib/rankingexpression/integration/tensorflow/Mnist_SoftmaxTestCase.java
deleted file mode 100644
index 6536a2f2a06..00000000000
--- a/searchlib/src/test/java/com/yahoo/searchlib/rankingexpression/integration/tensorflow/Mnist_SoftmaxTestCase.java
+++ /dev/null
@@ -1,117 +0,0 @@
-package com.yahoo.searchlib.rankingexpression.integration.tensorflow;
-
-import com.yahoo.searchlib.rankingexpression.RankingExpression;
-import com.yahoo.searchlib.rankingexpression.evaluation.Context;
-import com.yahoo.searchlib.rankingexpression.evaluation.MapContext;
-import com.yahoo.searchlib.rankingexpression.evaluation.TensorValue;
-import com.yahoo.searchlib.rankingexpression.rule.TensorFunctionNode;
-import com.yahoo.tensor.Tensor;
-import com.yahoo.tensor.TensorType;
-import org.junit.Ignore;
-import org.junit.Test;
-import org.tensorflow.SavedModelBundle;
-import org.tensorflow.Session;
-
-import java.nio.FloatBuffer;
-import java.util.List;
-
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNotNull;
-
-/**
- * @author bratseth
- */
-public class Mnist_SoftmaxTestCase {
-
- @Ignore
- @Test
- public void testImporting() {
- String modelDir = "src/test/files/integration/tensorflow/mnist_softmax/saved";
- SavedModelBundle model = SavedModelBundle.load(modelDir, "serve");
- TensorFlowModel result = new TensorFlowImporter().importModel(model);
-
- // Check constants
- assertEquals(2, result.constants().size());
-
- Tensor constant0 = result.constants().get("Variable");
- assertNotNull(constant0);
- assertEquals(new TensorType.Builder().indexed("d0", 784).indexed("d1", 10).build(),
- constant0.type());
- assertEquals(7840, constant0.size());
-
- Tensor constant1 = result.constants().get("Variable_1");
- assertNotNull(constant1);
- assertEquals(new TensorType.Builder().indexed("d0", 10).build(),
- constant1.type());
- assertEquals(10, constant1.size());
-
- // Check signatures
- assertEquals(1, result.signatures().size());
- TensorFlowModel.Signature signature = result.signatures().get("serving_default");
- assertNotNull(signature);
-
- // ... signature inputs
- assertEquals(1, signature.inputs().size());
- TensorType argument0 = signature.inputArgument("x");
- assertNotNull(argument0);
- assertEquals(new TensorType.Builder().indexed("d0").indexed("d1", 784).build(), argument0);
-
- // ... signature outputs
- assertEquals(1, signature.outputs().size());
- RankingExpression output = signature.outputExpression("y");
- assertNotNull(output);
- assertEquals("add", output.getName());
- assertEquals("" +
- "join(rename(matmul(Placeholder, rename(constant(Variable), (d0, d1), (d1, d3)), d1), d3, d1), " +
- "rename(constant(Variable_1), d0, d1), " +
- "f(a,b)(a + b))",
- toNonPrimitiveString(output));
-
- // ... skipped outputs
- assertEquals(0, signature.skippedOutputs().size());
-
- // Test execution
- assertEqualResult(model, result, "Variable/read");
- assertEqualResult(model, result, "Variable_1/read");
- assertEqualResult(model, result, "MatMul");
- assertEqualResult(model, result, "add");
- }
-
- private void assertEqualResult(SavedModelBundle model, TensorFlowModel result, String operationName) {
- Tensor tfResult = tensorFlowExecute(model, operationName);
- Context context = contextFrom(result);
- Tensor placeholder = placeholderArgument();
- context.put("Placeholder", new TensorValue(placeholder));
- Tensor vespaResult = result.expressions().get(operationName).evaluate(context).asTensor();
- assertEquals("Operation '" + operationName + "' produces equal results", vespaResult, tfResult);
- }
-
- private Tensor tensorFlowExecute(SavedModelBundle model, String operationName) {
- Session.Runner runner = model.session().runner();
- org.tensorflow.Tensor<?> placeholder = org.tensorflow.Tensor.create(new long[]{ 1, 784 }, FloatBuffer.allocate(784));
- runner.feed("Placeholder", placeholder);
- List<org.tensorflow.Tensor<?>> results = runner.fetch(operationName).run();
- assertEquals(1, results.size());
- return new TensorConverter().toVespaTensor(results.get(0));
- }
-
- private Context contextFrom(TensorFlowModel result) {
- MapContext context = new MapContext();
- result.constants().forEach((name, tensor) -> context.put("constant(" + name + ")", new TensorValue(tensor)));
- return context;
- }
-
- private String toNonPrimitiveString(RankingExpression expression) {
- // toString on the wrapping expression will map to primitives, which is harder to read
- return ((TensorFunctionNode)expression.getRoot()).function().toString();
- }
-
- private Tensor placeholderArgument() {
- int size = 784;
- Tensor.Builder b = Tensor.Builder.of(new TensorType.Builder().indexed("d0", 1).indexed("d1", size).build());
- for (int i = 0; i < size; i++)
- b.cell(0, 0, i);
- return b.build();
- }
-
-}
diff --git a/searchlib/src/test/java/com/yahoo/searchlib/rankingexpression/integration/tensorflow/TensorflowImportTestCase.java b/searchlib/src/test/java/com/yahoo/searchlib/rankingexpression/integration/tensorflow/TensorflowImportTestCase.java
new file mode 100644
index 00000000000..2aec1988a22
--- /dev/null
+++ b/searchlib/src/test/java/com/yahoo/searchlib/rankingexpression/integration/tensorflow/TensorflowImportTestCase.java
@@ -0,0 +1,298 @@
+package com.yahoo.searchlib.rankingexpression.integration.tensorflow;
+
+import com.yahoo.searchlib.rankingexpression.RankingExpression;
+import com.yahoo.searchlib.rankingexpression.evaluation.Context;
+import com.yahoo.searchlib.rankingexpression.evaluation.MapContext;
+import com.yahoo.searchlib.rankingexpression.evaluation.TensorValue;
+import com.yahoo.searchlib.rankingexpression.rule.TensorFunctionNode;
+import com.yahoo.tensor.Tensor;
+import com.yahoo.tensor.TensorType;
+import org.junit.Ignore;
+import org.junit.Test;
+import org.tensorflow.SavedModelBundle;
+import org.tensorflow.Session;
+
+import java.nio.FloatBuffer;
+import java.util.List;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+
+/**
+ * @author bratseth
+ */
+public class TensorflowImportTestCase {
+
+ @Ignore
+ @Test
+ public void testMnistSoftmaxImport() {
+ String modelDir = "src/test/files/integration/tensorflow/mnist_softmax/saved";
+ SavedModelBundle model = SavedModelBundle.load(modelDir, "serve");
+ TensorFlowModel result = new TensorFlowImporter().importModel(model);
+
+ // Check constants
+ assertEquals(2, result.constants().size());
+
+ Tensor constant0 = result.constants().get("Variable");
+ assertNotNull(constant0);
+ assertEquals(new TensorType.Builder().indexed("d0", 784).indexed("d1", 10).build(),
+ constant0.type());
+ assertEquals(7840, constant0.size());
+
+ Tensor constant1 = result.constants().get("Variable_1");
+ assertNotNull(constant1);
+ assertEquals(new TensorType.Builder().indexed("d0", 10).build(),
+ constant1.type());
+ assertEquals(10, constant1.size());
+
+ // Check signatures
+ assertEquals(1, result.signatures().size());
+ TensorFlowModel.Signature signature = result.signatures().get("serving_default");
+ assertNotNull(signature);
+
+ // ... signature inputs
+ assertEquals(1, signature.inputs().size());
+ TensorType argument0 = signature.inputArgument("x");
+ assertNotNull(argument0);
+ assertEquals(new TensorType.Builder().indexed("d0").indexed("d1", 784).build(), argument0);
+
+ // ... signature outputs
+ assertEquals(1, signature.outputs().size());
+ RankingExpression output = signature.outputExpression("y");
+ assertNotNull(output);
+ assertEquals("add", output.getName());
+ assertEquals("" +
+ "join(rename(matmul(Placeholder, rename(constant(Variable), (d0, d1), (d1, d3)), d1), d3, d1), " +
+ "rename(constant(Variable_1), d0, d1), " +
+ "f(a,b)(a + b))",
+ toNonPrimitiveString(output));
+
+ // Test execution
+ assertEqualResult(model, result, "Placeholder", "Variable/read");
+ assertEqualResult(model, result, "Placeholder", "Variable_1/read");
+ assertEqualResult(model, result, "Placeholder", "MatMul");
+ assertEqualResult(model, result, "Placeholder", "add");
+ }
+
+ @Ignore
+ @Test
+ public void test3LayerMnistImport() {
+ String modelDir = "src/test/files/integration/tensorflow/3_layer_mnist/saved";
+ SavedModelBundle model = SavedModelBundle.load(modelDir, "serve");
+ TensorFlowModel result = new TensorFlowImporter().importModel(model);
+
+ // Check constants
+ assertEquals(8, result.constants().size());
+
+ Tensor outputBias = result.constants().get("dnn/outputs/bias");
+ assertNotNull(outputBias);
+ assertEquals(new TensorType.Builder().indexed("d0", 10).build(), outputBias.type());
+ assertEquals(10, outputBias.size());
+
+ Tensor outputWeights = result.constants().get("dnn/outputs/weights");
+ assertNotNull(outputWeights);
+ assertEquals(new TensorType.Builder().indexed("d0", 40).indexed("d1", 10).build(), outputWeights.type());
+ assertEquals(400, outputWeights.size());
+
+ Tensor hidden3Bias = result.constants().get("dnn/hidden3/bias");
+ assertNotNull(hidden3Bias);
+ assertEquals(new TensorType.Builder().indexed("d0", 40).build(), hidden3Bias.type());
+ assertEquals(40, hidden3Bias.size());
+
+ Tensor hidden3Weights = result.constants().get("dnn/hidden3/weights");
+ assertNotNull(hidden3Weights);
+ assertEquals(new TensorType.Builder().indexed("d0", 100).indexed("d1", 40).build(), hidden3Weights.type());
+ assertEquals(4000, hidden3Weights.size());
+
+ Tensor hidden2Bias = result.constants().get("dnn/hidden2/bias");
+ assertNotNull(hidden2Bias);
+ assertEquals(new TensorType.Builder().indexed("d0", 100).build(), hidden2Bias.type());
+ assertEquals(100, hidden2Bias.size());
+
+ Tensor hidden2Weights = result.constants().get("dnn/hidden2/weights");
+ assertNotNull(hidden2Weights);
+ assertEquals(new TensorType.Builder().indexed("d0", 300).indexed("d1", 100).build(), hidden2Weights.type());
+ assertEquals(30000, hidden2Weights.size());
+
+ Tensor hidden1Bias = result.constants().get("dnn/hidden1/bias");
+ assertNotNull(hidden1Bias);
+ assertEquals(new TensorType.Builder().indexed("d0", 300).build(), hidden1Bias.type());
+ assertEquals(300, hidden1Bias.size());
+
+ Tensor hidden1Weights = result.constants().get("dnn/hidden1/weights");
+ assertNotNull(hidden1Weights);
+ assertEquals(new TensorType.Builder().indexed("d0", 784).indexed("d1", 300).build(), hidden1Weights.type());
+ assertEquals(235200, hidden1Weights.size());
+
+ // Check signatures
+ assertEquals(1, result.signatures().size());
+ TensorFlowModel.Signature signature = result.signatures().get("serving_default");
+ assertNotNull(signature);
+
+ // ... signature inputs
+ assertEquals(1, signature.inputs().size());
+ TensorType argument0 = signature.inputArgument("x");
+ assertNotNull(argument0);
+ assertEquals(new TensorType.Builder().indexed("d0").indexed("d1", 784).build(), argument0);
+
+ // ... signature outputs
+ assertEquals(1, signature.outputs().size());
+ RankingExpression output = signature.outputExpression("y");
+ assertNotNull(output);
+ assertEquals("dnn/outputs/add", output.getName());
+ assertEquals("" +
+ "join(" +
+ "rename(" +
+ "matmul(" +
+ "map(" +
+ "join(" +
+ "rename(" +
+ "matmul(" +
+ "map(" +
+ "join(" +
+ "rename(" +
+ "matmul(" +
+ "map(" +
+ "join(" +
+ "rename(" +
+ "matmul(" +
+ "X, " +
+ "rename(" +
+ "constant(dnn/hidden1/weights), " +
+ "(d0, d1), " +
+ "(d1, d3)" +
+ "), " +
+ "d1" +
+ "), " +
+ "d3, " +
+ "d1" +
+ "), " +
+ "rename(" +
+ "constant(dnn/hidden1/bias), " +
+ "d0, " +
+ "d1" +
+ "), " +
+ "f(a,b)(a + b)" +
+ "), " +
+ "f(a)(if(a < 0, exp(a)-1, a))" +
+ "), " +
+ "rename(" +
+ "constant(dnn/hidden2/weights), " +
+ "(d0, d1), " +
+ "(d1, d3)" +
+ "), " +
+ "d1" +
+ "), " +
+ "d3, " +
+ "d1" +
+ "), " +
+ "rename(" +
+ "constant(dnn/hidden2/bias), " +
+ "d0, " +
+ "d1" +
+ "), " +
+ "f(a,b)(a + b)" +
+ "), " +
+ "f(a)(max(0, a))" +
+ "), " +
+ "rename(" +
+ "constant(dnn/hidden3/weights), " +
+ "(d0, d1), " +
+ "(d1, d3)" +
+ "), " +
+ "d1" +
+ "), " +
+ "d3, " +
+ "d1" +
+ "), " +
+ "rename(" +
+ "constant(dnn/hidden3/bias), " +
+ "d0, " +
+ "d1" +
+ "), " +
+ "f(a,b)(a + b)" +
+ "), " +
+ "f(a)(1 / (1 + exp(-a)))" +
+ "), " +
+ "rename(" +
+ "constant(dnn/outputs/weights), " +
+ "(d0, d1), " +
+ "(d1, d3)" +
+ "), " +
+ "d1" +
+ "), " +
+ "d3, " +
+ "d1" +
+ "), " +
+ "rename(" +
+ "constant(dnn/outputs/bias), " +
+ "d0, " +
+ "d1" +
+ "), " +
+ "f(a,b)(a + b)" +
+ ")",
+ toNonPrimitiveString(output));
+
+ // Test constants
+ assertEqualResult(model, result, "X", "dnn/hidden1/weights/read");
+ assertEqualResult(model, result, "X", "dnn/hidden1/bias/read");
+ assertEqualResult(model, result, "X", "dnn/hidden2/weights/read");
+ assertEqualResult(model, result, "X", "dnn/hidden2/bias/read");
+ assertEqualResult(model, result, "X", "dnn/hidden3/weights/read");
+ assertEqualResult(model, result, "X", "dnn/hidden3/bias/read");
+ assertEqualResult(model, result, "X", "dnn/outputs/weights/read");
+ assertEqualResult(model, result, "X", "dnn/outputs/bias/read");
+
+ // Test execution
+ assertEqualResult(model, result, "X", "dnn/hidden1/MatMul");
+ assertEqualResult(model, result, "X", "dnn/hidden1/add");
+ assertEqualResult(model, result, "X", "dnn/hidden1/Elu");
+ assertEqualResult(model, result, "X", "dnn/hidden2/MatMul");
+ assertEqualResult(model, result, "X", "dnn/hidden2/add");
+ assertEqualResult(model, result, "X", "dnn/hidden2/Relu");
+ assertEqualResult(model, result, "X", "dnn/hidden3/MatMul");
+ assertEqualResult(model, result, "X", "dnn/hidden3/add");
+ assertEqualResult(model, result, "X", "dnn/hidden3/Sigmoid");
+ assertEqualResult(model, result, "X", "dnn/outputs/MatMul");
+ assertEqualResult(model, result, "X", "dnn/outputs/add");
+ }
+
+
+ private void assertEqualResult(SavedModelBundle model, TensorFlowModel result, String inputName, String operationName) {
+ Tensor tfResult = tensorFlowExecute(model, inputName, operationName);
+ Context context = contextFrom(result);
+ Tensor placeholder = placeholderArgument();
+ context.put(inputName, new TensorValue(placeholder));
+ Tensor vespaResult = result.expressions().get(operationName).evaluate(context).asTensor();
+ assertEquals("Operation '" + operationName + "' produces equal results", vespaResult, tfResult);
+ }
+
+ private Tensor tensorFlowExecute(SavedModelBundle model, String inputName, String operationName) {
+ Session.Runner runner = model.session().runner();
+ org.tensorflow.Tensor<?> placeholder = org.tensorflow.Tensor.create(new long[]{ 1, 784 }, FloatBuffer.allocate(784));
+ runner.feed(inputName, placeholder);
+ List<org.tensorflow.Tensor<?>> results = runner.fetch(operationName).run();
+ assertEquals(1, results.size());
+ return new TensorConverter().toVespaTensor(results.get(0));
+ }
+
+ private Context contextFrom(TensorFlowModel result) {
+ MapContext context = new MapContext();
+ result.constants().forEach((name, tensor) -> context.put("constant(" + name + ")", new TensorValue(tensor)));
+ return context;
+ }
+
+ private String toNonPrimitiveString(RankingExpression expression) {
+ // toString on the wrapping expression will map to primitives, which is harder to read
+ return ((TensorFunctionNode)expression.getRoot()).function().toString();
+ }
+
+ private Tensor placeholderArgument() {
+ int size = 784;
+ Tensor.Builder b = Tensor.Builder.of(new TensorType.Builder().indexed("d0", 1).indexed("d1", size).build());
+ for (int i = 0; i < size; i++)
+ b.cell(0, 0, i);
+ return b.build();
+ }
+
+}
diff --git a/searchlib/src/vespa/searchlib/aggregation/grouping.cpp b/searchlib/src/vespa/searchlib/aggregation/grouping.cpp
index cac39f43666..5c93fb4161c 100644
--- a/searchlib/src/vespa/searchlib/aggregation/grouping.cpp
+++ b/searchlib/src/vespa/searchlib/aggregation/grouping.cpp
@@ -129,8 +129,8 @@ void
Grouping::selectMembers(const vespalib::ObjectPredicate &predicate,
vespalib::ObjectOperation &operation)
{
- for (size_t i(0), m(_levels.size()); i < m; i++) {
- _levels[i].select(predicate, operation);
+ for (GroupingLevel & level : _levels) {
+ level.select(predicate, operation);
}
selectGroups(predicate, operation, _root, _firstLevel, _lastLevel, 0);
}
@@ -317,9 +317,9 @@ void Grouping::cleanupAttributeReferences()
void Grouping::cleanTemporary()
{
- for (GroupingLevelList::iterator it(_levels.begin()), mt(_levels.end()); it != mt; ++it) {
- if (it->getExpression().getRoot()->inherits(FunctionNode::classId)) {
- static_cast<FunctionNode &>(*it->getExpression().getRoot()).reset();
+ for (GroupingLevel & level : _levels) {
+ if (level.getExpression().getRoot()->inherits(FunctionNode::classId)) {
+ static_cast<FunctionNode &>(*level.getExpression().getRoot()).reset();
}
}
}
diff --git a/searchlib/src/vespa/searchlib/aggregation/grouping.h b/searchlib/src/vespa/searchlib/aggregation/grouping.h
index 74defb6035f..c84259578bc 100644
--- a/searchlib/src/vespa/searchlib/aggregation/grouping.h
+++ b/searchlib/src/vespa/searchlib/aggregation/grouping.h
@@ -22,14 +22,14 @@ public:
typedef std::unique_ptr<Grouping> UP;
private:
- uint32_t _id; // client id for this grouping
- bool _valid; // is this grouping object valid?
- bool _all; // if true, group all document, not just hits (streaming only)
- int64_t _topN; // hits to process per search node
- uint32_t _firstLevel; // first processing level this iteration (levels before considered frozen)
- uint32_t _lastLevel; // last processing level this iteration
- GroupingLevelList _levels; // grouping parameters per level
- Group _root; // the grouping tree
+ uint32_t _id; // client id for this grouping
+ bool _valid; // is this grouping object valid?
+ bool _all; // if true, group all document, not just hits (streaming only)
+ int64_t _topN; // hits to process per search node
+ uint32_t _firstLevel; // first processing level this iteration (levels before considered frozen)
+ uint32_t _lastLevel; // last processing level this iteration
+ GroupingLevelList _levels; // grouping parameters per level
+ Group _root; // the grouping tree
const vespalib::Clock *_clock; // An optional clock to be used for timeout handling.
fastos::TimeStamp _timeOfDoom; // Used if clock is specified. This is time when request expires.
diff --git a/storage/src/vespa/storage/visiting/visitor.cpp b/storage/src/vespa/storage/visiting/visitor.cpp
index 0d230745875..2f028b5c9fa 100644
--- a/storage/src/vespa/storage/visiting/visitor.cpp
+++ b/storage/src/vespa/storage/visiting/visitor.cpp
@@ -243,7 +243,7 @@ Visitor::Visitor(StorageComponent& component)
_visitorTarget(),
_state(STATE_NOT_STARTED),
_buckets(),
- _bucketSpace(BucketSpace::placeHolder()),
+ _bucketSpace(BucketSpace::invalid()),
_currentBucket(),
_bucketStates(),
_calledStartingVisitor(false),
diff --git a/storageapi/src/vespa/storageapi/messageapi/storagemessage.h b/storageapi/src/vespa/storageapi/messageapi/storagemessage.h
index 32f6c769e87..1625be88418 100644
--- a/storageapi/src/vespa/storageapi/messageapi/storagemessage.h
+++ b/storageapi/src/vespa/storageapi/messageapi/storagemessage.h
@@ -348,7 +348,7 @@ protected:
StorageMessage(const MessageType& code, Id id);
StorageMessage(const StorageMessage&, Id id);
- static document::Bucket getDummyBucket() { return document::Bucket(document::BucketSpace::placeHolder(), document::BucketId()); }
+ static document::Bucket getDummyBucket() { return document::Bucket(document::BucketSpace::invalid(), document::BucketId()); }
public:
virtual ~StorageMessage();
diff --git a/vespajlib/src/main/java/com/yahoo/tensor/functions/ScalarFunctions.java b/vespajlib/src/main/java/com/yahoo/tensor/functions/ScalarFunctions.java
index f1dadba2a29..4d1abf3978f 100644
--- a/vespajlib/src/main/java/com/yahoo/tensor/functions/ScalarFunctions.java
+++ b/vespajlib/src/main/java/com/yahoo/tensor/functions/ScalarFunctions.java
@@ -27,7 +27,10 @@ public class ScalarFunctions {
public static DoubleBinaryOperator multiply() { return new Multiply(); }
public static DoubleUnaryOperator acos() { return new Acos(); }
+ public static DoubleUnaryOperator elu() { return new Elu(); }
public static DoubleUnaryOperator exp() { return new Exp(); }
+ public static DoubleUnaryOperator relu() { return new Relu(); }
+ public static DoubleUnaryOperator sigmoid() { return new Sigmoid(); }
public static DoubleUnaryOperator sqrt() { return new Sqrt(); }
public static DoubleUnaryOperator square() { return new Square(); }
@@ -81,6 +84,27 @@ public class ScalarFunctions {
public String toString() { return "f(a)(acos(a))"; }
}
+ public static class Elu implements DoubleUnaryOperator {
+ @Override
+ public double applyAsDouble(double operand) { return operand < 0 ? Math.exp(operand) -1 : operand; }
+ @Override
+ public String toString() { return "f(a)(if(a < 0, exp(a)-1, a))"; }
+ }
+
+ public static class Relu implements DoubleUnaryOperator {
+ @Override
+ public double applyAsDouble(double operand) { return Math.max(operand, 0); }
+ @Override
+ public String toString() { return "f(a)(max(0, a))"; }
+ }
+
+ public static class Sigmoid implements DoubleUnaryOperator {
+ @Override
+ public double applyAsDouble(double operand) { return 1.0 / (1.0 + Math.exp(-operand)); }
+ @Override
+ public String toString() { return "f(a)(1 / (1 + exp(-a)))"; }
+ }
+
public static class Sqrt implements DoubleUnaryOperator {
@Override
public double applyAsDouble(double operand) { return Math.sqrt(operand); }
diff --git a/zkfacade/src/main/java/com/yahoo/vespa/curator/Curator.java b/zkfacade/src/main/java/com/yahoo/vespa/curator/Curator.java
index 92c98bc7601..4c932969460 100644
--- a/zkfacade/src/main/java/com/yahoo/vespa/curator/Curator.java
+++ b/zkfacade/src/main/java/com/yahoo/vespa/curator/Curator.java
@@ -77,7 +77,7 @@ public class Curator implements AutoCloseable {
String spec = String.format("%s:%d", server.hostname(), server.port());
- if ((config.zookeeperLocalhostAffinity() || config.system().equals("cd")) && server.hostname().equals(thisServer)) {
+ if (config.zookeeperLocalhostAffinity() && server.hostname().equals(thisServer)) {
// Only connect to localhost server if possible, to save network traffic
// and balance load.
return spec;