diff options
author | Lester Solbakken <lesters@oath.com> | 2019-06-19 15:56:24 +0200 |
---|---|---|
committer | Lester Solbakken <lesters@oath.com> | 2019-06-19 15:56:24 +0200 |
commit | 6ad990b0309d673a279f9a1308f6029d028c11b7 (patch) | |
tree | 77beff42b429d73521e1cc4942c86eb71cc9fbdd /model-integration | |
parent | fb094bd5f31a5b9dee97a95473be8eae9ada9544 (diff) |
Remove outdated comment
Diffstat (limited to 'model-integration')
-rw-r--r-- | model-integration/src/test/models/tensorflow/softmax/softmax.py | 4 |
1 files changed, 1 insertions, 3 deletions
diff --git a/model-integration/src/test/models/tensorflow/softmax/softmax.py b/model-integration/src/test/models/tensorflow/softmax/softmax.py index c636af58043..aab9956f914 100644 --- a/model-integration/src/test/models/tensorflow/softmax/softmax.py +++ b/model-integration/src/test/models/tensorflow/softmax/softmax.py @@ -9,11 +9,9 @@ n_inputs = 5 n_outputs = 3 input = tf.placeholder(tf.float32, shape=(None, n_inputs), name="input") - -W = tf.Variable(tf.random.uniform([n_inputs, n_outputs]), name="weights") # or just add uniform random +W = tf.Variable(tf.random.uniform([n_inputs, n_outputs]), name="weights") b = tf.Variable(tf.random.uniform([n_outputs]), name="bias") Z = tf.matmul(input, W) + b - hidden_layer = tf.nn.relu(Z) output_layer = tf.nn.softmax(hidden_layer, name="output") |