diff options
author | Lester Solbakken <lesters@oath.com> | 2018-03-08 17:20:05 +0100 |
---|---|---|
committer | Lester Solbakken <lesters@oath.com> | 2018-03-08 17:20:05 +0100 |
commit | 28eb8acb97a8b1c1b3f3afc02d3e84003526947c (patch) | |
tree | 6ea8163036adcc97673c01f7b4a47032cab7f166 /searchlib/src/test/files/integration/tensorflow/dropout/dropout.py | |
parent | 71673253d52acb54b42f997758b12f75a2e032bc (diff) |
Add test for tensorflow broadcasting
Diffstat (limited to 'searchlib/src/test/files/integration/tensorflow/dropout/dropout.py')
-rw-r--r-- | searchlib/src/test/files/integration/tensorflow/dropout/dropout.py | 5 |
1 files changed, 4 insertions, 1 deletions
diff --git a/searchlib/src/test/files/integration/tensorflow/dropout/dropout.py b/searchlib/src/test/files/integration/tensorflow/dropout/dropout.py index adbf29b9ab6..06ae4c4e5d5 100644 --- a/searchlib/src/test/files/integration/tensorflow/dropout/dropout.py +++ b/searchlib/src/test/files/integration/tensorflow/dropout/dropout.py @@ -16,8 +16,11 @@ X = tf.placeholder(tf.float32, shape=(None, n_inputs), name="X") y = tf.placeholder(tf.int64, shape=(None), name="y") training = tf.placeholder_with_default(False, shape=(), name='training') +def leaky_relu_with_small_constant(z, name=None): + return tf.maximum(tf.constant(0.01, shape=[1]) * z, z, name=name) + X_drop = tf.layers.dropout(X, dropout_rate, training=training, name="xdrop") -output = tf.layers.dense(X_drop, n_outputs, name="outputs") +output = tf.layers.dense(X_drop, n_outputs, activation=leaky_relu_with_small_constant, name="outputs") init = tf.global_variables_initializer() file_writer = tf.summary.FileWriter(logdir, tf.get_default_graph()) |