From 3f7d82f8edb78807b1624cd7f52c775e31e40dbb Mon Sep 17 00:00:00 2001 From: Terry Taewoong Um Date: Tue, 17 May 2016 20:28:22 -0400 Subject: [PATCH] Delete sigmoid function at the output layer Since ```softmax_cross_entropy_with_logits``` already contains softmax function at the output layer, I deleted the sigmoid at the output layer --- notebooks/mlp_mnist_simple.ipynb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/notebooks/mlp_mnist_simple.ipynb b/notebooks/mlp_mnist_simple.ipynb index de3bc53..dddf9e2 100755 --- a/notebooks/mlp_mnist_simple.ipynb +++ b/notebooks/mlp_mnist_simple.ipynb @@ -78,7 +78,7 @@ "def multilayer_perceptron(_X, _weights, _biases):\n", " layer_1 = tf.nn.sigmoid(tf.add(tf.matmul(_X, _weights['h1']), _biases['b1'])) \n", " layer_2 = tf.nn.sigmoid(tf.add(tf.matmul(layer_1, _weights['h2']), _biases['b2']))\n", - " return (tf.sigmoid(tf.matmul(layer_2, _weights['out']) + _biases['out']))\n", + " return (tf.matmul(layer_2, _weights['out']) + _biases['out'])\n", " \n", "# Store layers weight & bias\n", "stddev = 0.1 # <== This greatly affects accuracy!! \n",