From 429a8a53c6d502f5a70bb25f0f6e1ed6535d78d1 Mon Sep 17 00:00:00 2001 From: Nikhil Kilari <36819773+kilarinikhil@users.noreply.github.com> Date: Tue, 17 Mar 2020 22:25:51 +0530 Subject: [PATCH] A minor mistake in cross entropy loss tf.reduce_mean(-tf.reduce_sum(y_true * tf.math.log(y_pred),1)) or else it simply finds the sum and the reduced mean remains the sum itself. --- tensorflow_v2/notebooks/2_BasicModels/logistic_regression.ipynb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tensorflow_v2/notebooks/2_BasicModels/logistic_regression.ipynb b/tensorflow_v2/notebooks/2_BasicModels/logistic_regression.ipynb index c29c42b9..b9b1ccc4 100644 --- a/tensorflow_v2/notebooks/2_BasicModels/logistic_regression.ipynb +++ b/tensorflow_v2/notebooks/2_BasicModels/logistic_regression.ipynb @@ -109,7 +109,7 @@ " # Clip prediction values to avoid log(0) error.\n", " y_pred = tf.clip_by_value(y_pred, 1e-9, 1.)\n", " # Compute cross-entropy.\n", - " return tf.reduce_mean(-tf.reduce_sum(y_true * tf.math.log(y_pred)))\n", + " return tf.reduce_mean(-tf.reduce_sum(y_true * tf.math.log(y_pred),1))\n", "\n", "# Accuracy metric.\n", "def accuracy(y_pred, y_true):\n",