From a1a4ec43164538b9ac4227944d68ed491f7011be Mon Sep 17 00:00:00 2001 From: Cyril Date: Mon, 13 Dec 2021 15:13:11 -0500 Subject: [PATCH] Updated TF softmax function for TF 2.0 TF2 changed the softmax cross entropy function, they removed the variable `dim` and recommended to use `axis`. --- crowd_layer/crowd_layers.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/crowd_layer/crowd_layers.py b/crowd_layer/crowd_layers.py index abe7d1b..f213742 100644 --- a/crowd_layer/crowd_layers.py +++ b/crowd_layer/crowd_layers.py @@ -142,7 +142,7 @@ def compute_output_shape(self, input_shape): class MaskedMultiCrossEntropy(object): def loss(self, y_true, y_pred): - vec = tf.nn.softmax_cross_entropy_with_logits(logits=y_pred, labels=y_true, dim=1) + vec = tf.nn.softmax_cross_entropy_with_logits(logits=y_pred, labels=y_true, axis=1) mask = tf.equal(y_true[:,0,:], -1) zer = tf.zeros_like(vec) loss = tf.where(mask, x=zer, y=vec) @@ -172,7 +172,7 @@ def loss(self, y_true, y_pred): y_true = tf.transpose(tf.one_hot(tf.cast(y_true, tf.int32), self.num_classes, axis=-1), [0,1,3,2]) # masked cross-entropy - vec = tf.nn.softmax_cross_entropy_with_logits(logits=y_pred, labels=y_true, dim=2) + vec = tf.nn.softmax_cross_entropy_with_logits(logits=y_pred, labels=y_true, axis=2) zer = tf.zeros_like(vec) vec = tf.where(mask_missings, x=zer, y=vec) vec = tf.where(mask_padding, x=zer, y=vec) @@ -358,5 +358,3 @@ def on_epoch_begin(self, epoch, logs=None): def on_epoch_end(self, epoch, logs=None): # run M-step self.model.pi = self.loss.m_step() - -