bugfix - losses - DSSIM - Don't reduce mean on output

This commit is contained in:
torzdf 2020-08-21 15:29:27 +01:00
parent 619bd415aa
commit 877b90b60a

View File

@ -136,7 +136,7 @@ class DSSIMObjective(tf.keras.losses.Loss):
denom = (K.square(u_true) + K.square(u_pred) + self.c_1) * (
var_pred + var_true + self.c_2)
ssim /= denom # no need for clipping, c_1 + c_2 make the denorm non-zero
return K.mean((1.0 - ssim) / 2.0)
return (1.0 - ssim) / 2.0
@staticmethod
def _preprocess_padding(padding):