Reverted to default AdamOptimizer beta values

Signed-off-by: Jim Martens <github@2martens.de>
This commit is contained in:
2019-04-12 13:44:05 +02:00
parent 25e2110a5b
commit 7ea63a9a49

View File

@ -84,8 +84,7 @@ def train_simple(dataset: tf.data.Dataset,
'encoder': model.Encoder(zsize),
'decoder': model.Decoder(channels, zsize),
# define optimizers
'enc_dec_optimizer': tf.train.AdamOptimizer(learning_rate=checkpointables['learning_rate_var'],
beta1=0.5, beta2=0.999),
'enc_dec_optimizer': tf.train.AdamOptimizer(learning_rate=checkpointables['learning_rate_var']),
# global step counter
'epoch_var': K.variable(-1, dtype=tf.int64),
'global_step': tf.train.get_or_create_global_step(),