Reverted to default AdamOptimizer beta values
Signed-off-by: Jim Martens <github@2martens.de>
This commit is contained in:
@ -84,8 +84,7 @@ def train_simple(dataset: tf.data.Dataset,
|
||||
'encoder': model.Encoder(zsize),
|
||||
'decoder': model.Decoder(channels, zsize),
|
||||
# define optimizers
|
||||
'enc_dec_optimizer': tf.train.AdamOptimizer(learning_rate=checkpointables['learning_rate_var'],
|
||||
beta1=0.5, beta2=0.999),
|
||||
'enc_dec_optimizer': tf.train.AdamOptimizer(learning_rate=checkpointables['learning_rate_var']),
|
||||
# global step counter
|
||||
'epoch_var': K.variable(-1, dtype=tf.int64),
|
||||
'global_step': tf.train.get_or_create_global_step(),
|
||||
|
||||
Reference in New Issue
Block a user