Reversed optimizer to Tensorflow and changed beta1 to 0.9

Signed-off-by: Jim Martens <github@2martens.de>
This commit is contained in:
2019-07-04 12:59:39 +02:00
parent 59a1d85af9
commit 2b93480df9

View File

@ -471,11 +471,8 @@ def train_keras(train_generator: callable,
# compile the model # compile the model
ssd_model.model.compile( ssd_model.model.compile(
# optimizer=tf.train.AdamOptimizer(learning_rate=learning_rate_var, optimizer=tf.train.AdamOptimizer(learning_rate=learning_rate_var,
# beta1=0.5, beta2=0.999), beta1=0.9, beta2=0.999),
optimizer=tf.keras.optimizers.Adam(lr=learning_rate_var,
beta_1=0.9, beta_2=0.999,
epsilon=1e-08, decay=5e-04),
loss=ssd_loss.compute_loss, loss=ssd_loss.compute_loss,
metrics=[ metrics=[
"categorical_accuracy" "categorical_accuracy"