Reduced latent space to 32 channels

Signed-off-by: Jim Martens <github@2martens.de>
This commit is contained in:
2019-04-17 14:14:48 +02:00
parent 600cb55e2e
commit f4f9d31a7d

View File

@ -158,12 +158,12 @@ def _auto_encoder_train(args: argparse.Namespace) -> None:
with train_summary_writer.as_default(): with train_summary_writer.as_default():
train.train_simple(coco_data, iteration=args.iteration, train.train_simple(coco_data, iteration=args.iteration,
weights_prefix=f"{args.weights_path}/category-{category}", weights_prefix=f"{args.weights_path}/category-{category}",
zsize=64, lr=0.0001, verbose=args.verbose, zsize=32, lr=0.0001, verbose=args.verbose,
channels=3, train_epoch=args.num_epochs, batch_size=batch_size) channels=3, train_epoch=args.num_epochs, batch_size=batch_size)
else: else:
train.train_simple(coco_data, iteration=args.iteration, train.train_simple(coco_data, iteration=args.iteration,
weights_prefix=f"{args.weights_path}/category-{category}", weights_prefix=f"{args.weights_path}/category-{category}",
zsize=64, lr=0.0001, verbose=args.verbose, zsize=32, lr=0.0001, verbose=args.verbose,
channels=3, train_epoch=args.num_epochs, batch_size=batch_size) channels=3, train_epoch=args.num_epochs, batch_size=batch_size)