From 27833dae809d1b08463a21e71e2ad2fd3320b73a Mon Sep 17 00:00:00 2001 From: "Daniel J. Hofmann" Date: Thu, 30 May 2019 19:51:37 +0200 Subject: [PATCH] Removes adam's l2 regularization until fixed in pytorch#4429 --- config/model-unet.toml | 3 --- robosat/tools/train.py | 3 +-- 2 files changed, 1 insertion(+), 5 deletions(-) diff --git a/config/model-unet.toml b/config/model-unet.toml index 6effd699..5303f825 100644 --- a/config/model-unet.toml +++ b/config/model-unet.toml @@ -27,8 +27,5 @@ # Learning rate for the optimizer. lr = 0.0001 - # Weight decay l2 penalty for the optimizer - decay = 0.0001 - # Loss function name (e.g 'Lovasz', 'mIoU' or 'CrossEntropy') loss = 'Lovasz' diff --git a/robosat/tools/train.py b/robosat/tools/train.py index a5246429..a70e6500 100644 --- a/robosat/tools/train.py +++ b/robosat/tools/train.py @@ -78,7 +78,7 @@ def main(args): if model["opt"]["loss"] in ("CrossEntropy", "mIoU", "Focal"): sys.exit("Error: The loss function used, need dataset weights values") - optimizer = Adam(net.parameters(), lr=model["opt"]["lr"], weight_decay=model["opt"]["decay"]) + optimizer = Adam(net.parameters(), lr=model["opt"]["lr"]) resume = 0 if args.checkpoint: @@ -118,7 +118,6 @@ def map_location(storage, _): log.log("Batch Size:\t {}".format(model["common"]["batch_size"])) log.log("Image Size:\t {}".format(model["common"]["image_size"])) log.log("Learning Rate:\t {}".format(model["opt"]["lr"])) - log.log("Weight Decay:\t {}".format(model["opt"]["decay"])) log.log("Loss function:\t {}".format(model["opt"]["loss"])) if "weight" in locals(): log.log("Weights :\t {}".format(dataset["weights"]["values"]))