Skip to content

Commit

Permalink
Removes adam's l2 regularization until fixed in pytorch#4429
Browse files Browse the repository at this point in the history
  • Loading branch information
daniel-j-h committed May 30, 2019
1 parent e31b206 commit 27833da
Show file tree
Hide file tree
Showing 2 changed files with 1 addition and 5 deletions.
3 changes: 0 additions & 3 deletions config/model-unet.toml
Original file line number Diff line number Diff line change
Expand Up @@ -27,8 +27,5 @@
# Learning rate for the optimizer.
lr = 0.0001

# Weight decay l2 penalty for the optimizer
decay = 0.0001

# Loss function name (e.g 'Lovasz', 'mIoU' or 'CrossEntropy')
loss = 'Lovasz'
3 changes: 1 addition & 2 deletions robosat/tools/train.py
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,7 @@ def main(args):
if model["opt"]["loss"] in ("CrossEntropy", "mIoU", "Focal"):
sys.exit("Error: The loss function used, need dataset weights values")

optimizer = Adam(net.parameters(), lr=model["opt"]["lr"], weight_decay=model["opt"]["decay"])
optimizer = Adam(net.parameters(), lr=model["opt"]["lr"])

resume = 0
if args.checkpoint:
Expand Down Expand Up @@ -118,7 +118,6 @@ def map_location(storage, _):
log.log("Batch Size:\t {}".format(model["common"]["batch_size"]))
log.log("Image Size:\t {}".format(model["common"]["image_size"]))
log.log("Learning Rate:\t {}".format(model["opt"]["lr"]))
log.log("Weight Decay:\t {}".format(model["opt"]["decay"]))
log.log("Loss function:\t {}".format(model["opt"]["loss"]))
if "weight" in locals():
log.log("Weights :\t {}".format(dataset["weights"]["values"]))
Expand Down

0 comments on commit 27833da

Please sign in to comment.