From 8c9e7b56be9fe9a2882d8d56f7bfbe9a1972ecb3 Mon Sep 17 00:00:00 2001 From: Devon Hjelm Date: Mon, 17 Sep 2018 09:54:13 -0400 Subject: [PATCH 1/5] Update README.md Reloading no longer an issue --- README.md | 1 - 1 file changed, 1 deletion(-) diff --git a/README.md b/README.md index 06e1897..f41d9cf 100644 --- a/README.md +++ b/README.md @@ -4,7 +4,6 @@ Cortex is under heavy development. It's functional, but may not fit your needs y (some) Outstanding issues: -* Reloading models does not reload hyperparameters (they need to be specified again when reloading) * Need custom data iterator functionality within custom models * torchtext integration needed * Missing unit tests From 4a4e270c7b6aed2039bf8a8348bfe10579cc2734 Mon Sep 17 00:00:00 2001 From: Dmitriy Serdyuk Date: Mon, 24 Sep 2018 17:01:29 -0400 Subject: [PATCH 2/5] Fix model_optimizer_options --- cortex/_lib/optimizer.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/cortex/_lib/optimizer.py b/cortex/_lib/optimizer.py index 0e3ed93..c865452 100644 --- a/cortex/_lib/optimizer.py +++ b/cortex/_lib/optimizer.py @@ -170,7 +170,8 @@ def setup(model, optimizer='Adam', learning_rate=1.e-4, optimizer_options_.update(weight_decay=wd, clipping=cl, lr=eta) if network_key in model_optimizer_options.keys(): - optimizer_options_.update(**model_optimizer_options) + optimizer_options_.update( + **eval(model_optimizer_options[network_key])) # Create the optimizer op = wrap_optimizer(op) From a95ac80889e7a03b43df599402f4834506683936 Mon Sep 17 00:00:00 2001 From: Dmitriy Serdyuk Date: Mon, 24 Sep 2018 17:03:27 -0400 Subject: [PATCH 3/5] Format docstrings --- cortex/_lib/optimizer.py | 12 +++++------- 1 file changed, 5 insertions(+), 7 deletions(-) diff --git a/cortex/_lib/optimizer.py b/cortex/_lib/optimizer.py index c865452..4dd15b2 100644 --- a/cortex/_lib/optimizer.py +++ b/cortex/_lib/optimizer.py @@ -1,6 +1,4 @@ -'''Module for setting up the optimizer. - -''' +"""Module for setting up the optimizer.""" from collections import defaultdict import logging @@ -52,8 +50,8 @@ def step(self, closure=None): """Performs a single optimization step. Arguments: - closure (callable, optional): A closure that reevaluates the model - and returns the loss. + closure (callable, optional): A closure that reevaluates the + model and returns the loss. """ loss = super().step(closure=closure) @@ -70,7 +68,7 @@ def step(self, closure=None): def setup(model, optimizer='Adam', learning_rate=1.e-4, weight_decay={}, clipping={}, optimizer_options={}, model_optimizer_options={}, scheduler=None, scheduler_options={}): - '''Optimizer entrypoint. + """Optimizer entrypoint. Args: optimizer: Optimizer type. See `torch.optim` for supported optimizers. @@ -83,7 +81,7 @@ def setup(model, optimizer='Adam', learning_rate=1.e-4, scheduler: Optimizer learning rate scheduler. scheduler_options: Options for scheduler. - ''' + """ OPTIMIZERS.clear() SCHEDULERS.clear() From 42a38f7690b5f46609ae15f9fe63575af98f554f Mon Sep 17 00:00:00 2001 From: Dmitriy Serdyuk Date: Mon, 24 Sep 2018 17:08:54 -0400 Subject: [PATCH 4/5] Simplify if check --- cortex/_lib/optimizer.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cortex/_lib/optimizer.py b/cortex/_lib/optimizer.py index 4dd15b2..0cfbf86 100644 --- a/cortex/_lib/optimizer.py +++ b/cortex/_lib/optimizer.py @@ -167,7 +167,7 @@ def setup(model, optimizer='Adam', learning_rate=1.e-4, optimizer_options_ = dict((k, v) for k, v in optimizer_options.items()) optimizer_options_.update(weight_decay=wd, clipping=cl, lr=eta) - if network_key in model_optimizer_options.keys(): + if network_key in model_optimizer_options: optimizer_options_.update( **eval(model_optimizer_options[network_key])) From 4d5bf2401b1c5139c838ab4a373f4cac072a3572 Mon Sep 17 00:00:00 2001 From: Dmitriy Serdyuk Date: Mon, 24 Sep 2018 17:31:05 -0400 Subject: [PATCH 5/5] Refactor --- cortex/_lib/optimizer.py | 26 +++++++++++--------------- 1 file changed, 11 insertions(+), 15 deletions(-) diff --git a/cortex/_lib/optimizer.py b/cortex/_lib/optimizer.py index 0cfbf86..0926335 100644 --- a/cortex/_lib/optimizer.py +++ b/cortex/_lib/optimizer.py @@ -146,26 +146,22 @@ def setup(model, optimizer='Adam', learning_rate=1.e-4, for p in params: p.requires_grad = True - # Learning rates - if isinstance(learning_rate, dict): - eta = learning_rate[network_key] - else: - eta = learning_rate + def extract_value(dict_or_value, default=None): + if isinstance(dict_or_value, dict): + return dict_or_value.get(network_key, default) + return dict_or_value + # Learning rates + network_lr = extract_value(learning_rate) # Weight decay - if isinstance(weight_decay, dict): - wd = weight_decay.get(network_key, 0) - else: - wd = weight_decay - - if isinstance(clipping, dict): - cl = clipping.get(network_key, None) - else: - cl = clipping + network_wd = extract_value(weight_decay, 0) + # Gradient clipping + network_cl = extract_value(clipping) # Update the optimizer options optimizer_options_ = dict((k, v) for k, v in optimizer_options.items()) - optimizer_options_.update(weight_decay=wd, clipping=cl, lr=eta) + optimizer_options_.update( + weight_decay=network_wd, clipping=network_cl, lr=network_lr) if network_key in model_optimizer_options: optimizer_options_.update(