No implicit learning rate scheduling

This commit is contained in:
Jensun Ravichandran
2021-06-04 15:55:06 +02:00
parent b0df61d1c3
commit 42d974e08c
3 changed files with 45 additions and 20 deletions

View File

@@ -1,5 +1,4 @@
import pytorch_lightning as pl
from torch.optim.lr_scheduler import ExponentialLR
class AbstractPrototypeModel(pl.LightningModule):
@@ -18,15 +17,16 @@ class AbstractPrototypeModel(pl.LightningModule):
def configure_optimizers(self):
optimizer = self.optimizer(self.parameters(), lr=self.hparams.lr)
scheduler = ExponentialLR(optimizer,
gamma=0.99,
last_epoch=-1,
verbose=False)
sch = {
"scheduler": scheduler,
"interval": "step",
} # called after each training step
return [optimizer], [sch]
if self.lr_scheduler is not None:
scheduler = self.lr_scheduler(optimizer,
**self.lr_scheduler_kwargs)
sch = {
"scheduler": scheduler,
"interval": "step",
} # called after each training step
return [optimizer], [sch]
else:
return optimizer
class PrototypeImageModel(pl.LightningModule):