No implicit learning rate scheduling
This commit is contained in:
@@ -1,5 +1,4 @@
|
||||
import pytorch_lightning as pl
|
||||
from torch.optim.lr_scheduler import ExponentialLR
|
||||
|
||||
|
||||
class AbstractPrototypeModel(pl.LightningModule):
|
||||
@@ -18,15 +17,16 @@ class AbstractPrototypeModel(pl.LightningModule):
|
||||
|
||||
def configure_optimizers(self):
|
||||
optimizer = self.optimizer(self.parameters(), lr=self.hparams.lr)
|
||||
scheduler = ExponentialLR(optimizer,
|
||||
gamma=0.99,
|
||||
last_epoch=-1,
|
||||
verbose=False)
|
||||
sch = {
|
||||
"scheduler": scheduler,
|
||||
"interval": "step",
|
||||
} # called after each training step
|
||||
return [optimizer], [sch]
|
||||
if self.lr_scheduler is not None:
|
||||
scheduler = self.lr_scheduler(optimizer,
|
||||
**self.lr_scheduler_kwargs)
|
||||
sch = {
|
||||
"scheduler": scheduler,
|
||||
"interval": "step",
|
||||
} # called after each training step
|
||||
return [optimizer], [sch]
|
||||
else:
|
||||
return optimizer
|
||||
|
||||
|
||||
class PrototypeImageModel(pl.LightningModule):
|
||||
|
Reference in New Issue
Block a user