Add support for multiple optimizers

This commit is contained in:
Jensun Ravichandran
2021-05-03 13:20:49 +02:00
parent 042b3fcaa2
commit 96aeaa3448
2 changed files with 19 additions and 2 deletions

View File

@@ -1,11 +1,20 @@
import pytorch_lightning as pl
import torch
from torch.optim.lr_scheduler import ExponentialLR
class AbstractLightningModel(pl.LightningModule):
def configure_optimizers(self):
optimizer = torch.optim.Adam(self.parameters(), lr=self.hparams.lr)
return optimizer
scheduler = ExponentialLR(optimizer,
gamma=0.99,
last_epoch=-1,
verbose=False)
sch = {
"scheduler": scheduler,
"interval": "step",
} # called after each training step
return [optimizer], [sch]
class AbstractPrototypeModel(AbstractLightningModel):