Change optimizer using kwargs

This commit is contained in:
Jensun Ravichandran
2021-05-11 16:13:00 +02:00
parent b38acd58a8
commit eab1ec72c2
3 changed files with 21 additions and 41 deletions

View File

@@ -3,9 +3,13 @@ import torch
from torch.optim.lr_scheduler import ExponentialLR
class AbstractLightningModel(pl.LightningModule):
class AbstractPrototypeModel(pl.LightningModule):
@property
def prototypes(self):
return self.proto_layer.components.detach().cpu()
def configure_optimizers(self):
optimizer = torch.optim.Adam(self.parameters(), lr=self.hparams.lr)
optimizer = self.optimizer(self.parameters(), lr=self.hparams.lr)
scheduler = ExponentialLR(optimizer,
gamma=0.99,
last_epoch=-1,
@@ -15,9 +19,3 @@ class AbstractLightningModel(pl.LightningModule):
"interval": "step",
} # called after each training step
return [optimizer], [sch]
class AbstractPrototypeModel(AbstractLightningModel):
@property
def prototypes(self):
return self.proto_layer.components.detach().cpu()