Change optimizer using kwargs
This commit is contained in:
@@ -3,9 +3,13 @@ import torch
|
||||
from torch.optim.lr_scheduler import ExponentialLR
|
||||
|
||||
|
||||
class AbstractLightningModel(pl.LightningModule):
|
||||
class AbstractPrototypeModel(pl.LightningModule):
|
||||
@property
|
||||
def prototypes(self):
|
||||
return self.proto_layer.components.detach().cpu()
|
||||
|
||||
def configure_optimizers(self):
|
||||
optimizer = torch.optim.Adam(self.parameters(), lr=self.hparams.lr)
|
||||
optimizer = self.optimizer(self.parameters(), lr=self.hparams.lr)
|
||||
scheduler = ExponentialLR(optimizer,
|
||||
gamma=0.99,
|
||||
last_epoch=-1,
|
||||
@@ -15,9 +19,3 @@ class AbstractLightningModel(pl.LightningModule):
|
||||
"interval": "step",
|
||||
} # called after each training step
|
||||
return [optimizer], [sch]
|
||||
|
||||
|
||||
class AbstractPrototypeModel(AbstractLightningModel):
|
||||
@property
|
||||
def prototypes(self):
|
||||
return self.proto_layer.components.detach().cpu()
|
||||
|
Reference in New Issue
Block a user