No implicit learning rate scheduling

This commit is contained in:
Jensun Ravichandran
2021-06-04 15:55:06 +02:00
parent b0df61d1c3
commit 42d974e08c
3 changed files with 45 additions and 20 deletions

View File

@@ -2,10 +2,10 @@
import argparse
import prototorch as pt
import pytorch_lightning as pl
import torch
import prototorch as pt
from torch.optim.lr_scheduler import ExponentialLR
if __name__ == "__main__":
# Command-line arguments
@@ -29,9 +29,16 @@ if __name__ == "__main__":
)
# Initialize the model
model = pt.models.GLVQ(hparams,
optimizer=torch.optim.Adam,
prototype_initializer=pt.components.SMI(train_ds))
model = pt.models.GLVQ(
hparams,
optimizer=torch.optim.Adam,
prototype_initializer=pt.components.SMI(train_ds),
lr_scheduler=ExponentialLR,
lr_scheduler_kwargs=dict(gamma=0.99, verbose=False),
)
# Compute intermediate input and output sizes
model.example_input_array = torch.zeros(4, 2)
# Callbacks
vis = pt.models.VisGLVQ2D(data=train_ds)
@@ -40,6 +47,8 @@ if __name__ == "__main__":
trainer = pl.Trainer.from_argparse_args(
args,
callbacks=[vis],
weights_summary="full",
accelerator="ddp",
)
# Training loop