Use LambdaLayer from ProtoTorch
This commit is contained in:
parent
8f4d66edf1
commit
27eccf44d4
@ -47,17 +47,25 @@ if __name__ == "__main__":
|
|||||||
prototype_initializer=pt.components.SMI(train_ds),
|
prototype_initializer=pt.components.SMI(train_ds),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# Summary
|
||||||
|
print(model)
|
||||||
|
|
||||||
# Callbacks
|
# Callbacks
|
||||||
vis = pt.models.VisGLVQ2D(train_ds)
|
vis = pt.models.VisGLVQ2D(train_ds)
|
||||||
proto_scheduler = PrototypeScheduler(train_ds, 10)
|
proto_scheduler = PrototypeScheduler(train_ds, 10)
|
||||||
|
|
||||||
# Setup trainer
|
# Setup trainer
|
||||||
trainer = pl.Trainer.from_argparse_args(args,
|
trainer = pl.Trainer.from_argparse_args(
|
||||||
max_epochs=100,
|
args,
|
||||||
callbacks=[vis, proto_scheduler],
|
max_epochs=100,
|
||||||
terminate_on_nan=True,
|
callbacks=[
|
||||||
weights_summary=None,
|
vis,
|
||||||
accelerator='ddp')
|
proto_scheduler,
|
||||||
|
],
|
||||||
|
terminate_on_nan=True,
|
||||||
|
weights_summary=None,
|
||||||
|
accelerator="ddp",
|
||||||
|
)
|
||||||
|
|
||||||
# Training loop
|
# Training loop
|
||||||
trainer.fit(model, train_loader)
|
trainer.fit(model, train_loader)
|
||||||
|
@ -9,23 +9,11 @@ from prototorch.functions.distances import (euclidean_distance, omega_distance,
|
|||||||
sed)
|
sed)
|
||||||
from prototorch.functions.helper import get_flat
|
from prototorch.functions.helper import get_flat
|
||||||
from prototorch.functions.losses import glvq_loss, lvq1_loss, lvq21_loss
|
from prototorch.functions.losses import glvq_loss, lvq1_loss, lvq21_loss
|
||||||
|
from prototorch.modules import LambdaLayer
|
||||||
|
|
||||||
from .abstract import AbstractPrototypeModel, PrototypeImageModel
|
from .abstract import AbstractPrototypeModel, PrototypeImageModel
|
||||||
|
|
||||||
|
|
||||||
class FunctionLayer(torch.nn.Module):
|
|
||||||
def __init__(self, distance_fn):
|
|
||||||
super().__init__()
|
|
||||||
self.fn = distance_fn
|
|
||||||
self.name = distance_fn.__name__
|
|
||||||
|
|
||||||
def forward(self, *args, **kwargs):
|
|
||||||
return self.fn(*args, **kwargs)
|
|
||||||
|
|
||||||
def extra_repr(self):
|
|
||||||
return self.name
|
|
||||||
|
|
||||||
|
|
||||||
class GLVQ(AbstractPrototypeModel):
|
class GLVQ(AbstractPrototypeModel):
|
||||||
"""Generalized Learning Vector Quantization."""
|
"""Generalized Learning Vector Quantization."""
|
||||||
def __init__(self, hparams, **kwargs):
|
def __init__(self, hparams, **kwargs):
|
||||||
@ -46,9 +34,9 @@ class GLVQ(AbstractPrototypeModel):
|
|||||||
distribution=self.hparams.distribution,
|
distribution=self.hparams.distribution,
|
||||||
initializer=self.prototype_initializer(**kwargs))
|
initializer=self.prototype_initializer(**kwargs))
|
||||||
|
|
||||||
self.distance_layer = FunctionLayer(distance_fn)
|
self.distance_layer = LambdaLayer(distance_fn)
|
||||||
self.transfer_layer = FunctionLayer(tranfer_fn)
|
self.transfer_layer = LambdaLayer(tranfer_fn)
|
||||||
self.loss = FunctionLayer(glvq_loss)
|
self.loss = LambdaLayer(glvq_loss)
|
||||||
|
|
||||||
self.optimizer = kwargs.get("optimizer", torch.optim.Adam)
|
self.optimizer = kwargs.get("optimizer", torch.optim.Adam)
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user