test: add unit tests

This commit is contained in:
Jensun Ravichandran
2022-03-30 15:12:33 +02:00
parent 9da47b1dba
commit 7d3f59e54b
6 changed files with 218 additions and 26 deletions

View File

@@ -26,13 +26,11 @@ from .lvq import (
)
from .probabilistic import (
CELVQ,
PLVQ,
RSLVQ,
SLVQ,
)
from .unsupervised import (
GrowingNeuralGas,
HeskesSOM,
KohonenSOM,
NeuralGas,
)

View File

@@ -16,7 +16,7 @@ class KNN(SupervisedPrototypeModel):
"""K-Nearest-Neighbors classification algorithm."""
def __init__(self, hparams, **kwargs):
super().__init__(hparams, **kwargs)
super().__init__(hparams, skip_proto_layer=True, **kwargs)
# Default hparams
self.hparams.setdefault("k", 1)
@@ -28,7 +28,7 @@ class KNN(SupervisedPrototypeModel):
# Layers
self.proto_layer = LabeledComponents(
distribution=[],
distribution=len(data) * [1],
components_initializer=LiteralCompInitializer(data),
labels_initializer=LiteralLabelsInitializer(targets))
self.competition_layer = KNNC(k=self.hparams.k)

View File

@@ -67,8 +67,13 @@ class SLVQ(ProbabilisticLVQ):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
# Default hparams
self.hparams.setdefault("variance", 1.0)
variance = self.hparams.get("variance")
self.conditional_distribution = GaussianPrior(variance)
self.loss = LossLayer(nllr_loss)
self.conditional_distribution = GaussianPrior(self.hparams.variance)
class RSLVQ(ProbabilisticLVQ):
@@ -76,8 +81,13 @@ class RSLVQ(ProbabilisticLVQ):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
# Default hparams
self.hparams.setdefault("variance", 1.0)
variance = self.hparams.get("variance")
self.conditional_distribution = GaussianPrior(variance)
self.loss = LossLayer(rslvq_loss)
self.conditional_distribution = GaussianPrior(self.hparams.variance)
class PLVQ(ProbabilisticLVQ, SiameseGMLVQ):
@@ -88,8 +98,12 @@ class PLVQ(ProbabilisticLVQ, SiameseGMLVQ):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.conditional_distribution = RankScaledGaussianPrior(
self.hparams.lambd)
# Default hparams
self.hparams.setdefault("lambda", 1.0)
lam = self.hparams.get("lambda", 1.0)
self.conditional_distribution = RankScaledGaussianPrior(lam)
self.loss = torch.nn.KLDivLoss()
# FIXME

View File

@@ -35,7 +35,7 @@ class KohonenSOM(NonGradientMixin, UnsupervisedPrototypeModel):
# Additional parameters
x, y = torch.arange(h), torch.arange(w)
grid = torch.stack(torch.meshgrid(x, y), dim=-1)
grid = torch.stack(torch.meshgrid(x, y, indexing="ij"), dim=-1)
self.register_buffer("_grid", grid)
self._sigma = self.hparams.sigma
self._lr = self.hparams.lr
@@ -88,12 +88,12 @@ class NeuralGas(UnsupervisedPrototypeModel):
self.save_hyperparameters(hparams)
# Default hparams
self.hparams.setdefault("agelimit", 10)
self.hparams.setdefault("age_limit", 10)
self.hparams.setdefault("lm", 1)
self.energy_layer = NeuralGasEnergy(lm=self.hparams.lm)
self.topology_layer = ConnectionTopology(
agelimit=self.hparams.agelimit,
agelimit=self.hparams.age_limit,
num_prototypes=self.hparams.num_prototypes,
)