diff --git a/docs/source/library.rst b/docs/source/library.rst index 4e8331e..0472f05 100644 --- a/docs/source/library.rst +++ b/docs/source/library.rst @@ -74,7 +74,7 @@ For a test sample they return a distribution instead of a class assignment. The following two algorihms were presented by :cite:t:`seo2003` . Every prototypes is a center of a gaussian distribution of its class, generating a mixture model. -.. autoclass:: prototorch.models.probabilistic.LikelihoodRatioLVQ +.. autoclass:: prototorch.models.probabilistic.SLVQ :members: .. autoclass:: prototorch.models.probabilistic.RSLVQ diff --git a/prototorch/models/__init__.py b/prototorch/models/__init__.py index 48c4835..73ac1ee 100644 --- a/prototorch/models/__init__.py +++ b/prototorch/models/__init__.py @@ -19,7 +19,7 @@ from .glvq import ( ) from .knn import KNN from .lvq import LVQ1, LVQ21, MedianLVQ -from .probabilistic import CELVQ, RSLVQ, LikelihoodRatioLVQ +from .probabilistic import CELVQ, RSLVQ, SLVQ from .unsupervised import GrowingNeuralGas, HeskesSOM, KohonenSOM, NeuralGas from .vis import * diff --git a/prototorch/models/probabilistic.py b/prototorch/models/probabilistic.py index 4f6737f..475fdc0 100644 --- a/prototorch/models/probabilistic.py +++ b/prototorch/models/probabilistic.py @@ -61,8 +61,8 @@ class ProbabilisticLVQ(GLVQ): return loss -class LikelihoodRatioLVQ(ProbabilisticLVQ): - """Learning Vector Quantization based on Likelihood Ratios.""" +class SLVQ(ProbabilisticLVQ): + """Soft Learning Vector Quantization.""" def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.loss = LossLayer(nllr_loss)