Update example scripts
This commit is contained in:
parent
d7972a69e8
commit
728131e9db
@ -1,102 +1,36 @@
|
||||
"""CBC example using the Iris dataset."""
|
||||
|
||||
import numpy as np
|
||||
import prototorch as pt
|
||||
import pytorch_lightning as pl
|
||||
import torch
|
||||
from matplotlib import pyplot as plt
|
||||
from prototorch.components import initializers as cinit
|
||||
from prototorch.datasets.abstract import NumpyDataset
|
||||
from sklearn.datasets import load_iris
|
||||
from torch.utils.data import DataLoader
|
||||
|
||||
from prototorch.models.cbc import CBC, euclidean_similarity
|
||||
|
||||
|
||||
class VisualizationCallback(pl.Callback):
|
||||
def __init__(
|
||||
self,
|
||||
x_train,
|
||||
y_train,
|
||||
prototype_model=True,
|
||||
title="Prototype Visualization",
|
||||
cmap="viridis",
|
||||
):
|
||||
super().__init__()
|
||||
self.x_train = x_train
|
||||
self.y_train = y_train
|
||||
self.title = title
|
||||
self.fig = plt.figure(self.title)
|
||||
self.cmap = cmap
|
||||
self.prototype_model = prototype_model
|
||||
|
||||
def on_epoch_end(self, trainer, pl_module):
|
||||
if self.prototype_model:
|
||||
protos = pl_module.components
|
||||
color = pl_module.prototype_labels
|
||||
else:
|
||||
protos = pl_module.components
|
||||
color = "k"
|
||||
ax = self.fig.gca()
|
||||
ax.cla()
|
||||
ax.set_title(self.title)
|
||||
ax.set_xlabel("Data dimension 1")
|
||||
ax.set_ylabel("Data dimension 2")
|
||||
ax.scatter(x_train[:, 0], x_train[:, 1], c=y_train, edgecolor="k")
|
||||
ax.scatter(
|
||||
protos[:, 0],
|
||||
protos[:, 1],
|
||||
c=color,
|
||||
cmap=self.cmap,
|
||||
edgecolor="k",
|
||||
marker="D",
|
||||
s=50,
|
||||
)
|
||||
x = np.vstack((x_train, protos))
|
||||
x_min, x_max = x[:, 0].min() - 1, x[:, 0].max() + 1
|
||||
y_min, y_max = x[:, 1].min() - 1, x[:, 1].max() + 1
|
||||
xx, yy = np.meshgrid(np.arange(x_min, x_max, 1 / 50),
|
||||
np.arange(y_min, y_max, 1 / 50))
|
||||
mesh_input = np.c_[xx.ravel(), yy.ravel()]
|
||||
y_pred = pl_module.predict(torch.Tensor(mesh_input))
|
||||
y_pred = y_pred.reshape(xx.shape)
|
||||
|
||||
ax.contourf(xx, yy, y_pred, cmap=self.cmap, alpha=0.35)
|
||||
ax.set_xlim(left=x_min + 0, right=x_max - 0)
|
||||
ax.set_ylim(bottom=y_min + 0, top=y_max - 0)
|
||||
plt.pause(0.1)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
# Dataset
|
||||
from sklearn.datasets import load_iris
|
||||
x_train, y_train = load_iris(return_X_y=True)
|
||||
x_train = x_train[:, [0, 2]]
|
||||
train_ds = NumpyDataset(x_train, y_train)
|
||||
train_ds = pt.datasets.NumpyDataset(x_train, y_train)
|
||||
|
||||
# Dataloaders
|
||||
train_loader = DataLoader(train_ds, num_workers=0, batch_size=150)
|
||||
train_loader = torch.utils.data.DataLoader(train_ds,
|
||||
num_workers=0,
|
||||
batch_size=150)
|
||||
|
||||
# Hyperparameters
|
||||
hparams = dict(
|
||||
input_dim=x_train.shape[1],
|
||||
nclasses=len(np.unique(y_train)),
|
||||
nclasses=3,
|
||||
num_components=9,
|
||||
component_initializer=cinit.StratifiedMeanInitializer(
|
||||
torch.Tensor(x_train), torch.Tensor(y_train)),
|
||||
component_initializer=pt.components.SMI(train_ds),
|
||||
lr=0.01,
|
||||
)
|
||||
|
||||
# Initialize the model
|
||||
model = CBC(
|
||||
hparams,
|
||||
data=[x_train, y_train],
|
||||
similarity=euclidean_similarity,
|
||||
)
|
||||
model = pt.models.CBC(hparams)
|
||||
|
||||
# Callbacks
|
||||
dvis = VisualizationCallback(x_train,
|
||||
y_train,
|
||||
prototype_model=False,
|
||||
title="CBC Iris Example")
|
||||
dvis = pt.models.VisCBC2D(data=(x_train, y_train),
|
||||
title="CBC Iris Example")
|
||||
|
||||
# Setup trainer
|
||||
trainer = pl.Trainer(
|
||||
|
@ -1,40 +1,39 @@
|
||||
"""GLVQ example using the Iris dataset."""
|
||||
|
||||
import prototorch as pt
|
||||
import pytorch_lightning as pl
|
||||
import torch
|
||||
from prototorch.components import initializers as cinit
|
||||
from prototorch.datasets.abstract import NumpyDataset
|
||||
from sklearn.datasets import load_iris
|
||||
from torch.utils.data import DataLoader
|
||||
|
||||
from prototorch.models.callbacks.visualization import VisGLVQ2D
|
||||
from prototorch.models.glvq import GLVQ
|
||||
|
||||
if __name__ == "__main__":
|
||||
# Dataset
|
||||
from sklearn.datasets import load_iris
|
||||
x_train, y_train = load_iris(return_X_y=True)
|
||||
x_train = x_train[:, [0, 2]]
|
||||
train_ds = NumpyDataset(x_train, y_train)
|
||||
train_ds = pt.datasets.NumpyDataset(x_train, y_train)
|
||||
|
||||
# Dataloaders
|
||||
train_loader = DataLoader(train_ds, num_workers=0, batch_size=150)
|
||||
train_loader = torch.utils.data.DataLoader(train_ds,
|
||||
num_workers=0,
|
||||
batch_size=150)
|
||||
|
||||
# Hyperparameters
|
||||
hparams = dict(
|
||||
nclasses=3,
|
||||
prototypes_per_class=2,
|
||||
prototype_initializer=cinit.StratifiedMeanInitializer(
|
||||
torch.Tensor(x_train), torch.Tensor(y_train)),
|
||||
prototype_initializer=pt.components.SMI(train_ds),
|
||||
lr=0.01,
|
||||
)
|
||||
|
||||
# Initialize the model
|
||||
model = GLVQ(hparams)
|
||||
model = pt.models.GLVQ(hparams)
|
||||
|
||||
# Callbacks
|
||||
vis = pt.models.VisGLVQ2D(data=(x_train, y_train))
|
||||
|
||||
# Setup trainer
|
||||
trainer = pl.Trainer(
|
||||
max_epochs=50,
|
||||
callbacks=[VisGLVQ2D(x_train, y_train)],
|
||||
callbacks=[vis],
|
||||
)
|
||||
|
||||
# Training loop
|
||||
|
@ -1,14 +1,8 @@
|
||||
"""GLVQ example using the spiral dataset."""
|
||||
|
||||
import prototorch as pt
|
||||
import pytorch_lightning as pl
|
||||
import torch
|
||||
from prototorch.components import initializers as cinit
|
||||
from prototorch.datasets.abstract import NumpyDataset
|
||||
from prototorch.datasets.spiral import make_spiral
|
||||
from torch.utils.data import DataLoader
|
||||
|
||||
from prototorch.models.callbacks.visualization import VisGLVQ2D
|
||||
from prototorch.models.glvq import GLVQ
|
||||
|
||||
|
||||
class StopOnNaN(pl.Callback):
|
||||
@ -23,29 +17,28 @@ class StopOnNaN(pl.Callback):
|
||||
|
||||
if __name__ == "__main__":
|
||||
# Dataset
|
||||
x_train, y_train = make_spiral(n_samples=600, noise=0.6)
|
||||
train_ds = NumpyDataset(x_train, y_train)
|
||||
train_ds = pt.datasets.Spiral(n_samples=600, noise=0.6)
|
||||
|
||||
# Dataloaders
|
||||
train_loader = DataLoader(train_ds, num_workers=0, batch_size=256)
|
||||
train_loader = torch.utils.data.DataLoader(train_ds,
|
||||
num_workers=0,
|
||||
batch_size=256)
|
||||
|
||||
# Hyperparameters
|
||||
hparams = dict(
|
||||
nclasses=2,
|
||||
prototypes_per_class=20,
|
||||
prototype_initializer=cinit.SSI(torch.Tensor(x_train),
|
||||
torch.Tensor(y_train),
|
||||
noise=1e-7),
|
||||
prototype_initializer=pt.components.SSI(train_ds, noise=1e-7),
|
||||
transfer_function="sigmoid_beta",
|
||||
transfer_beta=10.0,
|
||||
lr=0.01,
|
||||
)
|
||||
|
||||
# Initialize the model
|
||||
model = GLVQ(hparams)
|
||||
model = pt.models.GLVQ(hparams)
|
||||
|
||||
# Callbacks
|
||||
vis = VisGLVQ2D(x_train, y_train, show_last_only=True, block=True)
|
||||
vis = pt.models.VisGLVQ2D(train_ds, show_last_only=True, block=True)
|
||||
snan = StopOnNaN(model.proto_layer.components)
|
||||
|
||||
# Setup trainer
|
||||
|
@ -1,48 +1,37 @@
|
||||
"""GMLVQ example using all four dimensions of the Iris dataset."""
|
||||
|
||||
import prototorch as pt
|
||||
import pytorch_lightning as pl
|
||||
import torch
|
||||
from prototorch.components import initializers as cinit
|
||||
from prototorch.datasets.abstract import NumpyDataset
|
||||
from sklearn.datasets import load_iris
|
||||
from torch.utils.data import DataLoader
|
||||
|
||||
from prototorch.models.callbacks.visualization import VisSiameseGLVQ2D
|
||||
from prototorch.models.glvq import GMLVQ
|
||||
|
||||
if __name__ == "__main__":
|
||||
# Dataset
|
||||
from sklearn.datasets import load_iris
|
||||
x_train, y_train = load_iris(return_X_y=True)
|
||||
train_ds = NumpyDataset(x_train, y_train)
|
||||
train_ds = pt.datasets.NumpyDataset(x_train, y_train)
|
||||
|
||||
# Dataloaders
|
||||
train_loader = DataLoader(train_ds, num_workers=0, batch_size=150)
|
||||
|
||||
train_loader = torch.utils.data.DataLoader(train_ds,
|
||||
num_workers=0,
|
||||
batch_size=150)
|
||||
# Hyperparameters
|
||||
hparams = dict(
|
||||
nclasses=3,
|
||||
prototypes_per_class=1,
|
||||
prototype_initializer=cinit.SMI(torch.Tensor(x_train),
|
||||
torch.Tensor(y_train)),
|
||||
input_dim=x_train.shape[1],
|
||||
latent_dim=2,
|
||||
latent_dim=x_train.shape[1],
|
||||
prototype_initializer=pt.components.SMI(train_ds),
|
||||
lr=0.01,
|
||||
)
|
||||
|
||||
# Initialize the model
|
||||
model = GMLVQ(hparams)
|
||||
|
||||
# Model summary
|
||||
print(model)
|
||||
|
||||
# Callbacks
|
||||
vis = VisSiameseGLVQ2D(x_train, y_train)
|
||||
|
||||
# Namespace hook for the visualization to work
|
||||
model.backbone = model.omega_layer
|
||||
model = pt.models.GMLVQ(hparams)
|
||||
|
||||
# Setup trainer
|
||||
trainer = pl.Trainer(max_epochs=100, callbacks=[vis])
|
||||
trainer = pl.Trainer(max_epochs=100)
|
||||
|
||||
# Training loop
|
||||
trainer.fit(model, train_loader)
|
||||
|
||||
# Display the Lambda matrix
|
||||
model.show_lambda()
|
||||
|
@ -1,47 +1,45 @@
|
||||
"""Limited Rank MLVQ example using the Tecator dataset."""
|
||||
"""Limited Rank Matrix LVQ example using the Tecator dataset."""
|
||||
|
||||
import prototorch as pt
|
||||
import pytorch_lightning as pl
|
||||
from prototorch.components import initializers as cinit
|
||||
from prototorch.datasets.tecator import Tecator
|
||||
from torch.utils.data import DataLoader
|
||||
|
||||
from prototorch.models.callbacks.visualization import VisSiameseGLVQ2D
|
||||
from prototorch.models.glvq import GMLVQ
|
||||
import torch
|
||||
|
||||
if __name__ == "__main__":
|
||||
# Dataset
|
||||
train_ds = Tecator(root="./datasets/", train=True)
|
||||
train_ds = pt.datasets.Tecator(root="~/datasets/", train=True)
|
||||
|
||||
# Reproducibility
|
||||
pl.utilities.seed.seed_everything(seed=42)
|
||||
|
||||
# Dataloaders
|
||||
train_loader = DataLoader(train_ds, num_workers=0, batch_size=32)
|
||||
|
||||
# Grab the full dataset to warm-start prototypes
|
||||
x, y = next(iter(DataLoader(train_ds, batch_size=len(train_ds))))
|
||||
train_loader = torch.utils.data.DataLoader(train_ds,
|
||||
num_workers=0,
|
||||
batch_size=32)
|
||||
|
||||
# Hyperparameters
|
||||
hparams = dict(
|
||||
nclasses=2,
|
||||
prototypes_per_class=2,
|
||||
prototype_initializer=cinit.SMI(x, y),
|
||||
input_dim=x.shape[1],
|
||||
input_dim=100,
|
||||
latent_dim=2,
|
||||
lr=0.01,
|
||||
prototype_initializer=pt.components.SMI(train_ds),
|
||||
lr=0.001,
|
||||
)
|
||||
|
||||
# Initialize the model
|
||||
model = GMLVQ(hparams)
|
||||
model = pt.models.GMLVQ(hparams)
|
||||
|
||||
# Model summary
|
||||
print(model)
|
||||
|
||||
# Callbacks
|
||||
vis = VisSiameseGLVQ2D(x, y)
|
||||
vis = pt.models.VisSiameseGLVQ2D(train_ds, border=0.1)
|
||||
|
||||
# Namespace hook for the visualization to work
|
||||
model.backbone = model.omega_layer
|
||||
|
||||
# Setup trainer
|
||||
trainer = pl.Trainer(max_epochs=100, callbacks=[vis])
|
||||
trainer = pl.Trainer(max_epochs=200, callbacks=[vis])
|
||||
|
||||
# Training loop
|
||||
trainer.fit(model, train_loader)
|
||||
|
@ -1,50 +1,40 @@
|
||||
"""Neural Gas example using the Iris dataset."""
|
||||
|
||||
import prototorch as pt
|
||||
import pytorch_lightning as pl
|
||||
from prototorch.datasets.abstract import NumpyDataset
|
||||
from sklearn.datasets import load_iris
|
||||
from sklearn.preprocessing import StandardScaler
|
||||
from torch.utils.data import DataLoader
|
||||
|
||||
from prototorch.models.callbacks.visualization import VisNG2D
|
||||
from prototorch.models.neural_gas import NeuralGas
|
||||
import torch
|
||||
|
||||
if __name__ == "__main__":
|
||||
# Dataset
|
||||
# Prepare and pre-process the dataset
|
||||
from sklearn.datasets import load_iris
|
||||
from sklearn.preprocessing import StandardScaler
|
||||
x_train, y_train = load_iris(return_X_y=True)
|
||||
x_train = x_train[:, [0, 2]]
|
||||
scaler = StandardScaler()
|
||||
scaler.fit(x_train)
|
||||
x_train = scaler.transform(x_train)
|
||||
|
||||
train_ds = NumpyDataset(x_train, y_train)
|
||||
train_ds = pt.datasets.NumpyDataset(x_train, y_train)
|
||||
|
||||
# Dataloaders
|
||||
train_loader = DataLoader(train_ds, num_workers=0, batch_size=150)
|
||||
train_loader = torch.utils.data.DataLoader(train_ds,
|
||||
num_workers=0,
|
||||
batch_size=150)
|
||||
|
||||
# Hyperparameters
|
||||
hparams = dict(
|
||||
input_dim=x_train.shape[1],
|
||||
num_prototypes=30,
|
||||
lr=0.01,
|
||||
)
|
||||
hparams = dict(num_prototypes=30, lr=0.03)
|
||||
|
||||
# Initialize the model
|
||||
model = NeuralGas(hparams)
|
||||
model = pt.models.NeuralGas(hparams)
|
||||
|
||||
# Model summary
|
||||
print(model)
|
||||
|
||||
# Callbacks
|
||||
vis = VisNG2D(x_train, y_train)
|
||||
vis = pt.models.VisNG2D(data=train_ds)
|
||||
|
||||
# Setup trainer
|
||||
trainer = pl.Trainer(
|
||||
max_epochs=100,
|
||||
callbacks=[
|
||||
vis,
|
||||
],
|
||||
)
|
||||
trainer = pl.Trainer(max_epochs=200, callbacks=[vis])
|
||||
|
||||
# Training loop
|
||||
trainer.fit(model, train_loader)
|
||||
|
@ -1,13 +1,8 @@
|
||||
"""Siamese GLVQ example using all four dimensions of the Iris dataset."""
|
||||
|
||||
import prototorch as pt
|
||||
import pytorch_lightning as pl
|
||||
import torch
|
||||
from prototorch.components import initializers as cinit
|
||||
from prototorch.datasets.abstract import NumpyDataset
|
||||
from prototorch.models.callbacks.visualization import VisSiameseGLVQ2D
|
||||
from prototorch.models.glvq import SiameseGLVQ
|
||||
from sklearn.datasets import load_iris
|
||||
from torch.utils.data import DataLoader
|
||||
|
||||
|
||||
class Backbone(torch.nn.Module):
|
||||
@ -29,27 +24,29 @@ class Backbone(torch.nn.Module):
|
||||
|
||||
if __name__ == "__main__":
|
||||
# Dataset
|
||||
from sklearn.datasets import load_iris
|
||||
x_train, y_train = load_iris(return_X_y=True)
|
||||
train_ds = NumpyDataset(x_train, y_train)
|
||||
train_ds = pt.datasets.NumpyDataset(x_train, y_train)
|
||||
|
||||
# Reproducibility
|
||||
pl.utilities.seed.seed_everything(seed=2)
|
||||
|
||||
# Dataloaders
|
||||
train_loader = DataLoader(train_ds, num_workers=0, batch_size=150)
|
||||
train_loader = torch.utils.data.DataLoader(train_ds,
|
||||
num_workers=0,
|
||||
batch_size=150)
|
||||
|
||||
# Hyperparameters
|
||||
hparams = dict(
|
||||
nclasses=3,
|
||||
prototypes_per_class=2,
|
||||
prototype_initializer=cinit.SMI(torch.Tensor(x_train),
|
||||
torch.Tensor(y_train)),
|
||||
prototype_initializer=pt.components.SMI((x_train, y_train)),
|
||||
proto_lr=0.001,
|
||||
bb_lr=0.001,
|
||||
)
|
||||
|
||||
# Initialize the model
|
||||
model = SiameseGLVQ(
|
||||
model = pt.models.SiameseGLVQ(
|
||||
hparams,
|
||||
backbone_module=Backbone,
|
||||
)
|
||||
@ -58,7 +55,7 @@ if __name__ == "__main__":
|
||||
print(model)
|
||||
|
||||
# Callbacks
|
||||
vis = VisSiameseGLVQ2D(x_train, y_train, border=0.1)
|
||||
vis = pt.models.VisSiameseGLVQ2D(data=(x_train, y_train), border=0.1)
|
||||
|
||||
# Setup trainer
|
||||
trainer = pl.Trainer(max_epochs=100, callbacks=[vis])
|
||||
|
Loading…
Reference in New Issue
Block a user