2021-04-22 14:01:44 +00:00
|
|
|
import torch
|
|
|
|
import torchmetrics
|
2021-10-11 13:45:43 +00:00
|
|
|
from prototorch.core.competitions import CBCC
|
|
|
|
from prototorch.core.components import ReasoningComponents
|
|
|
|
from prototorch.core.initializers import RandomReasoningsInitializer
|
|
|
|
from prototorch.core.losses import MarginLoss
|
|
|
|
from prototorch.core.similarities import euclidean_similarity
|
|
|
|
from prototorch.nn.wrappers import LambdaLayer
|
2021-04-22 14:01:44 +00:00
|
|
|
|
2021-05-20 15:36:00 +00:00
|
|
|
from .glvq import SiameseGLVQ
|
2021-10-11 14:05:12 +00:00
|
|
|
from .mixin import ImagePrototypesMixin
|
2021-05-20 15:36:00 +00:00
|
|
|
|
2021-04-22 14:01:44 +00:00
|
|
|
|
2021-05-20 15:36:00 +00:00
|
|
|
class CBC(SiameseGLVQ):
|
2021-04-22 14:01:44 +00:00
|
|
|
"""Classification-By-Components."""
|
2021-06-15 13:59:47 +00:00
|
|
|
def __init__(self, hparams, **kwargs):
|
2021-05-20 15:36:00 +00:00
|
|
|
super().__init__(hparams, **kwargs)
|
2021-04-22 14:01:44 +00:00
|
|
|
|
2021-06-15 13:59:47 +00:00
|
|
|
similarity_fn = kwargs.get("similarity_fn", euclidean_similarity)
|
|
|
|
components_initializer = kwargs.get("components_initializer", None)
|
|
|
|
reasonings_initializer = kwargs.get("reasonings_initializer",
|
|
|
|
RandomReasoningsInitializer())
|
|
|
|
self.components_layer = ReasoningComponents(
|
|
|
|
self.hparams.distribution,
|
|
|
|
components_initializer=components_initializer,
|
|
|
|
reasonings_initializer=reasonings_initializer,
|
|
|
|
)
|
|
|
|
self.similarity_layer = LambdaLayer(similarity_fn)
|
|
|
|
self.competition_layer = CBCC()
|
|
|
|
|
|
|
|
# Namespace hook
|
|
|
|
self.proto_layer = self.components_layer
|
2021-04-22 15:37:20 +00:00
|
|
|
|
2021-06-15 13:59:47 +00:00
|
|
|
self.loss = MarginLoss(self.hparams.margin)
|
2021-04-22 14:01:44 +00:00
|
|
|
|
|
|
|
def forward(self, x):
|
2021-06-15 13:59:47 +00:00
|
|
|
components, reasonings = self.components_layer()
|
2021-04-22 14:01:44 +00:00
|
|
|
latent_x = self.backbone(x)
|
2021-05-20 15:36:00 +00:00
|
|
|
self.backbone.requires_grad_(self.both_path_gradients)
|
|
|
|
latent_components = self.backbone(components)
|
|
|
|
self.backbone.requires_grad_(True)
|
2021-06-15 13:59:47 +00:00
|
|
|
detections = self.similarity_layer(latent_x, latent_components)
|
|
|
|
probs = self.competition_layer(detections, reasonings)
|
2021-04-22 14:01:44 +00:00
|
|
|
return probs
|
|
|
|
|
2021-05-20 15:36:00 +00:00
|
|
|
def shared_step(self, batch, batch_idx, optimizer_idx=None):
|
|
|
|
x, y = batch
|
2021-04-22 14:01:44 +00:00
|
|
|
y_pred = self(x)
|
2021-06-15 13:59:47 +00:00
|
|
|
num_classes = self.num_classes
|
2021-05-25 13:41:10 +00:00
|
|
|
y_true = torch.nn.functional.one_hot(y.long(), num_classes=num_classes)
|
2021-06-25 14:56:10 +00:00
|
|
|
loss = self.loss(y_pred, y_true).mean()
|
2021-05-20 15:36:00 +00:00
|
|
|
return y_pred, loss
|
|
|
|
|
|
|
|
def training_step(self, batch, batch_idx, optimizer_idx=None):
|
|
|
|
y_pred, train_loss = self.shared_step(batch, batch_idx, optimizer_idx)
|
|
|
|
preds = torch.argmax(y_pred, dim=1)
|
2021-05-28 19:30:50 +00:00
|
|
|
accuracy = torchmetrics.functional.accuracy(preds.int(),
|
|
|
|
batch[1].int())
|
2021-05-20 15:36:00 +00:00
|
|
|
self.log("train_acc",
|
2021-05-28 19:30:50 +00:00
|
|
|
accuracy,
|
2021-05-20 15:36:00 +00:00
|
|
|
on_step=False,
|
|
|
|
on_epoch=True,
|
|
|
|
prog_bar=True,
|
|
|
|
logger=True)
|
|
|
|
return train_loss
|
2021-04-22 14:01:44 +00:00
|
|
|
|
|
|
|
def predict(self, x):
|
|
|
|
with torch.no_grad():
|
|
|
|
y_pred = self(x)
|
|
|
|
y_pred = torch.argmax(y_pred, dim=1)
|
2021-05-13 13:22:01 +00:00
|
|
|
return y_pred
|
2021-04-22 14:01:44 +00:00
|
|
|
|
|
|
|
|
2021-06-04 20:20:32 +00:00
|
|
|
class ImageCBC(ImagePrototypesMixin, CBC):
|
2021-04-22 14:01:44 +00:00
|
|
|
"""CBC model that constrains the components to the range [0, 1] by
|
|
|
|
clamping after updates.
|
|
|
|
"""
|