feat: add binnam_xor.py

This commit is contained in:
Jensun Ravichandran
2021-07-15 18:19:28 +02:00
parent 823b05e390
commit cb7fb91c95
3 changed files with 121 additions and 3 deletions

View File

@@ -16,7 +16,14 @@ class BinaryNAM(ProtoTorchBolt):
def __init__(self, hparams: dict, extractors: torch.nn.ModuleList,
**kwargs):
super().__init__(hparams, **kwargs)
# Default hparams
self.hparams.setdefault("threshold", 0.5)
self.extractors = extractors
self.linear = torch.nn.Linear(in_features=len(extractors),
out_features=1,
bias=True)
def extract(self, x):
"""Apply the local extractors batch-wise on features."""
@@ -26,12 +33,13 @@ class BinaryNAM(ProtoTorchBolt):
return out
def forward(self, x):
x = self.extract(x).sum(1)
return torch.nn.functional.sigmoid(x)
x = self.extract(x)
x = self.linear(x)
return torch.sigmoid(x)
def training_step(self, batch, batch_idx, optimizer_idx=None):
x, y = batch
preds = self(x)
preds = self(x).squeeze()
train_loss = torch.nn.functional.binary_cross_entropy(preds, y.float())
self.log("train_loss", train_loss)
accuracy = torchmetrics.functional.accuracy(preds.int(), y.int())
@@ -42,3 +50,9 @@ class BinaryNAM(ProtoTorchBolt):
prog_bar=True,
logger=True)
return train_loss
def predict(self, x):
out = self(x)
pred = torch.zeros_like(out, device=self.device)
pred[out > self.hparams.threshold] = 1
return pred

View File

@@ -117,6 +117,24 @@ class Vis2DAbstract(pl.Callback):
plt.close()
class Vis2D(Vis2DAbstract):
def on_epoch_end(self, trainer, pl_module):
if not self.precheck(trainer):
return True
x_train, y_train = self.x_train, self.y_train
ax = self.setup_ax(xlabel="Data dimension 1",
ylabel="Data dimension 2")
self.plot_data(ax, x_train, y_train)
mesh_input, xx, yy = mesh2d(x_train, self.border, self.resolution)
mesh_input = torch.from_numpy(mesh_input).type_as(x_train)
y_pred = pl_module.predict(mesh_input)
y_pred = y_pred.cpu().reshape(xx.shape)
ax.contourf(xx, yy, y_pred, cmap=self.cmap, alpha=0.35)
self.log_and_display(trainer, pl_module)
class VisGLVQ2D(Vis2DAbstract):
def on_epoch_end(self, trainer, pl_module):
if not self.precheck(trainer):