Use 'num_' in all variable names

This commit is contained in:
Alexander Engelsberger
2021-05-25 15:41:10 +02:00
parent e7e6bf9173
commit 72e064338c
8 changed files with 24 additions and 23 deletions

View File

@@ -24,10 +24,10 @@ if __name__ == "__main__":
batch_size=150)
# Hyperparameters
nclasses = 3
num_classes = 3
prototypes_per_class = 2
hparams = dict(
distribution=(nclasses, prototypes_per_class),
distribution=(num_classes, prototypes_per_class),
lr=0.01,
)

View File

@@ -13,7 +13,7 @@ if __name__ == "__main__":
args = parser.parse_args()
# Dataset
train_ds = pt.datasets.Spiral(n_samples=600, noise=0.6)
train_ds = pt.datasets.Spiral(num_samples=600, noise=0.6)
# Dataloaders
train_loader = torch.utils.data.DataLoader(train_ds,
@@ -21,10 +21,10 @@ if __name__ == "__main__":
batch_size=256)
# Hyperparameters
nclasses = 2
num_classes = 2
prototypes_per_class = 20
hparams = dict(
distribution=(nclasses, prototypes_per_class),
distribution=(num_classes, prototypes_per_class),
transfer_function="sigmoid_beta",
transfer_beta=10.0,
lr=0.01,

View File

@@ -22,10 +22,10 @@ if __name__ == "__main__":
num_workers=0,
batch_size=150)
# Hyperparameters
nclasses = 3
num_classes = 3
prototypes_per_class = 1
hparams = dict(
distribution=(nclasses, prototypes_per_class),
distribution=(num_classes, prototypes_per_class),
input_dim=x_train.shape[1],
latent_dim=x_train.shape[1],
proto_lr=0.01,

View File

@@ -41,12 +41,12 @@ if __name__ == "__main__":
batch_size=256)
# Hyperparameters
nclasses = 10
num_classes = 10
prototypes_per_class = 2
hparams = dict(
input_dim=28 * 28,
latent_dim=28 * 28,
distribution=(nclasses, prototypes_per_class),
distribution=(num_classes, prototypes_per_class),
proto_lr=0.01,
bb_lr=0.01,
)
@@ -61,7 +61,7 @@ if __name__ == "__main__":
# Callbacks
vis = pt.models.VisImgComp(
data=train_ds,
nrow=5,
num_columns=5,
show=False,
tensorboard=True,
random_data=20,

View File

@@ -24,10 +24,10 @@ if __name__ == "__main__":
test_loader = torch.utils.data.DataLoader(test_ds, batch_size=32)
# Hyperparameters
nclasses = 2
num_classes = 2
prototypes_per_class = 2
hparams = dict(
distribution=(nclasses, prototypes_per_class),
distribution=(num_classes, prototypes_per_class),
input_dim=100,
latent_dim=2,
proto_lr=0.001,