From 7b93cd4ad595d14961c6bd5ca6c2061d2f9bc445 Mon Sep 17 00:00:00 2001 From: Alexander Engelsberger Date: Mon, 30 Aug 2021 17:15:40 +0200 Subject: [PATCH] feat(compatibility): Python3.6 compatibility --- .travis.yml | 6 +++++- prototorch/models/__init__.py | 2 -- prototorch/models/abstract.py | 6 +----- prototorch/models/callbacks.py | 2 +- prototorch/models/glvq.py | 3 ++- prototorch/models/lvq.py | 4 ++-- prototorch/models/vis.py | 2 -- setup.py | 5 ++++- tests/test_examples.sh | 20 ++++++++++++++++++-- 9 files changed, 33 insertions(+), 17 deletions(-) diff --git a/.travis.yml b/.travis.yml index 518b106..6474a58 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,7 +1,11 @@ dist: bionic sudo: false language: python -python: 3.9 +python: + - 3.9 + - 3.8 + - 3.7 + - 3.6 cache: directories: - "$HOME/.cache/pip" diff --git a/prototorch/models/__init__.py b/prototorch/models/__init__.py index 728c922..466b2da 100644 --- a/prototorch/models/__init__.py +++ b/prototorch/models/__init__.py @@ -1,7 +1,5 @@ """`models` plugin for the `prototorch` package.""" -from importlib.metadata import PackageNotFoundError, version - from .callbacks import PrototypeConvergence, PruneLoserPrototypes from .cbc import CBC, ImageCBC from .glvq import ( diff --git a/prototorch/models/abstract.py b/prototorch/models/abstract.py index d08dbec..34e5b76 100644 --- a/prototorch/models/abstract.py +++ b/prototorch/models/abstract.py @@ -1,7 +1,5 @@ """Abstract classes to be inherited by prototorch models.""" -from typing import Final, final - import pytorch_lightning as pl import torch import torchmetrics @@ -43,7 +41,6 @@ class ProtoTorchBolt(pl.LightningModule): else: return optimizer - @final def reconfigure_optimizers(self): self.trainer.accelerator.setup_optimizers(self.trainer) @@ -175,7 +172,7 @@ class NonGradientMixin(ProtoTorchMixin): """Mixin for custom non-gradient optimization.""" def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) - self.automatic_optimization: Final = False + self.automatic_optimization = False def training_step(self, train_batch, batch_idx, optimizer_idx=None): raise NotImplementedError @@ -183,7 +180,6 @@ class NonGradientMixin(ProtoTorchMixin): class ImagePrototypesMixin(ProtoTorchMixin): """Mixin for models with image prototypes.""" - @final def on_train_batch_end(self, outputs, batch, batch_idx, dataloader_idx): """Constrain the components to the range [0, 1] by clamping after updates.""" self.proto_layer.components.data.clamp_(0.0, 1.0) diff --git a/prototorch/models/callbacks.py b/prototorch/models/callbacks.py index ed3a141..095f41d 100644 --- a/prototorch/models/callbacks.py +++ b/prototorch/models/callbacks.py @@ -55,7 +55,7 @@ class PruneLoserPrototypes(pl.Callback): distribution = dict(zip(labels.tolist(), counts.tolist())) if self.verbose: print(f"Re-adding pruned prototypes...") - print(f"{distribution=}") + print(f"distribution={distribution}") pl_module.add_prototypes( distribution=distribution, components_initializer=self.prototypes_initializer) diff --git a/prototorch/models/glvq.py b/prototorch/models/glvq.py index 6834399..ce4e1d5 100644 --- a/prototorch/models/glvq.py +++ b/prototorch/models/glvq.py @@ -112,7 +112,8 @@ class SiameseGLVQ(GLVQ): proto_opt = self.optimizer(self.proto_layer.parameters(), lr=self.hparams.proto_lr) # Only add a backbone optimizer if backbone has trainable parameters - if (bb_params := list(self.backbone.parameters())): + bb_params = list(self.backbone.parameters()) + if (bb_params): bb_opt = self.optimizer(bb_params, lr=self.hparams.bb_lr) optimizers = [proto_opt, bb_opt] else: diff --git a/prototorch/models/lvq.py b/prototorch/models/lvq.py index b06a8dc..f398f93 100644 --- a/prototorch/models/lvq.py +++ b/prototorch/models/lvq.py @@ -28,8 +28,8 @@ class LVQ1(NonGradientMixin, GLVQ): self.proto_layer.load_state_dict({"_components": updated_protos}, strict=False) - print(f"{dis=}") - print(f"{y=}") + print(f"dis={dis}") + print(f"y={y}") # Logging self.log_acc(dis, y, tag="train_acc") diff --git a/prototorch/models/vis.py b/prototorch/models/vis.py index 4f6b696..0f39f1a 100644 --- a/prototorch/models/vis.py +++ b/prototorch/models/vis.py @@ -251,8 +251,6 @@ class VisImgComp(Vis2DAbstract): size=self.embedding_data, replace=False) data = self.x_train[ind] - # print(f"{data.shape=}") - # print(f"{self.y_train[ind].shape=}") tb.add_embedding(data.view(len(ind), -1), label_img=data, global_step=None, diff --git a/setup.py b/setup.py index 12f461d..dd34adc 100644 --- a/setup.py +++ b/setup.py @@ -63,7 +63,7 @@ setup( url=PROJECT_URL, download_url=DOWNLOAD_URL, license="MIT", - python_requires=">=3.9", + python_requires=">=3.6", install_requires=INSTALL_REQUIRES, extras_require={ "dev": DEV, @@ -80,6 +80,9 @@ setup( "License :: OSI Approved :: MIT License", "Natural Language :: English", "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.6", "Operating System :: OS Independent", "Topic :: Scientific/Engineering :: Artificial Intelligence", "Topic :: Software Development :: Libraries", diff --git a/tests/test_examples.sh b/tests/test_examples.sh index 68f105b..fbac343 100755 --- a/tests/test_examples.sh +++ b/tests/test_examples.sh @@ -1,11 +1,27 @@ #! /bin/bash + +# Read Flags +gpu=0 +while [ -n "$1" ]; do + case "$1" in + --gpu) gpu=1;; + -g) gpu=1;; + *) path=$1;; + esac + shift +done + +python --version +echo "Using GPU: " $gpu + +# Loop failed=0 -for example in $(find $1 -maxdepth 1 -name "*.py") +for example in $(find $path -maxdepth 1 -name "*.py") do echo -n "$x" $example '... ' - export DISPLAY= && python $example --fast_dev_run 1 --gpus 0 &> run_log.txt + export DISPLAY= && python $example --fast_dev_run 1 --gpus $gpu &> run_log.txt if [[ $? -ne 0 ]]; then echo "FAILED!!" cat run_log.txt