Fix a bunch of codacy code-style issues

This commit is contained in:
blackfly 2020-04-11 15:47:26 +02:00
parent 8f3a43f62a
commit 8c629c0cb1
5 changed files with 23 additions and 14 deletions

View File

@ -1,9 +1,11 @@
# Release 0.1.1-dev0
# ProtoTorch Releases
## Includes
## Release 0.1.1-dev0
### Includes
- Bugfixes.
- 100% test coverage.
# Release 0.1.0-dev0
## Release 0.1.0-dev0
Initial public release of ProtoTorch.

View File

@ -21,6 +21,7 @@ x_train = scaler.transform(x_train)
# Define the GLVQ model
class Model(torch.nn.Module):
def __init__(self, **kwargs):
"""GLVQ model."""
super().__init__()
self.p1 = Prototypes1D(input_dim=2,
prototypes_per_class=1,

View File

@ -16,16 +16,17 @@ def register_activation(f):
@register_activation
# @torch.jit.script
def identity(x, beta=torch.tensor([0])):
""":math:`f(x) = x`"""
""":math:`f(x) = x`."""
return x
@register_activation
# @torch.jit.script
def sigmoid_beta(x, beta=torch.tensor([10])):
""":math:`f(x) = \\frac{1}{1 + e^{-\\beta x}}`
r""":math:`f(x) = \\frac{1}{1 + e^{-\\beta x}}`.
Keyword Arguments:
__________________
beta (float): Parameter :math:`\\beta`
"""
out = torch.reciprocal(1.0 + torch.exp(-int(beta.item()) * x))
@ -35,9 +36,10 @@ def sigmoid_beta(x, beta=torch.tensor([10])):
@register_activation
# @torch.jit.script
def swish_beta(x, beta=torch.tensor([10])):
""":math:`f(x) = \\frac{x}{1 + e^{-\\beta x}}`
r""":math:`f(x) = \\frac{x}{1 + e^{-\\beta x}}`.
Keyword Arguments:
__________________
beta (float): Parameter :math:`\\beta`
"""
out = x * sigmoid_beta(x, beta=beta)

View File

@ -4,7 +4,8 @@ import torch
def squared_euclidean_distance(x, y):
"""Compute the squared Euclidean distance between :math:`x` and :math:`y`.
"""
Compute the squared Euclidean distance between :math:`x` and :math:`y`.
Expected dimension of x is 2.
Expected dimension of y is 2.
@ -17,7 +18,8 @@ def squared_euclidean_distance(x, y):
def euclidean_distance(x, y):
"""Compute the Euclidean distance between :math:`x` and :math:`y`.
"""
Compute the Euclidean distance between :math:`x` and :math:`y`.
Expected dimension of x is 2.
Expected dimension of y is 2.
@ -28,7 +30,8 @@ def euclidean_distance(x, y):
def lpnorm_distance(x, y, p):
"""Compute :math:`{\\langle x, y \\rangle}_p`.
"""
Compute :math:`{\\langle x, y \\rangle}_p`.
Expected dimension of x is 2.
Expected dimension of y is 2.
@ -38,7 +41,8 @@ def lpnorm_distance(x, y, p):
def omega_distance(x, y, omega):
"""Omega distance.
"""
Omega distance.
Compute :math:`{\\langle \\Omega x, \\Omega y \\rangle}_p`
@ -53,7 +57,8 @@ def omega_distance(x, y, omega):
def lomega_distance(x, y, omegas):
"""Localized Omega distance.
"""
Localized Omega distance.
Compute :math:`{\\langle \\Omega_k x, \\Omega_k y_k \\rangle}_p`

View File

@ -7,7 +7,6 @@ from prototorch.functions.losses import glvq_loss
class GLVQLoss(torch.nn.Module):
"""GLVQ Loss."""
def __init__(self, margin=0.0, squashing='identity', beta=10, **kwargs):
super().__init__(**kwargs)
self.margin = margin