Minor cosmetic changes
This commit is contained in:
parent
e54bf07030
commit
f80d9648c3
@ -3,8 +3,8 @@
|
||||
## Release 0.1.1-dev0
|
||||
|
||||
### Includes
|
||||
- Bugfixes.
|
||||
- 100% test coverage.
|
||||
- Minor bugfixes.
|
||||
- 100% line coverage.
|
||||
|
||||
## Release 0.1.0-dev0
|
||||
|
||||
|
@ -16,18 +16,24 @@ def register_activation(f):
|
||||
@register_activation
|
||||
# @torch.jit.script
|
||||
def identity(x, beta=torch.tensor([0])):
|
||||
""":math:`f(x) = x`."""
|
||||
"""Identity activation function.
|
||||
|
||||
Definition:
|
||||
:math:`f(x) = x`
|
||||
"""
|
||||
return x
|
||||
|
||||
|
||||
@register_activation
|
||||
# @torch.jit.script
|
||||
def sigmoid_beta(x, beta=torch.tensor([10])):
|
||||
r""":math:`f(x) = \\frac{1}{1 + e^{-\\beta x}}`.
|
||||
r"""Sigmoid activation function with scaling.
|
||||
|
||||
Definition:
|
||||
:math:`f(x) = \frac{1}{1 + e^{-\beta x}}`
|
||||
|
||||
Keyword Arguments:
|
||||
__________________
|
||||
beta (float): Parameter :math:`\\beta`
|
||||
beta (`torch.tensor`): Scaling parameter :math:`\beta`
|
||||
"""
|
||||
out = torch.reciprocal(1.0 + torch.exp(-int(beta.item()) * x))
|
||||
return out
|
||||
@ -36,11 +42,13 @@ def sigmoid_beta(x, beta=torch.tensor([10])):
|
||||
@register_activation
|
||||
# @torch.jit.script
|
||||
def swish_beta(x, beta=torch.tensor([10])):
|
||||
r""":math:`f(x) = \\frac{x}{1 + e^{-\\beta x}}`.
|
||||
r"""Swish activation function with scaling.
|
||||
|
||||
Definition:
|
||||
:math:`f(x) = \frac{x}{1 + e^{-\beta x}}`
|
||||
|
||||
Keyword Arguments:
|
||||
__________________
|
||||
beta (float): Parameter :math:`\\beta`
|
||||
beta (`torch.tensor`): Scaling parameter :math:`\beta`
|
||||
"""
|
||||
out = x * sigmoid_beta(x, beta=beta)
|
||||
return out
|
||||
@ -49,8 +57,6 @@ def swish_beta(x, beta=torch.tensor([10])):
|
||||
def get_activation(funcname):
|
||||
if callable(funcname):
|
||||
return funcname
|
||||
else:
|
||||
if funcname in ACTIVATIONS:
|
||||
return ACTIVATIONS.get(funcname)
|
||||
else:
|
||||
raise NameError(f'Activation {funcname} was not found.')
|
||||
if funcname in ACTIVATIONS:
|
||||
return ACTIVATIONS.get(funcname)
|
||||
raise NameError(f'Activation {funcname} was not found.')
|
||||
|
@ -30,8 +30,8 @@ def euclidean_distance(x, y):
|
||||
|
||||
|
||||
def lpnorm_distance(x, y, p):
|
||||
"""
|
||||
Compute :math:`{\\langle x, y \\rangle}_p`.
|
||||
r"""
|
||||
Compute :math:`{\langle x, y \rangle}_p`.
|
||||
|
||||
Expected dimension of x is 2.
|
||||
Expected dimension of y is 2.
|
||||
@ -41,10 +41,10 @@ def lpnorm_distance(x, y, p):
|
||||
|
||||
|
||||
def omega_distance(x, y, omega):
|
||||
"""
|
||||
r"""
|
||||
Omega distance.
|
||||
|
||||
Compute :math:`{\\langle \\Omega x, \\Omega y \\rangle}_p`
|
||||
Compute :math:`{\langle \Omega x, \Omega y \rangle}_p`
|
||||
|
||||
Expected dimension of x is 2.
|
||||
Expected dimension of y is 2.
|
||||
@ -57,10 +57,10 @@ def omega_distance(x, y, omega):
|
||||
|
||||
|
||||
def lomega_distance(x, y, omegas):
|
||||
"""
|
||||
r"""
|
||||
Localized Omega distance.
|
||||
|
||||
Compute :math:`{\\langle \\Omega_k x, \\Omega_k y_k \\rangle}_p`
|
||||
Compute :math:`{\langle \Omega_k x, \Omega_k y_k \rangle}_p`
|
||||
|
||||
Expected dimension of x is 2.
|
||||
Expected dimension of y is 2.
|
||||
|
@ -86,8 +86,6 @@ def stratified_random(x_train, y_train, prototype_distribution):
|
||||
def get_initializer(funcname):
|
||||
if callable(funcname):
|
||||
return funcname
|
||||
else:
|
||||
if funcname in INITIALIZERS:
|
||||
return INITIALIZERS.get(funcname)
|
||||
else:
|
||||
raise NameError(f'Initializer {funcname} was not found.')
|
||||
if funcname in INITIALIZERS:
|
||||
return INITIALIZERS.get(funcname)
|
||||
raise NameError(f'Initializer {funcname} was not found.')
|
||||
|
Loading…
Reference in New Issue
Block a user