Minor cosmetic changes
This commit is contained in:
parent
e54bf07030
commit
f80d9648c3
@ -3,8 +3,8 @@
|
|||||||
## Release 0.1.1-dev0
|
## Release 0.1.1-dev0
|
||||||
|
|
||||||
### Includes
|
### Includes
|
||||||
- Bugfixes.
|
- Minor bugfixes.
|
||||||
- 100% test coverage.
|
- 100% line coverage.
|
||||||
|
|
||||||
## Release 0.1.0-dev0
|
## Release 0.1.0-dev0
|
||||||
|
|
||||||
|
@ -16,18 +16,24 @@ def register_activation(f):
|
|||||||
@register_activation
|
@register_activation
|
||||||
# @torch.jit.script
|
# @torch.jit.script
|
||||||
def identity(x, beta=torch.tensor([0])):
|
def identity(x, beta=torch.tensor([0])):
|
||||||
""":math:`f(x) = x`."""
|
"""Identity activation function.
|
||||||
|
|
||||||
|
Definition:
|
||||||
|
:math:`f(x) = x`
|
||||||
|
"""
|
||||||
return x
|
return x
|
||||||
|
|
||||||
|
|
||||||
@register_activation
|
@register_activation
|
||||||
# @torch.jit.script
|
# @torch.jit.script
|
||||||
def sigmoid_beta(x, beta=torch.tensor([10])):
|
def sigmoid_beta(x, beta=torch.tensor([10])):
|
||||||
r""":math:`f(x) = \\frac{1}{1 + e^{-\\beta x}}`.
|
r"""Sigmoid activation function with scaling.
|
||||||
|
|
||||||
|
Definition:
|
||||||
|
:math:`f(x) = \frac{1}{1 + e^{-\beta x}}`
|
||||||
|
|
||||||
Keyword Arguments:
|
Keyword Arguments:
|
||||||
__________________
|
beta (`torch.tensor`): Scaling parameter :math:`\beta`
|
||||||
beta (float): Parameter :math:`\\beta`
|
|
||||||
"""
|
"""
|
||||||
out = torch.reciprocal(1.0 + torch.exp(-int(beta.item()) * x))
|
out = torch.reciprocal(1.0 + torch.exp(-int(beta.item()) * x))
|
||||||
return out
|
return out
|
||||||
@ -36,11 +42,13 @@ def sigmoid_beta(x, beta=torch.tensor([10])):
|
|||||||
@register_activation
|
@register_activation
|
||||||
# @torch.jit.script
|
# @torch.jit.script
|
||||||
def swish_beta(x, beta=torch.tensor([10])):
|
def swish_beta(x, beta=torch.tensor([10])):
|
||||||
r""":math:`f(x) = \\frac{x}{1 + e^{-\\beta x}}`.
|
r"""Swish activation function with scaling.
|
||||||
|
|
||||||
|
Definition:
|
||||||
|
:math:`f(x) = \frac{x}{1 + e^{-\beta x}}`
|
||||||
|
|
||||||
Keyword Arguments:
|
Keyword Arguments:
|
||||||
__________________
|
beta (`torch.tensor`): Scaling parameter :math:`\beta`
|
||||||
beta (float): Parameter :math:`\\beta`
|
|
||||||
"""
|
"""
|
||||||
out = x * sigmoid_beta(x, beta=beta)
|
out = x * sigmoid_beta(x, beta=beta)
|
||||||
return out
|
return out
|
||||||
@ -49,8 +57,6 @@ def swish_beta(x, beta=torch.tensor([10])):
|
|||||||
def get_activation(funcname):
|
def get_activation(funcname):
|
||||||
if callable(funcname):
|
if callable(funcname):
|
||||||
return funcname
|
return funcname
|
||||||
else:
|
if funcname in ACTIVATIONS:
|
||||||
if funcname in ACTIVATIONS:
|
return ACTIVATIONS.get(funcname)
|
||||||
return ACTIVATIONS.get(funcname)
|
raise NameError(f'Activation {funcname} was not found.')
|
||||||
else:
|
|
||||||
raise NameError(f'Activation {funcname} was not found.')
|
|
||||||
|
@ -30,8 +30,8 @@ def euclidean_distance(x, y):
|
|||||||
|
|
||||||
|
|
||||||
def lpnorm_distance(x, y, p):
|
def lpnorm_distance(x, y, p):
|
||||||
"""
|
r"""
|
||||||
Compute :math:`{\\langle x, y \\rangle}_p`.
|
Compute :math:`{\langle x, y \rangle}_p`.
|
||||||
|
|
||||||
Expected dimension of x is 2.
|
Expected dimension of x is 2.
|
||||||
Expected dimension of y is 2.
|
Expected dimension of y is 2.
|
||||||
@ -41,10 +41,10 @@ def lpnorm_distance(x, y, p):
|
|||||||
|
|
||||||
|
|
||||||
def omega_distance(x, y, omega):
|
def omega_distance(x, y, omega):
|
||||||
"""
|
r"""
|
||||||
Omega distance.
|
Omega distance.
|
||||||
|
|
||||||
Compute :math:`{\\langle \\Omega x, \\Omega y \\rangle}_p`
|
Compute :math:`{\langle \Omega x, \Omega y \rangle}_p`
|
||||||
|
|
||||||
Expected dimension of x is 2.
|
Expected dimension of x is 2.
|
||||||
Expected dimension of y is 2.
|
Expected dimension of y is 2.
|
||||||
@ -57,10 +57,10 @@ def omega_distance(x, y, omega):
|
|||||||
|
|
||||||
|
|
||||||
def lomega_distance(x, y, omegas):
|
def lomega_distance(x, y, omegas):
|
||||||
"""
|
r"""
|
||||||
Localized Omega distance.
|
Localized Omega distance.
|
||||||
|
|
||||||
Compute :math:`{\\langle \\Omega_k x, \\Omega_k y_k \\rangle}_p`
|
Compute :math:`{\langle \Omega_k x, \Omega_k y_k \rangle}_p`
|
||||||
|
|
||||||
Expected dimension of x is 2.
|
Expected dimension of x is 2.
|
||||||
Expected dimension of y is 2.
|
Expected dimension of y is 2.
|
||||||
|
@ -86,8 +86,6 @@ def stratified_random(x_train, y_train, prototype_distribution):
|
|||||||
def get_initializer(funcname):
|
def get_initializer(funcname):
|
||||||
if callable(funcname):
|
if callable(funcname):
|
||||||
return funcname
|
return funcname
|
||||||
else:
|
if funcname in INITIALIZERS:
|
||||||
if funcname in INITIALIZERS:
|
return INITIALIZERS.get(funcname)
|
||||||
return INITIALIZERS.get(funcname)
|
raise NameError(f'Initializer {funcname} was not found.')
|
||||||
else:
|
|
||||||
raise NameError(f'Initializer {funcname} was not found.')
|
|
||||||
|
Loading…
Reference in New Issue
Block a user