More cosmetic changes
This commit is contained in:
parent
f80d9648c3
commit
4158586cb9
@ -3,8 +3,8 @@
|
||||
## Release 0.1.1-dev0
|
||||
|
||||
### Includes
|
||||
- Minor bugfixes.
|
||||
- 100% line coverage.
|
||||
- Minor bugfixes.
|
||||
- 100% line coverage.
|
||||
|
||||
## Release 0.1.0-dev0
|
||||
|
||||
|
@ -8,9 +8,10 @@ ACTIVATIONS = dict()
|
||||
# def register_activation(scriptf):
|
||||
# ACTIVATIONS[scriptf.name] = scriptf
|
||||
# return scriptf
|
||||
def register_activation(f):
|
||||
ACTIVATIONS[f.__name__] = f
|
||||
return f
|
||||
def register_activation(function):
|
||||
"""Add the activation function to the registry."""
|
||||
ACTIVATIONS[function.__name__] = function
|
||||
return function
|
||||
|
||||
|
||||
@register_activation
|
||||
@ -55,6 +56,7 @@ def swish_beta(x, beta=torch.tensor([10])):
|
||||
|
||||
|
||||
def get_activation(funcname):
|
||||
"""Deserialize the activation function."""
|
||||
if callable(funcname):
|
||||
return funcname
|
||||
if funcname in ACTIVATIONS:
|
||||
|
@ -4,8 +4,7 @@ import torch
|
||||
|
||||
|
||||
def squared_euclidean_distance(x, y):
|
||||
"""
|
||||
Compute the squared Euclidean distance between :math:`x` and :math:`y`.
|
||||
"""Compute the squared Euclidean distance between :math:`x` and :math:`y`.
|
||||
|
||||
Expected dimension of x is 2.
|
||||
Expected dimension of y is 2.
|
||||
@ -18,8 +17,7 @@ def squared_euclidean_distance(x, y):
|
||||
|
||||
|
||||
def euclidean_distance(x, y):
|
||||
"""
|
||||
Compute the Euclidean distance between :math:`x` and :math:`y`.
|
||||
"""Compute the Euclidean distance between :math:`x` and :math:`y`.
|
||||
|
||||
Expected dimension of x is 2.
|
||||
Expected dimension of y is 2.
|
||||
@ -30,8 +28,7 @@ def euclidean_distance(x, y):
|
||||
|
||||
|
||||
def lpnorm_distance(x, y, p):
|
||||
r"""
|
||||
Compute :math:`{\langle x, y \rangle}_p`.
|
||||
r"""Compute :math:`{\langle x, y \rangle}_p`.
|
||||
|
||||
Expected dimension of x is 2.
|
||||
Expected dimension of y is 2.
|
||||
@ -41,8 +38,7 @@ def lpnorm_distance(x, y, p):
|
||||
|
||||
|
||||
def omega_distance(x, y, omega):
|
||||
r"""
|
||||
Omega distance.
|
||||
r"""Omega distance.
|
||||
|
||||
Compute :math:`{\langle \Omega x, \Omega y \rangle}_p`
|
||||
|
||||
@ -57,8 +53,7 @@ def omega_distance(x, y, omega):
|
||||
|
||||
|
||||
def lomega_distance(x, y, omegas):
|
||||
r"""
|
||||
Localized Omega distance.
|
||||
r"""Localized Omega distance.
|
||||
|
||||
Compute :math:`{\langle \Omega_k x, \Omega_k y_k \rangle}_p`
|
||||
|
||||
|
@ -7,9 +7,10 @@ import torch
|
||||
INITIALIZERS = dict()
|
||||
|
||||
|
||||
def register_initializer(func):
|
||||
INITIALIZERS[func.__name__] = func
|
||||
return func
|
||||
def register_initializer(function):
|
||||
"""Add the initializer to the registry."""
|
||||
INITIALIZERS[function.__name__] = function
|
||||
return function
|
||||
|
||||
|
||||
def labels_from(distribution):
|
||||
@ -84,6 +85,7 @@ def stratified_random(x_train, y_train, prototype_distribution):
|
||||
|
||||
|
||||
def get_initializer(funcname):
|
||||
"""Deserialize the initializer."""
|
||||
if callable(funcname):
|
||||
return funcname
|
||||
if funcname in INITIALIZERS:
|
||||
|
Loading…
Reference in New Issue
Block a user