Rename input
to x
in activation functions
This commit is contained in:
parent
c843ace63d
commit
f640a22cf2
@ -15,32 +15,32 @@ def register_activation(f):
|
|||||||
|
|
||||||
@register_activation
|
@register_activation
|
||||||
# @torch.jit.script
|
# @torch.jit.script
|
||||||
def identity(input, beta=torch.tensor([0])):
|
def identity(x, beta=torch.tensor([0])):
|
||||||
""":math:`f(x) = x`"""
|
""":math:`f(x) = x`"""
|
||||||
return input
|
return x
|
||||||
|
|
||||||
|
|
||||||
@register_activation
|
@register_activation
|
||||||
# @torch.jit.script
|
# @torch.jit.script
|
||||||
def sigmoid_beta(input, beta=torch.tensor([10])):
|
def sigmoid_beta(x, beta=torch.tensor([10])):
|
||||||
""":math:`f(x) = \\frac{1}{1 + e^{-\\beta x}}`
|
""":math:`f(x) = \\frac{1}{1 + e^{-\\beta x}}`
|
||||||
|
|
||||||
Keyword Arguments:
|
Keyword Arguments:
|
||||||
beta (float): Parameter :math:`\\beta`
|
beta (float): Parameter :math:`\\beta`
|
||||||
"""
|
"""
|
||||||
out = torch.reciprocal(1.0 + torch.exp(-int(beta.item()) * input))
|
out = torch.reciprocal(1.0 + torch.exp(-int(beta.item()) * x))
|
||||||
return out
|
return out
|
||||||
|
|
||||||
|
|
||||||
@register_activation
|
@register_activation
|
||||||
# @torch.jit.script
|
# @torch.jit.script
|
||||||
def swish_beta(input, beta=torch.tensor([10])):
|
def swish_beta(x, beta=torch.tensor([10])):
|
||||||
""":math:`f(x) = \\frac{x}{1 + e^{-\\beta x}}`
|
""":math:`f(x) = \\frac{x}{1 + e^{-\\beta x}}`
|
||||||
|
|
||||||
Keyword Arguments:
|
Keyword Arguments:
|
||||||
beta (float): Parameter :math:`\\beta`
|
beta (float): Parameter :math:`\\beta`
|
||||||
"""
|
"""
|
||||||
out = input * sigmoid_beta(input, beta=beta)
|
out = x * sigmoid_beta(x, beta=beta)
|
||||||
return out
|
return out
|
||||||
|
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user