Rename input to x in activation functions

This commit is contained in:
blackfly 2020-04-11 14:25:35 +02:00
parent c843ace63d
commit f640a22cf2

View File

@ -15,32 +15,32 @@ def register_activation(f):
@register_activation
# @torch.jit.script
def identity(input, beta=torch.tensor([0])):
def identity(x, beta=torch.tensor([0])):
""":math:`f(x) = x`"""
return input
return x
@register_activation
# @torch.jit.script
def sigmoid_beta(input, beta=torch.tensor([10])):
def sigmoid_beta(x, beta=torch.tensor([10])):
""":math:`f(x) = \\frac{1}{1 + e^{-\\beta x}}`
Keyword Arguments:
beta (float): Parameter :math:`\\beta`
"""
out = torch.reciprocal(1.0 + torch.exp(-int(beta.item()) * input))
out = torch.reciprocal(1.0 + torch.exp(-int(beta.item()) * x))
return out
@register_activation
# @torch.jit.script
def swish_beta(input, beta=torch.tensor([10])):
def swish_beta(x, beta=torch.tensor([10])):
""":math:`f(x) = \\frac{x}{1 + e^{-\\beta x}}`
Keyword Arguments:
beta (float): Parameter :math:`\\beta`
"""
out = input * sigmoid_beta(input, beta=beta)
out = x * sigmoid_beta(x, beta=beta)
return out