Anoter Codacy bug fix

This commit is contained in:
Christoph 2021-01-14 11:18:25 +01:00
parent 30dc0ea8b1
commit e2867f696e

View File

@ -145,11 +145,7 @@ class GTLVQ(nn.Module):
return euclidean_distance_matrix(x, projected_prototypes) return euclidean_distance_matrix(x, projected_prototypes)
def local_tangent_projection(self, def local_tangent_projection(self,
signals, signals):
protos,
subspaces,
squared=False,
epsilon=1e-10):
# Note: subspaces is always assumed as transposed and must be orthogonal! # Note: subspaces is always assumed as transposed and must be orthogonal!
# shape(signals): batch x proto_number x channels x dim1 x dim2 x ... x dimN # shape(signals): batch x proto_number x channels x dim1 x dim2 x ... x dimN
# shape(protos): proto_number x dim1 x dim2 x ... x dimN # shape(protos): proto_number x dim1 x dim2 x ... x dimN
@ -157,7 +153,8 @@ class GTLVQ(nn.Module):
# subspace should be orthogonalized # subspace should be orthogonalized
# Origin Source Code # Origin Source Code
# Origin Author: # Origin Author:
protos = self.cls.prototypes
subspaces = self.subspaces
signal_shape, signal_int_shape = _int_and_mixed_shape(signals) signal_shape, signal_int_shape = _int_and_mixed_shape(signals)
_, proto_int_shape = _int_and_mixed_shape(protos) _, proto_int_shape = _int_and_mixed_shape(protos)