[BUG] GLVQ training is unstable

GLVQ training is unstable when prototypes are initialized exactly to datapoints
without small shifts. Perhaps because of zero distances?
This commit is contained in:
Jensun Ravichandran 2021-04-29 19:25:28 +02:00
parent e686adbea1
commit 9a7d3192c0

View File

@ -111,7 +111,8 @@ class StratifiedSelectionInitializer(ClassAwareInitializer):
samples_list = [init.generate(per_class) for init in self.initializers] samples_list = [init.generate(per_class) for init in self.initializers]
samples = torch.vstack(samples_list) samples = torch.vstack(samples_list)
if self.noise is not None: if self.noise is not None:
samples = self.add_noise(samples) # samples = self.add_noise(samples)
samples = samples + self.noise
return samples return samples