Dynamically remove components

This commit is contained in:
Jensun Ravichandran 2021-06-01 18:45:47 +02:00
parent 27b43b06a7
commit 8e8851d962

View File

@ -41,8 +41,6 @@ class Components(torch.nn.Module):
initialized_components=None): initialized_components=None):
super().__init__() super().__init__()
self.num_components = num_components
# Ignore all initialization settings if initialized_components is given. # Ignore all initialization settings if initialized_components is given.
if initialized_components is not None: if initialized_components is not None:
self._register_components(initialized_components) self._register_components(initialized_components)
@ -50,7 +48,12 @@ class Components(torch.nn.Module):
wmsg = "Arguments ignored while initializing Components" wmsg = "Arguments ignored while initializing Components"
warnings.warn(wmsg) warnings.warn(wmsg)
else: else:
self._initialize_components(initializer) self._initialize_components(initializer, num_components)
@property
def num_components(self):
# return len(self._components)
return self._components.shape[0]
def _register_components(self, components): def _register_components(self, components):
self.register_parameter("_components", Parameter(components)) self.register_parameter("_components", Parameter(components))
@ -62,17 +65,31 @@ class Components(torch.nn.Module):
f"You have provided: {initializer=} instead." f"You have provided: {initializer=} instead."
raise TypeError(emsg) raise TypeError(emsg)
def _initialize_components(self, initializer): def _initialize_components(self, initializer, num_components):
self._precheck_initializer(initializer) self._precheck_initializer(initializer)
_components = initializer.generate(self.num_components) _components = initializer.generate(num_components)
self._register_components(_components) self._register_components(_components)
def increase_components(self, initializer, num=1): def add_components(self,
self._precheck_initializer(initializer) initializer=None,
_new = initializer.generate(num) num=1,
_components = torch.cat([self._components, _new]) *,
initialized_components=None):
if initialized_components is not None:
_components = torch.cat([self._components, initialized_components])
else:
self._precheck_initializer(initializer)
_new = initializer.generate(num)
_components = torch.cat([self._components, _new])
self._register_components(_components) self._register_components(_components)
def remove_components(self, indices=None):
mask = torch.ones(self.num_components, dtype=torch.bool)
mask[indices] = False
_components = self._components[mask]
self._register_components(_components)
return mask
@property @property
def components(self): def components(self):
"""Tensor containing the component tensors.""" """Tensor containing the component tensors."""
@ -101,7 +118,7 @@ class LabeledComponents(Components):
self._labels = component_labels self._labels = component_labels
else: else:
labels = get_labels_object(distribution) labels = get_labels_object(distribution)
self.distribution = labels.distribution self.initial_distribution = labels.distribution
_labels = labels.generate() _labels = labels.generate()
super().__init__(len(_labels), initializer=initializer) super().__init__(len(_labels), initializer=initializer)
self._register_labels(_labels) self._register_labels(_labels)
@ -109,21 +126,21 @@ class LabeledComponents(Components):
def _register_labels(self, labels): def _register_labels(self, labels):
self.register_buffer("_labels", labels) self.register_buffer("_labels", labels)
def _update_distribution(self, distribution): @property
self.distribution = [ def distribution(self):
old + new for old, new in zip(self.distribution, distribution) clabels, counts = torch.unique(self._labels, sorted=True, return_counts=True)
] return dict(zip(clabels.tolist(), counts.tolist()))
def _initialize_components(self, initializer): def _initialize_components(self, initializer, num_components):
if isinstance(initializer, ClassAwareInitializer): if isinstance(initializer, ClassAwareInitializer):
self._precheck_initializer(initializer) self._precheck_initializer(initializer)
_components = initializer.generate(self.num_components, _components = initializer.generate(num_components,
self.distribution) self.initial_distribution)
self._register_components(_components) self._register_components(_components)
else: else:
super()._initialize_components(initializer) super()._initialize_components(initializer, num_components)
def increase_components(self, initializer, distribution=[1]): def add_components(self, initializer, distribution=[1]):
self._precheck_initializer(initializer) self._precheck_initializer(initializer)
# Labels # Labels
@ -140,8 +157,13 @@ class LabeledComponents(Components):
_components = torch.cat([self._components, _new]) _components = torch.cat([self._components, _new])
self._register_components(_components) self._register_components(_components)
# Housekeeping def remove_components(self, indices=None):
self._update_distribution(labels.distribution) # Components
mask = super().remove_components(indices)
# Labels
_labels = self._labels[mask]
self._register_labels(_labels)
@property @property
def component_labels(self): def component_labels(self):