Add probabilistic losses
Based on Soft LVQ paper by Seo and Obermayer
This commit is contained in:
parent
7f0da894fa
commit
040d1ee9e8
@ -57,3 +57,37 @@ def lvq21_loss(distances, target_labels, prototype_labels):
|
|||||||
mu = dp - dm
|
mu = dp - dm
|
||||||
|
|
||||||
return mu
|
return mu
|
||||||
|
|
||||||
|
|
||||||
|
# Probabilistic
|
||||||
|
def log_likelihood_ratio_loss(probabilities, target, prototype_labels):
|
||||||
|
uniques = prototype_labels.unique(sorted=True).tolist()
|
||||||
|
labels = target.tolist()
|
||||||
|
|
||||||
|
key_val = {key: val for key, val in zip(uniques, range(len(uniques)))}
|
||||||
|
target_indices = torch.LongTensor(list(map(key_val.get, labels)))
|
||||||
|
|
||||||
|
whole_probability = probabilities.sum(dim=1)
|
||||||
|
correct_probability = probabilities[torch.arange(len(probabilities)),
|
||||||
|
target_indices]
|
||||||
|
wrong_probability = whole_probability - correct_probability
|
||||||
|
|
||||||
|
likelihood = correct_probability / wrong_probability
|
||||||
|
log_likelihood = torch.log(likelihood)
|
||||||
|
return log_likelihood
|
||||||
|
|
||||||
|
|
||||||
|
def robust_soft_loss(probabilities, target, prototype_labels):
|
||||||
|
uniques = prototype_labels.unique(sorted=True).tolist()
|
||||||
|
labels = target.tolist()
|
||||||
|
|
||||||
|
key_val = {key: val for key, val in zip(uniques, range(len(uniques)))}
|
||||||
|
target_indices = torch.LongTensor(list(map(key_val.get, labels)))
|
||||||
|
|
||||||
|
whole_probability = probabilities.sum(dim=1)
|
||||||
|
correct_probability = probabilities[torch.arange(len(probabilities)),
|
||||||
|
target_indices]
|
||||||
|
|
||||||
|
likelihood = correct_probability / whole_probability
|
||||||
|
log_likelihood = torch.log(likelihood)
|
||||||
|
return log_likelihood
|
||||||
|
Loading…
Reference in New Issue
Block a user