seed_loss.py 1.4 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546
  1. import torch.nn.functional as F
  2. from torch import nn
  3. import torch
  4. class CosineEmbeddingLoss(nn.Module):
  5. def __init__(self, margin=0.0):
  6. super(CosineEmbeddingLoss, self).__init__()
  7. self.margin = margin
  8. self.epsilon = 1e-12
  9. def forward(self, x1, x2):
  10. similarity = torch.sum(x1 * x2, axis=-1) / (
  11. torch.norm(x1, dim=-1) * torch.norm(x2, dim=-1) + self.epsilon)
  12. return (1 - similarity).mean()
  13. class SEEDLoss(nn.Module):
  14. def __init__(self, label_smoothing=0.1, ignore_index=0, **kwargs):
  15. super(SEEDLoss, self).__init__()
  16. self.label_smoothing = label_smoothing
  17. self.loss_sem = CosineEmbeddingLoss()
  18. def forward(self, preds, batch):
  19. embedding_vectors, pred = preds
  20. max_len = batch[2].max()
  21. tgt = batch[1][:, 1:2 + max_len]
  22. pred = pred.flatten(0, 1)
  23. tgt = tgt.reshape([-1])
  24. loss = F.cross_entropy(
  25. pred,
  26. tgt,
  27. reduction='mean',
  28. label_smoothing=self.label_smoothing,
  29. ignore_index=pred.shape[1] + 1,
  30. ) # self.loss_func(pred, tgt)
  31. sem_target = batch[3].float()
  32. sem_loss = torch.sum(self.loss_sem(embedding_vectors, sem_target))
  33. return {
  34. 'loss': loss + 0.1 * sem_loss,
  35. 'rec_loss': loss,
  36. 'sem_loss': sem_loss
  37. }