Register
Login
Resources
Docs Blog Datasets Glossary Case Studies Tutorials & Webinars
Product
Data Engine LLMs Platform Enterprise
Pricing Explore
Connect to our Discord channel

#609 Ci fix

Merged
Ghost merged 1 commits into Deci-AI:master from deci-ai:bugfix/infra-000_ci
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
  1. import torch
  2. from torch import nn
  3. from torch.nn.modules.loss import _Loss
  4. class FocalLoss(_Loss):
  5. """Wraps focal loss around existing loss_fcn(), i.e. criteria = FocalLoss(nn.BCEWithLogitsLoss(), gamma=1.5)"""
  6. def __init__(self, loss_fcn: nn.BCEWithLogitsLoss, gamma=1.5, alpha=0.25):
  7. super(FocalLoss, self).__init__()
  8. self.loss_fcn = loss_fcn # must be nn.BCEWithLogitsLoss()
  9. self.gamma = gamma
  10. self.alpha = alpha
  11. self.reduction = loss_fcn.reduction
  12. self.loss_fcn.reduction = 'none' # required to apply FocalLoss to each element
  13. def forward(self, pred, true):
  14. loss = self.loss_fcn(pred, true)
  15. pred_prob = torch.sigmoid(pred) # prob from logits
  16. p_t = true * pred_prob + (1 - true) * (1 - pred_prob)
  17. alpha_factor = true * self.alpha + (1 - true) * (1 - self.alpha)
  18. modulating_factor = (1.0 - p_t) ** self.gamma
  19. loss *= alpha_factor * modulating_factor
  20. if self.reduction == 'mean':
  21. return loss.mean()
  22. elif self.reduction == 'sum':
  23. return loss.sum()
  24. else: # 'none'
  25. return loss
Discard
Tip!

Press p or to see the previous file or, n or to see the next file