Register
Login
Resources
Docs Blog Datasets Glossary Case Studies Tutorials & Webinars
Product
Data Engine LLMs Platform Enterprise
Pricing Explore
Connect to our Discord channel

#869 Add DagsHub Logger to Super Gradients

Merged
Ghost merged 1 commits into Deci-AI:master from timho102003:dagshub_logger
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
  1. import torch
  2. from torch import nn
  3. from torch.autograd import Variable
  4. from super_gradients.common.object_names import Losses
  5. from super_gradients.common.registry.registry import register_loss
  6. @register_loss(Losses.SHELFNET_SE_LOSS)
  7. class ShelfNetSemanticEncodingLoss(nn.CrossEntropyLoss):
  8. """2D Cross Entropy Loss with Auxilary Loss"""
  9. # FIXME - THIS LOSS SHOULD BE CHANGED TO SUPPORT APEX
  10. def __init__(self, se_weight=0.2, nclass=21, aux_weight=0.4, weight=None, ignore_index=-1):
  11. super().__init__(weight, None, ignore_index)
  12. self.nclass = nclass
  13. self.se_weight = se_weight
  14. self.aux_weight = aux_weight
  15. # FIXME - TEST CODE LOTEM, CHANGED IN ORDER TO WORK WITH apex.amp
  16. self.bcewithlogitsloss = nn.BCELoss(weight)
  17. def forward(self, logits, labels):
  18. pred1, se_pred, pred2 = logits
  19. batch = labels.size(0)
  20. se_target = Variable(torch.zeros(batch, self.nclass))
  21. # FIXME - THIS IS WHAT apex MIGHT BE FAILING TO WORK WITH
  22. for i in range(batch):
  23. hist = torch.histc(labels[i].cpu().data.float(), bins=self.nclass, min=0, max=self.nclass - 1)
  24. vect = hist > 0
  25. se_target[i] = vect
  26. loss1 = super().forward(pred1, labels)
  27. loss2 = super().forward(pred2, labels)
  28. loss3 = self.bcewithlogitsloss(torch.sigmoid(se_pred), se_target.data.cuda()) # FIXME - MAYBE CHANGE TO SIGMOID
  29. total_loss = loss1 + self.aux_weight * loss2 + self.se_weight * loss3
  30. losses = [loss1, loss2, loss3, total_loss]
  31. return total_loss, torch.stack(losses, dim=0).detach()
  32. @property
  33. def component_names(self):
  34. """
  35. Component names for logging during training.
  36. These correspond to 2nd item in the tuple returned in self.forward(...).
  37. See super_gradients.Trainer.train() docs for more info.
  38. """
  39. return ["loss1", "loss2", "loss3", "total_loss"]
Discard
Tip!

Press p or to see the previous file or, n or to see the next file