Register
Login
Resources
Docs Blog Datasets Glossary Case Studies Tutorials & Webinars
Product
Data Engine LLMs Platform Enterprise
Pricing Explore
Connect to our Discord channel

#869 Add DagsHub Logger to Super Gradients

Merged
Ghost merged 1 commits into Deci-AI:master from timho102003:dagshub_logger
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
  1. import unittest
  2. from torch import Tensor
  3. from torchmetrics import Accuracy
  4. import torch
  5. from super_gradients import Trainer
  6. from super_gradients.common.object_names import Models
  7. from super_gradients.training import models
  8. from super_gradients.training.dataloaders.dataloaders import classification_test_dataloader
  9. class CriterionWithUnnamedComponents(torch.nn.CrossEntropyLoss):
  10. def __init__(self):
  11. super(CriterionWithUnnamedComponents, self).__init__()
  12. def forward(self, input: Tensor, target: Tensor) -> tuple:
  13. loss = super(CriterionWithUnnamedComponents, self).forward(input=input, target=target)
  14. items = torch.cat((loss.unsqueeze(0), loss.unsqueeze(0))).detach()
  15. return loss, items
  16. class CriterionWithNamedComponents(CriterionWithUnnamedComponents):
  17. def __init__(self):
  18. super(CriterionWithNamedComponents, self).__init__()
  19. self.component_names = ["loss_A", "loss_B"]
  20. class LossLoggingsTest(unittest.TestCase):
  21. def test_single_item_logging(self):
  22. trainer = Trainer("test_single_item_logging", model_checkpoints_location="local")
  23. dataloader = classification_test_dataloader(batch_size=10)
  24. model = models.get(Models.RESNET18, arch_params={"num_classes": 5})
  25. train_params = {
  26. "max_epochs": 1,
  27. "lr_updates": [1],
  28. "lr_decay_factor": 0.1,
  29. "lr_mode": "step",
  30. "lr_warmup_epochs": 0,
  31. "initial_lr": 0.1,
  32. "loss": torch.nn.CrossEntropyLoss(),
  33. "optimizer": "SGD",
  34. "criterion_params": {},
  35. "optimizer_params": {"weight_decay": 1e-4, "momentum": 0.9},
  36. "train_metrics_list": [Accuracy()],
  37. "valid_metrics_list": [Accuracy()],
  38. "metric_to_watch": "Accuracy",
  39. "greater_metric_to_watch_is_better": True,
  40. }
  41. trainer.train(model=model, training_params=train_params, train_loader=dataloader, valid_loader=dataloader)
  42. self.assertListEqual(trainer.loss_logging_items_names, ["CrossEntropyLoss"])
  43. def test_multiple_unnamed_components_loss_logging(self):
  44. trainer = Trainer("test_multiple_unnamed_components_loss_logging", model_checkpoints_location="local")
  45. dataloader = classification_test_dataloader(batch_size=10)
  46. model = models.get(Models.RESNET18, arch_params={"num_classes": 5})
  47. train_params = {
  48. "max_epochs": 1,
  49. "lr_updates": [1],
  50. "lr_decay_factor": 0.1,
  51. "lr_mode": "step",
  52. "lr_warmup_epochs": 0,
  53. "initial_lr": 0.1,
  54. "loss": CriterionWithUnnamedComponents(),
  55. "optimizer": "SGD",
  56. "criterion_params": {},
  57. "optimizer_params": {"weight_decay": 1e-4, "momentum": 0.9},
  58. "train_metrics_list": [Accuracy()],
  59. "valid_metrics_list": [Accuracy()],
  60. "metric_to_watch": "Accuracy",
  61. "greater_metric_to_watch_is_better": True,
  62. }
  63. trainer.train(model=model, training_params=train_params, train_loader=dataloader, valid_loader=dataloader)
  64. self.assertListEqual(trainer.loss_logging_items_names, ["CriterionWithUnnamedComponents/loss_0", "CriterionWithUnnamedComponents/loss_1"])
  65. def test_multiple_named_components_loss_logging(self):
  66. trainer = Trainer("test_multiple_named_components_loss_logging", model_checkpoints_location="local")
  67. dataloader = classification_test_dataloader(batch_size=10)
  68. model = models.get(Models.RESNET18, arch_params={"num_classes": 5})
  69. train_params = {
  70. "max_epochs": 1,
  71. "lr_updates": [1],
  72. "lr_decay_factor": 0.1,
  73. "lr_mode": "step",
  74. "lr_warmup_epochs": 0,
  75. "initial_lr": 0.1,
  76. "loss": CriterionWithNamedComponents(),
  77. "optimizer": "SGD",
  78. "criterion_params": {},
  79. "optimizer_params": {"weight_decay": 1e-4, "momentum": 0.9},
  80. "train_metrics_list": [Accuracy()],
  81. "valid_metrics_list": [Accuracy()],
  82. "metric_to_watch": "Accuracy",
  83. "greater_metric_to_watch_is_better": True,
  84. }
  85. trainer.train(model=model, training_params=train_params, train_loader=dataloader, valid_loader=dataloader)
  86. self.assertListEqual(trainer.loss_logging_items_names, ["CriterionWithNamedComponents/loss_A", "CriterionWithNamedComponents/loss_B"])
  87. if __name__ == "__main__":
  88. unittest.main()
Discard
Tip!

Press p or to see the previous file or, n or to see the next file