Register
Login
Resources
Docs Blog Datasets Glossary Case Studies Tutorials & Webinars
Product
Data Engine LLMs Platform Enterprise
Pricing Explore
Connect to our Discord channel

#561 Feature/sg 193 extend output formator

Merged
Ghost merged 1 commits into Deci-AI:master from deci-ai:feature/SG-193-extend_detection_target_transform
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
  1. import unittest
  2. import numpy as np
  3. from super_gradients.training import Trainer
  4. from super_gradients.training.dataloaders.dataloaders import classification_test_dataloader
  5. from super_gradients.training.metrics import Accuracy
  6. from super_gradients.training.models import LeNet
  7. from super_gradients.training.utils.callbacks import TestLRCallback, LRCallbackBase, Phase
  8. class ExponentialWarmupLRCallback(LRCallbackBase):
  9. """
  10. LR scheduling callback for exponential warmup.
  11. LR grows exponentially from warmup_initial_lr to initial lr.
  12. When warmup_initial_lr is None- LR climb starts from 0.001
  13. """
  14. def __init__(self, **kwargs):
  15. super().__init__(Phase.TRAIN_EPOCH_START, **kwargs)
  16. self.warmup_initial_lr = self.training_params.warmup_initial_lr or 0.001
  17. warmup_epochs = self.training_params.lr_warmup_epochs
  18. lr_start = self.warmup_initial_lr
  19. lr_end = self.initial_lr
  20. self.c1 = (lr_end - lr_start) / (np.exp(warmup_epochs) - 1.)
  21. self.c2 = (lr_start * np.exp(warmup_epochs) - lr_end) / (np.exp(warmup_epochs) - 1.)
  22. def perform_scheduling(self, context):
  23. self.lr = self.c1 * np.exp(context.epoch) + self.c2
  24. self.update_lr(context.optimizer, context.epoch, None)
  25. def is_lr_scheduling_enabled(self, context):
  26. return self.training_params.lr_warmup_epochs >= context.epoch
  27. class LRWarmupTest(unittest.TestCase):
  28. def test_lr_warmup(self):
  29. # Define Model
  30. net = LeNet()
  31. trainer = Trainer("lr_warmup_test")
  32. lrs = []
  33. phase_callbacks = [TestLRCallback(lr_placeholder=lrs)]
  34. train_params = {"max_epochs": 5, "lr_updates": [], "lr_decay_factor": 0.1, "lr_mode": "step",
  35. "lr_warmup_epochs": 3, "initial_lr": 1, "loss": "cross_entropy", "optimizer": 'SGD',
  36. "criterion_params": {}, "optimizer_params": {"weight_decay": 1e-4, "momentum": 0.9},
  37. "train_metrics_list": [Accuracy()], "valid_metrics_list": [Accuracy()],
  38. "metric_to_watch": "Accuracy",
  39. "greater_metric_to_watch_is_better": True, "ema": False, "phase_callbacks": phase_callbacks,
  40. "warmup_mode": "linear_step"}
  41. expected_lrs = [0.25, 0.5, 0.75, 1.0, 1.0]
  42. trainer.train(model=net, training_params=train_params,
  43. train_loader=classification_test_dataloader(batch_size=4),
  44. valid_loader=classification_test_dataloader(batch_size=4))
  45. self.assertListEqual(lrs, expected_lrs)
  46. def test_lr_warmup_with_lr_scheduling(self):
  47. # Define model
  48. net = LeNet()
  49. trainer = Trainer("lr_warmup_test")
  50. lrs = []
  51. phase_callbacks = [TestLRCallback(lr_placeholder=lrs)]
  52. train_params = {"max_epochs": 5, "cosine_final_lr_ratio": 0.2, "lr_mode": "cosine",
  53. "lr_warmup_epochs": 3, "initial_lr": 1, "loss": "cross_entropy", "optimizer": 'SGD',
  54. "criterion_params": {}, "optimizer_params": {"weight_decay": 1e-4, "momentum": 0.9},
  55. "train_metrics_list": [Accuracy()], "valid_metrics_list": [Accuracy()],
  56. "metric_to_watch": "Accuracy",
  57. "greater_metric_to_watch_is_better": True, "ema": False, "phase_callbacks": phase_callbacks,
  58. "warmup_mode": "linear_step"}
  59. expected_lrs = [0.25, 0.5, 0.75, 0.9236067977499791, 0.4763932022500211]
  60. trainer.train(model=net, training_params=train_params,
  61. train_loader=classification_test_dataloader(batch_size=4, dataset_size=5),
  62. valid_loader=classification_test_dataloader(batch_size=4, dataset_size=5))
  63. # ALTHOUGH NOT SEEN IN HERE, THE 4TH EPOCH USES LR=1, SO THIS IS THE EXPECTED LIST AS WE COLLECT
  64. # THE LRS AFTER THE UPDATE
  65. self.assertListEqual(lrs, expected_lrs)
  66. def test_warmup_initial_lr(self):
  67. # Define model
  68. net = LeNet()
  69. trainer = Trainer("test_warmup_initial_lr")
  70. lrs = []
  71. phase_callbacks = [TestLRCallback(lr_placeholder=lrs)]
  72. train_params = {"max_epochs": 5, "lr_updates": [], "lr_decay_factor": 0.1, "lr_mode": "step",
  73. "lr_warmup_epochs": 3, "loss": "cross_entropy", "optimizer": 'SGD',
  74. "criterion_params": {}, "optimizer_params": {"weight_decay": 1e-4, "momentum": 0.9},
  75. "train_metrics_list": [Accuracy()], "valid_metrics_list": [Accuracy()],
  76. "metric_to_watch": "Accuracy",
  77. "greater_metric_to_watch_is_better": True, "ema": False, "phase_callbacks": phase_callbacks,
  78. "warmup_mode": "linear_step", "initial_lr": 1, "warmup_initial_lr": 4.}
  79. expected_lrs = [4., 3., 2., 1., 1.]
  80. trainer.train(model=net, training_params=train_params,
  81. train_loader=classification_test_dataloader(batch_size=4, dataset_size=5),
  82. valid_loader=classification_test_dataloader(batch_size=4, dataset_size=5))
  83. self.assertListEqual(lrs, expected_lrs)
  84. def test_custom_lr_warmup(self):
  85. # Define model
  86. net = LeNet()
  87. trainer = Trainer("custom_lr_warmup_test")
  88. lrs = []
  89. phase_callbacks = [TestLRCallback(lr_placeholder=lrs)]
  90. train_params = {"max_epochs": 5, "lr_updates": [], "lr_decay_factor": 0.1, "lr_mode": "step",
  91. "lr_warmup_epochs": 3, "loss": "cross_entropy", "optimizer": 'SGD',
  92. "criterion_params": {}, "optimizer_params": {"weight_decay": 1e-4, "momentum": 0.9},
  93. "train_metrics_list": [Accuracy()], "valid_metrics_list": [Accuracy()],
  94. "metric_to_watch": "Accuracy",
  95. "greater_metric_to_watch_is_better": True, "ema": False, "phase_callbacks": phase_callbacks,
  96. "warmup_mode": ExponentialWarmupLRCallback, "initial_lr": 1., "warmup_initial_lr": 0.1}
  97. expected_lrs = [0.1, 0.18102751585334242, 0.40128313980266034, 1.0, 1.0]
  98. trainer.train(model=net, training_params=train_params,
  99. train_loader=classification_test_dataloader(batch_size=4),
  100. valid_loader=classification_test_dataloader(batch_size=4))
  101. self.assertListEqual(lrs, expected_lrs)
  102. if __name__ == '__main__':
  103. unittest.main()
Discard
Tip!

Press p or to see the previous file or, n or to see the next file