Register
Login
Resources
Docs Blog Datasets Glossary Case Studies Tutorials & Webinars
Product
Data Engine LLMs Platform Enterprise
Pricing Explore
Connect to our Discord channel

#381 Feature/sg 000 connect to lab

Merged
Ghost merged 1 commits into Deci-AI:master from deci-ai:feature/sg-000_connect_to_lab
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
  1. from super_gradients.training.utils import HpmStruct
  2. from copy import deepcopy
  3. DEFAULT_TRAINING_PARAMS = {"lr_warmup_epochs": 0,
  4. "lr_cooldown_epochs": 0,
  5. "warmup_initial_lr": None,
  6. "cosine_final_lr_ratio": 0.01,
  7. "optimizer": "SGD",
  8. "criterion_params": {},
  9. "ema": False,
  10. "batch_accumulate": 1, # number of batches to accumulate before every backward pass
  11. "ema_params": {},
  12. "zero_weight_decay_on_bias_and_bn": False,
  13. "load_opt_params": True,
  14. "run_validation_freq": 1,
  15. "save_model": True,
  16. "metric_to_watch": "Accuracy",
  17. "launch_tensorboard": False,
  18. "tb_files_user_prompt": False, # Asks User for Tensorboard Deletion Prompt
  19. "silent_mode": False, # Silents the Print outs
  20. "mixed_precision": False,
  21. "tensorboard_port": None,
  22. "save_ckpt_epoch_list": [], # indices where the ckpt will save automatically
  23. "average_best_models": True,
  24. "dataset_statistics": False, # add a dataset statistical analysis and sample images to tensorboard
  25. "save_tensorboard_to_s3": False,
  26. "lr_schedule_function": None,
  27. "train_metrics_list": [],
  28. "valid_metrics_list": [],
  29. "loss_logging_items_names": ["Loss"],
  30. "greater_metric_to_watch_is_better": True,
  31. "precise_bn": False,
  32. "precise_bn_batch_size": None,
  33. "seed": 42,
  34. "lr_mode": None,
  35. "phase_callbacks": None,
  36. "log_installed_packages": True,
  37. "save_full_train_log": False,
  38. "sg_logger": "base_sg_logger",
  39. "sg_logger_params":
  40. {"tb_files_user_prompt": False, # Asks User for Tensorboard Deletion Prompt
  41. "project_name": "",
  42. "launch_tensorboard": False,
  43. "tensorboard_port": None,
  44. "save_checkpoints_remote": False, # upload checkpoint files to s3
  45. "save_tensorboard_remote": False, # upload tensorboard files to s3
  46. "save_logs_remote": False}, # upload log files to s3
  47. "warmup_mode": "linear_step",
  48. "step_lr_update_freq": None,
  49. "lr_updates": [],
  50. 'clip_grad_norm': None,
  51. 'pre_prediction_callback': None,
  52. 'ckpt_best_name': 'ckpt_best.pth',
  53. 'enable_qat': False,
  54. 'qat_params': {
  55. "start_epoch": 0,
  56. "quant_modules_calib_method": "percentile",
  57. "per_channel_quant_modules": False,
  58. "calibrate": True,
  59. "calibrated_model_path": None,
  60. "calib_data_loader": None,
  61. "num_calib_batches": 2,
  62. "percentile": 99.99
  63. },
  64. "resume": False,
  65. "resume_path": None,
  66. "resume_strict_load": False
  67. }
  68. DEFAULT_OPTIMIZER_PARAMS_SGD = {"weight_decay": 1e-4, "momentum": 0.9}
  69. DEFAULT_OPTIMIZER_PARAMS_ADAM = {"weight_decay": 1e-4}
  70. DEFAULT_OPTIMIZER_PARAMS_RMSPROP = {"weight_decay": 1e-4, "momentum": 0.9}
  71. DEFAULT_OPTIMIZER_PARAMS_RMSPROPTF = {"weight_decay": 1e-4, "momentum": 0.9}
  72. TRAINING_PARAM_SCHEMA = {"type": "object",
  73. "properties": {
  74. "max_epochs": {"type": "number", "minimum": 1},
  75. # FIXME: CHECK THE IMPORTANCE OF THE COMMENTED SCHEMA- AS IT CAUSES HYDRA USE TO CRASH
  76. # "lr_updates": {"type": "array", "minItems": 1},
  77. "lr_decay_factor": {"type": "number", "minimum": 0, "maximum": 1},
  78. "lr_warmup_epochs": {"type": "number", "minimum": 0, "maximum": 10},
  79. "initial_lr": {"type": "number", "exclusiveMinimum": 0, "maximum": 10}
  80. },
  81. "if": {
  82. "properties": {"lr_mode": {"const": "step"}}
  83. },
  84. "then": {
  85. "required": ["lr_updates", "lr_decay_factor"]
  86. },
  87. "required": ["max_epochs", "lr_mode", "initial_lr", "loss"]
  88. }
  89. class TrainingParams(HpmStruct):
  90. def __init__(self, **entries):
  91. # WE initialize by the default training params, overridden by the provided params
  92. default_training_params = deepcopy(DEFAULT_TRAINING_PARAMS)
  93. super().__init__(**default_training_params)
  94. self.set_schema(TRAINING_PARAM_SCHEMA)
  95. if len(entries) > 0:
  96. self.override(**entries)
  97. def override(self, **entries):
  98. super().override(**entries)
  99. self.validate()
Discard
Tip!

Press p or to see the previous file or, n or to see the next file