Register
Login
Resources
Docs Blog Datasets Glossary Case Studies Tutorials & Webinars
Product
Data Engine LLMs Platform Enterprise
Pricing Explore
Connect to our Discord channel

#378 Feature/sg 281 add kd notebook

Merged
Ghost merged 1 commits into Deci-AI:master from deci-ai:feature/SG-281-add_kd_notebook
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
  1. import os
  2. from typing import Union, Optional, Any
  3. import numpy as np
  4. from PIL import Image
  5. import matplotlib.pyplot as plt
  6. import torch
  7. from super_gradients.common.abstractions.abstract_logger import get_logger
  8. from super_gradients.common.sg_loggers.base_sg_logger import BaseSGLogger
  9. from super_gradients.common.environment.env_helpers import multi_process_safe
  10. logger = get_logger(__name__)
  11. try:
  12. import wandb
  13. except (ModuleNotFoundError, ImportError, NameError):
  14. pass # no action or logging - this is normal in most cases
  15. WANDB_ID_PREFIX = 'wandb_id.'
  16. WANDB_INCLUDE_FILE_NAME = '.wandbinclude'
  17. class WandBSGLogger(BaseSGLogger):
  18. def __init__(self, project_name: str, experiment_name: str, storage_location: str, resumed: bool, training_params: dict, checkpoints_dir_path: str,
  19. tb_files_user_prompt: bool = False, launch_tensorboard: bool = False, tensorboard_port: int = None, save_checkpoints_remote: bool = True,
  20. save_tensorboard_remote: bool = True, save_logs_remote: bool = True, entity: Optional[str] = None, api_server: Optional[str] = None,
  21. save_code: bool = False, **kwargs):
  22. """
  23. :param experiment_name: Used for logging and loading purposes
  24. :param s3_path: If set to 's3' (i.e. s3://my-bucket) saves the Checkpoints in AWS S3 otherwise saves the Checkpoints Locally
  25. :param checkpoint_loaded: If true, then old tensorboard files will *not* be deleted when tb_files_user_prompt=True
  26. :param max_epochs: Number of epochs planned for this training
  27. :param tb_files_user_prompt: Asks user for Tensorboard deletion prompt.
  28. :param launch_tensorboard: Whether to launch a TensorBoard process.
  29. :param tensorboard_port: Specific port number for the tensorboard to use when launched (when set to None, some free port number will be used)
  30. :param save_checkpoints_remote: Saves checkpoints in s3.
  31. :param save_tensorboard_remote: Saves tensorboard in s3.
  32. :param save_logs_remote: Saves log files in s3.
  33. :param save_code: Save current code to wandb
  34. """
  35. self.s3_location_available = storage_location.startswith('s3')
  36. super().__init__(project_name, experiment_name, storage_location, resumed, training_params,
  37. checkpoints_dir_path, tb_files_user_prompt, launch_tensorboard, tensorboard_port,
  38. self.s3_location_available, self.s3_location_available, self.s3_location_available)
  39. if api_server is not None:
  40. if api_server != os.getenv('WANDB_BASE_URL'):
  41. logger.warning(f'WANDB_BASE_URL environment parameter not set to {api_server}. Setting the parameter')
  42. os.putenv('WANDB_BASE_URL', api_server)
  43. wandb_id = None
  44. self.resumed = resumed
  45. if self.resumed:
  46. wandb_id = self._get_wandb_id()
  47. run = wandb.init(project=project_name, name=experiment_name, entity=entity, resume=resumed, id=wandb_id, **kwargs)
  48. if save_code:
  49. self._save_code()
  50. self._set_wandb_id(run.id)
  51. self.save_checkpoints_wandb = save_checkpoints_remote
  52. self.save_tensorboard_wandb = save_tensorboard_remote
  53. self.save_logs_wandb = save_logs_remote
  54. @multi_process_safe
  55. def _save_code(self):
  56. """
  57. Save the current code to wandb.
  58. If a file named .wandbinclude is avilable in the root dir of the project the settings will be taken from the file.
  59. Otherwise, all python file in the current working dir (recursively) will be saved.
  60. File structure: a single relative path or a single type in each line.
  61. i.e:
  62. src
  63. tests
  64. examples
  65. *.py
  66. *.yaml
  67. The paths and types in the file are the paths and types to be included in code upload to wandb
  68. """
  69. base_path, paths, types = self._get_include_paths()
  70. if len(types) > 0:
  71. def func(path):
  72. for p in paths:
  73. if path.startswith(p):
  74. for t in types:
  75. if path.endswith(t):
  76. return True
  77. return False
  78. include_fn = func
  79. else:
  80. include_fn = lambda path: path.endswith(".py")
  81. if base_path != ".":
  82. wandb.run.log_code(base_path, include_fn=include_fn)
  83. else:
  84. wandb.run.log_code(".", include_fn=include_fn)
  85. @multi_process_safe
  86. def add_config(self, tag: str, config: dict):
  87. super(WandBSGLogger, self).add_config(tag=tag, config=config)
  88. wandb.config.update(config, allow_val_change=self.resumed)
  89. @multi_process_safe
  90. def add_scalar(self, tag: str, scalar_value: float, global_step: int = 0):
  91. super(WandBSGLogger, self).add_scalar(tag=tag, scalar_value=scalar_value, global_step=global_step)
  92. wandb.log(data={tag: scalar_value}, step=global_step)
  93. @multi_process_safe
  94. def add_scalars(self, tag_scalar_dict: dict, global_step: int = 0):
  95. super(WandBSGLogger, self).add_scalars(tag_scalar_dict=tag_scalar_dict, global_step=global_step)
  96. wandb.log(data=tag_scalar_dict, step=global_step)
  97. @multi_process_safe
  98. def add_image(self, tag: str, image: Union[torch.Tensor, np.array, Image.Image], data_format='CHW', global_step: int = 0):
  99. super(WandBSGLogger, self).add_image(tag=tag, image=image, data_format=data_format, global_step=global_step)
  100. if isinstance(image, torch.Tensor):
  101. image = image.cpu().detach().numpy()
  102. if image.shape[0] < 5:
  103. image = image.transpose([1, 2, 0])
  104. wandb.log(data={tag: wandb.Image(image, caption=tag)}, step=global_step)
  105. @multi_process_safe
  106. def add_images(self, tag: str, images: Union[torch.Tensor, np.array], data_format='NCHW', global_step: int = 0):
  107. super(WandBSGLogger, self).add_images(tag=tag, images=images, data_format=data_format, global_step=global_step)
  108. wandb_images = []
  109. for im in images:
  110. if isinstance(im, torch.Tensor):
  111. im = im.cpu().detach().numpy()
  112. if im.shape[0] < 5:
  113. im = im.transpose([1, 2, 0])
  114. wandb_images.append(wandb.Image(im))
  115. wandb.log({tag: wandb_images}, step=global_step)
  116. @multi_process_safe
  117. def add_video(self, tag: str, video: Union[torch.Tensor, np.array], global_step: int = 0):
  118. super().add_video(tag, video, global_step)
  119. if video.ndim > 4:
  120. for index, vid in enumerate(video):
  121. self.add_video(tag=f'{tag}_{index}', video=vid, global_step=global_step)
  122. else:
  123. if isinstance(video, torch.Tensor):
  124. video = video.cpu().detach().numpy()
  125. wandb.log({tag: wandb.Video(video, fps=4)}, step=global_step)
  126. @multi_process_safe
  127. def add_histogram(self, tag: str, values: Union[torch.Tensor, np.array], bins: str, global_step: int = 0):
  128. super().add_histogram(tag, values, bins, global_step)
  129. wandb.log({tag: wandb.Histogram(values, num_bins=bins)}, step=global_step)
  130. @multi_process_safe
  131. def add_text(self, tag: str, text_string: str, global_step: int = 0):
  132. super().add_text(tag, text_string, global_step)
  133. wandb.log({tag: text_string}, step=global_step)
  134. @multi_process_safe
  135. def add_figure(self, tag: str, figure: plt.figure, global_step: int = 0):
  136. super().add_figure(tag, figure, global_step)
  137. wandb.log({tag: figure}, step=global_step)
  138. @multi_process_safe
  139. def close(self):
  140. super().close()
  141. wandb.finish()
  142. @multi_process_safe
  143. def add_file(self, file_name: str = None):
  144. super().add_file(file_name)
  145. wandb.save(glob_str=os.path.join(self._local_dir, file_name), base_path=self._local_dir, policy='now')
  146. @multi_process_safe
  147. def upload(self):
  148. super().upload()
  149. if self.save_tensorboard_wandb:
  150. wandb.save(glob_str=self._get_tensorboard_file_name(), base_path=self._local_dir, policy='now')
  151. if self.save_logs_wandb:
  152. wandb.save(glob_str=self.log_file_path, base_path=self._local_dir, policy='now')
  153. @multi_process_safe
  154. def add_checkpoint(self, tag: str, state_dict: dict, global_step: int = 0):
  155. name = f'ckpt_{global_step}.pth' if tag is None else tag
  156. if not name.endswith('.pth'):
  157. name += '.pth'
  158. path = os.path.join(self._local_dir, name)
  159. torch.save(state_dict, path)
  160. if self.save_checkpoints_wandb:
  161. if self.s3_location_available:
  162. self.model_checkpoints_data_interface.save_remote_checkpoints_file(self.experiment_name, self._local_dir, name)
  163. wandb.save(glob_str=path, base_path=self._local_dir, policy='now')
  164. def _get_tensorboard_file_name(self):
  165. try:
  166. tb_file_path = self.tensorboard_writer.file_writer.event_writer._file_name
  167. except RuntimeError:
  168. logger.warning('tensorboard file could not be located for ')
  169. return None
  170. return tb_file_path
  171. def _get_wandb_id(self):
  172. for file in os.listdir(self._local_dir):
  173. if file.startswith(WANDB_ID_PREFIX):
  174. return file.replace(WANDB_ID_PREFIX, '')
  175. def _set_wandb_id(self, id):
  176. for file in os.listdir(self._local_dir):
  177. if file.startswith(WANDB_ID_PREFIX):
  178. os.remove(os.path.join(self._local_dir, file))
  179. os.mknod(os.path.join(self._local_dir, f'{WANDB_ID_PREFIX}{id}'))
  180. def add(self, tag: str, obj: Any, global_step: int = None):
  181. pass
  182. def _get_include_paths(self):
  183. """
  184. Look for .wandbinclude file in parent dirs and return the list of paths defined in the file.
  185. file structure is a single relative (i.e. src/) or a single type (i.e *.py)in each line.
  186. the paths and types in the file are the paths and types to be included in code upload to wandb
  187. :return: if file exists, return the list of paths and a list of types defined in the file
  188. """
  189. wandb_include_file_path = self._search_upwards_for_file(WANDB_INCLUDE_FILE_NAME)
  190. if wandb_include_file_path is not None:
  191. with open(wandb_include_file_path) as file:
  192. lines = file.readlines()
  193. base_path = os.path.dirname(wandb_include_file_path)
  194. paths = []
  195. types = []
  196. for line in lines:
  197. line = line.strip().strip('/n')
  198. if line == "" or line.startswith("#"):
  199. continue
  200. if line.startswith('*.'):
  201. types.append(line.replace('*', ''))
  202. else:
  203. paths.append(os.path.join(base_path, line))
  204. return base_path, paths, types
  205. return ".", [], []
  206. @staticmethod
  207. def _search_upwards_for_file(file_name: str):
  208. """
  209. Search in the current directory and all directories above it for a file of a particular name.
  210. :param file_name: file name to look for.
  211. :return: pathlib.Path, the location of the first file found or None, if none was found
  212. """
  213. try:
  214. cur_dir = os.getcwd()
  215. while cur_dir != '/':
  216. if file_name in os.listdir(cur_dir):
  217. return os.path.join(cur_dir, file_name)
  218. else:
  219. cur_dir = os.path.dirname(cur_dir)
  220. except RuntimeError:
  221. return None
  222. return None
Discard
Tip!

Press p or to see the previous file or, n or to see the next file