Register
Login
Resources
Docs Blog Datasets Glossary Case Studies Tutorials & Webinars
Product
Data Engine LLMs Platform Enterprise
Pricing Explore
Connect to our Discord channel

#869 Add DagsHub Logger to Super Gradients

Merged
Ghost merged 1 commits into Deci-AI:master from timho102003:dagshub_logger
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
  1. import logging
  2. import os
  3. import platform
  4. import psutil
  5. from super_gradients.common.environment.env_variables import env_variables
  6. def mute_subprocesses():
  7. """Mute (prints, warnings and all logs except ERRORS) of some subprocesses to avoid having duplicates in the logs."""
  8. # When running DDP, mute all nodes except for the master node
  9. if int(env_variables.LOCAL_RANK) > 0:
  10. mute_current_process()
  11. mute_non_linux_dataloader_worker_process()
  12. def mute_current_process():
  13. """Mute prints, warnings and all logs except ERRORS. This is meant when running multiple processes."""
  14. # Ignore warnings
  15. import warnings
  16. warnings.filterwarnings("ignore")
  17. # Ignore prints
  18. import sys
  19. sys.stdout = open(os.devnull, "w")
  20. # Only show ERRORS
  21. process_loggers = [logging.getLogger(name) for name in logging.root.manager.loggerDict]
  22. for logger in process_loggers:
  23. logger.setLevel(logging.ERROR)
  24. def mute_non_linux_dataloader_worker_process() -> None:
  25. """Mute any worker process when running on mac/windows.
  26. This is required because the dataloader workers are "spawned" on mac/windows and "forked" on linux.
  27. The consequence being that the on mac/windows every module will be imported on each worker process, leading to a huge number of prints/logs that are
  28. displayed on import.
  29. For more information: https://pytorch.org/docs/stable/data.html#platform-specific-behaviors
  30. To avoid this, we mute the dataloader workers when running on mac/windows.
  31. Note:
  32. We assume that the process tree looks like this:
  33. Without DDP:
  34. ... -> main_process -> worker_process
  35. With DDP:
  36. ... -> main_process -> node_process -> worker_process
  37. Knowing that depending on how the script is launched, main_process might be child of other non "python" processes such as:
  38. ssh(non-python) -> pycharm(non-python) -> main_process(python) -> ...
  39. """
  40. if is_non_linux_dataloader_worker_process():
  41. mute_current_process()
  42. def is_non_linux_dataloader_worker_process() -> bool:
  43. """Check if current process is a dataloader worker process on a non linux device."""
  44. if any(os_name in platform.platform() for os_name in ["macOS", "Windows"]):
  45. # When using DDP with SG launcher, we expect the worker process to have 2 parents processes using python, and only 1 otherwise.
  46. # Note that this is a "root_process" is the root process only if current process is a worker process
  47. if int(env_variables.LOCAL_RANK) == -1:
  48. # NO DDP
  49. main_process = psutil.Process().parent()
  50. elif os.environ.get("TORCHELASTIC_RUN_ID") == "sg_initiated":
  51. # DDP launched using SG logic
  52. main_process = psutil.Process().parent().parent()
  53. else:
  54. # DDP launched using torch.distributed.launch or torchrun
  55. main_process = psutil.Process().parent()
  56. is_worker_process = main_process and "python" in main_process.name()
  57. if is_worker_process:
  58. return True
  59. return False
Discard
Tip!

Press p or to see the previous file or, n or to see the next file