1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
|
- # Ultralytics ๐ AGPL-3.0 License - https://ultralytics.com/license
- import os
- import shutil
- import socket
- import sys
- import tempfile
- from . import USER_CONFIG_DIR
- from .torch_utils import TORCH_1_9
- def find_free_network_port() -> int:
- """
- Find a free port on localhost.
- It is useful in single-node training when we don't want to connect to a real main node but have to set the
- `MASTER_PORT` environment variable.
- Returns:
- (int): The available network port number.
- """
- with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
- s.bind(("127.0.0.1", 0))
- return s.getsockname()[1] # port
- def generate_ddp_file(trainer):
- """
- Generate a DDP (Distributed Data Parallel) file for multi-GPU training.
- This function creates a temporary Python file that enables distributed training across multiple GPUs.
- The file contains the necessary configuration to initialize the trainer in a distributed environment.
- Args:
- trainer (object): The trainer object containing training configuration and arguments.
- Must have args attribute and be a class instance.
- Returns:
- (str): Path to the generated temporary DDP file.
- Notes:
- The generated file is saved in the USER_CONFIG_DIR/DDP directory and includes:
- - Trainer class import
- - Configuration overrides from the trainer arguments
- - Model path configuration
- - Training initialization code
- """
- module, name = f"{trainer.__class__.__module__}.{trainer.__class__.__name__}".rsplit(".", 1)
- content = f"""
- # Ultralytics Multi-GPU training temp file (should be automatically deleted after use)
- overrides = {vars(trainer.args)}
- if __name__ == "__main__":
- from {module} import {name}
- from ultralytics.utils import DEFAULT_CFG_DICT
- cfg = DEFAULT_CFG_DICT.copy()
- cfg.update(save_dir='') # handle the extra key 'save_dir'
- trainer = {name}(cfg=cfg, overrides=overrides)
- trainer.args.model = "{getattr(trainer.hub_session, "model_url", trainer.args.model)}"
- results = trainer.train()
- """
- (USER_CONFIG_DIR / "DDP").mkdir(exist_ok=True)
- with tempfile.NamedTemporaryFile(
- prefix="_temp_",
- suffix=f"{id(trainer)}.py",
- mode="w+",
- encoding="utf-8",
- dir=USER_CONFIG_DIR / "DDP",
- delete=False,
- ) as file:
- file.write(content)
- return file.name
- def generate_ddp_command(world_size, trainer):
- """
- Generate command for distributed training.
- Args:
- world_size (int): Number of processes to spawn for distributed training.
- trainer (object): The trainer object containing configuration for distributed training.
- Returns:
- cmd (List[str]): The command to execute for distributed training.
- file (str): Path to the temporary file created for DDP training.
- """
- import __main__ # noqa local import to avoid https://github.com/Lightning-AI/pytorch-lightning/issues/15218
- if not trainer.resume:
- shutil.rmtree(trainer.save_dir) # remove the save_dir
- file = generate_ddp_file(trainer)
- dist_cmd = "torch.distributed.run" if TORCH_1_9 else "torch.distributed.launch"
- port = find_free_network_port()
- cmd = [sys.executable, "-m", dist_cmd, "--nproc_per_node", f"{world_size}", "--master_port", f"{port}", file]
- return cmd, file
- def ddp_cleanup(trainer, file):
- """
- Delete temporary file if created during distributed data parallel (DDP) training.
- This function checks if the provided file contains the trainer's ID in its name, indicating it was created
- as a temporary file for DDP training, and deletes it if so.
- Args:
- trainer (object): The trainer object used for distributed training.
- file (str): Path to the file that might need to be deleted.
- Examples:
- >>> trainer = YOLOTrainer()
- >>> file = "/tmp/ddp_temp_123456789.py"
- >>> ddp_cleanup(trainer, file)
- """
- if f"{id(trainer)}.py" in file: # if temp_file suffix in file
- os.remove(file)
|