Register
Login
Resources
Docs Blog Datasets Glossary Case Studies Tutorials & Webinars
Product
Data Engine LLMs Platform Enterprise
Pricing Explore
Connect to our Discord channel

average_checkpoints.py 5.0 KB

You have to be logged in to leave a comment. Sign In
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
  1. #!/usr/bin/env python3
  2. import argparse
  3. import collections
  4. import torch
  5. import os
  6. import re
  7. def average_checkpoints(inputs):
  8. """Loads checkpoints from inputs and returns a model with averaged weights.
  9. Args:
  10. inputs: An iterable of string paths of checkpoints to load from.
  11. Returns:
  12. A dict of string keys mapping to various values. The 'model' key
  13. from the returned dict should correspond to an OrderedDict mapping
  14. string parameter names to torch Tensors.
  15. """
  16. params_dict = collections.OrderedDict()
  17. params_keys = None
  18. new_state = None
  19. for f in inputs:
  20. state = torch.load(
  21. f,
  22. map_location=(
  23. lambda s, _: torch.serialization.default_restore_location(s, 'cpu')
  24. ),
  25. )
  26. # Copies over the settings from the first checkpoint
  27. if new_state is None:
  28. new_state = state
  29. model_params = state['model']
  30. model_params_keys = list(model_params.keys())
  31. if params_keys is None:
  32. params_keys = model_params_keys
  33. elif params_keys != model_params_keys:
  34. raise KeyError(
  35. 'For checkpoint {}, expected list of params: {}, '
  36. 'but found: {}'.format(f, params_keys, model_params_keys)
  37. )
  38. for k in params_keys:
  39. if k not in params_dict:
  40. params_dict[k] = []
  41. p = model_params[k]
  42. if isinstance(p, torch.HalfTensor):
  43. p = p.float()
  44. params_dict[k].append(p)
  45. averaged_params = collections.OrderedDict()
  46. # v should be a list of torch Tensor.
  47. for k, v in params_dict.items():
  48. summed_v = None
  49. for x in v:
  50. summed_v = summed_v + x if summed_v is not None else x
  51. averaged_params[k] = summed_v / len(v)
  52. new_state['model'] = averaged_params
  53. return new_state
  54. def last_n_checkpoints(paths, n, update_based, upper_bound=None):
  55. assert len(paths) == 1
  56. path = paths[0]
  57. if update_based:
  58. pt_regexp = re.compile(r'checkpoint_\d+_(\d+)\.pt')
  59. else:
  60. pt_regexp = re.compile(r'checkpoint(\d+)\.pt')
  61. files = os.listdir(path)
  62. entries = []
  63. for f in files:
  64. m = pt_regexp.fullmatch(f)
  65. if m is not None:
  66. sort_key = int(m.group(1))
  67. if upper_bound is None or sort_key <= upper_bound:
  68. entries.append((sort_key, m.group(0)))
  69. if len(entries) < n:
  70. raise Exception('Found {} checkpoint files but need at least {}', len(entries), n)
  71. return [os.path.join(path, x[1]) for x in sorted(entries, reverse=True)[:n]]
  72. def main():
  73. parser = argparse.ArgumentParser(
  74. description='Tool to average the params of input checkpoints to '
  75. 'produce a new checkpoint',
  76. )
  77. # fmt: off
  78. parser.add_argument('--inputs', required=True, nargs='+',
  79. help='Input checkpoint file paths.')
  80. parser.add_argument('--output', required=True, metavar='FILE',
  81. help='Write the new checkpoint containing the averaged weights to this path.')
  82. num_group = parser.add_mutually_exclusive_group()
  83. num_group.add_argument('--num-epoch-checkpoints', type=int,
  84. help='if set, will try to find checkpoints with names checkpoint_xx.pt in the path specified by input, '
  85. 'and average last this many of them.')
  86. num_group.add_argument('--num-update-checkpoints', type=int,
  87. help='if set, will try to find checkpoints with names checkpoint_ee_xx.pt in the path specified by input, '
  88. 'and average last this many of them.')
  89. parser.add_argument('--checkpoint-upper-bound', type=int,
  90. help='when using --num-epoch-checkpoints, this will set an upper bound on which checkpoint to use, '
  91. 'e.g., with --num-epoch-checkpoints=10 --checkpoint-upper-bound=50, checkpoints 41-50 would be averaged.')
  92. # fmt: on
  93. args = parser.parse_args()
  94. print(args)
  95. num = None
  96. is_update_based = False
  97. if args.num_update_checkpoints is not None:
  98. num = args.num_update_checkpoints
  99. is_update_based = True
  100. elif args.num_epoch_checkpoints is not None:
  101. num = args.num_epoch_checkpoints
  102. assert args.checkpoint_upper_bound is None or args.num_epoch_checkpoints is not None, \
  103. '--checkpoint-upper-bound requires --num-epoch-checkpoints'
  104. assert args.num_epoch_checkpoints is None or args.num_update_checkpoints is None, \
  105. 'Cannot combine --num-epoch-checkpoints and --num-update-checkpoints'
  106. if num is not None:
  107. args.inputs = last_n_checkpoints(
  108. args.inputs, num, is_update_based, upper_bound=args.checkpoint_upper_bound,
  109. )
  110. print('averaging checkpoints: ', args.inputs)
  111. new_state = average_checkpoints(args.inputs)
  112. torch.save(new_state, args.output)
  113. print('Finished writing averaged checkpoint to {}.'.format(args.output))
  114. if __name__ == '__main__':
  115. main()
Tip!

Press p or to see the previous file or, n or to see the next file

Comments

Loading...