Register
Login
Resources
Docs Blog Datasets Glossary Case Studies Tutorials & Webinars
Product
Data Engine LLMs Platform Enterprise
Pricing Explore
Connect to our Discord channel

#578 Feature/sg 516 support head replacement for local pretrained weights unknown dataset

Merged
Ghost merged 1 commits into Deci-AI:master from deci-ai:feature/SG-516_support_head_replacement_for_local_pretrained_weights_unknown_dataset
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
  1. import math
  2. import torch
  3. from torch.utils.data import Sampler
  4. import torch.distributed as dist
  5. # TODO: Add unit test for RepeatAugSampler once DDP unit tests are supported.
  6. class RepeatAugSampler(Sampler):
  7. """
  8. Sampler that restricts data loading to a subset of the dataset for distributed,
  9. with repeated augmentation.
  10. It ensures that different each augmented version of a sample will be visible to a
  11. different process (GPU). Heavily based on torch.utils.data.DistributedSampler
  12. This sampler was taken from https://github.com/facebookresearch/deit/blob/0c4b8f60/samplers.py
  13. Copyright (c) 2015-present, Facebook, Inc.
  14. Below code is modified from:
  15. https://github.com/rwightman/pytorch-image-models/blame/master/timm/data/distributed_sampler.py
  16. Note this sampler is currently supported only for DDP training.
  17. Arguments:
  18. dataset (torch.utils.data.Dataset): dataset to sample from.
  19. num_replicas (int): Number of dataset replicas, equals to world_size when set to 0 (default=0).
  20. shuffle (bool): whether to shuffle the dataset indices (default=True).
  21. num_repeats (int): amount of repetitions for each example.
  22. selected_round (int): When > 0, the number of samples to select per epoch for each rank is determined by
  23. int(math.floor(len(self.dataset) // selected_round * selected_round / selected_ratio))
  24. (default=256)
  25. selected_ratio (int): ratio to reduce selected samples by, num_replicas if 0.
  26. """
  27. def __init__(
  28. self,
  29. dataset: torch.utils.data.Dataset,
  30. num_replicas: int = None,
  31. rank: int = None,
  32. shuffle: bool = True,
  33. num_repeats: int = 3,
  34. selected_round: int = 256,
  35. selected_ratio: int = 0,
  36. ):
  37. if num_replicas is None:
  38. if not dist.is_available():
  39. raise RuntimeError("Requires distributed package to be available")
  40. num_replicas = dist.get_world_size()
  41. if rank is None:
  42. if not dist.is_available():
  43. raise RuntimeError("Requires distributed package to be available")
  44. rank = dist.get_rank()
  45. self.dataset = dataset
  46. self.num_replicas = num_replicas
  47. self.rank = rank
  48. self.shuffle = shuffle
  49. self.num_repeats = num_repeats
  50. self.epoch = 0
  51. self.num_samples = int(math.ceil(len(self.dataset) * num_repeats / self.num_replicas))
  52. self.total_size = self.num_samples * self.num_replicas
  53. # Determine the number of samples to select per epoch for each rank.
  54. # num_selected logic defaults to be the same as original RASampler impl, but this one can be tweaked
  55. # via selected_ratio and selected_round args.
  56. selected_ratio = selected_ratio or num_replicas # ratio to reduce selected samples by, num_replicas if 0
  57. if selected_round:
  58. self.num_selected_samples = int(math.floor(
  59. len(self.dataset) // selected_round * selected_round / selected_ratio))
  60. else:
  61. self.num_selected_samples = int(math.ceil(len(self.dataset) / selected_ratio))
  62. def __iter__(self):
  63. # deterministically shuffle based on epoch
  64. g = torch.Generator()
  65. g.manual_seed(self.epoch)
  66. if self.shuffle:
  67. indices = torch.randperm(len(self.dataset), generator=g)
  68. else:
  69. indices = torch.arange(start=0, end=len(self.dataset))
  70. # produce repeats e.g. [0, 0, 0, 1, 1, 1, 2, 2, 2....]
  71. if isinstance(self.num_repeats, float) and not self.num_repeats.is_integer():
  72. # resample for repeats w/ non-integer ratio
  73. repeat_size = math.ceil(self.num_repeats * len(self.dataset))
  74. indices = indices[torch.tensor([int(i // self.num_repeats) for i in range(repeat_size)])]
  75. else:
  76. indices = torch.repeat_interleave(indices, repeats=int(self.num_repeats), dim=0)
  77. indices = indices.tolist() # leaving as tensor thrashes dataloader memory
  78. # add extra samples to make it evenly divisible
  79. padding_size = self.total_size - len(indices)
  80. if padding_size > 0:
  81. indices += indices[:padding_size]
  82. assert len(indices) == self.total_size
  83. # subsample per rank
  84. indices = indices[self.rank:self.total_size:self.num_replicas]
  85. assert len(indices) == self.num_samples
  86. # return up to num selected samples
  87. return iter(indices[:self.num_selected_samples])
  88. def __len__(self):
  89. return self.num_selected_samples
  90. def set_epoch(self, epoch):
  91. self.epoch = epoch
Discard
Tip!

Press p or to see the previous file or, n or to see the next file