Register
Login
Resources
Docs Blog Datasets Glossary Case Studies Tutorials & Webinars
Product
Data Engine LLMs Platform Enterprise
Pricing Explore
Connect to our Discord channel

#609 Ci fix

Merged
Ghost merged 1 commits into Deci-AI:master from deci-ai:bugfix/infra-000_ci
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
  1. from torch import nn
  2. from super_gradients.training.models.sg_module import SgModule
  3. from super_gradients.training.utils import get_param
  4. def create_conv_module(in_channels, out_channels, kernel_size=3, stride=1):
  5. padding = (kernel_size - 1) // 2
  6. nn_sequential_module = nn.Sequential()
  7. nn_sequential_module.add_module("Conv2d", nn.Conv2d(in_channels, out_channels, kernel_size=kernel_size, stride=stride, padding=padding, bias=False))
  8. nn_sequential_module.add_module("BatchNorm2d", nn.BatchNorm2d(out_channels))
  9. nn_sequential_module.add_module("LeakyRelu", nn.LeakyReLU())
  10. return nn_sequential_module
  11. # Residual block
  12. class DarkResidualBlock(nn.Module):
  13. """
  14. DarkResidualBlock - The Darknet Residual Block
  15. """
  16. def __init__(self, in_channels, shortcut=True):
  17. super(DarkResidualBlock, self).__init__()
  18. self.shortcut = shortcut
  19. reduced_channels = int(in_channels / 2)
  20. self.layer1 = create_conv_module(in_channels, reduced_channels, kernel_size=1)
  21. self.layer2 = create_conv_module(reduced_channels, in_channels)
  22. def forward(self, x):
  23. residual = x
  24. out = self.layer1(x)
  25. out = self.layer2(out)
  26. out += residual if self.shortcut else out
  27. return out
  28. class Darknet53Base(SgModule):
  29. def __init__(self):
  30. super(Darknet53Base, self).__init__()
  31. # THE MODULES LIST IS APPROACHABLE FROM "OUTSIDE THE CLASS - SO WE CAN CHANGE IT'S STRUCTURE"
  32. self.modules_list = nn.ModuleList()
  33. self.modules_list.append(create_conv_module(3, 32)) # 0
  34. self.modules_list.append(create_conv_module(32, 64, stride=2)) # 1
  35. self.modules_list.append(self._make_layer(DarkResidualBlock, in_channels=64, num_blocks=1)) # 2
  36. self.modules_list.append(create_conv_module(64, 128, stride=2)) # 3
  37. self.modules_list.append(self._make_layer(DarkResidualBlock, in_channels=128, num_blocks=2)) # 4
  38. self.modules_list.append(create_conv_module(128, 256, stride=2)) # 5
  39. self.modules_list.append(self._make_layer(DarkResidualBlock, in_channels=256, num_blocks=8)) # 6
  40. self.modules_list.append(create_conv_module(256, 512, stride=2)) # 7
  41. self.modules_list.append(self._make_layer(DarkResidualBlock, in_channels=512, num_blocks=8)) # 8
  42. self.modules_list.append(create_conv_module(512, 1024, stride=2)) # 9
  43. self.modules_list.append(self._make_layer(DarkResidualBlock, in_channels=1024, num_blocks=4)) # 10
  44. def forward(self, x):
  45. out = x
  46. for i, module in enumerate(self.modules_list):
  47. out = self.modules_list[i](out)
  48. return out
  49. def _make_layer(self, block, in_channels, num_blocks):
  50. layers = []
  51. for i in range(0, num_blocks):
  52. layers.append(block(in_channels))
  53. return nn.Sequential(*layers)
  54. class Darknet53(Darknet53Base):
  55. def __init__(self, arch_params=None, backbone_mode=True, num_classes=None):
  56. super(Darknet53, self).__init__()
  57. # IN ORDER TO ALLOW PASSING PARAMETERS WITH ARCH_PARAMS BUT NOT BREAK YOLOV3 INTEGRATION
  58. self.backbone_mode = get_param(arch_params, "backbone_mode", backbone_mode)
  59. self.num_classes = get_param(arch_params, "num_classes", num_classes)
  60. if not self.backbone_mode:
  61. # IF NOT USED AS A BACKEND BUT AS A CLASSIFIER WE ADD THE CLASSIFICATION LAYERS
  62. if self.num_classes is not None:
  63. nn_sequential_block = nn.Sequential()
  64. nn_sequential_block.add_module("global_avg_pool", nn.AdaptiveAvgPool2d((1, 1)))
  65. nn_sequential_block.add_module("view", ViewModule(1024))
  66. nn_sequential_block.add_module("fc", nn.Linear(1024, self.num_classes))
  67. self.modules_list.append(nn_sequential_block)
  68. else:
  69. raise ValueError("num_classes must be specified to use Darknet53 as a classifier")
  70. def get_modules_list(self):
  71. return self.modules_list
  72. def forward(self, x):
  73. """
  74. forward - Forward pass on the modules list
  75. :param x: The input data
  76. :return: forward pass for backbone pass or classification pass
  77. """
  78. return super().forward(x)
  79. # Residual block
  80. class ViewModule(nn.Module):
  81. """
  82. Returns a reshaped version of the input, to be used in None-Backbone Mode
  83. """
  84. def __init__(self, features=1024):
  85. super(ViewModule, self).__init__()
  86. self.features = features
  87. def forward(self, x):
  88. return x.view(-1, self.features)
Discard
Tip!

Press p or to see the previous file or, n or to see the next file