Register
Login
Resources
Docs Blog Datasets Glossary Case Studies Tutorials & Webinars
Product
Data Engine LLMs Platform Enterprise
Pricing Explore
Connect to our Discord channel

#609 Ci fix

Merged
Ghost merged 1 commits into Deci-AI:master from deci-ai:bugfix/infra-000_ci
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
  1. '''SENet in PyTorch.
  2. SENet is the winner of ImageNet-2017. The paper is not released yet.
  3. Code adapted from https://github.com/fastai/imagenet-fast/blob/master/cifar10/models/cifar10/senet.py
  4. '''
  5. import torch
  6. import torch.nn as nn
  7. import torch.nn.functional as F
  8. from super_gradients.training.models.sg_module import SgModule
  9. class BasicBlock(nn.Module):
  10. def __init__(self, in_planes, planes, stride=1):
  11. super(BasicBlock, self).__init__()
  12. self.conv1 = nn.Conv2d(in_planes, planes, kernel_size=3, stride=stride, padding=1, bias=False)
  13. self.bn1 = nn.BatchNorm2d(planes)
  14. self.conv2 = nn.Conv2d(planes, planes, kernel_size=3, stride=1, padding=1, bias=False)
  15. self.bn2 = nn.BatchNorm2d(planes)
  16. self.shortcut = nn.Sequential()
  17. if stride != 1 or in_planes != planes:
  18. self.shortcut = nn.Sequential(
  19. nn.Conv2d(in_planes, planes, kernel_size=1, stride=stride, bias=False),
  20. nn.BatchNorm2d(planes)
  21. )
  22. # SE layers
  23. self.fc1 = nn.Conv2d(planes, planes // 16, kernel_size=1) # Use nn.Conv2d instead of nn.Linear
  24. self.fc2 = nn.Conv2d(planes // 16, planes, kernel_size=1)
  25. def forward(self, x):
  26. out = F.relu(self.bn1(self.conv1(x)))
  27. out = self.bn2(self.conv2(out))
  28. # Squeeze
  29. w = F.avg_pool2d(out, out.size(2))
  30. w = F.relu(self.fc1(w))
  31. w = F.sigmoid(self.fc2(w))
  32. # Excitation
  33. out = out * w # New broadcasting feature from v0.2!
  34. out += self.shortcut(x)
  35. out = F.relu(out)
  36. return out
  37. class PreActBlock(nn.Module):
  38. def __init__(self, in_planes, planes, stride=1):
  39. super(PreActBlock, self).__init__()
  40. self.bn1 = nn.BatchNorm2d(in_planes)
  41. self.conv1 = nn.Conv2d(in_planes, planes, kernel_size=3, stride=stride, padding=1, bias=False)
  42. self.bn2 = nn.BatchNorm2d(planes)
  43. self.conv2 = nn.Conv2d(planes, planes, kernel_size=3, stride=1, padding=1, bias=False)
  44. if stride != 1 or in_planes != planes:
  45. self.shortcut = nn.Sequential(
  46. nn.Conv2d(in_planes, planes, kernel_size=1, stride=stride, bias=False)
  47. )
  48. # SE layers
  49. self.fc1 = nn.Conv2d(planes, planes // 16, kernel_size=1)
  50. self.fc2 = nn.Conv2d(planes // 16, planes, kernel_size=1)
  51. def forward(self, x):
  52. out = F.relu(self.bn1(x))
  53. shortcut = self.shortcut(out) if hasattr(self, 'shortcut') else x
  54. out = self.conv1(out)
  55. out = self.conv2(F.relu(self.bn2(out)))
  56. # Squeeze
  57. w = F.avg_pool2d(out, out.size(2))
  58. w = F.relu(self.fc1(w))
  59. w = F.sigmoid(self.fc2(w))
  60. # Excitation
  61. out = out * w
  62. out += shortcut
  63. return out
  64. class SENet(SgModule):
  65. def __init__(self, block, num_blocks, num_classes=10):
  66. super(SENet, self).__init__()
  67. self.in_planes = 64
  68. self.conv1 = nn.Conv2d(3, 64, kernel_size=3, stride=1, padding=1, bias=False)
  69. self.bn1 = nn.BatchNorm2d(64)
  70. self.layer1 = self._make_layer(block, 64, num_blocks[0], stride=1)
  71. self.layer2 = self._make_layer(block, 128, num_blocks[1], stride=2)
  72. self.layer3 = self._make_layer(block, 256, num_blocks[2], stride=2)
  73. self.layer4 = self._make_layer(block, 512, num_blocks[3], stride=2)
  74. self.linear = nn.Linear(512, num_classes)
  75. def _make_layer(self, block, planes, num_blocks, stride):
  76. strides = [stride] + [1] * (num_blocks - 1)
  77. layers = []
  78. for stride in strides:
  79. layers.append(block(self.in_planes, planes, stride))
  80. self.in_planes = planes
  81. return nn.Sequential(*layers)
  82. def forward(self, x):
  83. out = F.relu(self.bn1(self.conv1(x)))
  84. out = self.layer1(out)
  85. out = self.layer2(out)
  86. out = self.layer3(out)
  87. out = self.layer4(out)
  88. out = F.avg_pool2d(out, 4)
  89. out = out.view(out.size(0), -1)
  90. out = self.linear(out)
  91. return out
  92. def SENet18():
  93. return SENet(PreActBlock, [2, 2, 2, 2])
  94. def test():
  95. net = SENet18()
  96. y = net(torch.randn(1, 3, 32, 32))
  97. print(y.size())
  98. # test()
Discard
Tip!

Press p or to see the previous file or, n or to see the next file