1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
|
- <!DOCTYPE html>
- <html class="writer-html5" lang="en" >
- <head>
- <meta charset="utf-8" />
- <meta name="viewport" content="width=device-width, initial-scale=1.0" />
- <title>super_gradients.training.utils.module_utils — SuperGradients 1.0 documentation</title>
- <link rel="stylesheet" href="../../../../_static/pygments.css" type="text/css" />
- <link rel="stylesheet" href="../../../../_static/css/theme.css" type="text/css" />
- <link rel="stylesheet" href="../../../../_static/graphviz.css" type="text/css" />
- <!--[if lt IE 9]>
- <script src="../../../../_static/js/html5shiv.min.js"></script>
- <![endif]-->
-
- <script data-url_root="../../../../" id="documentation_options" src="../../../../_static/documentation_options.js"></script>
- <script src="../../../../_static/jquery.js"></script>
- <script src="../../../../_static/underscore.js"></script>
- <script src="../../../../_static/doctools.js"></script>
- <script src="../../../../_static/js/theme.js"></script>
- <link rel="index" title="Index" href="../../../../genindex.html" />
- <link rel="search" title="Search" href="../../../../search.html" />
- </head>
- <body class="wy-body-for-nav">
- <div class="wy-grid-for-nav">
- <nav data-toggle="wy-nav-shift" class="wy-nav-side">
- <div class="wy-side-scroll">
- <div class="wy-side-nav-search" >
- <a href="../../../../index.html" class="icon icon-home"> SuperGradients
- </a>
- <div role="search">
- <form id="rtd-search-form" class="wy-form" action="../../../../search.html" method="get">
- <input type="text" name="q" placeholder="Search docs" />
- <input type="hidden" name="check_keywords" value="yes" />
- <input type="hidden" name="area" value="default" />
- </form>
- </div>
- </div><div class="wy-menu wy-menu-vertical" data-spy="affix" role="navigation" aria-label="Navigation menu">
- <p class="caption"><span class="caption-text">Welcome To SuperGradients</span></p>
- <ul>
- <li class="toctree-l1"><a class="reference internal" href="../../../../welcome.html">Fill our 4-question quick survey! We will raffle free SuperGradients swag between those who will participate -> Fill Survey</a></li>
- <li class="toctree-l1"><a class="reference internal" href="../../../../welcome.html#supergradients">SuperGradients</a></li>
- </ul>
- <p class="caption"><span class="caption-text">Technical Documentation</span></p>
- <ul>
- <li class="toctree-l1"><a class="reference internal" href="../../../../super_gradients.common.html">Common package</a></li>
- <li class="toctree-l1"><a class="reference internal" href="../../../../super_gradients.training.html">Training package</a></li>
- </ul>
- <p class="caption"><span class="caption-text">User Guide</span></p>
- <ul>
- <li class="toctree-l1"><a class="reference internal" href="../../../../user_guide.html">What is SuperGradients?</a></li>
- <li class="toctree-l1"><a class="reference internal" href="../../../../user_guide.html#introducing-the-supergradients-library">Introducing the SuperGradients library</a></li>
- <li class="toctree-l1"><a class="reference internal" href="../../../../user_guide.html#installation">Installation</a></li>
- <li class="toctree-l1"><a class="reference internal" href="../../../../user_guide.html#integrating-your-training-code-complete-walkthrough">Integrating Your Training Code - Complete Walkthrough</a></li>
- <li class="toctree-l1"><a class="reference internal" href="../../../../user_guide.html#training-parameters">Training Parameters</a></li>
- <li class="toctree-l1"><a class="reference internal" href="../../../../user_guide.html#logs-and-checkpoints">Logs and Checkpoints</a></li>
- <li class="toctree-l1"><a class="reference internal" href="../../../../user_guide.html#dataset-parameters">Dataset Parameters</a></li>
- <li class="toctree-l1"><a class="reference internal" href="../../../../user_guide.html#network-architectures">Network Architectures</a></li>
- <li class="toctree-l1"><a class="reference internal" href="../../../../user_guide.html#pretrained-models">Pretrained Models</a></li>
- <li class="toctree-l1"><a class="reference internal" href="../../../../user_guide.html#how-to-reproduce-our-training-recipes">How To Reproduce Our Training Recipes</a></li>
- <li class="toctree-l1"><a class="reference internal" href="../../../../user_guide.html#professional-tools-integration">Professional Tools Integration</a></li>
- <li class="toctree-l1"><a class="reference internal" href="../../../../user_guide.html#supergradients-faq">SuperGradients FAQ</a></li>
- </ul>
- </div>
- </div>
- </nav>
- <section data-toggle="wy-nav-shift" class="wy-nav-content-wrap"><nav class="wy-nav-top" aria-label="Mobile navigation menu" >
- <i data-toggle="wy-nav-top" class="fa fa-bars"></i>
- <a href="../../../../index.html">SuperGradients</a>
- </nav>
- <div class="wy-nav-content">
- <div class="rst-content">
- <div role="navigation" aria-label="Page navigation">
- <ul class="wy-breadcrumbs">
- <li><a href="../../../../index.html" class="icon icon-home"></a> »</li>
- <li><a href="../../../index.html">Module code</a> »</li>
- <li>super_gradients.training.utils.module_utils</li>
- <li class="wy-breadcrumbs-aside">
- </li>
- </ul>
- <hr/>
- </div>
- <div role="main" class="document" itemscope="itemscope" itemtype="http://schema.org/Article">
- <div itemprop="articleBody">
-
- <h1>Source code for super_gradients.training.utils.module_utils</h1><div class="highlight"><pre>
- <span></span><span class="kn">from</span> <span class="nn">collections</span> <span class="kn">import</span> <span class="n">OrderedDict</span>
- <span class="kn">import</span> <span class="nn">copy</span>
- <span class="kn">from</span> <span class="nn">typing</span> <span class="kn">import</span> <span class="n">List</span><span class="p">,</span> <span class="n">Union</span><span class="p">,</span> <span class="n">Tuple</span>
- <span class="kn">import</span> <span class="nn">torch</span>
- <span class="kn">from</span> <span class="nn">torch</span> <span class="kn">import</span> <span class="n">nn</span>
- <div class="viewcode-block" id="MultiOutputModule"><a class="viewcode-back" href="../../../../super_gradients.training.utils.html#super_gradients.training.utils.module_utils.MultiOutputModule">[docs]</a><span class="k">class</span> <span class="nc">MultiOutputModule</span><span class="p">(</span><span class="n">nn</span><span class="o">.</span><span class="n">Module</span><span class="p">):</span>
- <span class="sd">"""</span>
- <span class="sd"> This module wraps around a container nn.Module (such as Module, Sequential and ModuleList) and allows to extract</span>
- <span class="sd"> multiple output from its inner modules on each forward call() (as a list of output tensors)</span>
- <span class="sd"> note: the default output of the wrapped module will not be added to the output list by default. To get</span>
- <span class="sd"> the default output in the outputs list, explicitly include its path in the @output_paths parameter</span>
- <span class="sd"> i.e.</span>
- <span class="sd"> for module:</span>
- <span class="sd"> Sequential(</span>
- <span class="sd"> (0): Sequential(</span>
- <span class="sd"> (0): Conv2d(3, 32, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False)</span>
- <span class="sd"> (1): BatchNorm2d(32, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)</span>
- <span class="sd"> (2): ReLU6(inplace=True)</span>
- <span class="sd"> ) ===================================>></span>
- <span class="sd"> (1): InvertedResidual(</span>
- <span class="sd"> (conv): Sequential(</span>
- <span class="sd"> (0): Conv2d(32, 32, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), groups=32, bias=False)</span>
- <span class="sd"> (1): BatchNorm2d(32, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)</span>
- <span class="sd"> (2): ReLU6(inplace=True) ===================================>></span>
- <span class="sd"> (3): Conv2d(32, 16, kernel_size=(1, 1), stride=(1, 1), bias=False)</span>
- <span class="sd"> (4): BatchNorm2d(16, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)</span>
- <span class="sd"> )</span>
- <span class="sd"> )</span>
- <span class="sd"> )</span>
- <span class="sd"> and paths:</span>
- <span class="sd"> [0, [1, 'conv', 2]]</span>
- <span class="sd"> the output are marked with arrows</span>
- <span class="sd"> """</span>
- <span class="k">def</span> <span class="fm">__init__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">module</span><span class="p">:</span> <span class="n">nn</span><span class="o">.</span><span class="n">Module</span><span class="p">,</span> <span class="n">output_paths</span><span class="p">:</span> <span class="nb">list</span><span class="p">,</span> <span class="n">prune</span><span class="p">:</span> <span class="nb">bool</span> <span class="o">=</span> <span class="kc">True</span><span class="p">):</span>
- <span class="sd">"""</span>
- <span class="sd"> :param module: The wrapped container module</span>
- <span class="sd"> :param output_paths: a list of lists or keys containing the canonical paths to the outputs</span>
- <span class="sd"> i.e. [3, [4, 'conv', 5], 7] will extract outputs of layers 3, 7 and 4->conv->5</span>
- <span class="sd"> """</span>
- <span class="nb">super</span><span class="p">()</span><span class="o">.</span><span class="fm">__init__</span><span class="p">()</span>
- <span class="bp">self</span><span class="o">.</span><span class="n">output_paths</span> <span class="o">=</span> <span class="n">output_paths</span>
- <span class="bp">self</span><span class="o">.</span><span class="n">_modules</span><span class="p">[</span><span class="s1">'0'</span><span class="p">]</span> <span class="o">=</span> <span class="n">module</span>
- <span class="bp">self</span><span class="o">.</span><span class="n">_outputs_lists</span> <span class="o">=</span> <span class="p">{}</span>
- <span class="k">for</span> <span class="n">path</span> <span class="ow">in</span> <span class="n">output_paths</span><span class="p">:</span>
- <span class="n">child</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">_get_recursive</span><span class="p">(</span><span class="n">module</span><span class="p">,</span> <span class="n">path</span><span class="p">)</span>
- <span class="n">child</span><span class="o">.</span><span class="n">register_forward_hook</span><span class="p">(</span><span class="n">hook</span><span class="o">=</span><span class="bp">self</span><span class="o">.</span><span class="n">save_output_hook</span><span class="p">)</span>
- <span class="c1"># PRUNE THE MODULE TO SUPPORT ALL PROVIDED OUTPUT_PATHS BUT REMOVE ALL REDUNDANT LAYERS</span>
- <span class="k">if</span> <span class="n">prune</span><span class="p">:</span>
- <span class="bp">self</span><span class="o">.</span><span class="n">_prune</span><span class="p">(</span><span class="n">module</span><span class="p">,</span> <span class="n">output_paths</span><span class="p">)</span>
- <div class="viewcode-block" id="MultiOutputModule.save_output_hook"><a class="viewcode-back" href="../../../../super_gradients.training.utils.html#super_gradients.training.utils.module_utils.MultiOutputModule.save_output_hook">[docs]</a> <span class="k">def</span> <span class="nf">save_output_hook</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">_</span><span class="p">,</span> <span class="nb">input</span><span class="p">,</span> <span class="n">output</span><span class="p">):</span>
- <span class="bp">self</span><span class="o">.</span><span class="n">_outputs_lists</span><span class="p">[</span><span class="nb">input</span><span class="p">[</span><span class="mi">0</span><span class="p">]</span><span class="o">.</span><span class="n">device</span><span class="p">]</span><span class="o">.</span><span class="n">append</span><span class="p">(</span><span class="n">output</span><span class="p">)</span></div>
- <div class="viewcode-block" id="MultiOutputModule.forward"><a class="viewcode-back" href="../../../../super_gradients.training.utils.html#super_gradients.training.utils.module_utils.MultiOutputModule.forward">[docs]</a> <span class="k">def</span> <span class="nf">forward</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">x</span><span class="p">)</span> <span class="o">-></span> <span class="nb">list</span><span class="p">:</span>
- <span class="bp">self</span><span class="o">.</span><span class="n">_outputs_lists</span><span class="p">[</span><span class="n">x</span><span class="o">.</span><span class="n">device</span><span class="p">]</span> <span class="o">=</span> <span class="p">[]</span>
- <span class="bp">self</span><span class="o">.</span><span class="n">_modules</span><span class="p">[</span><span class="s1">'0'</span><span class="p">](</span><span class="n">x</span><span class="p">)</span>
- <span class="k">return</span> <span class="bp">self</span><span class="o">.</span><span class="n">_outputs_lists</span><span class="p">[</span><span class="n">x</span><span class="o">.</span><span class="n">device</span><span class="p">]</span></div>
- <span class="k">def</span> <span class="nf">_get_recursive</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">module</span><span class="p">:</span> <span class="n">nn</span><span class="o">.</span><span class="n">Module</span><span class="p">,</span> <span class="n">path</span><span class="p">)</span> <span class="o">-></span> <span class="n">nn</span><span class="o">.</span><span class="n">Module</span><span class="p">:</span>
- <span class="sd">"""recursively look for a module using a path"""</span>
- <span class="k">if</span> <span class="ow">not</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">path</span><span class="p">,</span> <span class="nb">list</span><span class="p">):</span>
- <span class="k">return</span> <span class="n">module</span><span class="o">.</span><span class="n">_modules</span><span class="p">[</span><span class="nb">str</span><span class="p">(</span><span class="n">path</span><span class="p">)]</span>
- <span class="k">elif</span> <span class="nb">len</span><span class="p">(</span><span class="n">path</span><span class="p">)</span> <span class="o">==</span> <span class="mi">1</span><span class="p">:</span>
- <span class="k">return</span> <span class="n">module</span><span class="o">.</span><span class="n">_modules</span><span class="p">[</span><span class="nb">str</span><span class="p">(</span><span class="n">path</span><span class="p">[</span><span class="mi">0</span><span class="p">])]</span>
- <span class="k">else</span><span class="p">:</span>
- <span class="k">return</span> <span class="bp">self</span><span class="o">.</span><span class="n">_get_recursive</span><span class="p">(</span><span class="n">module</span><span class="o">.</span><span class="n">_modules</span><span class="p">[</span><span class="nb">str</span><span class="p">(</span><span class="n">path</span><span class="p">[</span><span class="mi">0</span><span class="p">])],</span> <span class="n">path</span><span class="p">[</span><span class="mi">1</span><span class="p">:])</span>
- <span class="k">def</span> <span class="nf">_prune</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">module</span><span class="p">:</span> <span class="n">nn</span><span class="o">.</span><span class="n">Module</span><span class="p">,</span> <span class="n">output_paths</span><span class="p">:</span> <span class="nb">list</span><span class="p">):</span>
- <span class="sd">"""</span>
- <span class="sd"> Recursively prune the module to support all provided output_paths but remove all redundant layers</span>
- <span class="sd"> """</span>
- <span class="n">last_index</span> <span class="o">=</span> <span class="o">-</span><span class="mi">1</span>
- <span class="n">last_key</span> <span class="o">=</span> <span class="kc">None</span>
- <span class="c1"># look for the last key from all paths</span>
- <span class="k">for</span> <span class="n">path</span> <span class="ow">in</span> <span class="n">output_paths</span><span class="p">:</span>
- <span class="n">key</span> <span class="o">=</span> <span class="n">path</span><span class="p">[</span><span class="mi">0</span><span class="p">]</span> <span class="k">if</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">path</span><span class="p">,</span> <span class="nb">list</span><span class="p">)</span> <span class="k">else</span> <span class="n">path</span>
- <span class="n">index</span> <span class="o">=</span> <span class="nb">list</span><span class="p">(</span><span class="n">module</span><span class="o">.</span><span class="n">_modules</span><span class="p">)</span><span class="o">.</span><span class="n">index</span><span class="p">(</span><span class="nb">str</span><span class="p">(</span><span class="n">key</span><span class="p">))</span>
- <span class="k">if</span> <span class="n">index</span> <span class="o">></span> <span class="n">last_index</span><span class="p">:</span>
- <span class="n">last_index</span> <span class="o">=</span> <span class="n">index</span>
- <span class="n">last_key</span> <span class="o">=</span> <span class="n">key</span>
- <span class="n">module</span><span class="o">.</span><span class="n">_modules</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">_slice_odict</span><span class="p">(</span><span class="n">module</span><span class="o">.</span><span class="n">_modules</span><span class="p">,</span> <span class="mi">0</span><span class="p">,</span> <span class="n">last_index</span> <span class="o">+</span> <span class="mi">1</span><span class="p">)</span>
- <span class="n">next_level_paths</span> <span class="o">=</span> <span class="p">[]</span>
- <span class="k">for</span> <span class="n">path</span> <span class="ow">in</span> <span class="n">output_paths</span><span class="p">:</span>
- <span class="k">if</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">path</span><span class="p">,</span> <span class="nb">list</span><span class="p">)</span> <span class="ow">and</span> <span class="n">path</span><span class="p">[</span><span class="mi">0</span><span class="p">]</span> <span class="o">==</span> <span class="n">last_key</span> <span class="ow">and</span> <span class="nb">len</span><span class="p">(</span><span class="n">path</span><span class="p">)</span> <span class="o">></span> <span class="mi">1</span><span class="p">:</span>
- <span class="n">next_level_paths</span><span class="o">.</span><span class="n">append</span><span class="p">(</span><span class="n">path</span><span class="p">[</span><span class="mi">1</span><span class="p">:])</span>
- <span class="k">if</span> <span class="nb">len</span><span class="p">(</span><span class="n">next_level_paths</span><span class="p">)</span> <span class="o">></span> <span class="mi">0</span><span class="p">:</span>
- <span class="bp">self</span><span class="o">.</span><span class="n">_prune</span><span class="p">(</span><span class="n">module</span><span class="o">.</span><span class="n">_modules</span><span class="p">[</span><span class="nb">str</span><span class="p">(</span><span class="n">last_key</span><span class="p">)],</span> <span class="n">next_level_paths</span><span class="p">)</span>
- <span class="k">def</span> <span class="nf">_slice_odict</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">odict</span><span class="p">:</span> <span class="n">OrderedDict</span><span class="p">,</span> <span class="n">start</span><span class="p">:</span> <span class="nb">int</span><span class="p">,</span> <span class="n">end</span><span class="p">:</span> <span class="nb">int</span><span class="p">):</span>
- <span class="sd">"""Slice an OrderedDict in the same logic list,tuple... are sliced"""</span>
- <span class="k">return</span> <span class="n">OrderedDict</span><span class="p">([</span>
- <span class="p">(</span><span class="n">k</span><span class="p">,</span> <span class="n">v</span><span class="p">)</span> <span class="k">for</span> <span class="p">(</span><span class="n">k</span><span class="p">,</span> <span class="n">v</span><span class="p">)</span> <span class="ow">in</span> <span class="n">odict</span><span class="o">.</span><span class="n">items</span><span class="p">()</span>
- <span class="k">if</span> <span class="n">k</span> <span class="ow">in</span> <span class="nb">list</span><span class="p">(</span><span class="n">odict</span><span class="o">.</span><span class="n">keys</span><span class="p">())[</span><span class="n">start</span><span class="p">:</span><span class="n">end</span><span class="p">]</span>
- <span class="p">])</span></div>
- <span class="k">def</span> <span class="nf">_replace_activations_recursive</span><span class="p">(</span><span class="n">module</span><span class="p">:</span> <span class="n">nn</span><span class="o">.</span><span class="n">Module</span><span class="p">,</span> <span class="n">new_activation</span><span class="p">:</span> <span class="n">nn</span><span class="o">.</span><span class="n">Module</span><span class="p">,</span> <span class="n">activations_to_replace</span><span class="p">:</span> <span class="n">List</span><span class="p">[</span><span class="nb">type</span><span class="p">]):</span>
- <span class="sd">"""</span>
- <span class="sd"> A helper called in replace_activations(...)</span>
- <span class="sd"> """</span>
- <span class="k">for</span> <span class="n">n</span><span class="p">,</span> <span class="n">m</span> <span class="ow">in</span> <span class="n">module</span><span class="o">.</span><span class="n">named_children</span><span class="p">():</span>
- <span class="k">if</span> <span class="nb">type</span><span class="p">(</span><span class="n">m</span><span class="p">)</span> <span class="ow">in</span> <span class="n">activations_to_replace</span><span class="p">:</span>
- <span class="nb">setattr</span><span class="p">(</span><span class="n">module</span><span class="p">,</span> <span class="n">n</span><span class="p">,</span> <span class="n">copy</span><span class="o">.</span><span class="n">deepcopy</span><span class="p">(</span><span class="n">new_activation</span><span class="p">))</span>
- <span class="k">else</span><span class="p">:</span>
- <span class="n">_replace_activations_recursive</span><span class="p">(</span><span class="n">m</span><span class="p">,</span> <span class="n">new_activation</span><span class="p">,</span> <span class="n">activations_to_replace</span><span class="p">)</span>
- <div class="viewcode-block" id="replace_activations"><a class="viewcode-back" href="../../../../super_gradients.training.utils.html#super_gradients.training.utils.module_utils.replace_activations">[docs]</a><span class="k">def</span> <span class="nf">replace_activations</span><span class="p">(</span><span class="n">module</span><span class="p">:</span> <span class="n">nn</span><span class="o">.</span><span class="n">Module</span><span class="p">,</span> <span class="n">new_activation</span><span class="p">:</span> <span class="n">nn</span><span class="o">.</span><span class="n">Module</span><span class="p">,</span> <span class="n">activations_to_replace</span><span class="p">:</span> <span class="n">List</span><span class="p">[</span><span class="nb">type</span><span class="p">]):</span>
- <span class="sd">"""</span>
- <span class="sd"> Recursively go through module and replaces each activation in activations_to_replace with a copy of new_activation</span>
- <span class="sd"> :param module: a module that will be changed inplace</span>
- <span class="sd"> :param new_activation: a sample of a new activation (will be copied)</span>
- <span class="sd"> :param activations_to_replace: types of activations to replace, each must be a subclass of nn.Module</span>
- <span class="sd"> """</span>
- <span class="c1"># check arguments once before the recursion</span>
- <span class="k">assert</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">new_activation</span><span class="p">,</span> <span class="n">nn</span><span class="o">.</span><span class="n">Module</span><span class="p">),</span> <span class="s1">'new_activation should be nn.Module'</span>
- <span class="k">assert</span> <span class="nb">all</span><span class="p">([</span><span class="nb">isinstance</span><span class="p">(</span><span class="n">t</span><span class="p">,</span> <span class="nb">type</span><span class="p">)</span> <span class="ow">and</span> <span class="nb">issubclass</span><span class="p">(</span><span class="n">t</span><span class="p">,</span> <span class="n">nn</span><span class="o">.</span><span class="n">Module</span><span class="p">)</span> <span class="k">for</span> <span class="n">t</span> <span class="ow">in</span> <span class="n">activations_to_replace</span><span class="p">]),</span> \
- <span class="s1">'activations_to_replace should be types that are subclasses of nn.Module'</span>
- <span class="c1"># do the replacement</span>
- <span class="n">_replace_activations_recursive</span><span class="p">(</span><span class="n">module</span><span class="p">,</span> <span class="n">new_activation</span><span class="p">,</span> <span class="n">activations_to_replace</span><span class="p">)</span></div>
- <div class="viewcode-block" id="fuse_repvgg_blocks_residual_branches"><a class="viewcode-back" href="../../../../super_gradients.training.utils.html#super_gradients.training.utils.module_utils.fuse_repvgg_blocks_residual_branches">[docs]</a><span class="k">def</span> <span class="nf">fuse_repvgg_blocks_residual_branches</span><span class="p">(</span><span class="n">model</span><span class="p">:</span> <span class="n">nn</span><span class="o">.</span><span class="n">Module</span><span class="p">):</span>
- <span class="sd">'''</span>
- <span class="sd"> Call fuse_block_residual_branches for all repvgg blocks in the model</span>
- <span class="sd"> :param model: torch.nn.Module with repvgg blocks. Doesn't have to be entirely consists of repvgg.</span>
- <span class="sd"> :type model: torch.nn.Module</span>
- <span class="sd"> '''</span>
- <span class="k">assert</span> <span class="ow">not</span> <span class="n">model</span><span class="o">.</span><span class="n">training</span><span class="p">,</span> <span class="s2">"To fuse RepVGG block residual branches, model must be on eval mode"</span>
- <span class="k">for</span> <span class="n">module</span> <span class="ow">in</span> <span class="n">model</span><span class="o">.</span><span class="n">modules</span><span class="p">():</span>
- <span class="k">if</span> <span class="nb">hasattr</span><span class="p">(</span><span class="n">module</span><span class="p">,</span> <span class="s1">'fuse_block_residual_branches'</span><span class="p">):</span>
- <span class="n">module</span><span class="o">.</span><span class="n">fuse_block_residual_branches</span><span class="p">()</span>
- <span class="n">model</span><span class="o">.</span><span class="n">build_residual_branches</span> <span class="o">=</span> <span class="kc">False</span></div>
- <div class="viewcode-block" id="ConvBNReLU"><a class="viewcode-back" href="../../../../super_gradients.training.utils.html#super_gradients.training.utils.module_utils.ConvBNReLU">[docs]</a><span class="k">class</span> <span class="nc">ConvBNReLU</span><span class="p">(</span><span class="n">nn</span><span class="o">.</span><span class="n">Module</span><span class="p">):</span>
- <span class="sd">"""</span>
- <span class="sd"> Class for Convolution2d-Batchnorm2d-Relu layer. Default behaviour is Conv-BN-Relu. To exclude Batchnorm module use</span>
- <span class="sd"> `use_normalization=False`, to exclude Relu activation use `use_activation=False`.</span>
- <span class="sd"> For convolution arguments documentation see `nn.Conv2d`.</span>
- <span class="sd"> For batchnorm arguments documentation see `nn.BatchNorm2d`.</span>
- <span class="sd"> For relu arguments documentation see `nn.Relu`.</span>
- <span class="sd"> """</span>
- <span class="k">def</span> <span class="fm">__init__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span>
- <span class="n">in_channels</span><span class="p">:</span> <span class="nb">int</span><span class="p">,</span>
- <span class="n">out_channels</span><span class="p">:</span> <span class="nb">int</span><span class="p">,</span>
- <span class="n">kernel_size</span><span class="p">:</span> <span class="n">Union</span><span class="p">[</span><span class="nb">int</span><span class="p">,</span> <span class="n">Tuple</span><span class="p">[</span><span class="nb">int</span><span class="p">,</span> <span class="nb">int</span><span class="p">]],</span>
- <span class="n">stride</span><span class="p">:</span> <span class="n">Union</span><span class="p">[</span><span class="nb">int</span><span class="p">,</span> <span class="n">Tuple</span><span class="p">[</span><span class="nb">int</span><span class="p">,</span> <span class="nb">int</span><span class="p">]]</span> <span class="o">=</span> <span class="mi">1</span><span class="p">,</span>
- <span class="n">padding</span><span class="p">:</span> <span class="n">Union</span><span class="p">[</span><span class="nb">int</span><span class="p">,</span> <span class="n">Tuple</span><span class="p">[</span><span class="nb">int</span><span class="p">,</span> <span class="nb">int</span><span class="p">]]</span> <span class="o">=</span> <span class="mi">0</span><span class="p">,</span>
- <span class="n">dilation</span><span class="p">:</span> <span class="n">Union</span><span class="p">[</span><span class="nb">int</span><span class="p">,</span> <span class="n">Tuple</span><span class="p">[</span><span class="nb">int</span><span class="p">,</span> <span class="nb">int</span><span class="p">]]</span> <span class="o">=</span> <span class="mi">1</span><span class="p">,</span>
- <span class="n">groups</span><span class="p">:</span> <span class="nb">int</span> <span class="o">=</span> <span class="mi">1</span><span class="p">,</span>
- <span class="n">bias</span><span class="p">:</span> <span class="nb">bool</span> <span class="o">=</span> <span class="kc">True</span><span class="p">,</span>
- <span class="n">padding_mode</span><span class="p">:</span> <span class="nb">str</span> <span class="o">=</span> <span class="s1">'zeros'</span><span class="p">,</span>
- <span class="n">use_normalization</span><span class="p">:</span> <span class="nb">bool</span> <span class="o">=</span> <span class="kc">True</span><span class="p">,</span>
- <span class="n">eps</span><span class="p">:</span> <span class="nb">float</span> <span class="o">=</span> <span class="mf">1e-5</span><span class="p">,</span>
- <span class="n">momentum</span><span class="p">:</span> <span class="nb">float</span> <span class="o">=</span> <span class="mf">0.1</span><span class="p">,</span>
- <span class="n">affine</span><span class="p">:</span> <span class="nb">bool</span> <span class="o">=</span> <span class="kc">True</span><span class="p">,</span>
- <span class="n">track_running_stats</span><span class="p">:</span> <span class="nb">bool</span> <span class="o">=</span> <span class="kc">True</span><span class="p">,</span>
- <span class="n">device</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span>
- <span class="n">dtype</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span>
- <span class="n">use_activation</span><span class="p">:</span> <span class="nb">bool</span> <span class="o">=</span> <span class="kc">True</span><span class="p">,</span>
- <span class="n">inplace</span><span class="p">:</span> <span class="nb">bool</span> <span class="o">=</span> <span class="kc">False</span><span class="p">):</span>
- <span class="nb">super</span><span class="p">(</span><span class="n">ConvBNReLU</span><span class="p">,</span> <span class="bp">self</span><span class="p">)</span><span class="o">.</span><span class="fm">__init__</span><span class="p">()</span>
- <span class="bp">self</span><span class="o">.</span><span class="n">seq</span> <span class="o">=</span> <span class="n">nn</span><span class="o">.</span><span class="n">Sequential</span><span class="p">()</span>
- <span class="bp">self</span><span class="o">.</span><span class="n">seq</span><span class="o">.</span><span class="n">add_module</span><span class="p">(</span><span class="s2">"conv"</span><span class="p">,</span> <span class="n">nn</span><span class="o">.</span><span class="n">Conv2d</span><span class="p">(</span><span class="n">in_channels</span><span class="p">,</span>
- <span class="n">out_channels</span><span class="p">,</span>
- <span class="n">kernel_size</span><span class="o">=</span><span class="n">kernel_size</span><span class="p">,</span>
- <span class="n">stride</span><span class="o">=</span><span class="n">stride</span><span class="p">,</span>
- <span class="n">padding</span><span class="o">=</span><span class="n">padding</span><span class="p">,</span>
- <span class="n">dilation</span><span class="o">=</span><span class="n">dilation</span><span class="p">,</span>
- <span class="n">groups</span><span class="o">=</span><span class="n">groups</span><span class="p">,</span>
- <span class="n">bias</span><span class="o">=</span><span class="n">bias</span><span class="p">,</span>
- <span class="n">padding_mode</span><span class="o">=</span><span class="n">padding_mode</span><span class="p">))</span>
- <span class="k">if</span> <span class="n">use_normalization</span><span class="p">:</span>
- <span class="bp">self</span><span class="o">.</span><span class="n">seq</span><span class="o">.</span><span class="n">add_module</span><span class="p">(</span><span class="s2">"bn"</span><span class="p">,</span> <span class="n">nn</span><span class="o">.</span><span class="n">BatchNorm2d</span><span class="p">(</span><span class="n">out_channels</span><span class="p">,</span> <span class="n">eps</span><span class="o">=</span><span class="n">eps</span><span class="p">,</span> <span class="n">momentum</span><span class="o">=</span><span class="n">momentum</span><span class="p">,</span> <span class="n">affine</span><span class="o">=</span><span class="n">affine</span><span class="p">,</span>
- <span class="n">track_running_stats</span><span class="o">=</span><span class="n">track_running_stats</span><span class="p">,</span> <span class="n">device</span><span class="o">=</span><span class="n">device</span><span class="p">,</span>
- <span class="n">dtype</span><span class="o">=</span><span class="n">dtype</span><span class="p">))</span>
- <span class="k">if</span> <span class="n">use_activation</span><span class="p">:</span>
- <span class="bp">self</span><span class="o">.</span><span class="n">seq</span><span class="o">.</span><span class="n">add_module</span><span class="p">(</span><span class="s2">"relu"</span><span class="p">,</span> <span class="n">nn</span><span class="o">.</span><span class="n">ReLU</span><span class="p">(</span><span class="n">inplace</span><span class="o">=</span><span class="n">inplace</span><span class="p">))</span>
- <div class="viewcode-block" id="ConvBNReLU.forward"><a class="viewcode-back" href="../../../../super_gradients.training.utils.html#super_gradients.training.utils.module_utils.ConvBNReLU.forward">[docs]</a> <span class="k">def</span> <span class="nf">forward</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">x</span><span class="p">):</span>
- <span class="k">return</span> <span class="bp">self</span><span class="o">.</span><span class="n">seq</span><span class="p">(</span><span class="n">x</span><span class="p">)</span></div></div>
- <div class="viewcode-block" id="NormalizationAdapter"><a class="viewcode-back" href="../../../../super_gradients.training.utils.html#super_gradients.training.utils.module_utils.NormalizationAdapter">[docs]</a><span class="k">class</span> <span class="nc">NormalizationAdapter</span><span class="p">(</span><span class="n">torch</span><span class="o">.</span><span class="n">nn</span><span class="o">.</span><span class="n">Module</span><span class="p">):</span>
- <span class="sd">"""</span>
- <span class="sd"> Denormalizes input by mean_original, std_original, then normalizes by mean_required, std_required.</span>
- <span class="sd"> Used in KD training where teacher expects data normalized by mean_required, std_required.</span>
- <span class="sd"> mean_original, std_original, mean_required, std_required are all list-like objects of length that's equal to the</span>
- <span class="sd"> number of input channels.</span>
- <span class="sd"> """</span>
- <span class="k">def</span> <span class="fm">__init__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">mean_original</span><span class="p">,</span> <span class="n">std_original</span><span class="p">,</span> <span class="n">mean_required</span><span class="p">,</span> <span class="n">std_required</span><span class="p">):</span>
- <span class="nb">super</span><span class="p">(</span><span class="n">NormalizationAdapter</span><span class="p">,</span> <span class="bp">self</span><span class="p">)</span><span class="o">.</span><span class="fm">__init__</span><span class="p">()</span>
- <span class="n">mean_original</span> <span class="o">=</span> <span class="n">torch</span><span class="o">.</span><span class="n">tensor</span><span class="p">(</span><span class="n">mean_original</span><span class="p">)</span><span class="o">.</span><span class="n">unsqueeze</span><span class="p">(</span><span class="o">-</span><span class="mi">1</span><span class="p">)</span><span class="o">.</span><span class="n">unsqueeze</span><span class="p">(</span><span class="o">-</span><span class="mi">1</span><span class="p">)</span>
- <span class="n">std_original</span> <span class="o">=</span> <span class="n">torch</span><span class="o">.</span><span class="n">tensor</span><span class="p">(</span><span class="n">std_original</span><span class="p">)</span><span class="o">.</span><span class="n">unsqueeze</span><span class="p">(</span><span class="o">-</span><span class="mi">1</span><span class="p">)</span><span class="o">.</span><span class="n">unsqueeze</span><span class="p">(</span><span class="o">-</span><span class="mi">1</span><span class="p">)</span>
- <span class="n">mean_required</span> <span class="o">=</span> <span class="n">torch</span><span class="o">.</span><span class="n">tensor</span><span class="p">(</span><span class="n">mean_required</span><span class="p">)</span><span class="o">.</span><span class="n">unsqueeze</span><span class="p">(</span><span class="o">-</span><span class="mi">1</span><span class="p">)</span><span class="o">.</span><span class="n">unsqueeze</span><span class="p">(</span><span class="o">-</span><span class="mi">1</span><span class="p">)</span>
- <span class="n">std_required</span> <span class="o">=</span> <span class="n">torch</span><span class="o">.</span><span class="n">tensor</span><span class="p">(</span><span class="n">std_required</span><span class="p">)</span><span class="o">.</span><span class="n">unsqueeze</span><span class="p">(</span><span class="o">-</span><span class="mi">1</span><span class="p">)</span><span class="o">.</span><span class="n">unsqueeze</span><span class="p">(</span><span class="o">-</span><span class="mi">1</span><span class="p">)</span>
- <span class="bp">self</span><span class="o">.</span><span class="n">additive</span> <span class="o">=</span> <span class="n">torch</span><span class="o">.</span><span class="n">nn</span><span class="o">.</span><span class="n">Parameter</span><span class="p">((</span><span class="n">mean_original</span> <span class="o">-</span> <span class="n">mean_required</span><span class="p">)</span> <span class="o">/</span> <span class="n">std_original</span><span class="p">)</span>
- <span class="bp">self</span><span class="o">.</span><span class="n">multiplier</span> <span class="o">=</span> <span class="n">torch</span><span class="o">.</span><span class="n">nn</span><span class="o">.</span><span class="n">Parameter</span><span class="p">(</span><span class="n">std_original</span> <span class="o">/</span> <span class="n">std_required</span><span class="p">)</span>
- <div class="viewcode-block" id="NormalizationAdapter.forward"><a class="viewcode-back" href="../../../../super_gradients.training.utils.html#super_gradients.training.utils.module_utils.NormalizationAdapter.forward">[docs]</a> <span class="k">def</span> <span class="nf">forward</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">x</span><span class="p">):</span>
- <span class="n">x</span> <span class="o">=</span> <span class="p">(</span><span class="n">x</span> <span class="o">+</span> <span class="bp">self</span><span class="o">.</span><span class="n">additive</span><span class="p">)</span> <span class="o">*</span> <span class="bp">self</span><span class="o">.</span><span class="n">multiplier</span>
- <span class="k">return</span> <span class="n">x</span></div></div>
- </pre></div>
- </div>
- </div>
- <footer>
- <hr/>
- <div role="contentinfo">
- <p>© Copyright 2021, SuperGradients team.</p>
- </div>
- Built with <a href="https://www.sphinx-doc.org/">Sphinx</a> using a
- <a href="https://github.com/readthedocs/sphinx_rtd_theme">theme</a>
- provided by <a href="https://readthedocs.org">Read the Docs</a>.
-
- </footer>
- </div>
- </div>
- </section>
- </div>
- <script>
- jQuery(function () {
- SphinxRtdTheme.Navigation.enable(true);
- });
- </script>
- </body>
- </html>
|