1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
|
- <!DOCTYPE html>
- <html class="writer-html5" lang="en" >
- <head>
- <meta charset="utf-8" /><meta name="generator" content="Docutils 0.17.1: http://docutils.sourceforge.net/" />
- <meta name="viewport" content="width=device-width, initial-scale=1.0" />
- <title>SuperGradients — SuperGradients 1.0 documentation</title>
- <link rel="stylesheet" href="_static/pygments.css" type="text/css" />
- <link rel="stylesheet" href="_static/css/theme.css" type="text/css" />
- <link rel="stylesheet" href="_static/graphviz.css" type="text/css" />
- <!--[if lt IE 9]>
- <script src="_static/js/html5shiv.min.js"></script>
- <![endif]-->
- <script data-url_root="./" id="documentation_options" src="_static/documentation_options.js"></script>
- <script src="_static/jquery.js"></script>
- <script src="_static/underscore.js"></script>
- <script src="_static/doctools.js"></script>
- <script src="_static/js/theme.js"></script>
- <link rel="index" title="Index" href="genindex.html" />
- <link rel="search" title="Search" href="search.html" />
- <link rel="next" title="Common package" href="super_gradients.common.html" />
- <link rel="prev" title="Welcome to SuperGradients’s documentation!" href="index.html" />
- </head>
- <body class="wy-body-for-nav">
- <div class="wy-grid-for-nav">
- <nav data-toggle="wy-nav-shift" class="wy-nav-side">
- <div class="wy-side-scroll">
- <div class="wy-side-nav-search" >
- <a href="index.html" class="icon icon-home"> SuperGradients
- </a>
- <div role="search">
- <form id="rtd-search-form" class="wy-form" action="search.html" method="get">
- <input type="text" name="q" placeholder="Search docs" />
- <input type="hidden" name="check_keywords" value="yes" />
- <input type="hidden" name="area" value="default" />
- </form>
- </div>
- </div><div class="wy-menu wy-menu-vertical" data-spy="affix" role="navigation" aria-label="Navigation menu">
- <p class="caption"><span class="caption-text">Welcome To SuperGradients</span></p>
- <ul class="current">
- <li class="toctree-l1 current"><a class="current reference internal" href="#">SuperGradients</a><ul>
- <li class="toctree-l2"><a class="reference internal" href="#introduction">Introduction</a><ul>
- <li class="toctree-l3"><a class="reference internal" href="#why-use-supergradients">Why use SuperGradients?</a></li>
- <li class="toctree-l3"><a class="reference internal" href="#documentation">Documentation</a></li>
- </ul>
- </li>
- <li class="toctree-l2"><a class="reference internal" href="#what-s-new">What’s New</a></li>
- <li class="toctree-l2"><a class="reference internal" href="#comming-soon">Comming soon</a><ul>
- <li class="toctree-l3"><a class="reference internal" href="#table-of-content">Table of Content</a></li>
- </ul>
- </li>
- <li class="toctree-l2"><a class="reference internal" href="#getting-started">Getting Started</a><ul>
- <li class="toctree-l3"><a class="reference internal" href="#quick-start-notebook">Quick Start Notebook</a></li>
- <li class="toctree-l3"><a class="reference internal" href="#supergradients-walkthrough-notebook">SuperGradients Walkthrough Notebook</a></li>
- <li class="toctree-l3"><a class="reference internal" href="#transfer-learning-with-sg-notebook">Transfer Learning with SG Notebook</a></li>
- </ul>
- </li>
- <li class="toctree-l2"><a class="reference internal" href="#installation-methods">Installation Methods</a><ul>
- <li class="toctree-l3"><a class="reference internal" href="#prerequisites">Prerequisites</a></li>
- <li class="toctree-l3"><a class="reference internal" href="#quick-installation">Quick Installation</a></li>
- </ul>
- </li>
- <li class="toctree-l2"><a class="reference internal" href="#computer-vision-models-pretrained-checkpoints">Computer Vision Models’ Pretrained Checkpoints</a><ul>
- <li class="toctree-l3"><a class="reference internal" href="#pretrained-classification-pytorch-checkpoints">Pretrained Classification PyTorch Checkpoints</a></li>
- <li class="toctree-l3"><a class="reference internal" href="#pretrained-object-detection-pytorch-checkpoints">Pretrained Object Detection PyTorch Checkpoints</a></li>
- <li class="toctree-l3"><a class="reference internal" href="#pretrained-semantic-segmentation-pytorch-checkpoints">Pretrained Semantic Segmentation PyTorch Checkpoints</a></li>
- </ul>
- </li>
- <li class="toctree-l2"><a class="reference internal" href="#contributing">Contributing</a></li>
- <li class="toctree-l2"><a class="reference internal" href="#citation">Citation</a></li>
- <li class="toctree-l2"><a class="reference internal" href="#community">Community</a></li>
- <li class="toctree-l2"><a class="reference internal" href="#license">License</a></li>
- <li class="toctree-l2"><a class="reference internal" href="#deci-lab">Deci Lab</a></li>
- </ul>
- </li>
- </ul>
- <p class="caption"><span class="caption-text">Technical Documentation</span></p>
- <ul>
- <li class="toctree-l1"><a class="reference internal" href="super_gradients.common.html">Common package</a></li>
- <li class="toctree-l1"><a class="reference internal" href="super_gradients.training.html">Training package</a></li>
- </ul>
- <p class="caption"><span class="caption-text">User Guide</span></p>
- <ul>
- <li class="toctree-l1"><a class="reference internal" href="user_guide.html">What is SuperGradients?</a></li>
- <li class="toctree-l1"><a class="reference internal" href="user_guide.html#introducing-the-supergradients-library">Introducing the SuperGradients library</a></li>
- <li class="toctree-l1"><a class="reference internal" href="user_guide.html#installation">Installation</a></li>
- <li class="toctree-l1"><a class="reference internal" href="user_guide.html#integrating-your-training-code-complete-walkthrough">Integrating Your Training Code - Complete Walkthrough</a></li>
- <li class="toctree-l1"><a class="reference internal" href="user_guide.html#training-parameters">Training Parameters</a></li>
- <li class="toctree-l1"><a class="reference internal" href="user_guide.html#logs-and-checkpoints">Logs and Checkpoints</a></li>
- <li class="toctree-l1"><a class="reference internal" href="user_guide.html#dataset-parameters">Dataset Parameters</a></li>
- <li class="toctree-l1"><a class="reference internal" href="user_guide.html#network-architectures">Network Architectures</a></li>
- <li class="toctree-l1"><a class="reference internal" href="user_guide.html#pretrained-models">Pretrained Models</a></li>
- <li class="toctree-l1"><a class="reference internal" href="user_guide.html#how-to-reproduce-our-training-recipes">How To Reproduce Our Training Recipes</a></li>
- <li class="toctree-l1"><a class="reference internal" href="user_guide.html#supergradients-faq">SuperGradients FAQ</a></li>
- </ul>
- </div>
- </div>
- </nav>
- <section data-toggle="wy-nav-shift" class="wy-nav-content-wrap"><nav class="wy-nav-top" aria-label="Mobile navigation menu" >
- <i data-toggle="wy-nav-top" class="fa fa-bars"></i>
- <a href="index.html">SuperGradients</a>
- </nav>
- <div class="wy-nav-content">
- <div class="rst-content">
- <div role="navigation" aria-label="Page navigation">
- <ul class="wy-breadcrumbs">
- <li><a href="index.html" class="icon icon-home"></a> »</li>
- <li>SuperGradients</li>
- <li class="wy-breadcrumbs-aside">
- <a href="_sources/welcome.md.txt" rel="nofollow"> View page source</a>
- </li>
- </ul>
- <hr/>
- </div>
- <div role="main" class="document" itemscope="itemscope" itemtype="http://schema.org/Article">
- <div itemprop="articleBody">
- <div align="center">
- <img src="assets/SG_img/SG - Horizontal.png" width="600"/>
- <br/><br/>
- <p><strong>Easily train or fine-tune SOTA computer vision models with one open source training library</strong>
- <a class="reference external" href="https://twitter.com/intent/tweet?text=Easily%20train%20or%20fine-tune%20SOTA%20computer%20vision%20models%20from%20one%20training%20repository&url=https://github.com/Deci-AI/super-gradients&via=deci_ai&hashtags=AI,deeplearning,computervision,training,opensource"><img alt="Tweet" src="https://img.shields.io/twitter/url/http/shields.io.svg?style=social" /></a></p>
- <hr class="docutils" />
- <p><a href="https://github.com/Deci-AI/super-gradients#prerequisites"><img src="https://img.shields.io/badge/python-3.7%20%7C%203.8%20%7C%203.9-blue" />
- <a href="https://github.com/Deci-AI/super-gradients#prerequisites"><img src="https://img.shields.io/badge/pytorch-1.9%20%7C%201.10-blue" />
- <a href="https://pypi.org/project/super-gradients/"><img src="https://img.shields.io/pypi/v/super-gradients" />
- <a href="https://github.com/Deci-AI/super-gradients#computer-vision-models-pretrained-checkpoints" ><img src="https://img.shields.io/badge/pre--trained%20models-25-brightgreen" />
- <a href="https://github.com/Deci-AI/super-gradients/releases"><img src="https://img.shields.io/github/v/release/Deci-AI/super-gradients" />
- <a href="https://join.slack.com/t/supergradients-comm52/shared_invite/zt-10vz6o1ia-b_0W5jEPEnuHXm087K~t8Q"><img src="https://img.shields.io/badge/slack-community-blueviolet" />
- <a href="https://github.com/Deci-AI/super-gradients/blob/master/LICENSE.md"><img src="https://img.shields.io/badge/license-Apache%202.0-blue" />
- <a href="https://deci-ai.github.io/super-gradients/welcome.html"><img src="https://img.shields.io/badge/docs-sphinx-brightgreen" /></p>
- </div>
- <section class="tex2jax_ignore mathjax_ignore" id="supergradients">
- <h1>SuperGradients<a class="headerlink" href="#supergradients" title="Permalink to this headline"></a></h1>
- <section id="introduction">
- <h2>Introduction<a class="headerlink" href="#introduction" title="Permalink to this headline"></a></h2>
- <p>Welcome to SuperGradients, a free, open-source training library for PyTorch-based deep learning models.
- SuperGradients allows you to train or fine-tune SOTA pre-trained models for all the most commonly applied computer vision tasks with just one training library. We currently support object detection, image classification and semantic segmentation for videos and images.</p>
- <p>Docs and full user guide<span class="xref myst"></span></p>
- <section id="why-use-supergradients">
- <h3>Why use SuperGradients?<a class="headerlink" href="#why-use-supergradients" title="Permalink to this headline"></a></h3>
- <p><strong>Built-in SOTA Models</strong></p>
- <p>Easily load and fine-tune production-ready, <a class="reference external" href="https://github.com/Deci-AI/super-gradients#pretrained-classification-pytorch-checkpoints">pre-trained SOTA models</a> that incorporate best practices and validated hyper-parameters for achieving best-in-class accuracy.</p>
- <p><strong>Easily Reproduce our Results</strong></p>
- <p>Why do all the grind work, if we already did it for you? leverage tested and proven <a class="reference external" href="https://github.com/Deci-AI/super-gradients/tree/master/src/super_gradients/recipes">recipes</a> & <a class="reference external" href="https://github.com/Deci-AI/super-gradients/tree/master/src/super_gradients/examples">code examples</a> for a wide range of computer vision models generated by our team of deep learning experts. Easily configure your own or use plug & play hyperparameters for training, dataset, and architecture.</p>
- <p><strong>Production Readiness and Ease of Integration</strong></p>
- <p>All SuperGradients models’ are production ready in the sense that they are compatible with deployment tools such as TensorRT (Nvidia) and OpenVino (Intel) and can be easily taken into production. With a few lines of code you can easily integrate the models into your codebase.</p>
- <div align="center">
- <img src="./assets/SG_img/detection-demo.png" width="600px">
- </div>
- </section>
- <section id="documentation">
- <h3>Documentation<a class="headerlink" href="#documentation" title="Permalink to this headline"></a></h3>
- <p>Check SuperGradients <a class="reference external" href="https://deci-ai.github.io/super-gradients/welcome.html">Docs</a> for full documentation, user guide, and examples.</p>
- </section>
- </section>
- <hr class="docutils" />
- <section id="table-of-content">
- <h3>Table of Content<a class="headerlink" href="#table-of-content" title="Permalink to this headline"></a></h3>
- <details>
- <summary>See Table </summary>
- <!-- toc -->
- <ul class="simple">
- <li><p><a class="reference external" href="#getting-started">Getting Started</a></p>
- <ul>
- <li><p><a class="reference external" href="#quick-start-notebook">Quick Start Notebook</a></p></li>
- <li><p><a class="reference external" href="#supergradients-walkthrough-notebook">Walkthrough Notebook</a></p></li>
- <li><p><a class="reference external" href="#transfer-learning-with-sg-notebook">Transfer Learning with SG Notebook</a></p></li>
- </ul>
- </li>
- <li><p><a class="reference external" href="#installation-methods">Installation Methods</a></p>
- <ul>
- <li><p><a class="reference external" href="#prerequisites">Prerequisites</a></p></li>
- <li><p><a class="reference external" href="#quick-installation">Quick Installation</a></p></li>
- </ul>
- </li>
- <li><p><a class="reference external" href="#computer-vision-models-pretrained-checkpoints">Computer Vision Models’ Pretrained Checkpoints</a></p>
- <ul>
- <li><p><a class="reference external" href="#pretrained-classification-pytorch-checkpoints">Pretrained Classification PyTorch Checkpoints</a></p></li>
- <li><p><a class="reference external" href="#pretrained-object-detection-pytorch-checkpoints">Pretrained Object Detection PyTorch Checkpoints</a></p></li>
- <li><p><a class="reference external" href="#pretrained-semantic-segmentation-pytorch-checkpoints">Pretrained Semantic Segmentation PyTorch Checkpoints</a></p></li>
- </ul>
- </li>
- <li><p><a class="reference external" href="#contributing">Contributing</a></p></li>
- <li><p><a class="reference external" href="#citation">Citation</a></p></li>
- <li><p><a class="reference external" href="#community">Community</a></p></li>
- <li><p><a class="reference external" href="#license">License</a></p></li>
- <li><p><a class="reference external" href="#deci-lab">Deci Lab</a></p></li>
- </ul>
- <!-- tocstop -->
- </details>
- </section>
- </section>
- <section id="getting-started">
- <h2>Getting Started<a class="headerlink" href="#getting-started" title="Permalink to this headline"></a></h2>
- <section id="quick-start-notebook">
- <h3>Quick Start Notebook - Classification<a class="headerlink" href="#quick-start-notebook-classification" title="Permalink to this headline"></a></h3>
- <p>Get started with our quick start notebook for image classification tasks on Google Colab for a quick and easy start using free GPU hardware.</p>
- <table class="tfo-notebook-buttons" align="left">
- <td>
- <a target="_blank" href="https://bit.ly/3ufnsgT"><img src="./assets/SG_img/colab_logo.png" />Classification Quick Start in Google Colab</a>
- </td>
- <td>
- <a href="https://minhaskamal.github.io/DownGit/#/home?url=https://github.com/Deci-AI/super-gradients/blob/master/src/super_gradients/examples/SG_quickstart_classification.ipynb"><img src="./assets/SG_img/download_logo.png" />Download notebook</a>
- </td>
- <td>
- <a target="_blank" href="https://github.com/Deci-AI/super-gradients/tree/master/src/super_gradients/examples"><img src="./assets/SG_img/GitHub_logo.png" />View source on GitHub</a>
- </td>
- </table>
- </br></br>
- </section>
- <section id="quick-start-notebook-semantic-segmentation">
- <h3>Quick Start Notebook - Semantic Segmentation<a class="headerlink" href="#quick-start-notebook-semantic-segmentation" title="Permalink to this headline"></a></h3>
- <p>Get started with our quick start notebook for semantic segmentation tasks on Google Colab for a quick and easy start using free GPU hardware.</p>
- <table class="tfo-notebook-buttons" align="left">
- <td>
- <a target="_blank" href="https://bit.ly/3Jp7w1U"><img src="./assets/SG_img/colab_logo.png" />Segmentation Quick Start in Google Colab</a>
- </td>
- <td>
- <a href="https://minhaskamal.github.io/DownGit/#/home?url=https://github.com/Deci-AI/super-gradients/blob/master/src/super_gradients/examples/SG_quickstart_segmentation.ipynb"><img src="./assets/SG_img/download_logo.png" />Download notebook</a>
- </td>
- <td>
- <a target="_blank" href="https://github.com/Deci-AI/super-gradients/tree/master/src/super_gradients/examples"><img src="./assets/SG_img/GitHub_logo.png" />View source on GitHub</a>
- </td>
- </table>
- </br></br>
- <!--
- ### Quick Start Notebook - Object Detection
- Get started with our quick start notebook for object detection tasks on Google Colab for a quick and easy start using free GPU hardware.
- <table class="tfo-notebook-buttons" align="left">
- <table class="tfo-notebok-buttons" align="left">
- <td>
- <a target="_blank" href="https://bit.ly/3wqMsEM"><img src="./docs/assets/SG_img/colab_logo.png" />Detection Quick Start in Google Colab</a>
- </td>
- <td>
- <a href="https://minhaskamal.github.io/DownGit/#/home?url=https://github.com/Deci-AI/super-gradients/blob/master/src/super_gradients/examples/SG_quickstart_detection.ipynb"><img src="./docs/assets/SG_img/download_logo.png" />Download notebook</a>
- </td>
- <td>
- <a target="_blank" href="https://github.com/Deci-AI/super-gradients/tree/master/src/super_gradients/examples"><img src="./docs/assets/SG_img/GitHub_logo.png" />View source on GitHub</a>
- </td>
- </table>
- </br></br>
- ### Quick Start Notebook - Upload your model to Deci Platform
- Get Started with an example of how to upload your trained model to Deci Platform for runtime optimization and compilation to your target deployment HW.
- <table class="tfo-notebook-buttons" align="left">
- <tbody>
- <tr>
- <td vertical-align="middle">
- <img src="./docs/assets/SG_img/colab_logo.png" />
- <a target="_blank" href="https://bit.ly/3cAkoXG">
- Upload to Deci Platform in Google Colab
- </a>
- </td>
- <td vertical-align="middle">
- <img src="./docs/assets/SG_img/download_logo.png" />
- <a href="https://minhaskamal.github.io/DownGit/#/home?url=https://github.com/Deci-AI/super-gradients/blob/master/src/super_gradients/examples/SG_quickstart_model_upload_deci_lab.ipynb">
- Download notebook
- </a>
- </td>
- <td>
- <img src="./docs/assets/SG_img/GitHub_logo.png" />
- <a target="_blank" href="https://github.com/Deci-AI/super-gradients/blob/master/src/super_gradients/examples/deci_lab_export_example/deci_lab_export_example.py">
- View source on GitHub
- </a>
- </td>
- </tr>
- </tbody>
- </table>
- </br></br>
- ### SuperGradients Complete Walkthrough Notebook
- Learn more about SuperGradients training components with our walkthrough notebook on Google Colab for an easy to use tutorial using free GPU hardware
- <table class="tfo-notebook-buttons" align="left">
- <td>
- <a target="_blank" href="https://bit.ly/3JspSPF"><img src="./docs/assets/SG_img/colab_logo.png" />SuperGradients Walkthrough in Google Colab</a>
- </td>
- <td>
- <a href="https://minhaskamal.github.io/DownGit/#/home?url=https://github.com/Deci-AI/super-gradients/blob/master/src/super_gradients/examples/SG_Walkthrough.ipynb"><img src="./docs/assets/SG_img/download_logo.png" />Download notebook</a>
- </td>
- <td>
- <a target="_blank" href="https://github.com/Deci-AI/super-gradients/tree/master/src/super_gradients/examples"><img src="./docs/assets/SG_img/GitHub_logo.png" />View source on GitHub</a>
- </td>
- </table>
- </br></br>
- ### Transfer Learning with SG Notebook - Object Detection
- Learn more about SuperGradients transfer learning or fine tuning abilities with our COCO pre-trained YoloX nano fine tuning into a sub-dataset of PASCAL VOC example notebook on Google Colab for an easy to use tutorial using free GPU hardware
- <table class="tfo-notebook-buttons" align="left">
- <td>
- <a target="_blank" href="https://bit.ly/3iGvnP7"><img src="./docs/assets/SG_img/colab_logo.png" />Detection Transfer Learning in Google Colab</a>
- </td>
- <td>
- <a href="https://minhaskamal.github.io/DownGit/#/home?url=https://github.com/Deci-AI/super-gradients/blob/master/src/super_gradients/examples/SG_transfer_learning_object_detection.ipynb"><img src="./docs/assets/SG_img/download_logo.png" />Download notebook</a>
- </td>
- <td>
- <a target="_blank" href="https://github.com/Deci-AI/super-gradients/tree/master/src/super_gradients/examples"><img src="./docs/assets/SG_img/GitHub_logo.png" />View source on GitHub</a>
- </td>
- </table>
- </br></br>
- -->
- </section>
- </section>
- <section id="transfer-learning">
- <h2>Transfer Learning<a class="headerlink" href="#transfer-learning" title="Permalink to this headline"></a></h2>
- <section id="transfer-learning-with-sg-notebook-semantic-segmentation">
- <h3>Transfer Learning with SG Notebook - Semantic Segmentation<a class="headerlink" href="#transfer-learning-with-sg-notebook-semantic-segmentation" title="Permalink to this headline"></a></h3>
- <p>Learn more about SuperGradients transfer learning or fine tuning abilities with our Citiscapes pre-trained RegSeg48 fine tuning into a sub-dataset of Supervisely example notebook on Google Colab for an easy to use tutorial using free GPU hardware</p>
- <table class="tfo-notebook-buttons" align="left">
- <td>
- <a target="_blank" href="https://bit.ly/37P04PN"><img src="./assets/SG_img/colab_logo.png" />Segmentation Transfer Learning in Google Colab</a>
- </td>
- <td>
- <a href="https://minhaskamal.github.io/DownGit/#/home?url=https://github.com/Deci-AI/super-gradients/blob/master/src/super_gradients/examples/SG_transfer_learning_semantic_segmentation.ipynb"><img src="./assets/SG_img/download_logo.png" />Download notebook</a>
- </td>
- <td>
- <a target="_blank" href="https://github.com/Deci-AI/super-gradients/tree/master/src/super_gradients/examples"><img src="./assets/SG_img/GitHub_logo.png" />View source on GitHub</a>
- </td>
- </table>
- </br></br>
- </section>
- </section>
- <section id="knowledge-distillation-training">
- <h2>Knowledge Distillation Training<a class="headerlink" href="#knowledge-distillation-training" title="Permalink to this headline"></a></h2>
- <section id="knowledge-distillation-training-quick-start-with-sg-notebook-resnet18-example">
- <h3>Knowledge Distillation Training Quick Start with SG Notebook - ResNet18 example<a class="headerlink" href="#knowledge-distillation-training-quick-start-with-sg-notebook-resnet18-example" title="Permalink to this headline"></a></h3>
- <p>Knowledge Distillation is a training technique that uses a large model, teacher model, to improve the performance of a smaller model, the student model.
- Learn more about SuperGradients knowledge distillation training with our pre-trained BEiT base teacher model and Resnet18 student model on CIFAR10 example notebook on Google Colab for an easy to use tutorial using free GPU hardware</p>
- <table class="tfo-notebook-buttons" align="left">
- <td>
- <a target="_blank" href="https://bit.ly/3HQvbsg"><img src="./assets/SG_img/colab_logo.png" />KD Training in Google Colab</a>
- </td>
- <td>
- <a href="https://minhaskamal.github.io/DownGit/#/home?url=https://github.com/Deci-AI/super-gradients/blob/master/src/super_gradients/examples/SG_knowledge_distillation_quickstart.ipynb"><img src="./assets/SG_img/download_logo.png" />Download notebook</a>
- </td>
- <td>
- <a target="_blank" href="https://github.com/Deci-AI/super-gradients/tree/master/src/super_gradients/examples"><img src="./assets/SG_img/GitHub_logo.png" />View source on GitHub</a>
- </td>
- </table>
- </br></br>
- </section>
- </section>
- <section id="installation-methods">
- <h2>Installation Methods<a class="headerlink" href="#installation-methods" title="Permalink to this headline"></a></h2>
- <section id="prerequisites">
- <h3>Prerequisites<a class="headerlink" href="#prerequisites" title="Permalink to this headline"></a></h3>
- <details>
- <summary>General requirements</summary>
- <ul class="simple">
- <li><p>Python 3.7, 3.8 or 3.9 installed.</p></li>
- <li><p>torch>=1.9.0</p>
- <ul>
- <li><p>https://pytorch.org/get-started/locally/</p></li>
- </ul>
- </li>
- <li><p>The python packages that are specified in requirements.txt;</p></li>
- </ul>
- </details>
- <details>
- <summary>To train on nvidia GPUs</summary>
- <ul class="simple">
- <li><p><a class="reference external" href="https://developer.nvidia.com/cuda-11.2.0-download-archive?target_os=Linux&target_arch=x86_64&target_distro=Ubuntu">Nvidia CUDA Toolkit >= 11.2</a></p></li>
- <li><p>CuDNN >= 8.1.x</p></li>
- <li><p>Nvidia Driver with CUDA >= 11.2 support (≥460.x)</p></li>
- </ul>
- </details>
- </section>
- <section id="quick-installation">
- <h3>Quick Installation<a class="headerlink" href="#quick-installation" title="Permalink to this headline"></a></h3>
- <details>
- <summary>Install stable version using PyPi</summary>
- <p>See in <a class="reference external" href="https://pypi.org/project/super-gradients/">PyPi</a></p>
- <div class="highlight-bash notranslate"><div class="highlight"><pre><span></span>pip install super-gradients
- </pre></div>
- </div>
- <p>That’s it !</p>
- </details>
- <details>
- <summary>Install using GitHub</summary>
- <div class="highlight-bash notranslate"><div class="highlight"><pre><span></span>pip install git+https://github.com/Deci-AI/super-gradients.git@stable
- </pre></div>
- </div>
- </details>
- </section>
- </section>
- <section id="computer-vision-models-pretrained-checkpoints">
- <h2>Computer Vision Models - Pretrained Checkpoints<a class="headerlink" href="#computer-vision-models-pretrained-checkpoints" title="Permalink to this headline"></a></h2>
- <section id="pretrained-classification-pytorch-checkpoints">
- <h3>Pretrained Classification PyTorch Checkpoints<a class="headerlink" href="#pretrained-classification-pytorch-checkpoints" title="Permalink to this headline"></a></h3>
- <table class="colwidths-auto docutils align-default">
- <thead>
- <tr class="row-odd"><th class="head"><p>Model</p></th>
- <th class="head"><p>Dataset</p></th>
- <th class="head"><p>Resolution</p></th>
- <th class="head"><p>Top-1</p></th>
- <th class="head"><p>Top-5</p></th>
- <th class="head"><p>Latency (HW)*<sub>T4</sub></p></th>
- <th class="head"><p>Latency (Production)**<sub>T4</sub></p></th>
- <th class="head"><p>Latency (HW)*<sub>Jetson Xavier NX</sub></p></th>
- <th class="head"><p>Latency (Production)**<sub>Jetson Xavier NX</sub></p></th>
- <th class="text-center head"><p>Latency <sub>Cascade Lake</sub></p></th>
- </tr>
- </thead>
- <tbody>
- <tr class="row-even"><td><p>ViT base</p></td>
- <td><p>ImageNet21K</p></td>
- <td><p>224x224</p></td>
- <td><p>84.15</p></td>
- <td><p>-</p></td>
- <td><p><strong>4.46ms</strong></p></td>
- <td><p><strong>4.60ms</strong></p></td>
- <td><p><strong>-</strong> *</p></td>
- <td><p><strong>-</strong></p></td>
- <td class="text-center"><p><strong>57.22ms</strong></p></td>
- </tr>
- <tr class="row-odd"><td><p>ViT large</p></td>
- <td><p>ImageNet21K</p></td>
- <td><p>224x224</p></td>
- <td><p>85.64</p></td>
- <td><p>-</p></td>
- <td><p><strong>12.81ms</strong></p></td>
- <td><p><strong>13.19ms</strong></p></td>
- <td><p><strong>-</strong> *</p></td>
- <td><p><strong>-</strong></p></td>
- <td class="text-center"><p><strong>187.22ms</strong></p></td>
- </tr>
- <tr class="row-even"><td><p>BEiT</p></td>
- <td><p>ImageNet21K</p></td>
- <td><p>224x224</p></td>
- <td><p>-</p></td>
- <td><p>-</p></td>
- <td><p><strong>-ms</strong></p></td>
- <td><p><strong>-ms</strong></p></td>
- <td><p><strong>-</strong> *</p></td>
- <td><p><strong>-</strong></p></td>
- <td class="text-center"><p><strong>-ms</strong></p></td>
- </tr>
- <tr class="row-odd"><td><p>EfficientNet B0</p></td>
- <td><p>ImageNet</p></td>
- <td><p>224x224</p></td>
- <td><p>77.62</p></td>
- <td><p>93.49</p></td>
- <td><p><strong>0.93ms</strong></p></td>
- <td><p><strong>1.38ms</strong></p></td>
- <td><p><strong>-</strong> *</p></td>
- <td><p><strong>-</strong></p></td>
- <td class="text-center"><p><strong>3.44ms</strong></p></td>
- </tr>
- <tr class="row-even"><td><p>RegNet Y200</p></td>
- <td><p>ImageNet</p></td>
- <td><p>224x224</p></td>
- <td><p>70.88</p></td>
- <td><p>89.35</p></td>
- <td><p><strong>0.63ms</strong></p></td>
- <td><p><strong>1.08ms</strong></p></td>
- <td><p><strong>2.16ms</strong></p></td>
- <td><p><strong>2.47ms</strong></p></td>
- <td class="text-center"><p><strong>2.06ms</strong></p></td>
- </tr>
- <tr class="row-odd"><td><p>RegNet Y400</p></td>
- <td><p>ImageNet</p></td>
- <td><p>224x224</p></td>
- <td><p>74.74</p></td>
- <td><p>91.46</p></td>
- <td><p><strong>0.80ms</strong></p></td>
- <td><p><strong>1.25ms</strong></p></td>
- <td><p><strong>2.62ms</strong></p></td>
- <td><p><strong>2.91ms</strong></p></td>
- <td class="text-center"><p><strong>2.87ms</strong></p></td>
- </tr>
- <tr class="row-even"><td><p>RegNet Y600</p></td>
- <td><p>ImageNet</p></td>
- <td><p>224x224</p></td>
- <td><p>76.18</p></td>
- <td><p>92.34</p></td>
- <td><p><strong>0.77ms</strong></p></td>
- <td><p><strong>1.22ms</strong></p></td>
- <td><p><strong>2.64ms</strong></p></td>
- <td><p><strong>2.93ms</strong></p></td>
- <td class="text-center"><p><strong>2.39ms</strong></p></td>
- </tr>
- <tr class="row-odd"><td><p>RegNet Y800</p></td>
- <td><p>ImageNet</p></td>
- <td><p>224x224</p></td>
- <td><p>77.07</p></td>
- <td><p>93.26</p></td>
- <td><p><strong>0.74ms</strong></p></td>
- <td><p><strong>1.19ms</strong></p></td>
- <td><p><strong>2.77ms</strong></p></td>
- <td><p><strong>3.04ms</strong></p></td>
- <td class="text-center"><p><strong>2.81ms</strong></p></td>
- </tr>
- <tr class="row-even"><td><p>ResNet 18</p></td>
- <td><p>ImageNet</p></td>
- <td><p>224x224</p></td>
- <td><p>70.6</p></td>
- <td><p>89.64</p></td>
- <td><p><strong>0.52ms</strong></p></td>
- <td><p><strong>0.95ms</strong></p></td>
- <td><p><strong>2.01ms</strong></p></td>
- <td><p><strong>2.30ms</strong></p></td>
- <td class="text-center"><p><strong>4.56ms</strong></p></td>
- </tr>
- <tr class="row-odd"><td><p>ResNet 34</p></td>
- <td><p>ImageNet</p></td>
- <td><p>224x224</p></td>
- <td><p>74.13</p></td>
- <td><p>91.7</p></td>
- <td><p><strong>0.92ms</strong></p></td>
- <td><p><strong>1.34ms</strong></p></td>
- <td><p><strong>3.57ms</strong></p></td>
- <td><p><strong>3.87ms</strong></p></td>
- <td class="text-center"><p><strong>7.64ms</strong></p></td>
- </tr>
- <tr class="row-even"><td><p>ResNet 50</p></td>
- <td><p>ImageNet</p></td>
- <td><p>224x224</p></td>
- <td><p>81.91</p></td>
- <td><p>93.0</p></td>
- <td><p><strong>1.03ms</strong></p></td>
- <td><p><strong>1.44ms</strong></p></td>
- <td><p><strong>4.78ms</strong></p></td>
- <td><p><strong>5.10ms</strong></p></td>
- <td class="text-center"><p><strong>9.25ms</strong></p></td>
- </tr>
- <tr class="row-odd"><td><p>MobileNet V3_large-150 epochs</p></td>
- <td><p>ImageNet</p></td>
- <td><p>224x224</p></td>
- <td><p>73.79</p></td>
- <td><p>91.54</p></td>
- <td><p><strong>0.67ms</strong></p></td>
- <td><p><strong>1.11ms</strong></p></td>
- <td><p><strong>2.42ms</strong></p></td>
- <td><p><strong>2.71ms</strong></p></td>
- <td class="text-center"><p><strong>1.76ms</strong></p></td>
- </tr>
- <tr class="row-even"><td><p>MobileNet V3_large-300 epochs</p></td>
- <td><p>ImageNet</p></td>
- <td><p>224x224</p></td>
- <td><p>74.52</p></td>
- <td><p>91.92</p></td>
- <td><p><strong>0.67ms</strong></p></td>
- <td><p><strong>1.11ms</strong></p></td>
- <td><p><strong>2.42ms</strong></p></td>
- <td><p><strong>2.71ms</strong></p></td>
- <td class="text-center"><p><strong>1.76ms</strong></p></td>
- </tr>
- <tr class="row-odd"><td><p>MobileNet V3_small</p></td>
- <td><p>ImageNet</p></td>
- <td><p>224x224</p></td>
- <td><p>67.45</p></td>
- <td><p>87.47</p></td>
- <td><p><strong>0.55ms</strong></p></td>
- <td><p><strong>0.96ms</strong></p></td>
- <td><p><strong>2.01ms</strong> *</p></td>
- <td><p><strong>2.35ms</strong></p></td>
- <td class="text-center"><p><strong>1.06ms</strong></p></td>
- </tr>
- <tr class="row-even"><td><p>MobileNet V2_w1</p></td>
- <td><p>ImageNet</p></td>
- <td><p>224x224</p></td>
- <td><p>73.08</p></td>
- <td><p>91.1</p></td>
- <td><p><strong>0.46 ms</strong></p></td>
- <td><p><strong>0.89ms</strong></p></td>
- <td><p><strong>1.65ms</strong> *</p></td>
- <td><p><strong>1.90ms</strong></p></td>
- <td class="text-center"><p><strong>1.56ms</strong></p></td>
- </tr>
- </tbody>
- </table>
- <blockquote>
- <div><p><strong>NOTE:</strong> <br/></p>
- <ul class="simple">
- <li><p>Latency (HW)* - Hardware performance (not including IO)<br/></p></li>
- <li><p>Latency (Production)** - Production Performance (including IO)</p></li>
- <li><p>Performance measured for T4 and Jetson Xavier NX with TensorRT, using FP16 precision and batch size 1</p></li>
- <li><p>Performance measured for Cascade Lake CPU with OpenVINO, using FP16 precision and batch size 1</p></li>
- </ul>
- </div></blockquote>
- </section>
- <section id="pretrained-object-detection-pytorch-checkpoints">
- <h3>Pretrained Object Detection PyTorch Checkpoints<a class="headerlink" href="#pretrained-object-detection-pytorch-checkpoints" title="Permalink to this headline"></a></h3>
- <table class="colwidths-auto docutils align-default">
- <thead>
- <tr class="row-odd"><th class="head"><p>Model</p></th>
- <th class="head"><p>Dataset</p></th>
- <th class="head"><p>Resolution</p></th>
- <th class="head"><p>mAP<sup>val<br>0.5:0.95</p></th>
- <th class="head"><p>Latency (HW)*<sub>T4</sub></p></th>
- <th class="head"><p>Latency (Production)**<sub>T4</sub></p></th>
- <th class="head"><p>Latency (HW)*<sub>Jetson Xavier NX</sub></p></th>
- <th class="head"><p>Latency (Production)**<sub>Jetson Xavier NX</sub></p></th>
- <th class="text-center head"><p>Latency <sub>Cascade Lake</sub></p></th>
- </tr>
- </thead>
- <tbody>
- <tr class="row-even"><td><p>SSD lite MobileNet v2</p></td>
- <td><p>COCO</p></td>
- <td><p>320x320</p></td>
- <td><p>21.5</p></td>
- <td><p><strong>0.77ms</strong></p></td>
- <td><p><strong>1.40ms</strong></p></td>
- <td><p><strong>5.28ms</strong></p></td>
- <td><p><strong>6.44ms</strong></p></td>
- <td class="text-center"><p><strong>4.13ms</strong></p></td>
- </tr>
- <tr class="row-odd"><td><p>SSD lite MobileNet v1</p></td>
- <td><p>COCO</p></td>
- <td><p>320x320</p></td>
- <td><p>24.3</p></td>
- <td><p><strong>1.55ms</strong></p></td>
- <td><p><strong>2.84ms</strong></p></td>
- <td><p><strong>8.07ms</strong></p></td>
- <td><p><strong>9.14ms</strong></p></td>
- <td class="text-center"><p><strong>22.76ms</strong></p></td>
- </tr>
- <tr class="row-even"><td><p>YOLOX nano</p></td>
- <td><p>COCO</p></td>
- <td><p>640x640</p></td>
- <td><p>26.77</p></td>
- <td><p><strong>2.47ms</strong></p></td>
- <td><p><strong>4.09ms</strong></p></td>
- <td><p><strong>11.49ms</strong></p></td>
- <td><p><strong>12.97ms</strong></p></td>
- <td class="text-center"><p><strong>-</strong></p></td>
- </tr>
- <tr class="row-odd"><td><p>YOLOX tiny</p></td>
- <td><p>COCO</p></td>
- <td><p>640x640</p></td>
- <td><p>37.18</p></td>
- <td><p><strong>3.16ms</strong></p></td>
- <td><p><strong>4.61ms</strong></p></td>
- <td><p><strong>15.23ms</strong></p></td>
- <td><p><strong>19.24ms</strong></p></td>
- <td class="text-center"><p><strong>-</strong></p></td>
- </tr>
- <tr class="row-even"><td><p>YOLOX small</p></td>
- <td><p>COCO</p></td>
- <td><p>640x640</p></td>
- <td><p>40.47</p></td>
- <td><p><strong>3.58ms</strong></p></td>
- <td><p><strong>4.94ms</strong></p></td>
- <td><p><strong>18.88ms</strong></p></td>
- <td><p><strong>22.48ms</strong></p></td>
- <td class="text-center"><p><strong>-</strong></p></td>
- </tr>
- <tr class="row-odd"><td><p>YOLOX medium</p></td>
- <td><p>COCO</p></td>
- <td><p>640x640</p></td>
- <td><p>46.4</p></td>
- <td><p><strong>6.40ms</strong></p></td>
- <td><p><strong>7.65ms</strong></p></td>
- <td><p><strong>39.22ms</strong></p></td>
- <td><p><strong>44.5ms</strong></p></td>
- <td class="text-center"><p><strong>-</strong></p></td>
- </tr>
- <tr class="row-even"><td><p>YOLOX large</p></td>
- <td><p>COCO</p></td>
- <td><p>640x640</p></td>
- <td><p>49.25</p></td>
- <td><p><strong>10.07ms</strong></p></td>
- <td><p><strong>11.12ms</strong></p></td>
- <td><p><strong>68.73ms</strong></p></td>
- <td><p><strong>77.01ms</strong></p></td>
- <td class="text-center"><p><strong>-</strong></p></td>
- </tr>
- </tbody>
- </table>
- <blockquote>
- <div><p><strong>NOTE:</strong> <br/></p>
- <ul class="simple">
- <li><p>Latency (HW)* - Hardware performance (not including IO)<br/></p></li>
- <li><p>Latency (Production)** - Production Performance (including IO)</p></li>
- <li><p>Latency performance measured for T4 and Jetson Xavier NX with TensorRT, using FP16 precision and batch size 1</p></li>
- <li><p>Latency performance measured for Cascade Lake CPU with OpenVINO, using FP16 precision and batch size 1</p></li>
- </ul>
- </div></blockquote>
- </section>
- <section id="pretrained-semantic-segmentation-pytorch-checkpoints">
- <h3>Pretrained Semantic Segmentation PyTorch Checkpoints<a class="headerlink" href="#pretrained-semantic-segmentation-pytorch-checkpoints" title="Permalink to this headline"></a></h3>
- <table class="colwidths-auto docutils align-default">
- <thead>
- <tr class="row-odd"><th class="head"><p>Model</p></th>
- <th class="head"><p>Dataset</p></th>
- <th class="head"><p>Resolution</p></th>
- <th class="head"><p>mIoU</p></th>
- <th class="head"><p>Latency b1<sub>T4</sub></p></th>
- <th class="text-center head"><p>Latency b1<sub>T4</sub> including IO</p></th>
- </tr>
- </thead>
- <tbody>
- <tr class="row-even"><td><p>DDRNet 23</p></td>
- <td><p>Cityscapes</p></td>
- <td><p>1024x2048</p></td>
- <td><p>80.26</p></td>
- <td><p><strong>7.62ms</strong></p></td>
- <td class="text-center"><p><strong>25.94ms</strong></p></td>
- </tr>
- <tr class="row-odd"><td><p>DDRNet 23 slim</p></td>
- <td><p>Cityscapes</p></td>
- <td><p>1024x2048</p></td>
- <td><p>78.01</p></td>
- <td><p><strong>3.56ms</strong></p></td>
- <td class="text-center"><p><strong>22.80ms</strong></p></td>
- </tr>
- <tr class="row-even"><td><p>STDC 1-Seg50</p></td>
- <td><p>Cityscapes</p></td>
- <td><p>512x1024</p></td>
- <td><p>75.07</p></td>
- <td><p><strong>2.83ms</strong></p></td>
- <td class="text-center"><p><strong>12.57ms</strong></p></td>
- </tr>
- <tr class="row-odd"><td><p>STDC 1-Seg75</p></td>
- <td><p>Cityscapes</p></td>
- <td><p>768x1536</p></td>
- <td><p>77.8</p></td>
- <td><p><strong>5.71ms</strong></p></td>
- <td class="text-center"><p><strong>26.70ms</strong></p></td>
- </tr>
- <tr class="row-even"><td><p>STDC 2-Seg50</p></td>
- <td><p>Cityscapes</p></td>
- <td><p>512x1024</p></td>
- <td><p>75.79</p></td>
- <td><p><strong>3.74ms</strong></p></td>
- <td class="text-center"><p><strong>13.89ms</strong></p></td>
- </tr>
- <tr class="row-odd"><td><p>STDC 2-Seg75</p></td>
- <td><p>Cityscapes</p></td>
- <td><p>768x1536</p></td>
- <td><p>78.93</p></td>
- <td><p><strong>7.35ms</strong></p></td>
- <td class="text-center"><p><strong>28.18ms</strong></p></td>
- </tr>
- <tr class="row-even"><td><p>RegSeg (exp48)</p></td>
- <td><p>Cityscapes</p></td>
- <td><p>1024x2048</p></td>
- <td><p>78.15</p></td>
- <td><p><strong>13.09ms</strong></p></td>
- <td class="text-center"><p><strong>41.88ms</strong></p></td>
- </tr>
- <tr class="row-odd"><td><p>Larger RegSeg (exp53)</p></td>
- <td><p>Cityscapes</p></td>
- <td><p>1024x2048</p></td>
- <td><p>79.2</p></td>
- <td><p><strong>24.82ms</strong></p></td>
- <td class="text-center"><p><strong>51.87ms</strong></p></td>
- </tr>
- <tr class="row-even"><td><p>ShelfNet LW 34</p></td>
- <td><p>COCO Segmentation (21 classes from PASCAL including background)</p></td>
- <td><p>512x512</p></td>
- <td><p>65.1</p></td>
- <td><p><strong>-</strong></p></td>
- <td class="text-center"><p><strong>-</strong></p></td>
- </tr>
- </tbody>
- </table>
- <blockquote>
- <div><p><strong>NOTE:</strong> Performance measured on T4 GPU with TensorRT, using FP16 precision and batch size 1 (latency), and not including IO</p>
- </div></blockquote>
- </section>
- </section>
- <section id="implemented-model-architectures">
- <h2>Implemented Model Architectures<a class="headerlink" href="#implemented-model-architectures" title="Permalink to this headline"></a></h2>
- <section id="image-classification">
- <h3>Image Classification<a class="headerlink" href="#image-classification" title="Permalink to this headline"></a></h3>
- <ul class="simple">
- <li><p><a class="reference external" href="https://github.com/Deci-AI/super-gradients/blob/master/src/super_gradients/training/models/classification_models/densenet.py">DensNet (Densely Connected Convolutional Networks)</a> - Densely Connected Convolutional Networks <a class="reference external" href="https://arxiv.org/pdf/1608.06993.pdf">https://arxiv.org/pdf/1608.06993.pdf</a></p></li>
- <li><p><a class="reference external" href="https://github.com/Deci-AI/super-gradients/blob/master/src/super_gradients/training/models/classification_models/dpn.py">DPN</a> - Dual Path Networks <a class="reference external" href="https://arxiv.org/pdf/1707.01629">https://arxiv.org/pdf/1707.01629</a></p></li>
- <li><p><a class="reference external" href="https://github.com/Deci-AI/super-gradients/blob/master/src/super_gradients/training/models/classification_models/efficientnet.py">EfficientNet</a> - <a class="reference external" href="https://arxiv.org/abs/1905.11946">https://arxiv.org/abs/1905.11946</a></p></li>
- <li><p><a class="reference external" href="https://github.com/Deci-AI/super-gradients/blob/master/src/super_gradients/training/models/classification_models/googlenet.py">GoogleNet</a> - <a class="reference external" href="https://arxiv.org/pdf/1409.4842">https://arxiv.org/pdf/1409.4842</a></p></li>
- <li><p><a class="reference external" href="https://github.com/Deci-AI/super-gradients/blob/master/src/super_gradients/training/models/classification_models/lenet.py">LeNet</a> - <a class="reference external" href="http://yann.lecun.com/exdb/publis/pdf/lecun-01a.pdf">https://yann.lecun.com/exdb/lenet/</a></p></li>
- <li><p><a class="reference external" href="https://github.com/Deci-AI/super-gradients/blob/master/src/super_gradients/training/models/classification_models/mobilenet.py">MobileNet</a> - Efficient Convolutional Neural Networks for Mobile Vision Applications <a class="reference external" href="https://arxiv.org/pdf/1704.04861">https://arxiv.org/pdf/1704.04861</a></p></li>
- <li><p><a class="reference external" href="https://github.com/Deci-AI/super-gradients/blob/master/src/super_gradients/training/models/classification_models/mobilenetv2.py">MobileNet v2</a> - <a class="reference external" href="https://arxiv.org/pdf/1801.04381">https://arxiv.org/pdf/1801.04381</a></p></li>
- <li><p><a class="reference external" href="https://github.com/Deci-AI/super-gradients/blob/master/src/super_gradients/training/models/classification_models/mobilenetv3.py">MobileNet v3</a> - <a class="reference external" href="https://arxiv.org/pdf/1905.02244">https://arxiv.org/pdf/1905.02244</a></p></li>
- <li><p><a class="reference external" href="https://github.com/Deci-AI/super-gradients/blob/master/src/super_gradients/training/models/classification_models/pnasnet.py">PNASNet</a> - Progressive Neural Architecture Search Networks <a class="reference external" href="https://arxiv.org/pdf/1712.00559">https://arxiv.org/pdf/1712.00559</a></p></li>
- <li><p><a class="reference external" href="https://github.com/Deci-AI/super-gradients/blob/master/src/super_gradients/training/models/classification_models/preact_resnet.py">Pre-activation ResNet</a> - <a class="reference external" href="https://arxiv.org/pdf/1603.05027">https://arxiv.org/pdf/1603.05027</a></p></li>
- <li><p><a class="reference external" href="https://github.com/Deci-AI/super-gradients/blob/master/src/super_gradients/training/models/classification_models/regnet.py">RegNet</a> - <a class="reference external" href="https://arxiv.org/pdf/2003.13678.pdf">https://arxiv.org/pdf/2003.13678.pdf</a></p></li>
- <li><p><a class="reference external" href="https://github.com/Deci-AI/super-gradients/blob/master/src/super_gradients/training/models/classification_models/repvgg.py">RepVGG</a> - Making VGG-style ConvNets Great Again <a class="reference external" href="https://arxiv.org/pdf/2101.03697.pdf">https://arxiv.org/pdf/2101.03697.pdf</a></p></li>
- <li><p><a class="reference external" href="https://github.com/Deci-AI/super-gradients/blob/master/src/super_gradients/training/models/classification_models/resnet.py">ResNet</a> - Deep Residual Learning for Image Recognition <a class="reference external" href="https://arxiv.org/pdf/1512.03385">https://arxiv.org/pdf/1512.03385</a></p></li>
- <li><p><a class="reference external" href="https://github.com/Deci-AI/super-gradients/blob/master/src/super_gradients/training/models/classification_models/resnext.py">ResNeXt</a> - Aggregated Residual Transformations for Deep Neural Networks <a class="reference external" href="https://arxiv.org/pdf/1611.05431">https://arxiv.org/pdf/1611.05431</a></p></li>
- <li><p><a class="reference external" href="https://github.com/Deci-AI/super-gradients/blob/master/src/super_gradients/training/models/classification_models/senet.py">SENet </a> - Squeeze-and-Excitation Networks<a class="reference external" href="https://arxiv.org/pdf/1709.01507">https://arxiv.org/pdf/1709.01507</a></p></li>
- <li><p><a class="reference external" href="https://github.com/Deci-AI/super-gradients/blob/master/src/super_gradients/training/models/classification_models/shufflenet.py">ShuffleNet</a> - <a class="reference external" href="https://arxiv.org/pdf/1707.01083">https://arxiv.org/pdf/1707.01083</a></p></li>
- <li><p><a class="reference external" href="https://github.com/Deci-AI/super-gradients/blob/master/src/super_gradients/training/models/classification_models/shufflenetv2.py">ShuffleNet v2</a> - Efficient Convolutional Neural Network for Mobile
- Devices<a class="reference external" href="https://arxiv.org/pdf/1807.11164">https://arxiv.org/pdf/1807.11164</a></p></li>
- <li><p><a class="reference external" href="https://github.com/Deci-AI/super-gradients/blob/master/src/super_gradients/training/models/classification_models/vgg.py">VGG</a> - Very Deep Convolutional Networks for Large-scale Image Recognition <a class="reference external" href="https://arxiv.org/pdf/1409.1556">https://arxiv.org/pdf/1409.1556</a></p></li>
- </ul>
- </section>
- <section id="object-detection">
- <h3>Object Detection<a class="headerlink" href="#object-detection" title="Permalink to this headline"></a></h3>
- <ul class="simple">
- <li><p><a class="reference external" href="https://github.com/Deci-AI/super-gradients/blob/master/src/super_gradients/training/models/detection_models/csp_darknet53.py">CSP DarkNet</a></p></li>
- <li><p><a class="reference external" href="https://github.com/Deci-AI/super-gradients/blob/master/src/super_gradients/training/models/detection_models/darknet53.py">DarkNet-53</a></p></li>
- <li><p><a class="reference external" href="https://github.com/Deci-AI/super-gradients/blob/master/src/super_gradients/training/models/detection_models/ssd.py">SSD (Single Shot Detector)</a> - <a class="reference external" href="https://arxiv.org/pdf/1512.02325">https://arxiv.org/pdf/1512.02325</a></p></li>
- <li><p><a class="reference external" href="https://github.com/Deci-AI/super-gradients/blob/master/src/super_gradients/training/models/detection_models/yolox.py">YOLOX</a> - <a class="reference external" href="https://arxiv.org/abs/2107.08430">https://arxiv.org/abs/2107.08430</a></p></li>
- </ul>
- </section>
- <section id="semantic-segmentation">
- <h3>Semantic Segmentation<a class="headerlink" href="#semantic-segmentation" title="Permalink to this headline"></a></h3>
- <ul class="simple">
- <li><p><a class="reference external" href="https://github.com/Deci-AI/super-gradients/blob/master/src/super_gradients/training/models/segmentation_models/ddrnet.py">DDRNet (Deep Dual-resolution Networks)</a> - <a class="reference external" href="https://arxiv.org/pdf/2101.06085.pdf">https://arxiv.org/pdf/2101.06085.pdf</a></p></li>
- <li><p><a class="reference external" href="https://github.com/Deci-AI/super-gradients/blob/master/src/super_gradients/training/models/segmentation_models/laddernet.py">LadderNet</a> - Multi-path networks based on U-Net for medical image segmentation <a class="reference external" href="https://arxiv.org/pdf/1810.07810">https://arxiv.org/pdf/1810.07810</a></p></li>
- <li><p><a class="reference external" href="https://github.com/Deci-AI/super-gradients/blob/master/src/super_gradients/training/models/segmentation_models/regseg.py">RegSeg</a> - Rethink Dilated Convolution for Real-time Semantic Segmentation <a class="reference external" href="https://arxiv.org/pdf/2111.09957">https://arxiv.org/pdf/2111.09957</a></p></li>
- <li><p><a class="reference external" href="https://github.com/Deci-AI/super-gradients/blob/master/src/super_gradients/training/models/segmentation_models/shelfnet.py">ShelfNet</a> - <a class="reference external" href="https://arxiv.org/pdf/1811.11254">https://arxiv.org/pdf/1811.11254</a></p></li>
- <li><p><a class="reference external" href="https://github.com/Deci-AI/super-gradients/blob/master/src/super_gradients/training/models/segmentation_models/stdc.py">STDC</a> - Rethinking BiSeNet For Real-time Semantic Segmentation <a class="reference external" href="https://arxiv.org/pdf/2104.13188">https://arxiv.org/pdf/2104.13188</a></p></li>
- </ul>
- </details>
- </section>
- </section>
- <section id="contributing">
- <h2>Contributing<a class="headerlink" href="#contributing" title="Permalink to this headline"></a></h2>
- <p>To learn about making a contribution to SuperGradients, please see our <a class="reference internal" href="CONTRIBUTING.html"><span class="doc std std-doc">Contribution page</span></a>.</p>
- <p>Our awesome contributors:</p>
- <a href="https://github.com/Deci-AI/super-gradients/graphs/contributors">
- <img src="https://contrib.rocks/image?repo=Deci-AI/super-gradients" />
- </a>
- <p><br/>Made with <a class="reference external" href="https://contrib.rocks">contrib.rocks</a>.</p>
- </section>
- <section id="citation">
- <h2>Citation<a class="headerlink" href="#citation" title="Permalink to this headline"></a></h2>
- <p>If you are using SuperGradients library or benchmarks in your research, please cite SuperGradients deep learning training library.</p>
- </section>
- <section id="community">
- <h2>Community<a class="headerlink" href="#community" title="Permalink to this headline"></a></h2>
- <p>If you want to be a part of SuperGradients growing community, hear about all the exciting news and updates, need help, request for advanced features,
- or want to file a bug or issue report, we would love to welcome you aboard!</p>
- <ul class="simple">
- <li><p>Slack is the place to be and ask questions about SuperGradients and get support. <a class="reference external" href="https://join.slack.com/t/supergradients-comm52/shared_invite/zt-10vz6o1ia-b_0W5jEPEnuHXm087K~t8Q">Click here to join our Slack</a></p></li>
- <li><p>To report a bug, <a class="reference external" href="https://github.com/Deci-AI/super-gradients/issues">file an issue</a> on GitHub.</p></li>
- <li><p>You can also join the <a class="reference external" href="https://deci.ai/resources/blog/">community mailing list</a>
- to ask questions about the project and receive announcements.</p></li>
- <li><p>For a short meeting with SuperGradients PM, use this <a class="reference external" href="https://calendly.com/ofer-baratz-deci/15min">link</a> and choose your preferred time.</p></li>
- </ul>
- </section>
- <section id="license">
- <h2>License<a class="headerlink" href="#license" title="Permalink to this headline"></a></h2>
- <p>This project is released under the <a class="reference internal" href="LICENSE.html"><span class="doc std std-doc">Apache 2.0 license</span></a>.</p>
- </section>
- <hr class="docutils" />
- <section id="deci-lab">
- <h2>Deci Lab<a class="headerlink" href="#deci-lab" title="Permalink to this headline"></a></h2>
- <p>Deci Lab supports all common frameworks and Hardware, from Intel CPUs to Nvidia’s GPUs and Jetsons</p>
- <p>You can enjoy immediate improvement in throughput, latency, and memory with the Deci Lab. It optimizes deep learning models using best-of-breed technologies, such as quantization and graph compilers.</p>
- <p>Get a complete benchmark of your models’ performance on different hardware and batch sizes in a single interface. Invite co-workers to collaborate on models and communicate your progress.</p>
- <p>Sign up for Deci Lab for free <a class="reference external" href="https://console.deci.ai/">here</a></p>
- </section>
- </section>
- </div>
- </div>
- <footer><div class="rst-footer-buttons" role="navigation" aria-label="Footer">
- <a href="index.html" class="btn btn-neutral float-left" title="Welcome to SuperGradients’s documentation!" accesskey="p" rel="prev"><span class="fa fa-arrow-circle-left" aria-hidden="true"></span> Previous</a>
- <a href="super_gradients.common.html" class="btn btn-neutral float-right" title="Common package" accesskey="n" rel="next">Next <span class="fa fa-arrow-circle-right" aria-hidden="true"></span></a>
- </div>
- <hr/>
- <div role="contentinfo">
- <p>© Copyright 2021, SuperGradients team.</p>
- </div>
- Built with <a href="https://www.sphinx-doc.org/">Sphinx</a> using a
- <a href="https://github.com/readthedocs/sphinx_rtd_theme">theme</a>
- provided by <a href="https://readthedocs.org">Read the Docs</a>.
- </footer>
- </div>
- </div>
- </section>
- </div>
- <script>
- jQuery(function () {
- SphinxRtdTheme.Navigation.enable(true);
- });
- </script>
- </body>
- </html>
|