Register
Login
Resources
Docs Blog Datasets Glossary Case Studies Tutorials & Webinars
Product
Data Engine LLMs Platform Enterprise
Pricing Explore
Connect to our Discord channel

#546 Features/sg 409 check all params used

Merged
Ghost merged 1 commits into Deci-AI:master from deci-ai:features/SG-409-check-all-params-used
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
  1. from typing import Union, Tuple
  2. from torch import nn
  3. from .conv_bn_act_block import ConvBNAct
  4. from .repvgg_block import RepVGGBlock
  5. from .se_blocks import SEBlock, EffectiveSEBlock
  6. from .skip_connections import Residual, SkipConnection, CrossModelSkipConnection, BackboneInternalSkipConnection, HeadInternalSkipConnection
  7. def ConvBNReLU(
  8. in_channels: int,
  9. out_channels: int,
  10. kernel_size: Union[int, Tuple[int, int]],
  11. stride: Union[int, Tuple[int, int]] = 1,
  12. padding: Union[int, Tuple[int, int]] = 0,
  13. dilation: Union[int, Tuple[int, int]] = 1,
  14. groups: int = 1,
  15. bias: bool = True,
  16. padding_mode: str = "zeros",
  17. use_normalization: bool = True,
  18. eps: float = 1e-5,
  19. momentum: float = 0.1,
  20. affine: bool = True,
  21. track_running_stats: bool = True,
  22. device=None,
  23. dtype=None,
  24. use_activation: bool = True,
  25. inplace: bool = False,
  26. ):
  27. """
  28. Class for Convolution2d-Batchnorm2d-Relu layer. Default behaviour is Conv-BN-Relu. To exclude Batchnorm module use
  29. `use_normalization=False`, to exclude Relu activation use `use_activation=False`.
  30. It exists to keep backward compatibility and will be superseeded by ConvBNAct in future releases.
  31. For new classes please use ConvBNAct instead.
  32. For convolution arguments documentation see `nn.Conv2d`.
  33. For batchnorm arguments documentation see `nn.BatchNorm2d`.
  34. For relu arguments documentation see `nn.Relu`.
  35. """
  36. return ConvBNAct(
  37. in_channels=in_channels,
  38. out_channels=out_channels,
  39. kernel_size=kernel_size,
  40. stride=stride,
  41. padding=padding,
  42. dilation=dilation,
  43. groups=groups,
  44. bias=bias,
  45. padding_mode=padding_mode,
  46. use_normalization=use_normalization,
  47. eps=eps,
  48. momentum=momentum,
  49. affine=affine,
  50. track_running_stats=track_running_stats,
  51. device=device,
  52. dtype=dtype,
  53. activation_type=nn.ReLU if use_activation else None,
  54. activation_kwargs=dict(inplace=inplace),
  55. )
  56. __all__ = [
  57. "ConvBNAct",
  58. "RepVGGBlock",
  59. "SEBlock",
  60. "EffectiveSEBlock",
  61. "ConvBNReLU",
  62. "Residual",
  63. "SkipConnection",
  64. "CrossModelSkipConnection",
  65. "BackboneInternalSkipConnection",
  66. "HeadInternalSkipConnection",
  67. ]
Discard
Tip!

Press p or to see the previous file or, n or to see the next file