Register
Login
Resources
Docs Blog Datasets Glossary Case Studies Tutorials & Webinars
Product
Data Engine LLMs Platform Enterprise
Pricing Explore
Connect to our Discord channel

#475 Feature/sg 000 clean start prints

Merged
Ghost merged 1 commits into Deci-AI:master from deci-ai:feature/SG-000_clean_start_prints
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
  1. from typing import Union, Tuple
  2. from torch import nn
  3. from .conv_bn_act_block import ConvBNAct
  4. from .repvgg_block import RepVGGBlock
  5. from .se_blocks import SEBlock, EffectiveSEBlock
  6. def ConvBNReLU(
  7. in_channels: int,
  8. out_channels: int,
  9. kernel_size: Union[int, Tuple[int, int]],
  10. stride: Union[int, Tuple[int, int]] = 1,
  11. padding: Union[int, Tuple[int, int]] = 0,
  12. dilation: Union[int, Tuple[int, int]] = 1,
  13. groups: int = 1,
  14. bias: bool = True,
  15. padding_mode: str = "zeros",
  16. use_normalization: bool = True,
  17. eps: float = 1e-5,
  18. momentum: float = 0.1,
  19. affine: bool = True,
  20. track_running_stats: bool = True,
  21. device=None,
  22. dtype=None,
  23. use_activation: bool = True,
  24. inplace: bool = False,
  25. ):
  26. """
  27. Class for Convolution2d-Batchnorm2d-Relu layer. Default behaviour is Conv-BN-Relu. To exclude Batchnorm module use
  28. `use_normalization=False`, to exclude Relu activation use `use_activation=False`.
  29. It exists to keep backward compatibility and will be superseeded by ConvBNAct in future releases.
  30. For new classes please use ConvBNAct instead.
  31. For convolution arguments documentation see `nn.Conv2d`.
  32. For batchnorm arguments documentation see `nn.BatchNorm2d`.
  33. For relu arguments documentation see `nn.Relu`.
  34. """
  35. return ConvBNAct(
  36. in_channels=in_channels,
  37. out_channels=out_channels,
  38. kernel_size=kernel_size,
  39. stride=stride,
  40. padding=padding,
  41. dilation=dilation,
  42. groups=groups,
  43. bias=bias,
  44. padding_mode=padding_mode,
  45. use_normalization=use_normalization,
  46. eps=eps,
  47. momentum=momentum,
  48. affine=affine,
  49. track_running_stats=track_running_stats,
  50. device=device,
  51. dtype=dtype,
  52. activation_type=nn.ReLU if use_activation else None,
  53. activation_kwargs=dict(inplace=inplace),
  54. )
  55. __all__ = ["ConvBNAct", "RepVGGBlock", "SEBlock", "EffectiveSEBlock", "ConvBNReLU"]
Discard
Tip!

Press p or to see the previous file or, n or to see the next file