Register
Login
Resources
Docs Blog Datasets Glossary Case Studies Tutorials & Webinars
Product
Data Engine LLMs Platform Enterprise
Pricing Explore
Connect to our Discord channel

demo_pytorch2onnx.py 3.4 KB

You have to be logged in to leave a comment. Sign In
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
  1. import sys
  2. import onnx
  3. import os
  4. import argparse
  5. import numpy as np
  6. import cv2
  7. import onnxruntime
  8. import torch
  9. from tool.utils import *
  10. from models import Yolov4
  11. from demo_darknet2onnx import detect
  12. def transform_to_onnx(weight_file, batch_size, n_classes, IN_IMAGE_H, IN_IMAGE_W):
  13. model = Yolov4(n_classes=n_classes, inference=True)
  14. pretrained_dict = torch.load(weight_file, map_location=torch.device('cuda'))
  15. model.load_state_dict(pretrained_dict)
  16. input_names = ["input"]
  17. output_names = ['boxes', 'confs']
  18. dynamic = False
  19. if batch_size <= 0:
  20. dynamic = True
  21. if dynamic:
  22. x = torch.randn((1, 3, IN_IMAGE_H, IN_IMAGE_W), requires_grad=True)
  23. onnx_file_name = "yolov4_-1_3_{}_{}_dynamic.onnx".format(IN_IMAGE_H, IN_IMAGE_W)
  24. dynamic_axes = {"input": {0: "batch_size"}, "boxes": {0: "batch_size"}, "confs": {0: "batch_size"}}
  25. # Export the model
  26. print('Export the onnx model ...')
  27. torch.onnx.export(model,
  28. x,
  29. onnx_file_name,
  30. export_params=True,
  31. opset_version=11,
  32. do_constant_folding=True,
  33. input_names=input_names, output_names=output_names,
  34. dynamic_axes=dynamic_axes)
  35. print('Onnx model exporting done')
  36. return onnx_file_name
  37. else:
  38. x = torch.randn((batch_size, 3, IN_IMAGE_H, IN_IMAGE_W), requires_grad=True)
  39. onnx_file_name = "yolov4_{}_3_{}_{}_static.onnx".format(batch_size, IN_IMAGE_H, IN_IMAGE_W)
  40. # Export the model
  41. print('Export the onnx model ...')
  42. torch.onnx.export(model,
  43. x,
  44. onnx_file_name,
  45. export_params=True,
  46. opset_version=11,
  47. do_constant_folding=True,
  48. input_names=input_names, output_names=output_names,
  49. dynamic_axes=None)
  50. print('Onnx model exporting done')
  51. return onnx_file_name
  52. def main(weight_file, image_path, batch_size, n_classes, IN_IMAGE_H, IN_IMAGE_W):
  53. if batch_size <= 0:
  54. onnx_path_demo = transform_to_onnx(weight_file, batch_size, n_classes, IN_IMAGE_H, IN_IMAGE_W)
  55. else:
  56. # Transform to onnx as specified batch size
  57. transform_to_onnx(weight_file, batch_size, n_classes, IN_IMAGE_H, IN_IMAGE_W)
  58. # Transform to onnx for demo
  59. onnx_path_demo = transform_to_onnx(weight_file, 1, n_classes, IN_IMAGE_H, IN_IMAGE_W)
  60. session = onnxruntime.InferenceSession(onnx_path_demo)
  61. # session = onnx.load(onnx_path)
  62. print("The model expects input shape: ", session.get_inputs()[0].shape)
  63. image_src = cv2.imread(image_path)
  64. detect(session, image_src)
  65. if __name__ == '__main__':
  66. print("Converting to onnx and running demo ...")
  67. if len(sys.argv) == 7:
  68. weight_file = sys.argv[1]
  69. image_path = sys.argv[2]
  70. batch_size = int(sys.argv[3])
  71. n_classes = int(sys.argv[4])
  72. IN_IMAGE_H = int(sys.argv[5])
  73. IN_IMAGE_W = int(sys.argv[6])
  74. main(weight_file, image_path, batch_size, n_classes, IN_IMAGE_H, IN_IMAGE_W)
  75. else:
  76. print('Please run this way:\n')
  77. print(' python demo_onnx.py <weight_file> <image_path> <batch_size> <n_classes> <IN_IMAGE_H> <IN_IMAGE_W>')
Tip!

Press p or to see the previous file or, n or to see the next file

Comments

Loading...