Register
Login
Resources
Docs Blog Datasets Glossary Case Studies Tutorials & Webinars
Product
Data Engine LLMs Platform Enterprise
Pricing Explore
Connect to our Discord channel

ls_model_server.py 4.7 KB

You have to be logged in to leave a comment. Sign In
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
  1. import cloudpickle
  2. import hashlib
  3. import io
  4. import mlflow
  5. import os
  6. import requests
  7. from PIL import Image
  8. from requests.auth import HTTPBasicAuth
  9. from urllib.parse import urlparse
  10. from label_studio_ml.model import LabelStudioMLBase
  11. from label_studio_tools.core.utils.io import get_cache_dir, logger
  12. class SquirrelDetectorLSModel(LabelStudioMLBase):
  13. def __init__(self, **kwargs):
  14. super(SquirrelDetectorLSModel, self).__init__(**kwargs)
  15. # pre-initialize your variables here
  16. from_name, schema = list(self.parsed_label_config.items())[0]
  17. self.from_name = from_name
  18. self.to_name = schema['to_name'][0]
  19. self.labels = schema['labels']
  20. mlflow.set_tracking_uri(os.getenv("MLFLOW_TRACKING_URI"))
  21. self.user = os.getenv("DAGSHUB_USER_NAME")
  22. self.token = os.getenv("DAGSHUB_TOKEN")
  23. self.repo = os.getenv("DAGSHUB_REPO_NAME")
  24. client = mlflow.MlflowClient()
  25. name = 'SquirrelDetector'
  26. version = client.get_latest_versions(name=name)[0].version
  27. self.model_version = f'{name}:{version}'
  28. model_uri = f'models:/{name}/{version}'
  29. self.model = mlflow.pyfunc.load_model(model_uri)
  30. def image_uri_to_https(self, uri):
  31. if uri.startswith('http'):
  32. return uri
  33. elif uri.startswith('repo://'):
  34. link_data = uri.split("repo://")[-1].split("/")
  35. commit, tree_path = link_data[0], "/".join(link_data[1:])
  36. return f"https://dagshub.com/api/v1/repos/{self.user}/{self.repo}/raw/{commit}/{tree_path}"
  37. raise FileNotFoundError(f'Unkown URI {uri}')
  38. def download_image(self, url):
  39. cache_dir = get_cache_dir()
  40. parsed_url = urlparse(url)
  41. url_filename = os.path.basename(parsed_url.path)
  42. url_hash = hashlib.md5(url.encode()).hexdigest()[:6]
  43. filepath = os.path.join(cache_dir, url_hash + '__' + url_filename)
  44. if not os.path.exists(filepath):
  45. logger.info('Download {url} to {filepath}'.format(url=url, filepath=filepath))
  46. auth = HTTPBasicAuth(self.user, self.token)
  47. r = requests.get(url, stream=True, auth=auth)
  48. r.raise_for_status()
  49. with io.open(filepath, mode='wb') as fout:
  50. fout.write(r.content)
  51. return filepath
  52. def predict_task(self, task):
  53. uri = task['data']['image']
  54. url = self.image_uri_to_https(uri)
  55. image_path = self.download_image(url)
  56. img = Image.open(image_path)
  57. img_w, img_h = img.size
  58. objs = self.model.predict(img)
  59. lowest_conf = 2.0
  60. img_results = []
  61. for obj in objs:
  62. x, y, w, h, conf, cls = obj
  63. cls = int(cls)
  64. conf = float(conf)
  65. x = 100 * float(x - w / 2) / img_w
  66. y = 100 * float(y - h / 2) / img_h
  67. w = 100 * float(w) / img_w
  68. h = 100 * float(h) / img_h
  69. if conf < lowest_conf:
  70. lowest_conf = conf
  71. label = self.labels[cls]
  72. img_results.append({
  73. 'from_name': self.from_name,
  74. 'to_name': self.to_name,
  75. 'type': 'rectanglelabels',
  76. 'value': {
  77. 'rectanglelabels': [label],
  78. 'x': x,
  79. 'y': y,
  80. 'width': w,
  81. 'height': h,
  82. },
  83. 'score': conf
  84. })
  85. result = {
  86. 'result': img_results,
  87. 'model_version': self.model_version,
  88. 'task': task['id']
  89. }
  90. if lowest_conf <= 1.0:
  91. result['score'] = lowest_conf
  92. url = f'https://dagshub.com/{self.user}/{self.repo}/annotations/git/api/predictions/'
  93. auth = HTTPBasicAuth(self.user, self.token)
  94. res = requests.post(url, auth=auth, json=result)
  95. if res.status_code != 200:
  96. print(res)
  97. def predict(self, tasks, **kwargs):
  98. """ This is where inference happens:
  99. from PIL import Image
  100. model returns the list of predictions based on input list of tasks
  101. :param tasks: Label Studio tasks in JSON format
  102. """
  103. for task in tasks:
  104. self.predict_task(task)
  105. return []
  106. def fit(self, completions, workdir=None, **kwargs):
  107. """ This is where training happens: train your model given list of completions,
  108. then returns dict with created links and resources
  109. :param completions: aka annotations, the labeling results from Label Studio
  110. :param workdir: current working directory for ML backend
  111. """
  112. # save some training outputs to the job result
  113. return {'random': random.randint(1, 10)}
Tip!

Press p or to see the previous file or, n or to see the next file

Comments

Loading...