1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
|
- import dagshub
- import mlflow
- import os
- from label_studio_ml.model import LabelStudioMLBase
- from ultralytics import YOLO
- class YoloLS(LabelStudioMLBase):
- def __init__(self, **kwargs):
- super(YoloLS, self).__init__(**kwargs)
-
- # pre-initialize your variables here
- from_name, schema = list(self.parsed_label_config.items())[0]
- self.from_name = from_name
- self.to_name = schema['to_name'][0]
- self.labels = schema['labels']
- self.user = os.getenv("DAGSHUB_USER_NAME")
- self.token = os.getenv("DAGSHUB_TOKEN")
- self.repo = os.getenv("DAGSHUB_REPO_NAME")
- self.host = os.getenv("DAGSHUB_CLIENT_HOST")
-
- dagshub.auth.add_app_token(token=self.token, host=self.host)
-
- dagshub.init(repo_name=self.repo, repo_owner=self.user)
-
- self.model = YOLO('yolov8n-seg.pt', task='segment')
- # client = mlflow.MlflowClient()
- # name = 'YOLOv8 Custom'
- # version = client.get_latest_versions(name=name)[0].version
-
- # self.model_version = f'{name}:{version}'
- # model_uri = f'models:/{name}/{version}'
-
- # self.model = mlflow.pyfunc.load_model(model_uri)
-
- def image_uri_to_https(self, uri):
- if uri.startswith('http'):
- return uri
- elif uri.startswith('repo://'):
- link_data = uri.split("repo://")[-1].split("/")
- commit, tree_path = link_data[0], "/".join(link_data[1:])
- return f"{self.host}/api/v1/repos/{self.user}/{self.repo}/raw/{commit}/{tree_path}"
- elif uri.startswith('/'):
- return f"{self.host}{uri}"
- raise FileNotFoundError(f'Unknown URI {uri}')
-
- def predict(self, tasks, **kwargs):
- """ This is where inference happens:
- model returns the list of predictions based on input list of tasks
-
- :param tasks: Label Studio tasks in JSON format
- """
- results = []
-
- for task in tasks:
- uri = task['data']['image']
- url = self.image_uri_to_https(uri)
-
- if self.host != "https://dagshub.com":
- url = url.replace("https://",f"https://{self.user}:{self.token}@")
-
- preds = self.model.predict(url)[0]
-
- lowest_conf = 2.0
-
- img_results = []
-
- boxes = preds.boxes.cpu().numpy()
- masks = preds.masks
-
- for i in range(len(boxes.cls)):
- conf = float(boxes.conf[i])
- if conf < lowest_conf:
- lowest_conf = conf
-
- img_results.append({
- 'type': 'polygonlabels',
- 'to_name': self.to_name,
- 'from_name': self.from_name,
- 'image_rotation': 0,
- 'original_height': preds.orig_shape[0],
- 'original_width': preds.orig_shape[1],
- 'value': {
- 'closed': True,
- 'points': (masks.xyn[i] * 100).astype(float).tolist(),
- 'polygonlabels': [self.labels[int(boxes.cls[i])]],
- },
- 'score': conf
- })
-
- # img_results = {
- # 'type': 'polygonlabels',
- # 'to_name': self.to_name,
- # 'from_name': self.from_name,
- # 'image_rotation': 0,
- # 'original_height': preds.orig_shape[0],
- # 'original_width': preds.orig_shape[1],
- # 'value': {
- # 'closed': True,
- # 'points': (masks.xyn[i] * 100).astype(float).tolist(),
- # 'polygonlabels': self.labels[int(boxes.cls[i])],
- # },
- # 'score': conf
- # }
- results.append({
- 'result': img_results,
- 'score': lowest_conf
- })
-
- return results
- def fit(self, event, data, **kwargs):
- """ This is where training happens: train your model given list of completions,
- then returns dict with created links and resources
- :param completions: aka annotations, the labeling results from Label Studio
- :param workdir: current working directory for ML backend
- """
- # save some training outputs to the job result
- return {'random': 1}
|