Skip to content

Commit c54c857

Browse files
Vaghinak BasentsyanVaghinak Basentsyan
authored andcommitted
2 parents 16d1ac0 + 877d513 commit c54c857

File tree

16 files changed

+839
-102
lines changed

16 files changed

+839
-102
lines changed

src/superannotate/lib/app/interface/sdk_interface.py

Lines changed: 12 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -2445,23 +2445,24 @@ def upload_annotations_from_folder_to_project(
24452445
"""
24462446

24472447
project_name, folder_name = extract_project_folder(project)
2448+
project_folder_name = project_name + (f"/{folder_name}" if folder_name else "")
24482449

24492450
if recursive_subfolders:
24502451
logger.info(
24512452
"When using recursive subfolder parsing same name annotations in different "
24522453
"subfolders will overwrite each other.",
24532454
)
2454-
logger.info("The JSON files should follow a specific naming convention, matching file names already present "
2455-
"on the platform. Existing annotations will be overwritten")
2455+
logger.info(
2456+
"The JSON files should follow a specific naming convention, matching file names already present "
2457+
"on the platform. Existing annotations will be overwritten"
2458+
)
24562459

24572460
annotation_paths = get_annotation_paths(
24582461
folder_path, from_s3_bucket, recursive_subfolders
24592462
)
2460-
if not annotation_paths:
2461-
raise AppException("Could not find annotations matching existing items on the platform.")
24622463

24632464
logger.info(
2464-
"Uploading %s annotations to project %s.", len(annotation_paths), project_name
2465+
f"Uploading {len(annotation_paths)} annotations from {folder_path} to the project {project_folder_name}."
24652466
)
24662467
response = controller.upload_annotations_from_folder(
24672468
project_name=project_name,
@@ -2506,31 +2507,28 @@ def upload_preannotations_from_folder_to_project(
25062507
:rtype: tuple of list of strs
25072508
"""
25082509
project_name, folder_name = extract_project_folder(project)
2510+
project_folder_name = project_name + (f"/{folder_name}" if folder_name else "")
25092511
project = controller.get_project_metadata(project_name).data
25102512
if project["project"].project_type in [
25112513
constances.ProjectType.VIDEO.value,
25122514
constances.ProjectType.DOCUMENT.value,
25132515
]:
25142516
raise AppException(LIMITED_FUNCTIONS[project["project"].project_type])
2515-
25162517
if recursive_subfolders:
25172518
logger.info(
2518-
"When using recursive subfolder parsing same name annotations in different subfolders will overwrite each other.",
2519+
"When using recursive subfolder parsing same name annotations in different "
2520+
"subfolders will overwrite each other.",
25192521
)
2520-
25212522
logger.info(
2522-
"The JSON files should follow specific naming convention. For Vector projects they should be named '<image_name>___objects.json', for Pixel projects JSON file should be names '<image_name>___pixel.json' and also second mask image file should be present with the name '<image_name>___save.png'. In both cases image with <image_name> should be already present on the platform."
2523+
"The JSON files should follow a specific naming convention, matching file names already present "
2524+
"on the platform. Existing annotations will be overwritten"
25232525
)
25242526
logger.info("Existing annotations will be overwritten.",)
2525-
logger.info(
2526-
"Uploading all annotations from %s to project %s.", folder_path, project_name
2527-
)
2528-
25292527
annotation_paths = get_annotation_paths(
25302528
folder_path, from_s3_bucket, recursive_subfolders
25312529
)
25322530
logger.info(
2533-
"Uploading %s annotations to project %s.", len(annotation_paths), project_name
2531+
f"Uploading {len(annotation_paths)} annotations from {folder_path} to the project {project_folder_name}."
25342532
)
25352533
response = controller.upload_annotations_from_folder(
25362534
project_name=project_name,

src/superannotate/lib/core/helpers.py

Lines changed: 77 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@
88
def map_annotation_classes_name(annotation_classes, reporter: Reporter) -> dict:
99
classes_data = defaultdict(dict)
1010
for annotation_class in annotation_classes:
11-
class_info = {"id": annotation_class.uuid}
11+
class_info = {"id": annotation_class.uuid, "attribute_groups": {}}
1212
if annotation_class.attribute_groups:
1313
for attribute_group in annotation_class.attribute_groups:
1414
attribute_group_data = defaultdict(dict)
@@ -27,12 +27,11 @@ def map_annotation_classes_name(annotation_classes, reporter: Reporter) -> dict:
2727
" Only one of the annotation class attribute groups will be used."
2828
" This will result in errors in annotation upload."
2929
)
30-
class_info["attribute_groups"] = {
31-
attribute_group["name"]: {
32-
"id": attribute_group["id"],
33-
"attributes": attribute_group_data,
34-
}
30+
class_info["attribute_groups"][attribute_group["name"]] = {
31+
"id": attribute_group["id"],
32+
"attributes": attribute_group_data,
3533
}
34+
3635
if annotation_class.name in classes_data.keys():
3736
reporter.log_warning(
3837
f"Duplicate annotation class name {annotation_class.name}."
@@ -43,6 +42,16 @@ def map_annotation_classes_name(annotation_classes, reporter: Reporter) -> dict:
4342
return classes_data
4443

4544

45+
def fill_document_tags(
46+
annotations: dict, annotation_classes: dict,
47+
):
48+
new_tags = []
49+
for tag in annotations["tags"]:
50+
if annotation_classes.get(tag):
51+
new_tags.append(annotation_classes[tag]["id"])
52+
annotations["tags"] = new_tags
53+
54+
4655
def fill_annotation_ids(
4756
annotations: dict,
4857
annotation_classes_name_maps: dict,
@@ -95,7 +104,10 @@ def fill_annotation_ids(
95104
reporter.log_warning(
96105
f"Couldn't find annotation group {attribute['groupName']}."
97106
)
98-
reporter.store_message("missing_attribute_groups", f"{annotation['className']}.{attribute['groupName']}")
107+
reporter.store_message(
108+
"missing_attribute_groups",
109+
f"{annotation['className']}.{attribute['groupName']}",
110+
)
99111
continue
100112
attribute["groupId"] = annotation_classes_name_maps[annotation_class_name][
101113
"attribute_groups"
@@ -118,12 +130,16 @@ def fill_annotation_ids(
118130
][attribute["groupName"]]["attributes"][attribute["name"]]
119131

120132

121-
def convert_to_video_editor_json(data: dict, class_name_mapper: dict, reporter: Reporter):
133+
def convert_to_video_editor_json(
134+
data: dict, class_name_mapper: dict, reporter: Reporter
135+
):
136+
id_generator = ClassIdGenerator()
137+
122138
def safe_time(timestamp):
123139
return "0" if str(timestamp) == "0.0" else timestamp
124140

125141
def convert_timestamp(timestamp):
126-
return timestamp / 10 ** 6
142+
return timestamp / 10 ** 6 if timestamp else "0"
127143

128144
editor_data = {
129145
"instances": [],
@@ -132,20 +148,26 @@ def convert_timestamp(timestamp):
132148
"metadata": {
133149
"duration": convert_timestamp(data["metadata"]["duration"]),
134150
"name": data["metadata"]["name"],
135-
"width": data["metadata"]["width"],
136-
"height": data["metadata"]["height"],
151+
"width": data["metadata"].get("width"),
152+
"height": data["metadata"].get("height"),
137153
},
138154
}
139155
for instance in data["instances"]:
140156
meta = instance["meta"]
141-
class_name = meta["className"]
157+
class_name = meta.get("className")
142158
editor_instance = {
143159
"attributes": [],
144160
"timeline": {},
145161
"type": meta["type"],
146-
"classId": class_name_mapper.get(class_name, {}).get("id", -1),
147-
"locked": True,
162+
# TODO check
163+
"locked": False,
148164
}
165+
if class_name:
166+
editor_instance["classId"] = class_name_mapper.get(class_name, {}).get(
167+
"id", id_generator.send(class_name)
168+
)
169+
else:
170+
editor_instance["classId"] = id_generator.send("unknown_class")
149171
if meta.get("pointLabels", None):
150172
editor_instance["pointLabels"] = meta["pointLabels"]
151173
active_attributes = set()
@@ -168,20 +190,41 @@ def convert_timestamp(timestamp):
168190
editor_instance["timeline"][timestamp]["points"] = timestamp_data[
169191
"points"
170192
]
171-
172-
if not class_name_mapper.get(meta["className"], None):
193+
if not class_name:
194+
continue
195+
elif not class_name_mapper.get(class_name):
173196
reporter.store_message("missing_classes", meta["className"])
174197
continue
175198

176199
existing_attributes_in_current_instance = set()
177200
for attribute in timestamp_data["attributes"]:
178-
group_name, attr_name = attribute.get("groupName"), attribute.get("name")
179-
if not class_name_mapper[class_name].get("attribute_groups", {}).get(group_name):
180-
reporter.store_message("missing_attribute_groups", f"{class_name}.{group_name}")
181-
elif not class_name_mapper[class_name]["attribute_groups"][group_name].get("attributes", {}).get(attr_name):
182-
reporter.store_message("missing_attributes", f"{class_name}.{group_name}.{attr_name}")
201+
group_name, attr_name = (
202+
attribute.get("groupName"),
203+
attribute.get("name"),
204+
)
205+
if (
206+
not class_name_mapper[class_name]
207+
.get("attribute_groups", {})
208+
.get(group_name)
209+
):
210+
reporter.store_message(
211+
"missing_attribute_groups", f"{class_name}.{group_name}"
212+
)
213+
elif (
214+
not class_name_mapper[class_name]["attribute_groups"][
215+
group_name
216+
]
217+
.get("attributes", {})
218+
.get(attr_name)
219+
):
220+
reporter.store_message(
221+
"missing_attributes",
222+
f"{class_name}.{group_name}.{attr_name}",
223+
)
183224
else:
184-
existing_attributes_in_current_instance.add((group_name, attr_name))
225+
existing_attributes_in_current_instance.add(
226+
(group_name, attr_name)
227+
)
185228
attributes_to_add = (
186229
existing_attributes_in_current_instance - active_attributes
187230
)
@@ -227,3 +270,15 @@ def default(self, obj):
227270
if isinstance(obj, set):
228271
return list(obj)
229272
return json.JSONEncoder.default(self, obj)
273+
274+
275+
class ClassIdGenerator:
276+
def __init__(self):
277+
self.classes = defaultdict(int)
278+
self.idx = -1
279+
280+
def send(self, class_name: str):
281+
if class_name not in self.classes:
282+
self.classes[class_name] = self.idx
283+
self.idx -= 1
284+
return self.classes[class_name]

src/superannotate/lib/core/plugin.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -259,7 +259,7 @@ def get_extractable_frames(
259259
total_with_fps = sum(
260260
1
261261
for _ in VideoPlugin.frames_generator(
262-
video_path,start_time, end_time, target_fps, log=False
262+
video_path, start_time, end_time, target_fps, log=False
263263
)
264264
)
265265
zero_fill_count = len(str(total))
@@ -288,7 +288,7 @@ def extract_frames(
288288
for frame in VideoPlugin.frames_generator(
289289
video_path, start_time, end_time, target_fps
290290
):
291-
if len(extracted_frames_paths) > limit:
291+
if len(extracted_frames_paths) >= limit:
292292
break
293293
path = str(
294294
Path(extract_path)

src/superannotate/lib/core/types.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -48,7 +48,7 @@ class Metadata(BaseModel):
4848

4949

5050
class PointLabels(BaseModel):
51-
__root__: Dict[constr(regex=r'^[0-9]*$'), str]
51+
__root__: Dict[constr(regex=r"^[0-9]*$"), str]
5252

5353

5454
class BaseInstance(BaseModel):
@@ -227,10 +227,10 @@ class VideoInstance(BaseModel):
227227

228228
class VideoAnnotation(BaseModel):
229229
metadata: VideoMetaData
230-
instances: List[VideoInstance]
231-
tags: List[str]
230+
instances: Optional[List[VideoInstance]]
231+
tags: Optional[List[str]]
232232

233233

234234
class DocumentAnnotation(BaseModel):
235235
instances: list
236-
tags: List[str]
236+
tags: Optional[List[str]]

0 commit comments

Comments
 (0)