Skip to content

Commit e219efb

Browse files
Vaghinak BasentsyanVaghinak Basentsyan
authored andcommitted
Added limitation validation
1 parent 00b224c commit e219efb

File tree

11 files changed

+687
-320
lines changed

11 files changed

+687
-320
lines changed

.pre-commit-config.yaml

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -36,9 +36,9 @@ repos:
3636
hooks:
3737
- id: black
3838
name: Uncompromising Code Formatter (black)
39-
# - repo: 'https://github.com/asottile/dead'
40-
# rev: v1.3.0
41-
# hooks:
42-
# - id: dead
39+
- repo: 'https://github.com/asottile/dead'
40+
rev: v1.3.0
41+
hooks:
42+
- id: dead
4343
files: src/
4444
exclude: src/lib/app/analytics | src/lib/app/converters | src/lib/app/input_convertors

src/superannotate/lib/app/common.py

Lines changed: 0 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -180,12 +180,6 @@ def hex_to_rgb(hex_string):
180180
return tuple(int(h[i : i + 2], 16) for i in (0, 2, 4))
181181

182182

183-
def rgb_to_hex(rgb_tuple):
184-
"""Converts RGB values to HEX values
185-
"""
186-
return "#%02x%02x%02x" % rgb_tuple
187-
188-
189183
def blue_color_generator(n, hex_values=True):
190184
""" Blue colors generator for SuperAnnotate blue mask.
191185
"""
@@ -292,13 +286,6 @@ def save_web_format(output_dir, classes, files_dict):
292286
json.dump(classes, fw)
293287

294288

295-
def dump_output(output_dir, platform, classes, files_dict):
296-
if platform == "Web":
297-
save_web_format(output_dir, classes, files_dict)
298-
else:
299-
save_desktop_format(output_dir, classes, files_dict)
300-
301-
302289
def write_to_json(output_path, json_data):
303290
with open(output_path, "w") as fw:
304291
json.dump(json_data, fw, indent=2)

src/superannotate/lib/app/interface/sdk_interface.py

Lines changed: 39 additions & 208 deletions
Original file line numberDiff line numberDiff line change
@@ -6,9 +6,6 @@
66
import tempfile
77
import time
88
import uuid
9-
from collections import Counter
10-
from collections import namedtuple
11-
from io import BytesIO
129
from pathlib import Path
1310
from typing import Iterable
1411
from typing import List
@@ -44,7 +41,6 @@
4441
from lib.app.serializers import TeamSerializer
4542
from lib.core.enums import ImageQuality
4643
from lib.core.exceptions import AppException
47-
from lib.core.exceptions import AppValidationException
4844
from lib.infrastructure.controller import Controller
4945
from plotly.subplots import make_subplots
5046
from pydantic import EmailStr
@@ -595,6 +591,7 @@ def copy_image(
595591
)
596592

597593

594+
# TODO refactor
598595
@Trackable
599596
@validate_arguments
600597
def upload_images_from_public_urls_to_project(
@@ -625,100 +622,22 @@ def upload_images_from_public_urls_to_project(
625622
:rtype: tuple of list of strs
626623
"""
627624

628-
if img_names is not None and len(img_names) != len(img_urls):
629-
raise AppException("Not all image URLs have corresponding names.")
630-
631625
project_name, folder_name = extract_project_folder(project)
632626

633-
images_to_upload = []
634-
ProcessedImage = namedtuple("ProcessedImage", ["url", "uploaded", "path", "entity"])
635-
636-
def _upload_image(image_url, image_name=None) -> ProcessedImage:
637-
download_response = controller.download_image_from_public_url(
638-
project_name=project_name, image_url=image_url
639-
)
640-
if not download_response.errors:
641-
content, content_name = download_response.data
642-
image_name = image_name if image_name else content_name
643-
duplicated_images = [
644-
image.name
645-
for image in controller.get_duplicated_images(
646-
project_name=project_name,
647-
folder_name=folder_name,
648-
images=[image_name],
649-
)
650-
]
651-
if image_name not in duplicated_images:
652-
upload_response = controller.upload_image_to_s3(
653-
project_name=project_name,
654-
image_path=image_name,
655-
image_bytes=content,
656-
folder_name=folder_name,
657-
image_quality_in_editor=image_quality_in_editor,
658-
)
659-
if upload_response.errors:
660-
logger.warning(upload_response.errors)
661-
else:
662-
return ProcessedImage(
663-
url=image_url,
664-
uploaded=True,
665-
path=image_url,
666-
entity=upload_response.data,
667-
)
668-
logger.warning(download_response.errors)
669-
return ProcessedImage(
670-
url=image_url, uploaded=False, path=image_name, entity=None
671-
)
672-
673-
logger.info("Downloading %s images", len(img_urls))
674-
with tqdm(total=len(img_urls), desc="Downloading") as progress_bar:
675-
with concurrent.futures.ThreadPoolExecutor(max_workers=1) as executor:
676-
failed_images = []
677-
if img_names:
678-
results = [
679-
executor.submit(_upload_image, url, img_urls[idx])
680-
for idx, url in enumerate(img_urls)
681-
]
682-
else:
683-
results = [executor.submit(_upload_image, url) for url in img_urls]
684-
for future in concurrent.futures.as_completed(results):
685-
processed_image = future.result()
686-
if processed_image.uploaded and processed_image.entity:
687-
images_to_upload.append(processed_image)
688-
else:
689-
failed_images.append(processed_image)
690-
progress_bar.update(1)
691-
692-
uploaded = []
693-
duplicates = []
694-
for i in range(0, len(images_to_upload), 500):
695-
response = controller.upload_images(
696-
project_name=project_name,
697-
folder_name=folder_name,
698-
images=[
699-
image.entity for image in images_to_upload[i : i + 500] # noqa: E203
700-
],
701-
annotation_status=annotation_status,
702-
)
703-
704-
attachments, duplications = response.data
705-
uploaded.extend([attachment["name"] for attachment in attachments])
706-
duplicates.extend([duplication["name"] for duplication in duplications])
707-
uploaded_image_urls = list(
708-
{
709-
image.entity.name
710-
for image in images_to_upload
711-
if image.entity.name in uploaded
712-
}
713-
)
714-
failed_image_urls = [image.url for image in failed_images]
715-
716-
return (
717-
uploaded_image_urls,
718-
uploaded,
719-
duplicates,
720-
failed_image_urls,
627+
use_case = controller.upload_images_from_public_urls_to_project(
628+
project_name=project_name,
629+
folder_name=folder_name,
630+
image_urls=img_urls,
631+
image_names=img_names,
632+
annotation_status=annotation_status,
633+
image_quality_in_editor=image_quality_in_editor,
721634
)
635+
if use_case.is_valid():
636+
with tqdm(total=len(img_urls), desc="Uploading images") as progress_bar:
637+
for _ in use_case.execute():
638+
progress_bar.update(1)
639+
return use_case.data
640+
raise AppException(use_case.response.errors)
722641

723642

724643
@Trackable
@@ -3320,33 +3239,17 @@ def upload_image_to_project(
33203239
:type image_quality_in_editor: str
33213240
"""
33223241
project_name, folder_name = extract_project_folder(project)
3323-
3324-
project = controller.get_project_metadata(project_name).data
3325-
if project["project"].project_type == constances.ProjectType.VIDEO.value:
3326-
raise AppException(
3327-
"The function does not support projects containing videos attached with URLs"
3328-
)
3329-
3330-
if not isinstance(img, io.BytesIO):
3331-
if from_s3_bucket:
3332-
image_bytes = controller.get_image_from_s3(from_s3_bucket, image_name)
3333-
else:
3334-
image_bytes = io.BytesIO(open(img, "rb").read())
3335-
else:
3336-
image_bytes = img
3337-
upload_response = controller.upload_image_to_s3(
3242+
response = controller.upload_image_to_project(
33383243
project_name=project_name,
3339-
image_path=image_name if image_name else Path(img).name,
3340-
image_bytes=image_bytes,
33413244
folder_name=folder_name,
3342-
image_quality_in_editor=image_quality_in_editor,
3343-
)
3344-
controller.upload_images(
3345-
project_name=project_name,
3346-
folder_name=folder_name,
3347-
images=[upload_response.data], # noqa: E203
3245+
image_name=image_name,
3246+
image=img,
33483247
annotation_status=annotation_status,
3248+
image_quality_in_editor=image_quality_in_editor,
3249+
from_s3_bucket=from_s3_bucket,
33493250
)
3251+
if response.errors:
3252+
raise AppException(response.errors)
33503253

33513254

33523255
def search_models(
@@ -3408,103 +3311,31 @@ def upload_images_to_project(
34083311
:return: uploaded, could-not-upload, existing-images filepaths
34093312
:rtype: tuple (3 members) of list of strs
34103313
"""
3411-
uploaded_image_entities = []
3412-
failed_images = []
34133314
project_name, folder_name = extract_project_folder(project)
3414-
project = controller.get_project_metadata(project_name).data
3415-
if project["project"].project_type == constances.ProjectType.VIDEO.value:
3416-
raise AppException(
3417-
"The function does not support projects containing videos attached with URLs"
3418-
)
3419-
3420-
ProcessedImage = namedtuple("ProcessedImage", ["uploaded", "path", "entity"])
3421-
3422-
def _upload_local_image(image_path: str):
3423-
try:
3424-
with open(image_path, "rb") as image:
3425-
image_bytes = BytesIO(image.read())
3426-
upload_response = controller.upload_image_to_s3(
3427-
project_name=project_name,
3428-
image_path=image_path,
3429-
image_bytes=image_bytes,
3430-
folder_name=folder_name,
3431-
image_quality_in_editor=image_quality_in_editor,
3432-
)
34333315

3434-
if not upload_response.errors and upload_response.data:
3435-
entity = upload_response.data
3436-
return ProcessedImage(
3437-
uploaded=True, path=entity.path, entity=entity
3438-
)
3439-
else:
3440-
return ProcessedImage(uploaded=False, path=image_path, entity=None)
3441-
except FileNotFoundError:
3442-
return ProcessedImage(uploaded=False, path=image_path, entity=None)
3443-
3444-
def _upload_s3_image(image_path: str):
3445-
try:
3446-
image_bytes = controller.get_image_from_s3(
3447-
s3_bucket=from_s3_bucket, image_path=image_path
3448-
).data
3449-
except AppValidationException as e:
3450-
logger.warning(e)
3451-
return image_path
3452-
upload_response = controller.upload_image_to_s3(
3453-
project_name=project_name,
3454-
image_path=image_path,
3455-
image_bytes=image_bytes,
3456-
folder_name=folder_name,
3457-
image_quality_in_editor=image_quality_in_editor,
3458-
)
3459-
if not upload_response.errors and upload_response.data:
3460-
entity = upload_response.data
3461-
return ProcessedImage(uploaded=True, path=entity.path, entity=entity)
3462-
else:
3463-
return ProcessedImage(uploaded=False, path=image_path, entity=None)
3464-
3465-
filtered_paths = img_paths
3466-
duplication_counter = Counter(filtered_paths)
3467-
images_to_upload, duplicated_images = (
3468-
set(filtered_paths),
3469-
[item for item in duplication_counter if duplication_counter[item] > 1],
3316+
project_folder_name = project_name + (f"/{folder_name}" if folder_name else "")
3317+
use_case = controller.upload_images_to_project(
3318+
project_name=project_name,
3319+
folder_name=folder_name,
3320+
annotation_status=annotation_status,
3321+
image_quality_in_editor=image_quality_in_editor,
3322+
paths=img_paths,
3323+
from_s3_bucket=from_s3_bucket,
34703324
)
3471-
upload_method = _upload_s3_image if from_s3_bucket else _upload_local_image
3472-
3473-
with concurrent.futures.ThreadPoolExecutor(max_workers=10) as executor:
3474-
results = [
3475-
executor.submit(upload_method, image_path)
3476-
for image_path in images_to_upload
3477-
]
3478-
with tqdm(total=len(images_to_upload), desc="Uploading images") as progress_bar:
3479-
for future in concurrent.futures.as_completed(results):
3480-
processed_image = future.result()
3481-
if processed_image.uploaded and processed_image.entity:
3482-
uploaded_image_entities.append(processed_image.entity)
3483-
else:
3484-
failed_images.append(processed_image.path)
3485-
progress_bar.update(1)
3486-
uploaded = []
3487-
duplicates = []
3488-
3489-
logger.info("Uploading %s images to project.", len(images_to_upload))
3490-
3491-
for i in range(0, len(uploaded_image_entities), 500):
3492-
response = controller.upload_images(
3493-
project_name=project_name,
3494-
folder_name=folder_name,
3495-
images=uploaded_image_entities[i : i + 500], # noqa: E203
3496-
annotation_status=annotation_status,
3497-
)
3498-
attachments, duplications = response.data
3499-
uploaded.extend(attachments)
3500-
duplicates.extend(duplications)
3501-
3325+
images_to_upload, duplicates = use_case.images_to_upload
35023326
if len(duplicates):
35033327
logger.warning(
35043328
"%s already existing images found that won't be uploaded.", len(duplicates)
35053329
)
3506-
3507-
return uploaded, failed_images, duplicates
3330+
logger.info(
3331+
"Uploading %s images to project %s.", len(images_to_upload), project_folder_name
3332+
)
3333+
if use_case.is_valid():
3334+
with tqdm(total=len(images_to_upload), desc="Uploading images") as progress_bar:
3335+
for _ in use_case.execute():
3336+
progress_bar.update(1)
3337+
return use_case.data
3338+
raise AppException(use_case.response.errors)
35083339

35093340

35103341
@Trackable

src/superannotate/lib/core/__init__.py

Lines changed: 15 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -55,7 +55,21 @@
5555
TOKEN_UUID = "token"
5656

5757

58-
DEPRECATED_VIDEO_PROJECTS_MESSAGE = "The function does not support projects containing videos attached with URLs"
58+
DEPRECATED_VIDEO_PROJECTS_MESSAGE = (
59+
"The function does not support projects containing videos attached with URLs"
60+
)
61+
62+
UPLOAD_FOLDER_LIMIT_ERROR_MESSAGE = "The number of items you want to upload exceeds the limit of 50 000 items per folder."
63+
UPLOAD_PROJECT_LIMIT_ERROR_MESSAGE = "The number of items you want to upload exceeds the limit of 500 000 items per project."
64+
UPLOAD_USER_LIMIT_ERROR_MESSAGE = "The number of items you want to upload exceeds the limit of your subscription plan."
65+
66+
ATTACH_FOLDER_LIMIT_ERROR_MESSAGE = "The number of items you want to attach exceeds the limit of 50 000 items per folder."
67+
ATTACH_PROJECT_LIMIT_ERROR_MESSAGE = "The number of items you want to attach exceeds the limit of 500 000 items per project."
68+
ATTACH_USER_LIMIT_ERROR_MESSAGE = "The number of items you want to attach exceeds the limit of your subscription plan."
69+
70+
COPY_FOLDER_LIMIT_ERROR_MESSAGE = (
71+
"The number of items you want to copy exceeds the limit of 50 000 items per folder."
72+
)
5973

6074
__version__ = "?"
6175

0 commit comments

Comments
 (0)