diff --git a/.github/workflows/prerelease.yml b/.github/workflows/prerelease.yml index 5acd20007c1..8f51f27994d 100644 --- a/.github/workflows/prerelease.yml +++ b/.github/workflows/prerelease.yml @@ -62,22 +62,6 @@ jobs: - name: "🖨️ Print changelog to console" if: steps.version_type.outputs.type != 'skip' run: cat CHANGELOG.md - - - name: 💾 Commit and Tag - id: git_commit - if: steps.version_type.outputs.type != 'skip' - run: | - git config user.email ${{ secrets.CI_EMAIL }} - git config user.name ${{ secrets.CI_USER }} - cd repos/avalon-core - git checkout main - git pull - cd ../.. - git add . - git commit -m "[Automated] Bump version" - tag_name="CI/${{ steps.version.outputs.next_tag }}" - echo $tag_name - git tag -a $tag_name -m "nightly build" - name: Push to protected main branch uses: CasperWA/push-protected@v2.10.0 diff --git a/.gitignore b/.gitignore index 28cfb4b1e9e..e18c94a1f4b 100644 --- a/.gitignore +++ b/.gitignore @@ -102,3 +102,6 @@ website/.docusaurus .poetry/ .python-version +.editorconfig +.pre-commit-config.yaml +mypy.ini diff --git a/.hound.yml b/.hound.yml index df9cdab64af..de5adb3154a 100644 --- a/.hound.yml +++ b/.hound.yml @@ -1,3 +1,3 @@ -flake8: - enabled: true - config_file: setup.cfg +flake8: + enabled: true + config_file: setup.cfg diff --git a/openpype/hosts/fusion/plugins/load/actions.py b/openpype/hosts/fusion/plugins/load/actions.py index bc59cec77f9..819c9272fd0 100644 --- a/openpype/hosts/fusion/plugins/load/actions.py +++ b/openpype/hosts/fusion/plugins/load/actions.py @@ -6,7 +6,7 @@ class FusionSetFrameRangeLoader(load.LoaderPlugin): - """Specific loader of Alembic for the avalon.animation family""" + """Set frame range excluding pre- and post-handles""" families = ["animation", "camera", @@ -40,7 +40,7 @@ def load(self, context, name, namespace, data): class FusionSetFrameRangeWithHandlesLoader(load.LoaderPlugin): - """Specific loader of Alembic for the avalon.animation family""" + """Set frame range including pre- and post-handles""" families = ["animation", "camera", diff --git a/openpype/hosts/harmony/plugins/publish/collect_farm_render.py b/openpype/hosts/harmony/plugins/publish/collect_farm_render.py index f5bf0512434..3e9e680efd9 100644 --- a/openpype/hosts/harmony/plugins/publish/collect_farm_render.py +++ b/openpype/hosts/harmony/plugins/publish/collect_farm_render.py @@ -144,6 +144,7 @@ def get_instances(self, context): label=node.split("/")[1], subset=subset_name, asset=legacy_io.Session["AVALON_ASSET"], + task=task_name, attachTo=False, setMembers=[node], publish=info[4], diff --git a/openpype/hosts/hiero/api/otio/hiero_export.py b/openpype/hosts/hiero/api/otio/hiero_export.py index 1e4088d9c06..64fb81aed4d 100644 --- a/openpype/hosts/hiero/api/otio/hiero_export.py +++ b/openpype/hosts/hiero/api/otio/hiero_export.py @@ -151,7 +151,7 @@ def create_otio_reference(clip): padding = media_source.filenamePadding() file_head = media_source.filenameHead() is_sequence = not media_source.singleFile() - frame_duration = media_source.duration() + frame_duration = media_source.duration() - 1 fps = utils.get_rate(clip) or self.project_fps extension = os.path.splitext(path)[-1] diff --git a/openpype/hosts/hiero/plugins/publish/precollect_instances.py b/openpype/hosts/hiero/plugins/publish/precollect_instances.py index 4eac6a008a2..46f0b2440e3 100644 --- a/openpype/hosts/hiero/plugins/publish/precollect_instances.py +++ b/openpype/hosts/hiero/plugins/publish/precollect_instances.py @@ -296,6 +296,8 @@ def get_otio_clip_instance_data(self, track_item): continue if otio_clip.name not in track_item.name(): continue + self.log.debug("__ parent_range: {}".format(parent_range)) + self.log.debug("__ timeline_range: {}".format(timeline_range)) if openpype.lib.is_overlapping_otio_ranges( parent_range, timeline_range, strict=True): diff --git a/openpype/hosts/houdini/plugins/load/actions.py b/openpype/hosts/houdini/plugins/load/actions.py index 63d74c39a59..637be1513db 100644 --- a/openpype/hosts/houdini/plugins/load/actions.py +++ b/openpype/hosts/houdini/plugins/load/actions.py @@ -6,7 +6,7 @@ class SetFrameRangeLoader(load.LoaderPlugin): - """Set Houdini frame range""" + """Set frame range excluding pre- and post-handles""" families = [ "animation", @@ -44,7 +44,7 @@ def load(self, context, name, namespace, data): class SetFrameRangeWithHandlesLoader(load.LoaderPlugin): - """Set Maya frame range including pre- and post-handles""" + """Set frame range including pre- and post-handles""" families = [ "animation", diff --git a/openpype/hosts/houdini/plugins/load/load_alembic.py b/openpype/hosts/houdini/plugins/load/load_alembic.py index 0214229d5ab..96e666b2552 100644 --- a/openpype/hosts/houdini/plugins/load/load_alembic.py +++ b/openpype/hosts/houdini/plugins/load/load_alembic.py @@ -7,7 +7,7 @@ class AbcLoader(load.LoaderPlugin): - """Specific loader of Alembic for the avalon.animation family""" + """Load Alembic""" families = ["model", "animation", "pointcache", "gpuCache"] label = "Load Alembic" diff --git a/openpype/hosts/houdini/plugins/load/load_bgeo.py b/openpype/hosts/houdini/plugins/load/load_bgeo.py new file mode 100644 index 00000000000..a463d513832 --- /dev/null +++ b/openpype/hosts/houdini/plugins/load/load_bgeo.py @@ -0,0 +1,107 @@ +# -*- coding: utf-8 -*- +import os +import re + +from openpype.pipeline import ( + load, + get_representation_path, +) +from openpype.hosts.houdini.api import pipeline + + +class BgeoLoader(load.LoaderPlugin): + """Load bgeo files to Houdini.""" + + label = "Load bgeo" + families = ["model", "pointcache", "bgeo"] + representations = [ + "bgeo", "bgeosc", "bgeogz", + "bgeo.sc", "bgeo.gz", "bgeo.lzma", "bgeo.bz2"] + order = -10 + icon = "code-fork" + color = "orange" + + def load(self, context, name=None, namespace=None, data=None): + + import hou + + # Get the root node + obj = hou.node("/obj") + + # Define node name + namespace = namespace if namespace else context["asset"]["name"] + node_name = "{}_{}".format(namespace, name) if namespace else name + + # Create a new geo node + container = obj.createNode("geo", node_name=node_name) + is_sequence = bool(context["representation"]["context"].get("frame")) + + # Remove the file node, it only loads static meshes + # Houdini 17 has removed the file node from the geo node + file_node = container.node("file1") + if file_node: + file_node.destroy() + + # Explicitly create a file node + file_node = container.createNode("file", node_name=node_name) + file_node.setParms({"file": self.format_path(self.fname, is_sequence)}) + + # Set display on last node + file_node.setDisplayFlag(True) + + nodes = [container, file_node] + self[:] = nodes + + return pipeline.containerise( + node_name, + namespace, + nodes, + context, + self.__class__.__name__, + suffix="", + ) + + @staticmethod + def format_path(path, is_sequence): + """Format file path correctly for single bgeo or bgeo sequence.""" + if not os.path.exists(path): + raise RuntimeError("Path does not exist: %s" % path) + + # The path is either a single file or sequence in a folder. + if not is_sequence: + filename = path + print("single") + else: + filename = re.sub(r"(.*)\.(\d+)\.(bgeo.*)", "\\1.$F4.\\3", path) + + filename = os.path.join(path, filename) + + filename = os.path.normpath(filename) + filename = filename.replace("\\", "/") + + return filename + + def update(self, container, representation): + + node = container["node"] + try: + file_node = next( + n for n in node.children() if n.type().name() == "file" + ) + except StopIteration: + self.log.error("Could not find node of type `alembic`") + return + + # Update the file path + file_path = get_representation_path(representation) + file_path = self.format_path(file_path) + + file_node.setParms({"fileName": file_path}) + + # Update attribute + node.setParms({"representation": str(representation["_id"])}) + + def remove(self, container): + + node = container["node"] + node.destroy() diff --git a/openpype/hosts/houdini/plugins/load/load_camera.py b/openpype/hosts/houdini/plugins/load/load_camera.py index ef57d115da6..059ad11a764 100644 --- a/openpype/hosts/houdini/plugins/load/load_camera.py +++ b/openpype/hosts/houdini/plugins/load/load_camera.py @@ -78,7 +78,7 @@ def transfer_non_default_values(src, dest, ignore=None): class CameraLoader(load.LoaderPlugin): - """Specific loader of Alembic for the avalon.animation family""" + """Load camera from an Alembic file""" families = ["camera"] label = "Load Camera (abc)" diff --git a/openpype/hosts/houdini/plugins/load/load_image.py b/openpype/hosts/houdini/plugins/load/load_image.py index 671f08f18ff..928c2ee734b 100644 --- a/openpype/hosts/houdini/plugins/load/load_image.py +++ b/openpype/hosts/houdini/plugins/load/load_image.py @@ -42,9 +42,9 @@ def get_image_avalon_container(): class ImageLoader(load.LoaderPlugin): - """Specific loader of Alembic for the avalon.animation family""" + """Load images into COP2""" - families = ["colorbleed.imagesequence"] + families = ["imagesequence"] label = "Load Image (COP2)" representations = ["*"] order = -10 diff --git a/openpype/hosts/houdini/plugins/load/load_vdb.py b/openpype/hosts/houdini/plugins/load/load_vdb.py index 06bb9e45e47..bff0f8b0bfd 100644 --- a/openpype/hosts/houdini/plugins/load/load_vdb.py +++ b/openpype/hosts/houdini/plugins/load/load_vdb.py @@ -9,7 +9,7 @@ class VdbLoader(load.LoaderPlugin): - """Specific loader of Alembic for the avalon.animation family""" + """Load VDB""" families = ["vdbcache"] label = "Load VDB" diff --git a/openpype/hosts/maya/api/lib_renderproducts.py b/openpype/hosts/maya/api/lib_renderproducts.py index 5956cc482c1..ff04fa7aa28 100644 --- a/openpype/hosts/maya/api/lib_renderproducts.py +++ b/openpype/hosts/maya/api/lib_renderproducts.py @@ -77,6 +77,7 @@ "arnold": "defaultRenderGlobals.imageFilePrefix", "renderman": "rmanGlobals.imageFileFormat", "redshift": "defaultRenderGlobals.imageFilePrefix", + "mayahardware2": "defaultRenderGlobals.imageFilePrefix" } RENDERMAN_IMAGE_DIR = "maya//" @@ -155,7 +156,8 @@ class Instance(object): "arnold": RenderProductsArnold, "vray": RenderProductsVray, "redshift": RenderProductsRedshift, - "renderman": RenderProductsRenderman + "renderman": RenderProductsRenderman, + "mayahardware2": RenderProductsMayaHardware }.get(renderer_name.lower(), None) if renderer is None: raise UnsupportedRendererException( @@ -1124,6 +1126,67 @@ def get_files(self, product): return new_files +class RenderProductsMayaHardware(ARenderProducts): + """Expected files for MayaHardware renderer.""" + + renderer = "mayahardware2" + + extensions = [ + {"label": "JPEG", "index": 8, "extension": "jpg"}, + {"label": "PNG", "index": 32, "extension": "png"}, + {"label": "EXR(exr)", "index": 40, "extension": "exr"} + ] + + def _get_extension(self, value): + result = None + if isinstance(value, int): + extensions = { + extension["index"]: extension["extension"] + for extension in self.extensions + } + try: + result = extensions[value] + except KeyError: + raise NotImplementedError( + "Could not find extension for {}".format(value) + ) + + if isinstance(value, six.string_types): + extensions = { + extension["label"]: extension["extension"] + for extension in self.extensions + } + try: + result = extensions[value] + except KeyError: + raise NotImplementedError( + "Could not find extension for {}".format(value) + ) + + if not result: + raise NotImplementedError( + "Could not find extension for {}".format(value) + ) + + return result + + def get_render_products(self): + """Get all AOVs. + See Also: + :func:`ARenderProducts.get_render_products()` + """ + ext = self._get_extension( + self._get_attr("defaultRenderGlobals.imageFormat") + ) + + products = [] + for cam in self.get_renderable_cameras(): + product = RenderProduct(productName="beauty", ext=ext, camera=cam) + products.append(product) + + return products + + class AOVError(Exception): """Custom exception for determining AOVs.""" diff --git a/openpype/hosts/maya/plugins/create/create_render.py b/openpype/hosts/maya/plugins/create/create_render.py index 8e9bd0e22b4..93ee6679e57 100644 --- a/openpype/hosts/maya/plugins/create/create_render.py +++ b/openpype/hosts/maya/plugins/create/create_render.py @@ -77,7 +77,8 @@ class CreateRender(plugin.Creator): 'vray': 'vraySettings.fileNamePrefix', 'arnold': 'defaultRenderGlobals.imageFilePrefix', 'renderman': 'rmanGlobals.imageFileFormat', - 'redshift': 'defaultRenderGlobals.imageFilePrefix' + 'redshift': 'defaultRenderGlobals.imageFilePrefix', + 'mayahardware2': 'defaultRenderGlobals.imageFilePrefix', } _image_prefixes = { @@ -87,7 +88,8 @@ class CreateRender(plugin.Creator): # this needs `imageOutputDir` # (/renders/maya/) set separately 'renderman': '_..', - 'redshift': 'maya///' # noqa + 'redshift': 'maya///', # noqa + 'mayahardware2': 'maya///', # noqa } _aov_chars = { diff --git a/openpype/hosts/maya/plugins/load/actions.py b/openpype/hosts/maya/plugins/load/actions.py index 483ad324020..253dae1e431 100644 --- a/openpype/hosts/maya/plugins/load/actions.py +++ b/openpype/hosts/maya/plugins/load/actions.py @@ -1,7 +1,7 @@ """A module containing generic loader actions that will display in the Loader. """ - +import qargparse from openpype.pipeline import load from openpype.hosts.maya.api.lib import ( maintained_selection, @@ -10,7 +10,7 @@ class SetFrameRangeLoader(load.LoaderPlugin): - """Specific loader of Alembic for the avalon.animation family""" + """Set frame range excluding pre- and post-handles""" families = ["animation", "camera", @@ -44,7 +44,7 @@ def load(self, context, name, namespace, data): class SetFrameRangeWithHandlesLoader(load.LoaderPlugin): - """Specific loader of Alembic for the avalon.animation family""" + """Set frame range including pre- and post-handles""" families = ["animation", "camera", @@ -98,6 +98,15 @@ class ImportMayaLoader(load.LoaderPlugin): icon = "arrow-circle-down" color = "#775555" + options = [ + qargparse.Boolean( + "clean_import", + label="Clean import", + default=False, + help="Should all occurences of cbId be purged?" + ) + ] + def load(self, context, name=None, namespace=None, data=None): import maya.cmds as cmds @@ -114,13 +123,22 @@ def load(self, context, name=None, namespace=None, data=None): ) with maintained_selection(): - cmds.file(self.fname, - i=True, - preserveReferences=True, - namespace=namespace, - returnNewNodes=True, - groupReference=True, - groupName="{}:{}".format(namespace, name)) + nodes = cmds.file(self.fname, + i=True, + preserveReferences=True, + namespace=namespace, + returnNewNodes=True, + groupReference=True, + groupName="{}:{}".format(namespace, name)) + + if data.get("clean_import", False): + remove_attributes = ["cbId"] + for node in nodes: + for attr in remove_attributes: + if cmds.attributeQuery(attr, node=node, exists=True): + full_attr = "{}.{}".format(node, attr) + print("Removing {}".format(full_attr)) + cmds.deleteAttr(full_attr) # We do not containerize imported content, it remains unmanaged return diff --git a/openpype/hosts/maya/plugins/load/load_ass.py b/openpype/hosts/maya/plugins/load/load_ass.py index 18de4df3b14..a284b7ec1fe 100644 --- a/openpype/hosts/maya/plugins/load/load_ass.py +++ b/openpype/hosts/maya/plugins/load/load_ass.py @@ -16,7 +16,7 @@ class AssProxyLoader(openpype.hosts.maya.api.plugin.ReferenceLoader): - """Load the Proxy""" + """Load Arnold Proxy as reference""" families = ["ass"] representations = ["ass"] diff --git a/openpype/hosts/maya/plugins/load/load_gpucache.py b/openpype/hosts/maya/plugins/load/load_gpucache.py index 591e568e4c2..6d5e945508e 100644 --- a/openpype/hosts/maya/plugins/load/load_gpucache.py +++ b/openpype/hosts/maya/plugins/load/load_gpucache.py @@ -8,7 +8,7 @@ class GpuCacheLoader(load.LoaderPlugin): - """Load model Alembic as gpuCache""" + """Load Alembic as gpuCache""" families = ["model"] representations = ["abc"] diff --git a/openpype/hosts/maya/plugins/load/load_reference.py b/openpype/hosts/maya/plugins/load/load_reference.py index a8875cf2165..d65b5a2c1ec 100644 --- a/openpype/hosts/maya/plugins/load/load_reference.py +++ b/openpype/hosts/maya/plugins/load/load_reference.py @@ -12,7 +12,7 @@ class ReferenceLoader(openpype.hosts.maya.api.plugin.ReferenceLoader): - """Load the model""" + """Reference file""" families = ["model", "pointcache", diff --git a/openpype/hosts/maya/plugins/load/load_vdb_to_vray.py b/openpype/hosts/maya/plugins/load/load_vdb_to_vray.py index 4f14235bfbb..3a16264ec02 100644 --- a/openpype/hosts/maya/plugins/load/load_vdb_to_vray.py +++ b/openpype/hosts/maya/plugins/load/load_vdb_to_vray.py @@ -74,6 +74,7 @@ def _fix_duplicate_vvg_callbacks(): class LoadVDBtoVRay(load.LoaderPlugin): + """Load OpenVDB in a V-Ray Volume Grid""" families = ["vdbcache"] representations = ["vdb"] diff --git a/openpype/hosts/maya/plugins/publish/collect_render.py b/openpype/hosts/maya/plugins/publish/collect_render.py index 912fe179dd4..e66983780e5 100644 --- a/openpype/hosts/maya/plugins/publish/collect_render.py +++ b/openpype/hosts/maya/plugins/publish/collect_render.py @@ -326,8 +326,8 @@ def process(self, context): "byFrameStep": int( self.get_render_attribute("byFrameStep", layer=layer_name)), - "renderer": self.get_render_attribute("currentRenderer", - layer=layer_name), + "renderer": self.get_render_attribute( + "currentRenderer", layer=layer_name).lower(), # instance subset "family": "renderlayer", "families": ["renderlayer"], diff --git a/openpype/hosts/maya/plugins/publish/validate_render_single_camera.py b/openpype/hosts/maya/plugins/publish/validate_render_single_camera.py index 0838b4fbf8d..e6c6ef6c9e8 100644 --- a/openpype/hosts/maya/plugins/publish/validate_render_single_camera.py +++ b/openpype/hosts/maya/plugins/publish/validate_render_single_camera.py @@ -12,7 +12,8 @@ 'vray': 'vraySettings.fileNamePrefix', 'arnold': 'defaultRenderGlobals.imageFilePrefix', 'renderman': 'defaultRenderGlobals.imageFilePrefix', - 'redshift': 'defaultRenderGlobals.imageFilePrefix' + 'redshift': 'defaultRenderGlobals.imageFilePrefix', + 'mayahardware2': 'defaultRenderGlobals.imageFilePrefix', } diff --git a/openpype/hosts/maya/plugins/publish/validate_rendersettings.py b/openpype/hosts/maya/plugins/publish/validate_rendersettings.py index 023e27de17a..ba6c1397ab0 100644 --- a/openpype/hosts/maya/plugins/publish/validate_rendersettings.py +++ b/openpype/hosts/maya/plugins/publish/validate_rendersettings.py @@ -50,15 +50,17 @@ class ValidateRenderSettings(pyblish.api.InstancePlugin): 'vray': 'vraySettings.fileNamePrefix', 'arnold': 'defaultRenderGlobals.imageFilePrefix', 'renderman': 'rmanGlobals.imageFileFormat', - 'redshift': 'defaultRenderGlobals.imageFilePrefix' + 'redshift': 'defaultRenderGlobals.imageFilePrefix', + 'mayahardware2': 'defaultRenderGlobals.imageFilePrefix', } ImagePrefixTokens = { - - 'arnold': 'maya///{aov_separator}', # noqa + 'mentalray': 'maya///{aov_separator}', # noqa: E501 + 'arnold': 'maya///{aov_separator}', # noqa: E501 'redshift': 'maya///', 'vray': 'maya///', - 'renderman': '{aov_separator}..' # noqa + 'renderman': '{aov_separator}..', + 'mayahardware2': 'maya///', } _aov_chars = { @@ -234,7 +236,7 @@ def get_invalid(cls, instance): # load validation definitions from settings validation_settings = ( instance.context.data["project_settings"]["maya"]["publish"]["ValidateRenderSettings"].get( # noqa: E501 - "{}_render_attributes".format(renderer)) + "{}_render_attributes".format(renderer)) or [] ) # go through definitions and test if such node.attribute exists. diff --git a/openpype/hosts/nuke/api/lib.py b/openpype/hosts/nuke/api/lib.py index e13fed35050..ba8aa7a8dba 100644 --- a/openpype/hosts/nuke/api/lib.py +++ b/openpype/hosts/nuke/api/lib.py @@ -1,4 +1,5 @@ import os +from pprint import pformat import re import six import platform @@ -364,17 +365,15 @@ def fix_data_for_node_create(data): return data -def add_write_node(name, **kwarg): +def add_write_node_legacy(name, **kwarg): """Adding nuke write node - Arguments: name (str): nuke node name kwarg (attrs): data for nuke knobs - Returns: node (obj): nuke write node """ - frame_range = kwarg.get("frame_range", None) + frame_range = kwarg.get("use_range_limit", None) w = nuke.createNode( "Write", @@ -400,6 +399,35 @@ def add_write_node(name, **kwarg): return w +def add_write_node(name, file_path, knobs, **kwarg): + """Adding nuke write node + + Arguments: + name (str): nuke node name + kwarg (attrs): data for nuke knobs + + Returns: + node (obj): nuke write node + """ + frame_range = kwarg.get("use_range_limit", None) + + w = nuke.createNode( + "Write", + "name {}".format(name)) + + w["file"].setValue(file_path) + + # finally add knob overrides + set_node_knobs_from_settings(w, knobs, **kwarg) + + if frame_range: + w["use_limit"].setValue(True) + w["first"].setValue(frame_range[0]) + w["last"].setValue(frame_range[1]) + + return w + + def read_avalon_data(node): """Return user-defined knobs from given `node` @@ -500,13 +528,9 @@ def get_nuke_imageio_settings(): return get_anatomy_settings(Context.project_name)["imageio"]["nuke"] -def get_created_node_imageio_setting(**kwarg): +def get_created_node_imageio_setting_legacy(nodeclass, creator, subset): ''' Get preset data for dataflow (fileType, compression, bitDepth) ''' - log.debug(kwarg) - nodeclass = kwarg.get("nodeclass", None) - creator = kwarg.get("creator", None) - subset = kwarg.get("subset", None) assert any([creator, nodeclass]), nuke.message( "`{}`: Missing mandatory kwargs `host`, `cls`".format(__file__)) @@ -578,6 +602,97 @@ def get_created_node_imageio_setting(**kwarg): return imageio_node +def get_imageio_node_setting(node_class, plugin_name, subset): + ''' Get preset data for dataflow (fileType, compression, bitDepth) + ''' + imageio_nodes = get_nuke_imageio_settings()["nodes"] + required_nodes = imageio_nodes["requiredNodes"] + + imageio_node = None + for node in required_nodes: + log.info(node) + if ( + node_class in node["nukeNodeClass"] + and plugin_name in node["plugins"] + ): + imageio_node = node + break + + log.debug("__ imageio_node: {}".format(imageio_node)) + + if not imageio_node: + return + + # find overrides and update knobs with them + get_imageio_node_override_setting( + node_class, + plugin_name, + subset, + imageio_node["knobs"] + ) + + log.info("ImageIO node: {}".format(imageio_node)) + return imageio_node + + +def get_imageio_node_override_setting( + node_class, plugin_name, subset, knobs_settings +): + ''' Get imageio node overrides from settings + ''' + imageio_nodes = get_nuke_imageio_settings()["nodes"] + override_nodes = imageio_nodes["overrideNodes"] + + # find matching override node + override_imageio_node = None + for onode in override_nodes: + log.info(onode) + if node_class not in onode["nukeNodeClass"]: + continue + + if plugin_name not in onode["plugins"]: + continue + + if ( + onode["subsets"] + and not any(re.search(s, subset) for s in onode["subsets"]) + ): + continue + + override_imageio_node = onode + break + + log.debug("__ override_imageio_node: {}".format(override_imageio_node)) + # add overrides to imageio_node + if override_imageio_node: + # get all knob names in imageio_node + knob_names = [k["name"] for k in knobs_settings] + + for oknob in override_imageio_node["knobs"]: + for knob in knobs_settings: + # override matching knob name + if oknob["name"] == knob["name"]: + log.debug( + "_ overriding knob: `{}` > `{}`".format( + knob, oknob + )) + if not oknob["value"]: + # remove original knob if no value found in oknob + knobs_settings.remove(knob) + else: + # override knob value with oknob's + knob["value"] = oknob["value"] + + # add missing knobs into imageio_node + if oknob["name"] not in knob_names: + log.debug( + "_ adding knob: `{}`".format(oknob)) + knobs_settings.append(oknob) + knob_names.append(oknob["name"]) + + return knobs_settings + + def get_imageio_input_colorspace(filename): ''' Get input file colorspace based on regex in settings. ''' @@ -725,15 +840,14 @@ def check_subsetname_exists(nodes, subset_name): def get_render_path(node): ''' Generate Render path from presets regarding avalon knob data ''' - data = {'avalon': read_avalon_data(node)} - data_preset = { - "nodeclass": data["avalon"]["family"], - "families": [data["avalon"]["families"]], - "creator": data["avalon"]["creator"], - "subset": data["avalon"]["subset"] - } - - nuke_imageio_writes = get_created_node_imageio_setting(**data_preset) + avalon_knob_data = read_avalon_data(node) + data = {'avalon': avalon_knob_data} + + nuke_imageio_writes = get_imageio_node_setting( + node_class=avalon_knob_data["family"], + plugin_name=avalon_knob_data["creator"], + subset=avalon_knob_data["subset"] + ) host_name = os.environ.get("AVALON_APP") data.update({ @@ -825,8 +939,282 @@ def add_button_clear_rendered(node, path): node.addKnob(knob) -def create_write_node(name, data, input=None, prenodes=None, - review=True, linked_knobs=None, farm=True): +def create_prenodes( + prev_node, + nodes_setting, + plugin_name=None, + subset=None, + **kwargs +): + last_node = None + for_dependency = {} + for name, node in nodes_setting.items(): + # get attributes + nodeclass = node["nodeclass"] + knobs = node["knobs"] + + # create node + now_node = nuke.createNode( + nodeclass, "name {}".format(name)) + now_node.hideControlPanel() + + # add for dependency linking + for_dependency[name] = { + "node": now_node, + "dependent": node["dependent"] + } + + if all([plugin_name, subset]): + # find imageio overrides + get_imageio_node_override_setting( + now_node.Class(), + plugin_name, + subset, + knobs + ) + + # add data to knob + set_node_knobs_from_settings(now_node, knobs, **kwargs) + + # switch actual node to previous + last_node = now_node + + for _node_name, node_prop in for_dependency.items(): + if not node_prop["dependent"]: + node_prop["node"].setInput( + 0, prev_node) + elif node_prop["dependent"] in for_dependency: + _prev_node = for_dependency[node_prop["dependent"]]["node"] + node_prop["node"].setInput( + 0, _prev_node) + else: + log.warning("Dependency has wrong name of node: {}".format( + node_prop + )) + + return last_node + + +def create_write_node( + name, + data, + input=None, + prenodes=None, + review=True, + farm=True, + linked_knobs=None, + **kwargs +): + ''' Creating write node which is group node + + Arguments: + name (str): name of node + data (dict): creator write instance data + input (node)[optional]: selected node to connect to + prenodes (dict)[optional]: + nodes to be created before write with dependency + review (bool)[optional]: adding review knob + farm (bool)[optional]: rendering workflow target + kwargs (dict)[optional]: additional key arguments for formating + + Example: + prenodes = { + "nodeName": { + "nodeclass": "Reformat", + "dependent": [ + following_node_01, + ... + ], + "knobs": [ + { + "type": "text", + "name": "knobname", + "value": "knob value" + }, + ... + ] + }, + ... + } + + + Return: + node (obj): group node with avalon data as Knobs + ''' + prenodes = prenodes or {} + + # group node knob overrides + knob_overrides = data.pop("knobs", []) + + # filtering variables + plugin_name = data["creator"] + subset = data["subset"] + + # get knob settings for write node + imageio_writes = get_imageio_node_setting( + node_class=data["nodeclass"], + plugin_name=plugin_name, + subset=subset + ) + + for knob in imageio_writes["knobs"]: + if knob["name"] == "file_type": + representation = knob["value"] + + host_name = os.environ.get("AVALON_APP") + try: + data.update({ + "app": host_name, + "imageio_writes": imageio_writes, + "representation": representation, + }) + anatomy_filled = format_anatomy(data) + + except Exception as e: + msg = "problem with resolving anatomy template: {}".format(e) + log.error(msg) + nuke.message(msg) + + # build file path to workfiles + fdir = str(anatomy_filled["work"]["folder"]).replace("\\", "/") + fpath = data["fpath_template"].format( + work=fdir, + version=data["version"], + subset=data["subset"], + frame=data["frame"], + ext=representation + ) + + # create directory + if not os.path.isdir(os.path.dirname(fpath)): + log.warning("Path does not exist! I am creating it.") + os.makedirs(os.path.dirname(fpath)) + + GN = nuke.createNode("Group", "name {}".format(name)) + + prev_node = None + with GN: + if input: + input_name = str(input.name()).replace(" ", "") + # if connected input node was defined + prev_node = nuke.createNode( + "Input", "name {}".format(input_name)) + else: + # generic input node connected to nothing + prev_node = nuke.createNode( + "Input", "name {}".format("rgba")) + prev_node.hideControlPanel() + + # creating pre-write nodes `prenodes` + last_prenode = create_prenodes( + prev_node, + prenodes, + plugin_name, + subset, + **kwargs + ) + if last_prenode: + prev_node = last_prenode + + # creating write node + write_node = now_node = add_write_node( + "inside_{}".format(name), + fpath, + imageio_writes["knobs"], + **data + ) + write_node.hideControlPanel() + # connect to previous node + now_node.setInput(0, prev_node) + + # switch actual node to previous + prev_node = now_node + + now_node = nuke.createNode("Output", "name Output1") + now_node.hideControlPanel() + + # connect to previous node + now_node.setInput(0, prev_node) + + # imprinting group node + set_avalon_knob_data(GN, data["avalon"]) + add_publish_knob(GN) + add_rendering_knobs(GN, farm) + + if review: + add_review_knob(GN) + + # add divider + GN.addKnob(nuke.Text_Knob('', 'Rendering')) + + # Add linked knobs. + linked_knob_names = [] + + # add input linked knobs and create group only if any input + if linked_knobs: + linked_knob_names.append("_grp-start_") + linked_knob_names.extend(linked_knobs) + linked_knob_names.append("_grp-end_") + + linked_knob_names.append("Render") + + for _k_name in linked_knob_names: + if "_grp-start_" in _k_name: + knob = nuke.Tab_Knob( + "rnd_attr", "Rendering attributes", nuke.TABBEGINCLOSEDGROUP) + GN.addKnob(knob) + elif "_grp-end_" in _k_name: + knob = nuke.Tab_Knob( + "rnd_attr_end", "Rendering attributes", nuke.TABENDGROUP) + GN.addKnob(knob) + else: + if "___" in _k_name: + # add divider + GN.addKnob(nuke.Text_Knob("")) + else: + # add linked knob by _k_name + link = nuke.Link_Knob("") + link.makeLink(write_node.name(), _k_name) + link.setName(_k_name) + + # make render + if "Render" in _k_name: + link.setLabel("Render Local") + link.setFlag(0x1000) + GN.addKnob(link) + + # adding write to read button + add_button_write_to_read(GN) + + # adding write to read button + add_button_clear_rendered(GN, os.path.dirname(fpath)) + + # Deadline tab. + add_deadline_tab(GN) + + # open the our Tab as default + GN[_NODE_TAB_NAME].setFlag(0) + + # set tile color + tile_color = next( + iter( + k["value"] for k in imageio_writes["knobs"] + if "tile_color" in k["name"] + ), [255, 0, 0, 255] + ) + GN["tile_color"].setValue( + color_gui_to_int(tile_color)) + + # finally add knob overrides + set_node_knobs_from_settings(GN, knob_overrides, **kwargs) + + return GN + + +def create_write_node_legacy( + name, data, input=None, prenodes=None, + review=True, linked_knobs=None, farm=True +): ''' Creating write node which is group node Arguments: @@ -859,8 +1247,13 @@ def create_write_node(name, data, input=None, prenodes=None, node (obj): group node with avalon data as Knobs ''' knob_overrides = data.get("knobs", []) + nodeclass = data["nodeclass"] + creator = data["creator"] + subset = data["subset"] - imageio_writes = get_created_node_imageio_setting(**data) + imageio_writes = get_created_node_imageio_setting_legacy( + nodeclass, creator, subset + ) for knob in imageio_writes["knobs"]: if knob["name"] == "file_type": representation = knob["value"] @@ -982,7 +1375,8 @@ def create_write_node(name, data, input=None, prenodes=None, prev_node = now_node # creating write node - write_node = now_node = add_write_node( + + write_node = now_node = add_write_node_legacy( "inside_{}".format(name), **_data ) @@ -1089,6 +1483,79 @@ def create_write_node(name, data, input=None, prenodes=None, return GN +def set_node_knobs_from_settings(node, knob_settings, **kwargs): + """ Overriding knob values from settings + + Using `schema_nuke_knob_inputs` for knob type definitions. + + Args: + node (nuke.Node): nuke node + knob_settings (list): list of dict. Keys are `type`, `name`, `value` + kwargs (dict)[optional]: keys for formatable knob settings + """ + for knob in knob_settings: + log.debug("__ knob: {}".format(pformat(knob))) + knob_type = knob["type"] + knob_name = knob["name"] + + if knob_name not in node.knobs(): + continue + + # first deal with formatable knob settings + if knob_type == "formatable": + template = knob["template"] + to_type = knob["to_type"] + try: + _knob_value = template.format( + **kwargs + ) + log.debug("__ knob_value0: {}".format(_knob_value)) + except KeyError as msg: + log.warning("__ msg: {}".format(msg)) + raise KeyError(msg) + + # convert value to correct type + if to_type == "2d_vector": + knob_value = _knob_value.split(";").split(",") + else: + knob_value = _knob_value + + knob_type = to_type + + else: + knob_value = knob["value"] + + if not knob_value: + continue + + # first convert string types to string + # just to ditch unicode + if isinstance(knob_value, six.text_type): + knob_value = str(knob_value) + + # set correctly knob types + if knob_type == "bool": + knob_value = bool(knob_value) + elif knob_type == "decimal_number": + knob_value = float(knob_value) + elif knob_type == "number": + knob_value = int(knob_value) + elif knob_type == "text": + knob_value = knob_value + elif knob_type == "color_gui": + knob_value = color_gui_to_int(knob_value) + elif knob_type in ["2d_vector", "3d_vector", "color"]: + knob_value = [float(v) for v in knob_value] + + node[knob_name].setValue(knob_value) + + +def color_gui_to_int(color_gui): + hex_value = ( + "0x{0:0>2x}{1:0>2x}{2:0>2x}{3:0>2x}").format(*color_gui) + return int(hex_value, 16) + + def add_rendering_knobs(node, farm=True): ''' Adds additional rendering knobs to given node @@ -1389,15 +1856,11 @@ def set_writes_colorspace(self): if avalon_knob_data.get("families"): families.append(avalon_knob_data.get("families")) - data_preset = { - "nodeclass": avalon_knob_data["family"], - "families": families, - "creator": avalon_knob_data["creator"], - "subset": avalon_knob_data["subset"] - } - - nuke_imageio_writes = get_created_node_imageio_setting( - **data_preset) + nuke_imageio_writes = get_imageio_node_setting( + node_class=avalon_knob_data["family"], + plugin_name=avalon_knob_data["creator"], + subset=avalon_knob_data["subset"] + ) log.debug("nuke_imageio_writes: `{}`".format(nuke_imageio_writes)) @@ -1712,17 +2175,13 @@ def get_write_node_template_attr(node): ''' # get avalon data from node - data = {"avalon": read_avalon_data(node)} - - data_preset = { - "nodeclass": data["avalon"]["family"], - "families": [data["avalon"]["families"]], - "creator": data["avalon"]["creator"], - "subset": data["avalon"]["subset"] - } - + avalon_knob_data = read_avalon_data(node) # get template data - nuke_imageio_writes = get_created_node_imageio_setting(**data_preset) + nuke_imageio_writes = get_imageio_node_setting( + node_class=avalon_knob_data["family"], + plugin_name=avalon_knob_data["creator"], + subset=avalon_knob_data["subset"] + ) # collecting correct data correct_data = OrderedDict({ diff --git a/openpype/hosts/nuke/api/plugin.py b/openpype/hosts/nuke/api/plugin.py index 37c3633d2c5..2bad6f2c788 100644 --- a/openpype/hosts/nuke/api/plugin.py +++ b/openpype/hosts/nuke/api/plugin.py @@ -17,7 +17,8 @@ reset_selection, maintained_selection, set_avalon_knob_data, - add_publish_knob + add_publish_knob, + get_nuke_imageio_settings ) @@ -27,9 +28,6 @@ class OpenPypeCreator(LegacyCreator): def __init__(self, *args, **kwargs): super(OpenPypeCreator, self).__init__(*args, **kwargs) - self.presets = get_current_project_settings()["nuke"]["create"].get( - self.__class__.__name__, {} - ) if check_subsetname_exists( nuke.allNodes(), self.data["subset"]): @@ -606,6 +604,7 @@ class AbstractWriteRender(OpenPypeCreator): icon = "sign-out" defaults = ["Main", "Mask"] knobs = [] + prenodes = {} def __init__(self, *args, **kwargs): super(AbstractWriteRender, self).__init__(*args, **kwargs) @@ -682,21 +681,12 @@ def process(self): self.data.update(creator_data) write_data.update(creator_data) - if self.presets.get('fpath_template'): - self.log.info("Adding template path from preset") - write_data.update( - {"fpath_template": self.presets["fpath_template"]} - ) - else: - self.log.info("Adding template path from plugin") - write_data.update({ - "fpath_template": - ("{work}/" + self.family + "s/nuke/{subset}" - "/{subset}.{frame}.{ext}")}) - - write_node = self._create_write_node(selected_node, - inputs, outputs, - write_data) + write_node = self._create_write_node( + selected_node, + inputs, + outputs, + write_data + ) # relinking to collected connections for i, input in enumerate(inputs): @@ -711,6 +701,28 @@ def process(self): return write_node + def is_legacy(self): + """Check if it needs to run legacy code + + In case where `type` key is missing in singe + knob it is legacy project anatomy. + + Returns: + bool: True if legacy + """ + imageio_nodes = get_nuke_imageio_settings()["nodes"] + node = imageio_nodes["requiredNodes"][0] + if "type" not in node["knobs"][0]: + # if type is not yet in project anatomy + return True + elif next(iter( + _k for _k in node["knobs"] + if _k.get("type") == "__legacy__" + ), None): + # in case someone re-saved anatomy + # with old configuration + return True + @abstractmethod def _create_write_node(self, selected_node, inputs, outputs, write_data): """Family dependent implementation of Write node creation diff --git a/openpype/hosts/nuke/plugins/create/create_write_prerender.py b/openpype/hosts/nuke/plugins/create/create_write_prerender.py index 7297f74c136..32ee1fd86ff 100644 --- a/openpype/hosts/nuke/plugins/create/create_write_prerender.py +++ b/openpype/hosts/nuke/plugins/create/create_write_prerender.py @@ -1,7 +1,8 @@ import nuke from openpype.hosts.nuke.api import plugin -from openpype.hosts.nuke.api.lib import create_write_node +from openpype.hosts.nuke.api.lib import ( + create_write_node, create_write_node_legacy) class CreateWritePrerender(plugin.AbstractWriteRender): @@ -12,22 +13,37 @@ class CreateWritePrerender(plugin.AbstractWriteRender): n_class = "Write" family = "prerender" icon = "sign-out" + + # settings + fpath_template = "{work}/render/nuke/{subset}/{subset}.{frame}.{ext}" defaults = ["Key01", "Bg01", "Fg01", "Branch01", "Part01"] + reviewable = False + use_range_limit = True def __init__(self, *args, **kwargs): super(CreateWritePrerender, self).__init__(*args, **kwargs) def _create_write_node(self, selected_node, inputs, outputs, write_data): - reviewable = self.presets.get("reviewable") - write_node = create_write_node( - self.data["subset"], - write_data, - input=selected_node, - prenodes=[], - review=reviewable, - linked_knobs=["channels", "___", "first", "last", "use_limit"]) - - return write_node + # add fpath_template + write_data["fpath_template"] = self.fpath_template + write_data["use_range_limit"] = self.use_range_limit + + if not self.is_legacy(): + return create_write_node( + self.data["subset"], + write_data, + input=selected_node, + review=self.reviewable, + linked_knobs=["channels", "___", "first", "last", "use_limit"] + ) + else: + return create_write_node_legacy( + self.data["subset"], + write_data, + input=selected_node, + review=self.reviewable, + linked_knobs=["channels", "___", "first", "last", "use_limit"] + ) def _modify_write_node(self, write_node): # open group node @@ -38,7 +54,7 @@ def _modify_write_node(self, write_node): w_node = n write_node.end() - if self.presets.get("use_range_limit"): + if self.use_range_limit: w_node["use_limit"].setValue(True) w_node["first"].setValue(nuke.root()["first_frame"].value()) w_node["last"].setValue(nuke.root()["last_frame"].value()) diff --git a/openpype/hosts/nuke/plugins/create/create_write_render.py b/openpype/hosts/nuke/plugins/create/create_write_render.py index 36a7b5c33f6..23846c03325 100644 --- a/openpype/hosts/nuke/plugins/create/create_write_render.py +++ b/openpype/hosts/nuke/plugins/create/create_write_render.py @@ -1,7 +1,8 @@ import nuke from openpype.hosts.nuke.api import plugin -from openpype.hosts.nuke.api.lib import create_write_node +from openpype.hosts.nuke.api.lib import ( + create_write_node, create_write_node_legacy) class CreateWriteRender(plugin.AbstractWriteRender): @@ -12,13 +13,36 @@ class CreateWriteRender(plugin.AbstractWriteRender): n_class = "Write" family = "render" icon = "sign-out" + + # settings + fpath_template = "{work}/render/nuke/{subset}/{subset}.{frame}.{ext}" defaults = ["Main", "Mask"] - knobs = [] + prenodes = { + "Reformat01": { + "nodeclass": "Reformat", + "dependent": None, + "knobs": [ + { + "type": "text", + "name": "resize", + "value": "none" + }, + { + "type": "bool", + "name": "black_outside", + "value": True + } + ] + } + } def __init__(self, *args, **kwargs): super(CreateWriteRender, self).__init__(*args, **kwargs) def _create_write_node(self, selected_node, inputs, outputs, write_data): + # add fpath_template + write_data["fpath_template"] = self.fpath_template + # add reformat node to cut off all outside of format bounding box # get width and height try: @@ -27,24 +51,36 @@ def _create_write_node(self, selected_node, inputs, outputs, write_data): actual_format = nuke.root().knob('format').value() width, height = (actual_format.width(), actual_format.height()) - _prenodes = [ - { - "name": "Reformat01", - "class": "Reformat", - "knobs": [ - ("resize", 0), - ("black_outside", 1), - ], - "dependent": None - } - ] - - return create_write_node( - self.data["subset"], - write_data, - input=selected_node, - prenodes=_prenodes - ) + if not self.is_legacy(): + return create_write_node( + self.data["subset"], + write_data, + input=selected_node, + prenodes=self.prenodes, + **{ + "width": width, + "height": height + } + ) + else: + _prenodes = [ + { + "name": "Reformat01", + "class": "Reformat", + "knobs": [ + ("resize", 0), + ("black_outside", 1), + ], + "dependent": None + } + ] + + return create_write_node_legacy( + self.data["subset"], + write_data, + input=selected_node, + prenodes=_prenodes + ) def _modify_write_node(self, write_node): return write_node diff --git a/openpype/hosts/nuke/plugins/create/create_write_still.py b/openpype/hosts/nuke/plugins/create/create_write_still.py index d22b5eab3f5..4007ccf51e1 100644 --- a/openpype/hosts/nuke/plugins/create/create_write_still.py +++ b/openpype/hosts/nuke/plugins/create/create_write_still.py @@ -1,7 +1,8 @@ import nuke from openpype.hosts.nuke.api import plugin -from openpype.hosts.nuke.api.lib import create_write_node +from openpype.hosts.nuke.api.lib import ( + create_write_node, create_write_node_legacy) class CreateWriteStill(plugin.AbstractWriteRender): @@ -12,42 +13,69 @@ class CreateWriteStill(plugin.AbstractWriteRender): n_class = "Write" family = "still" icon = "image" + + # settings + fpath_template = "{work}/render/nuke/{subset}/{subset}.{ext}" defaults = [ - "ImageFrame{:0>4}".format(nuke.frame()), - "MPFrame{:0>4}".format(nuke.frame()), - "LayoutFrame{:0>4}".format(nuke.frame()) + "ImageFrame", + "MPFrame", + "LayoutFrame" ] + prenodes = { + "FrameHold01": { + "nodeclass": "FrameHold", + "dependent": None, + "knobs": [ + { + "type": "formatable", + "name": "first_frame", + "template": "{frame}", + "to_type": "number" + } + ] + } + } def __init__(self, *args, **kwargs): super(CreateWriteStill, self).__init__(*args, **kwargs) def _create_write_node(self, selected_node, inputs, outputs, write_data): - # explicitly reset template to 'renders', not same as other 2 writes - write_data.update({ - "fpath_template": ( - "{work}/renders/nuke/{subset}/{subset}.{ext}")}) - - _prenodes = [ - { - "name": "FrameHold01", - "class": "FrameHold", - "knobs": [ - ("first_frame", nuke.frame()) - ], - "dependent": None - } - ] + # add fpath_template + write_data["fpath_template"] = self.fpath_template - write_node = create_write_node( - self.name, - write_data, - input=selected_node, - review=False, - prenodes=_prenodes, - farm=False, - linked_knobs=["channels", "___", "first", "last", "use_limit"]) - - return write_node + if not self.is_legacy(): + return create_write_node( + self.name, + write_data, + input=selected_node, + review=False, + prenodes=self.prenodes, + farm=False, + linked_knobs=["channels", "___", "first", "last", "use_limit"], + **{ + "frame": nuke.frame() + } + ) + else: + _prenodes = [ + { + "name": "FrameHold01", + "class": "FrameHold", + "knobs": [ + ("first_frame", nuke.frame()) + ], + "dependent": None + } + ] + return create_write_node_legacy( + self.name, + write_data, + input=selected_node, + review=False, + prenodes=_prenodes, + farm=False, + linked_knobs=["channels", "___", "first", "last", "use_limit"] + ) def _modify_write_node(self, write_node): write_node.begin() diff --git a/openpype/hosts/nuke/plugins/load/actions.py b/openpype/hosts/nuke/plugins/load/actions.py index 81840b3a38f..d364a4f3a1f 100644 --- a/openpype/hosts/nuke/plugins/load/actions.py +++ b/openpype/hosts/nuke/plugins/load/actions.py @@ -9,7 +9,7 @@ class SetFrameRangeLoader(load.LoaderPlugin): - """Specific loader of Alembic for the avalon.animation family""" + """Set frame range excluding pre- and post-handles""" families = ["animation", "camera", @@ -43,7 +43,7 @@ def load(self, context, name, namespace, data): class SetFrameRangeWithHandlesLoader(load.LoaderPlugin): - """Specific loader of Alembic for the avalon.animation family""" + """Set frame range including pre- and post-handles""" families = ["animation", "camera", diff --git a/openpype/hosts/photoshop/api/ws_stub.py b/openpype/hosts/photoshop/api/ws_stub.py index fa076ecc7e4..b49bf1c73fb 100644 --- a/openpype/hosts/photoshop/api/ws_stub.py +++ b/openpype/hosts/photoshop/api/ws_stub.py @@ -29,6 +29,16 @@ class PSItem(object): color_code = attr.ib(default=None) # color code of layer instance_id = attr.ib(default=None) + @property + def clean_name(self): + """Returns layer name without publish icon highlight + + Returns: + (str) + """ + return (self.name.replace(PhotoshopServerStub.PUBLISH_ICON, '') + .replace(PhotoshopServerStub.LOADED_ICON, '')) + class PhotoshopServerStub: """ diff --git a/openpype/hosts/photoshop/plugins/publish/collect_batch_data.py b/openpype/hosts/photoshop/plugins/publish/collect_batch_data.py index 448493d370f..2881ef0ea65 100644 --- a/openpype/hosts/photoshop/plugins/publish/collect_batch_data.py +++ b/openpype/hosts/photoshop/plugins/publish/collect_batch_data.py @@ -39,6 +39,9 @@ class CollectBatchData(pyblish.api.ContextPlugin): def process(self, context): self.log.info("CollectBatchData") batch_dir = os.environ.get("OPENPYPE_PUBLISH_DATA") + if os.environ.get("IS_TEST"): + self.log.debug("Automatic testing, no batch data, skipping") + return assert batch_dir, ( "Missing `OPENPYPE_PUBLISH_DATA`") diff --git a/openpype/hosts/photoshop/plugins/publish/collect_color_coded_instances.py b/openpype/hosts/photoshop/plugins/publish/collect_color_coded_instances.py index 122428eea0b..ae025fc61dd 100644 --- a/openpype/hosts/photoshop/plugins/publish/collect_color_coded_instances.py +++ b/openpype/hosts/photoshop/plugins/publish/collect_color_coded_instances.py @@ -5,6 +5,7 @@ from openpype.lib import prepare_template_data from openpype.hosts.photoshop import api as photoshop +from openpype.settings import get_project_settings class CollectColorCodedInstances(pyblish.api.ContextPlugin): @@ -49,6 +50,12 @@ def process(self, context): asset_name = context.data["asset"] task_name = context.data["task"] variant = context.data["variant"] + project_name = context.data["projectEntity"]["name"] + + naming_conventions = get_project_settings(project_name).get( + "photoshop", {}).get( + "publish", {}).get( + "ValidateNaming", {}) stub = photoshop.stub() layers = stub.get_layers() @@ -83,6 +90,9 @@ def process(self, context): subset = resolved_subset_template.format( **prepare_template_data(fill_pairs)) + subset = self._clean_subset_name(stub, naming_conventions, + subset, layer) + if subset in existing_subset_names: self.log.info( "Subset {} already created, skipping.".format(subset)) @@ -141,6 +151,7 @@ def _create_instance(self, context, layer, family, instance.data["task"] = task_name instance.data["subset"] = subset instance.data["layer"] = layer + instance.data["families"] = [] return instance @@ -186,3 +197,21 @@ def _resolve_mapping(self, layer): self.log.debug("resolved_subset_template {}".format( resolved_subset_template)) return family, resolved_subset_template + + def _clean_subset_name(self, stub, naming_conventions, subset, layer): + """Cleans invalid characters from subset name and layer name.""" + if re.search(naming_conventions["invalid_chars"], subset): + subset = re.sub( + naming_conventions["invalid_chars"], + naming_conventions["replace_char"], + subset + ) + layer_name = re.sub( + naming_conventions["invalid_chars"], + naming_conventions["replace_char"], + layer.clean_name + ) + layer.name = layer_name + stub.rename_layer(layer.id, layer_name) + + return subset diff --git a/openpype/hosts/photoshop/plugins/publish/validate_naming.py b/openpype/hosts/photoshop/plugins/publish/validate_naming.py index bcae24108cd..b53f4e8198f 100644 --- a/openpype/hosts/photoshop/plugins/publish/validate_naming.py +++ b/openpype/hosts/photoshop/plugins/publish/validate_naming.py @@ -42,7 +42,8 @@ def process(self, context, plugin): layer_name = re.sub(invalid_chars, replace_char, - current_layer_state.name) + current_layer_state.clean_name) + layer_name = stub.PUBLISH_ICON + layer_name stub.rename_layer(current_layer_state.id, layer_name) @@ -73,13 +74,17 @@ class ValidateNaming(pyblish.api.InstancePlugin): def process(self, instance): help_msg = ' Use Repair action (A) in Pyblish to fix it.' - msg = "Name \"{}\" is not allowed.{}".format(instance.data["name"], - help_msg) - formatting_data = {"msg": msg} - if re.search(self.invalid_chars, instance.data["name"]): - raise PublishXmlValidationError(self, msg, - formatting_data=formatting_data) + layer = instance.data.get("layer") + if layer: + msg = "Name \"{}\" is not allowed.{}".format(layer.clean_name, + help_msg) + + formatting_data = {"msg": msg} + if re.search(self.invalid_chars, layer.clean_name): + raise PublishXmlValidationError(self, msg, + formatting_data=formatting_data + ) msg = "Subset \"{}\" is not allowed.{}".format(instance.data["subset"], help_msg) diff --git a/openpype/hosts/standalonepublisher/plugins/publish/collect_batch_instances.py b/openpype/hosts/standalonepublisher/plugins/publish/collect_batch_instances.py deleted file mode 100644 index 4ca1f72cc41..00000000000 --- a/openpype/hosts/standalonepublisher/plugins/publish/collect_batch_instances.py +++ /dev/null @@ -1,70 +0,0 @@ -import copy -import pyblish.api -from pprint import pformat - - -class CollectBatchInstances(pyblish.api.InstancePlugin): - """Collect all available instances for batch publish.""" - - label = "Collect Batch Instances" - order = pyblish.api.CollectorOrder + 0.489 - hosts = ["standalonepublisher"] - families = ["background_batch"] - - # presets - default_subset_task = { - "background_batch": "background" - } - subsets = { - "background_batch": { - "backgroundLayout": { - "task": "background", - "family": "backgroundLayout" - }, - "backgroundComp": { - "task": "background", - "family": "backgroundComp" - }, - "workfileBackground": { - "task": "background", - "family": "workfile" - } - } - } - unchecked_by_default = [] - - def process(self, instance): - context = instance.context - asset_name = instance.data["asset"] - family = instance.data["family"] - - default_task_name = self.default_subset_task.get(family) - for subset_name, subset_data in self.subsets[family].items(): - instance_name = f"{asset_name}_{subset_name}" - task_name = subset_data.get("task") or default_task_name - - # create new instance - new_instance = context.create_instance(instance_name) - - # add original instance data except name key - for key, value in instance.data.items(): - if key not in ["name"]: - # Make sure value is copy since value may be object which - # can be shared across all new created objects - new_instance.data[key] = copy.deepcopy(value) - - # add subset data from preset - new_instance.data.update(subset_data) - - new_instance.data["label"] = instance_name - new_instance.data["subset"] = subset_name - new_instance.data["task"] = task_name - - if subset_name in self.unchecked_by_default: - new_instance.data["publish"] = False - - self.log.info(f"Created new instance: {instance_name}") - self.log.debug(f"_ inst_data: {pformat(new_instance.data)}") - - # delete original instance - context.remove(instance) diff --git a/openpype/hosts/standalonepublisher/plugins/publish/extract_bg_for_compositing.py b/openpype/hosts/standalonepublisher/plugins/publish/extract_bg_for_compositing.py deleted file mode 100644 index 9621d707397..00000000000 --- a/openpype/hosts/standalonepublisher/plugins/publish/extract_bg_for_compositing.py +++ /dev/null @@ -1,243 +0,0 @@ -import os -import json -import copy - -import openpype.api -from openpype.pipeline import legacy_io - -PSDImage = None - - -class ExtractBGForComp(openpype.api.Extractor): - label = "Extract Background for Compositing" - families = ["backgroundComp"] - hosts = ["standalonepublisher"] - - new_instance_family = "background" - - # Presetable - allowed_group_names = [ - "OL", "BG", "MG", "FG", "SB", "UL", "SKY", "Field Guide", "Field_Guide", - "ANIM" - ] - - def process(self, instance): - # Check if python module `psd_tools` is installed - try: - global PSDImage - from psd_tools import PSDImage - except Exception: - raise AssertionError( - "BUG: Python module `psd-tools` is not installed!" - ) - - self.allowed_group_names = [ - name.lower() - for name in self.allowed_group_names - ] - - self.redo_global_plugins(instance) - - repres = instance.data.get("representations") - if not repres: - self.log.info("There are no representations on instance.") - return - - if not instance.data.get("transfers"): - instance.data["transfers"] = [] - - # Prepare staging dir - staging_dir = self.staging_dir(instance) - if not os.path.exists(staging_dir): - os.makedirs(staging_dir) - - for repre in tuple(repres): - # Skip all files without .psd extension - repre_ext = repre["ext"].lower() - if repre_ext.startswith("."): - repre_ext = repre_ext[1:] - - if repre_ext != "psd": - continue - - # Prepare publish dir for transfers - publish_dir = instance.data["publishDir"] - - # Prepare json filepath where extracted metadata are stored - json_filename = "{}.json".format(instance.name) - json_full_path = os.path.join(staging_dir, json_filename) - - self.log.debug(f"`staging_dir` is \"{staging_dir}\"") - - # Prepare new repre data - new_repre = { - "name": "json", - "ext": "json", - "files": json_filename, - "stagingDir": staging_dir - } - - # TODO add check of list - psd_filename = repre["files"] - psd_folder_path = repre["stagingDir"] - psd_filepath = os.path.join(psd_folder_path, psd_filename) - self.log.debug(f"psd_filepath: \"{psd_filepath}\"") - psd_object = PSDImage.open(psd_filepath) - - json_data, transfers = self.export_compositing_images( - psd_object, staging_dir, publish_dir - ) - self.log.info("Json file path: {}".format(json_full_path)) - with open(json_full_path, "w") as json_filestream: - json.dump(json_data, json_filestream, indent=4) - - instance.data["transfers"].extend(transfers) - instance.data["representations"].remove(repre) - instance.data["representations"].append(new_repre) - - def export_compositing_images(self, psd_object, output_dir, publish_dir): - json_data = { - "__schema_version__": 1, - "children": [] - } - transfers = [] - for main_idx, main_layer in enumerate(psd_object): - if ( - not main_layer.is_visible() - or main_layer.name.lower() not in self.allowed_group_names - or not main_layer.is_group - ): - continue - - export_layers = [] - layers_idx = 0 - for layer in main_layer: - # TODO this way may be added also layers next to "ADJ" - if layer.name.lower() == "adj": - for _layer in layer: - export_layers.append((layers_idx, _layer)) - layers_idx += 1 - - else: - export_layers.append((layers_idx, layer)) - layers_idx += 1 - - if not export_layers: - continue - - main_layer_data = { - "index": main_idx, - "name": main_layer.name, - "children": [] - } - - for layer_idx, layer in export_layers: - has_size = layer.width > 0 and layer.height > 0 - if not has_size: - self.log.debug(( - "Skipping layer \"{}\" because does " - "not have any content." - ).format(layer.name)) - continue - - main_layer_name = main_layer.name.replace(" ", "_") - layer_name = layer.name.replace(" ", "_") - - filename = "{:0>2}_{}_{:0>2}_{}.png".format( - main_idx + 1, main_layer_name, layer_idx + 1, layer_name - ) - layer_data = { - "index": layer_idx, - "name": layer.name, - "filename": filename - } - output_filepath = os.path.join(output_dir, filename) - dst_filepath = os.path.join(publish_dir, filename) - transfers.append((output_filepath, dst_filepath)) - - pil_object = layer.composite(viewport=psd_object.viewbox) - pil_object.save(output_filepath, "PNG") - - main_layer_data["children"].append(layer_data) - - if main_layer_data["children"]: - json_data["children"].append(main_layer_data) - - return json_data, transfers - - def redo_global_plugins(self, instance): - # TODO do this in collection phase - # Copy `families` and check if `family` is not in current families - families = instance.data.get("families") or list() - if families: - families = list(set(families)) - - if self.new_instance_family in families: - families.remove(self.new_instance_family) - - self.log.debug( - "Setting new instance families {}".format(str(families)) - ) - instance.data["families"] = families - - # Override instance data with new information - instance.data["family"] = self.new_instance_family - - subset_name = instance.data["anatomyData"]["subset"] - asset_doc = instance.data["assetEntity"] - latest_version = self.find_last_version(subset_name, asset_doc) - version_number = 1 - if latest_version is not None: - version_number += latest_version - - instance.data["latestVersion"] = latest_version - instance.data["version"] = version_number - - # Same data apply to anatomy data - instance.data["anatomyData"].update({ - "family": self.new_instance_family, - "version": version_number - }) - - # Redo publish and resources dir - anatomy = instance.context.data["anatomy"] - template_data = copy.deepcopy(instance.data["anatomyData"]) - template_data.update({ - "frame": "FRAME_TEMP", - "representation": "TEMP" - }) - anatomy_filled = anatomy.format(template_data) - if "folder" in anatomy.templates["publish"]: - publish_folder = anatomy_filled["publish"]["folder"] - else: - publish_folder = os.path.dirname(anatomy_filled["publish"]["path"]) - - publish_folder = os.path.normpath(publish_folder) - resources_folder = os.path.join(publish_folder, "resources") - - instance.data["publishDir"] = publish_folder - instance.data["resourcesDir"] = resources_folder - - self.log.debug("publishDir: \"{}\"".format(publish_folder)) - self.log.debug("resourcesDir: \"{}\"".format(resources_folder)) - - def find_last_version(self, subset_name, asset_doc): - subset_doc = legacy_io.find_one({ - "type": "subset", - "name": subset_name, - "parent": asset_doc["_id"] - }) - - if subset_doc is None: - self.log.debug("Subset entity does not exist yet.") - else: - version_doc = legacy_io.find_one( - { - "type": "version", - "parent": subset_doc["_id"] - }, - sort=[("name", -1)] - ) - if version_doc: - return int(version_doc["name"]) - return None diff --git a/openpype/hosts/standalonepublisher/plugins/publish/extract_bg_main_groups.py b/openpype/hosts/standalonepublisher/plugins/publish/extract_bg_main_groups.py deleted file mode 100644 index b45f04e5741..00000000000 --- a/openpype/hosts/standalonepublisher/plugins/publish/extract_bg_main_groups.py +++ /dev/null @@ -1,248 +0,0 @@ -import os -import copy -import json - -import pyblish.api - -import openpype.api -from openpype.pipeline import legacy_io - -PSDImage = None - - -class ExtractBGMainGroups(openpype.api.Extractor): - label = "Extract Background Layout" - order = pyblish.api.ExtractorOrder + 0.02 - families = ["backgroundLayout"] - hosts = ["standalonepublisher"] - - new_instance_family = "background" - - # Presetable - allowed_group_names = [ - "OL", "BG", "MG", "FG", "UL", "SB", "SKY", "Field Guide", "Field_Guide", - "ANIM" - ] - - def process(self, instance): - # Check if python module `psd_tools` is installed - try: - global PSDImage - from psd_tools import PSDImage - except Exception: - raise AssertionError( - "BUG: Python module `psd-tools` is not installed!" - ) - - self.allowed_group_names = [ - name.lower() - for name in self.allowed_group_names - ] - repres = instance.data.get("representations") - if not repres: - self.log.info("There are no representations on instance.") - return - - self.redo_global_plugins(instance) - - repres = instance.data.get("representations") - if not repres: - self.log.info("There are no representations on instance.") - return - - if not instance.data.get("transfers"): - instance.data["transfers"] = [] - - # Prepare staging dir - staging_dir = self.staging_dir(instance) - if not os.path.exists(staging_dir): - os.makedirs(staging_dir) - - # Prepare publish dir for transfers - publish_dir = instance.data["publishDir"] - - for repre in tuple(repres): - # Skip all files without .psd extension - repre_ext = repre["ext"].lower() - if repre_ext.startswith("."): - repre_ext = repre_ext[1:] - - if repre_ext != "psd": - continue - - # Prepare json filepath where extracted metadata are stored - json_filename = "{}.json".format(instance.name) - json_full_path = os.path.join(staging_dir, json_filename) - - self.log.debug(f"`staging_dir` is \"{staging_dir}\"") - - # Prepare new repre data - new_repre = { - "name": "json", - "ext": "json", - "files": json_filename, - "stagingDir": staging_dir - } - - # TODO add check of list - psd_filename = repre["files"] - psd_folder_path = repre["stagingDir"] - psd_filepath = os.path.join(psd_folder_path, psd_filename) - self.log.debug(f"psd_filepath: \"{psd_filepath}\"") - psd_object = PSDImage.open(psd_filepath) - - json_data, transfers = self.export_compositing_images( - psd_object, staging_dir, publish_dir - ) - self.log.info("Json file path: {}".format(json_full_path)) - with open(json_full_path, "w") as json_filestream: - json.dump(json_data, json_filestream, indent=4) - - instance.data["transfers"].extend(transfers) - instance.data["representations"].remove(repre) - instance.data["representations"].append(new_repre) - - def export_compositing_images(self, psd_object, output_dir, publish_dir): - json_data = { - "__schema_version__": 1, - "children": [] - } - output_ext = ".png" - - to_export = [] - for layer_idx, layer in enumerate(psd_object): - layer_name = layer.name.replace(" ", "_") - if ( - not layer.is_visible() - or layer_name.lower() not in self.allowed_group_names - ): - continue - - has_size = layer.width > 0 and layer.height > 0 - if not has_size: - self.log.debug(( - "Skipping layer \"{}\" because does not have any content." - ).format(layer.name)) - continue - - filebase = "{:0>2}_{}".format(layer_idx, layer_name) - if layer_name.lower() == "anim": - if not layer.is_group: - self.log.warning("ANIM layer is not a group layer.") - continue - - children = [] - for anim_idx, anim_layer in enumerate(layer): - anim_layer_name = anim_layer.name.replace(" ", "_") - filename = "{}_{:0>2}_{}{}".format( - filebase, anim_idx, anim_layer_name, output_ext - ) - children.append({ - "index": anim_idx, - "name": anim_layer.name, - "filename": filename - }) - to_export.append((anim_layer, filename)) - - json_data["children"].append({ - "index": layer_idx, - "name": layer.name, - "children": children - }) - continue - - filename = filebase + output_ext - json_data["children"].append({ - "index": layer_idx, - "name": layer.name, - "filename": filename - }) - to_export.append((layer, filename)) - - transfers = [] - for layer, filename in to_export: - output_filepath = os.path.join(output_dir, filename) - dst_filepath = os.path.join(publish_dir, filename) - transfers.append((output_filepath, dst_filepath)) - - pil_object = layer.composite(viewport=psd_object.viewbox) - pil_object.save(output_filepath, "PNG") - - return json_data, transfers - - def redo_global_plugins(self, instance): - # TODO do this in collection phase - # Copy `families` and check if `family` is not in current families - families = instance.data.get("families") or list() - if families: - families = list(set(families)) - - if self.new_instance_family in families: - families.remove(self.new_instance_family) - - self.log.debug( - "Setting new instance families {}".format(str(families)) - ) - instance.data["families"] = families - - # Override instance data with new information - instance.data["family"] = self.new_instance_family - - subset_name = instance.data["anatomyData"]["subset"] - asset_doc = instance.data["assetEntity"] - latest_version = self.find_last_version(subset_name, asset_doc) - version_number = 1 - if latest_version is not None: - version_number += latest_version - - instance.data["latestVersion"] = latest_version - instance.data["version"] = version_number - - # Same data apply to anatomy data - instance.data["anatomyData"].update({ - "family": self.new_instance_family, - "version": version_number - }) - - # Redo publish and resources dir - anatomy = instance.context.data["anatomy"] - template_data = copy.deepcopy(instance.data["anatomyData"]) - template_data.update({ - "frame": "FRAME_TEMP", - "representation": "TEMP" - }) - anatomy_filled = anatomy.format(template_data) - if "folder" in anatomy.templates["publish"]: - publish_folder = anatomy_filled["publish"]["folder"] - else: - publish_folder = os.path.dirname(anatomy_filled["publish"]["path"]) - - publish_folder = os.path.normpath(publish_folder) - resources_folder = os.path.join(publish_folder, "resources") - - instance.data["publishDir"] = publish_folder - instance.data["resourcesDir"] = resources_folder - - self.log.debug("publishDir: \"{}\"".format(publish_folder)) - self.log.debug("resourcesDir: \"{}\"".format(resources_folder)) - - def find_last_version(self, subset_name, asset_doc): - subset_doc = legacy_io.find_one({ - "type": "subset", - "name": subset_name, - "parent": asset_doc["_id"] - }) - - if subset_doc is None: - self.log.debug("Subset entity does not exist yet.") - else: - version_doc = legacy_io.find_one( - { - "type": "version", - "parent": subset_doc["_id"] - }, - sort=[("name", -1)] - ) - if version_doc: - return int(version_doc["name"]) - return None diff --git a/openpype/hosts/standalonepublisher/plugins/publish/extract_images_from_psd.py b/openpype/hosts/standalonepublisher/plugins/publish/extract_images_from_psd.py deleted file mode 100644 index 8485fa0915b..00000000000 --- a/openpype/hosts/standalonepublisher/plugins/publish/extract_images_from_psd.py +++ /dev/null @@ -1,171 +0,0 @@ -import os -import copy -import pyblish.api - -import openpype.api -from openpype.pipeline import legacy_io - -PSDImage = None - - -class ExtractImagesFromPSD(openpype.api.Extractor): - # PLUGIN is not currently enabled because was decided to use different - # approach - enabled = False - active = False - label = "Extract Images from PSD" - order = pyblish.api.ExtractorOrder + 0.02 - families = ["backgroundLayout"] - hosts = ["standalonepublisher"] - - new_instance_family = "image" - ignored_instance_data_keys = ("name", "label", "stagingDir", "version") - # Presetable - allowed_group_names = [ - "OL", "BG", "MG", "FG", "UL", "SKY", "Field Guide", "Field_Guide", - "ANIM" - ] - - def process(self, instance): - # Check if python module `psd_tools` is installed - try: - global PSDImage - from psd_tools import PSDImage - except Exception: - raise AssertionError( - "BUG: Python module `psd-tools` is not installed!" - ) - - self.allowed_group_names = [ - name.lower() - for name in self.allowed_group_names - ] - repres = instance.data.get("representations") - if not repres: - self.log.info("There are no representations on instance.") - return - - for repre in tuple(repres): - # Skip all files without .psd extension - repre_ext = repre["ext"].lower() - if repre_ext.startswith("."): - repre_ext = repre_ext[1:] - - if repre_ext != "psd": - continue - - # TODO add check of list of "files" value - psd_filename = repre["files"] - psd_folder_path = repre["stagingDir"] - psd_filepath = os.path.join(psd_folder_path, psd_filename) - self.log.debug(f"psd_filepath: \"{psd_filepath}\"") - psd_object = PSDImage.open(psd_filepath) - - self.create_new_instances(instance, psd_object) - - # Remove the instance from context - instance.context.remove(instance) - - def create_new_instances(self, instance, psd_object): - asset_doc = instance.data["assetEntity"] - for layer in psd_object: - if ( - not layer.is_visible() - or layer.name.lower() not in self.allowed_group_names - ): - continue - - has_size = layer.width > 0 and layer.height > 0 - if not has_size: - self.log.debug(( - "Skipping layer \"{}\" because does " - "not have any content." - ).format(layer.name)) - continue - - layer_name = layer.name.replace(" ", "_") - instance_name = subset_name = f"image{layer_name}" - self.log.info( - f"Creating new instance with name \"{instance_name}\"" - ) - new_instance = instance.context.create_instance(instance_name) - for key, value in instance.data.items(): - if key not in self.ignored_instance_data_keys: - new_instance.data[key] = copy.deepcopy(value) - - new_instance.data["label"] = " ".join( - (new_instance.data["asset"], instance_name) - ) - - # Find latest version - latest_version = self.find_last_version(subset_name, asset_doc) - version_number = 1 - if latest_version is not None: - version_number += latest_version - - self.log.info( - "Next version of instance \"{}\" will be {}".format( - instance_name, version_number - ) - ) - - # Set family and subset - new_instance.data["family"] = self.new_instance_family - new_instance.data["subset"] = subset_name - new_instance.data["version"] = version_number - new_instance.data["latestVersion"] = latest_version - - new_instance.data["anatomyData"].update({ - "subset": subset_name, - "family": self.new_instance_family, - "version": version_number - }) - - # Copy `families` and check if `family` is not in current families - families = new_instance.data.get("families") or list() - if families: - families = list(set(families)) - - if self.new_instance_family in families: - families.remove(self.new_instance_family) - new_instance.data["families"] = families - - # Prepare staging dir for new instance - staging_dir = self.staging_dir(new_instance) - - output_filename = "{}.png".format(layer_name) - output_filepath = os.path.join(staging_dir, output_filename) - pil_object = layer.composite(viewport=psd_object.viewbox) - pil_object.save(output_filepath, "PNG") - - new_repre = { - "name": "png", - "ext": "png", - "files": output_filename, - "stagingDir": staging_dir - } - self.log.debug( - "Creating new representation: {}".format(new_repre) - ) - new_instance.data["representations"] = [new_repre] - - def find_last_version(self, subset_name, asset_doc): - subset_doc = legacy_io.find_one({ - "type": "subset", - "name": subset_name, - "parent": asset_doc["_id"] - }) - - if subset_doc is None: - self.log.debug("Subset entity does not exist yet.") - else: - version_doc = legacy_io.find_one( - { - "type": "version", - "parent": subset_doc["_id"] - }, - sort=[("name", -1)] - ) - if version_doc: - return int(version_doc["name"]) - return None diff --git a/openpype/hosts/standalonepublisher/plugins/publish/extract_thumbnail.py b/openpype/hosts/standalonepublisher/plugins/publish/extract_thumbnail.py index 23f0b104c8a..3ee2f70809a 100644 --- a/openpype/hosts/standalonepublisher/plugins/publish/extract_thumbnail.py +++ b/openpype/hosts/standalonepublisher/plugins/publish/extract_thumbnail.py @@ -2,7 +2,11 @@ import tempfile import pyblish.api import openpype.api -import openpype.lib +from openpype.lib import ( + get_ffmpeg_tool_path, + get_ffprobe_streams, + path_to_subprocess_arg, +) class ExtractThumbnailSP(pyblish.api.InstancePlugin): @@ -34,85 +38,78 @@ def process(self, instance): if not thumbnail_repre: return + thumbnail_repre.pop("thumbnail") files = thumbnail_repre.get("files") if not files: return if isinstance(files, list): - files_len = len(files) - file = str(files[0]) + first_filename = str(files[0]) else: - files_len = 1 - file = files + first_filename = files staging_dir = None - is_jpeg = False - if file.endswith(".jpeg") or file.endswith(".jpg"): - is_jpeg = True - - if is_jpeg and files_len == 1: - # skip if already is single jpeg file - return - elif is_jpeg: - # use first frame as thumbnail if is sequence of jpegs - full_thumbnail_path = os.path.join( - thumbnail_repre["stagingDir"], file - ) - self.log.info( - "For thumbnail is used file: {}".format(full_thumbnail_path) - ) + # Convert to jpeg if not yet + full_input_path = os.path.join( + thumbnail_repre["stagingDir"], first_filename + ) + self.log.info("input {}".format(full_input_path)) + with tempfile.NamedTemporaryFile(suffix=".jpg") as tmp: + full_thumbnail_path = tmp.name - else: - # Convert to jpeg if not yet - full_input_path = os.path.join(thumbnail_repre["stagingDir"], file) - self.log.info("input {}".format(full_input_path)) + self.log.info("output {}".format(full_thumbnail_path)) - full_thumbnail_path = tempfile.mkstemp(suffix=".jpg")[1] - self.log.info("output {}".format(full_thumbnail_path)) + instance.context.data["cleanupFullPaths"].append(full_thumbnail_path) - ffmpeg_path = openpype.lib.get_ffmpeg_tool_path("ffmpeg") + ffmpeg_path = get_ffmpeg_tool_path("ffmpeg") - ffmpeg_args = self.ffmpeg_args or {} + ffmpeg_args = self.ffmpeg_args or {} - jpeg_items = [ - "\"{}\"".format(ffmpeg_path), - # override file if already exists - "-y" - ] + jpeg_items = [ + path_to_subprocess_arg(ffmpeg_path), + # override file if already exists + "-y" + ] - # add input filters from peresets - jpeg_items.extend(ffmpeg_args.get("input") or []) - # input file - jpeg_items.append("-i \"{}\"".format(full_input_path)) + # add input filters from peresets + jpeg_items.extend(ffmpeg_args.get("input") or []) + # input file + jpeg_items.extend([ + "-i", path_to_subprocess_arg(full_input_path), # extract only single file - jpeg_items.append("-frames:v 1") + "-frames:v", "1", # Add black background for transparent images - jpeg_items.append(( - "-filter_complex" - " \"color=black,format=rgb24[c]" + "-filter_complex", ( + "\"color=black,format=rgb24[c]" ";[c][0]scale2ref[c][i]" ";[c][i]overlay=format=auto:shortest=1,setsar=1\"" - )) + ), + ]) - jpeg_items.extend(ffmpeg_args.get("output") or []) + jpeg_items.extend(ffmpeg_args.get("output") or []) - # output file - jpeg_items.append("\"{}\"".format(full_thumbnail_path)) + # output file + jpeg_items.append(path_to_subprocess_arg(full_thumbnail_path)) - subprocess_jpeg = " ".join(jpeg_items) + subprocess_jpeg = " ".join(jpeg_items) - # run subprocess - self.log.debug("Executing: {}".format(subprocess_jpeg)) - openpype.api.run_subprocess( - subprocess_jpeg, shell=True, logger=self.log - ) + # run subprocess + self.log.debug("Executing: {}".format(subprocess_jpeg)) + openpype.api.run_subprocess( + subprocess_jpeg, shell=True, logger=self.log + ) # remove thumbnail key from origin repre - thumbnail_repre.pop("thumbnail") + streams = get_ffprobe_streams(full_thumbnail_path) + width = height = None + for stream in streams: + if "width" in stream and "height" in stream: + width = stream["width"] + height = stream["height"] + break - filename = os.path.basename(full_thumbnail_path) - staging_dir = staging_dir or os.path.dirname(full_thumbnail_path) + staging_dir, filename = os.path.split(full_thumbnail_path) # create new thumbnail representation representation = { @@ -120,12 +117,11 @@ def process(self, instance): 'ext': 'jpg', 'files': filename, "stagingDir": staging_dir, - "tags": ["thumbnail"], + "tags": ["thumbnail", "delete"], } - - # # add Delete tag when temp file was rendered - if not is_jpeg: - representation["tags"].append("delete") + if width and height: + representation["width"] = width + representation["height"] = height self.log.info(f"New representation {representation}") instance.data["representations"].append(representation) diff --git a/openpype/hosts/traypublisher/api/plugin.py b/openpype/hosts/traypublisher/api/plugin.py index 813641a7d2c..202664cfc6c 100644 --- a/openpype/hosts/traypublisher/api/plugin.py +++ b/openpype/hosts/traypublisher/api/plugin.py @@ -2,10 +2,7 @@ Creator, CreatedInstance ) -from openpype.lib import ( - FileDef, - BoolDef, -) +from openpype.lib import FileDef from .pipeline import ( list_instances, @@ -17,6 +14,7 @@ class TrayPublishCreator(Creator): create_allow_context_change = True + host_name = "traypublisher" def collect_instances(self): for instance_data in list_instances(): @@ -43,7 +41,6 @@ def get_pre_create_attr_defs(self): class SettingsCreator(TrayPublishCreator): create_allow_context_change = True - enable_review = False extensions = [] def collect_instances(self): @@ -67,19 +64,15 @@ def create(self, subset_name, data, pre_create_data): self._add_instance_to_context(new_instance) def get_instance_attr_defs(self): - output = [] - - file_def = FileDef( - "filepath", - folders=False, - extensions=self.extensions, - allow_sequences=self.allow_sequences, - label="Filepath", - ) - output.append(file_def) - if self.enable_review: - output.append(BoolDef("review", label="Review")) - return output + return [ + FileDef( + "filepath", + folders=False, + extensions=self.extensions, + allow_sequences=self.allow_sequences, + label="Filepath", + ) + ] @classmethod def from_settings(cls, item_data): @@ -97,7 +90,6 @@ def from_settings(cls, item_data): "icon": item_data["icon"], "description": item_data["description"], "detailed_description": item_data["detailed_description"], - "enable_review": item_data["enable_review"], "extensions": item_data["extensions"], "allow_sequences": item_data["allow_sequences"], "default_variants": item_data["default_variants"] diff --git a/openpype/hosts/traypublisher/plugins/publish/collect_review_family.py b/openpype/hosts/traypublisher/plugins/publish/collect_review_family.py new file mode 100644 index 00000000000..965e2515278 --- /dev/null +++ b/openpype/hosts/traypublisher/plugins/publish/collect_review_family.py @@ -0,0 +1,31 @@ +import pyblish.api +from openpype.lib import BoolDef +from openpype.pipeline import OpenPypePyblishPluginMixin + + +class CollectReviewFamily( + pyblish.api.InstancePlugin, OpenPypePyblishPluginMixin +): + """Add review family.""" + + label = "Collect Review Family" + order = pyblish.api.CollectorOrder - 0.49 + + hosts = ["traypublisher"] + families = [ + "image", + "render", + "plate", + "review" + ] + + def process(self, instance): + values = self.get_attr_values_from_data(instance.data) + if values.get("add_review_family"): + instance.data["families"].append("review") + + @classmethod + def get_attribute_defs(cls): + return [ + BoolDef("add_review_family", label="Review", default=True) + ] diff --git a/openpype/hosts/traypublisher/plugins/publish/collect_simple_instances.py b/openpype/hosts/traypublisher/plugins/publish/collect_simple_instances.py index 5fc66084d61..b2be43c701a 100644 --- a/openpype/hosts/traypublisher/plugins/publish/collect_simple_instances.py +++ b/openpype/hosts/traypublisher/plugins/publish/collect_simple_instances.py @@ -22,10 +22,6 @@ def process(self, instance): repres = instance.data["representations"] creator_attributes = instance.data["creator_attributes"] - - if creator_attributes.get("review"): - instance.data["families"].append("review") - filepath_item = creator_attributes["filepath"] self.log.info(filepath_item) filepaths = [ @@ -34,9 +30,11 @@ def process(self, instance): ] instance.data["sourceFilepaths"] = filepaths + instance.data["stagingDir"] = filepath_item["directory"] filenames = filepath_item["filenames"] - ext = os.path.splitext(filenames[0])[-1] + _, ext = os.path.splitext(filenames[0]) + ext = ext[1:] if len(filenames) == 1: filenames = filenames[0] @@ -46,3 +44,7 @@ def process(self, instance): "stagingDir": filepath_item["directory"], "files": filenames }) + + self.log.debug("Created Simple Settings instance {}".format( + instance.data + )) diff --git a/openpype/hosts/unreal/api/rendering.py b/openpype/hosts/unreal/api/rendering.py index 376e1b75cec..b2732506fce 100644 --- a/openpype/hosts/unreal/api/rendering.py +++ b/openpype/hosts/unreal/api/rendering.py @@ -1,5 +1,8 @@ +import os + import unreal +from openpype.api import Anatomy from openpype.hosts.unreal.api import pipeline @@ -46,6 +49,15 @@ def start_rendering(): if data["family"] == "render": inst_data.append(data) + try: + project = os.environ.get("AVALON_PROJECT") + anatomy = Anatomy(project) + root = anatomy.roots['renders'] + except Exception: + raise Exception("Could not find render root in anatomy settings.") + + render_dir = f"{root}/{project}" + # subsystem = unreal.get_editor_subsystem( # unreal.MoviePipelineQueueSubsystem) # queue = subsystem.get_queue() @@ -105,7 +117,7 @@ def start_rendering(): settings.custom_end_frame = r.get("frame_range")[1] settings.use_custom_playback_range = True settings.file_name_format = "{sequence_name}.{frame_number}" - settings.output_directory.path += r.get('output') + settings.output_directory.path = f"{render_dir}/{r.get('output')}" renderPass = job.get_configuration().find_or_add_setting_by_class( unreal.MoviePipelineDeferredPassBase) diff --git a/openpype/hosts/unreal/plugins/load/load_animation.py b/openpype/hosts/unreal/plugins/load/load_animation.py index c7581e0cdd3..60c1526d3d1 100644 --- a/openpype/hosts/unreal/plugins/load/load_animation.py +++ b/openpype/hosts/unreal/plugins/load/load_animation.py @@ -134,7 +134,6 @@ def load(self, context, name, namespace, options=None): Returns: list(str): list of container content """ - # Create directory for asset and avalon container hierarchy = context.get('asset').get('data').get('parents') root = "/Game/OpenPype" @@ -149,11 +148,30 @@ def load(self, context, name, namespace, options=None): asset_dir, container_name = tools.create_unique_asset_name( f"{root}/Animations/{asset}/{name}", suffix="") + ar = unreal.AssetRegistryHelpers.get_asset_registry() + + filter = unreal.ARFilter( + class_names=["World"], + package_paths=[f"{root}/{hierarchy[0]}"], + recursive_paths=False) + levels = ar.get_assets(filter) + master_level = levels[0].get_editor_property('object_path') + hierarchy_dir = root for h in hierarchy: hierarchy_dir = f"{hierarchy_dir}/{h}" hierarchy_dir = f"{hierarchy_dir}/{asset}" + filter = unreal.ARFilter( + class_names=["World"], + package_paths=[f"{hierarchy_dir}/"], + recursive_paths=True) + levels = ar.get_assets(filter) + level = levels[0].get_editor_property('object_path') + + unreal.EditorLevelLibrary.save_all_dirty_levels() + unreal.EditorLevelLibrary.load_level(level) + container_name += suffix EditorAssetLibrary.make_directory(asset_dir) @@ -165,7 +183,7 @@ def load(self, context, name, namespace, options=None): instance_name = data.get("instance_name") - animation = self._process(asset_dir, container_name, instance_name) + animation = self._process(asset_dir, asset_name, instance_name) asset_content = EditorAssetLibrary.list_assets( hierarchy_dir, recursive=True, include_folder=False) @@ -224,6 +242,9 @@ def load(self, context, name, namespace, options=None): for a in imported_content: EditorAssetLibrary.save_asset(a) + unreal.EditorLevelLibrary.save_current_level() + unreal.EditorLevelLibrary.load_level(master_level) + def update(self, container, representation): name = container["asset_name"] source_path = get_representation_path(representation) diff --git a/openpype/hosts/unreal/plugins/load/load_camera.py b/openpype/hosts/unreal/plugins/load/load_camera.py index ea896f6c44f..b33e45b6e96 100644 --- a/openpype/hosts/unreal/plugins/load/load_camera.py +++ b/openpype/hosts/unreal/plugins/load/load_camera.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- """Load camera from FBX.""" -import os +from pathlib import Path import unreal from unreal import EditorAssetLibrary @@ -268,68 +268,242 @@ def load(self, context, name, namespace, data): return asset_content def update(self, container, representation): - path = container["namespace"] - ar = unreal.AssetRegistryHelpers.get_asset_registry() - tools = unreal.AssetToolsHelpers().get_asset_tools() - asset_content = EditorAssetLibrary.list_assets( - path, recursive=False, include_folder=False - ) - asset_name = "" - for a in asset_content: - asset = ar.get_asset_by_object_path(a) - if a.endswith("_CON"): - loaded_asset = EditorAssetLibrary.load_asset(a) - EditorAssetLibrary.set_metadata_tag( - loaded_asset, "representation", str(representation["_id"]) - ) - EditorAssetLibrary.set_metadata_tag( - loaded_asset, "parent", str(representation["parent"]) - ) - asset_name = EditorAssetLibrary.get_metadata_tag( - loaded_asset, "asset_name" - ) - elif asset.asset_class == "LevelSequence": - EditorAssetLibrary.delete_asset(a) + root = "/Game/OpenPype" - sequence = tools.create_asset( - asset_name=asset_name, - package_path=path, - asset_class=unreal.LevelSequence, - factory=unreal.LevelSequenceFactoryNew() + asset_dir = container.get('namespace') + + context = representation.get("context") + + hierarchy = context.get('hierarchy').split("/") + h_dir = f"{root}/{hierarchy[0]}" + h_asset = hierarchy[0] + master_level = f"{h_dir}/{h_asset}_map.{h_asset}_map" + + EditorLevelLibrary.save_current_level() + + filter = unreal.ARFilter( + class_names=["LevelSequence"], + package_paths=[asset_dir], + recursive_paths=False) + sequences = ar.get_assets(filter) + filter = unreal.ARFilter( + class_names=["World"], + package_paths=[str(Path(asset_dir).parent.as_posix())], + recursive_paths=True) + maps = ar.get_assets(filter) + + # There should be only one map in the list + EditorLevelLibrary.load_level(maps[0].get_full_name()) + + level_sequence = sequences[0].get_asset() + + display_rate = level_sequence.get_display_rate() + playback_start = level_sequence.get_playback_start() + playback_end = level_sequence.get_playback_end() + + sequence_name = f"{container.get('asset')}_camera" + + # Get the actors in the level sequence. + objs = unreal.SequencerTools.get_bound_objects( + unreal.EditorLevelLibrary.get_editor_world(), + level_sequence, + level_sequence.get_bindings(), + unreal.SequencerScriptingRange( + has_start_value=True, + has_end_value=True, + inclusive_start=level_sequence.get_playback_start(), + exclusive_end=level_sequence.get_playback_end() + ) ) - io_asset = legacy_io.Session["AVALON_ASSET"] - asset_doc = legacy_io.find_one({ - "type": "asset", - "name": io_asset - }) + # Delete actors from the map + for o in objs: + if o.bound_objects[0].get_class().get_name() == "CineCameraActor": + actor_path = o.bound_objects[0].get_path_name().split(":")[-1] + actor = EditorLevelLibrary.get_actor_reference(actor_path) + EditorLevelLibrary.destroy_actor(actor) + + # Remove the Level Sequence from the parent. + # We need to traverse the hierarchy from the master sequence to find + # the level sequence. + root = "/Game/OpenPype" + namespace = container.get('namespace').replace(f"{root}/", "") + ms_asset = namespace.split('/')[0] + filter = unreal.ARFilter( + class_names=["LevelSequence"], + package_paths=[f"{root}/{ms_asset}"], + recursive_paths=False) + sequences = ar.get_assets(filter) + master_sequence = sequences[0].get_asset() + + sequences = [master_sequence] + + parent = None + sub_scene = None + for s in sequences: + tracks = s.get_master_tracks() + subscene_track = None + for t in tracks: + if t.get_class() == unreal.MovieSceneSubTrack.static_class(): + subscene_track = t + break + if subscene_track: + sections = subscene_track.get_sections() + for ss in sections: + if ss.get_sequence().get_name() == sequence_name: + parent = s + sub_scene = ss + # subscene_track.remove_section(ss) + break + sequences.append(ss.get_sequence()) + # Update subscenes indexes. + i = 0 + for ss in sections: + ss.set_row_index(i) + i += 1 + + if parent: + break - data = asset_doc.get("data") + assert parent, "Could not find the parent sequence" - if data: - sequence.set_display_rate(unreal.FrameRate(data.get("fps"), 1.0)) - sequence.set_playback_start(data.get("frameStart")) - sequence.set_playback_end(data.get("frameEnd")) + EditorAssetLibrary.delete_asset(level_sequence.get_path_name()) settings = unreal.MovieSceneUserImportFBXSettings() settings.set_editor_property('reduce_keys', False) + tools = unreal.AssetToolsHelpers().get_asset_tools() + new_sequence = tools.create_asset( + asset_name=sequence_name, + package_path=asset_dir, + asset_class=unreal.LevelSequence, + factory=unreal.LevelSequenceFactoryNew() + ) + + new_sequence.set_display_rate(display_rate) + new_sequence.set_playback_start(playback_start) + new_sequence.set_playback_end(playback_end) + + sub_scene.set_sequence(new_sequence) + unreal.SequencerTools.import_fbx( EditorLevelLibrary.get_editor_world(), - sequence, - sequence.get_bindings(), + new_sequence, + new_sequence.get_bindings(), settings, str(representation["data"]["path"]) ) + data = { + "representation": str(representation["_id"]), + "parent": str(representation["parent"]) + } + unreal_pipeline.imprint( + "{}/{}".format(asset_dir, container.get('container_name')), data) + + EditorLevelLibrary.save_current_level() + + asset_content = EditorAssetLibrary.list_assets( + asset_dir, recursive=True, include_folder=False) + + for a in asset_content: + EditorAssetLibrary.save_asset(a) + + EditorLevelLibrary.load_level(master_level) + def remove(self, container): - path = container["namespace"] - parent_path = os.path.dirname(path) + path = Path(container.get("namespace")) + parent_path = str(path.parent.as_posix()) + + ar = unreal.AssetRegistryHelpers.get_asset_registry() + filter = unreal.ARFilter( + class_names=["LevelSequence"], + package_paths=[f"{str(path.as_posix())}"], + recursive_paths=False) + sequences = ar.get_assets(filter) + + if not sequences: + raise Exception("Could not find sequence.") + + world = ar.get_asset_by_object_path( + EditorLevelLibrary.get_editor_world().get_path_name()) + + filter = unreal.ARFilter( + class_names=["World"], + package_paths=[f"{parent_path}"], + recursive_paths=True) + maps = ar.get_assets(filter) + + # There should be only one map in the list + if not maps: + raise Exception("Could not find map.") + + map = maps[0] + + EditorLevelLibrary.save_all_dirty_levels() + EditorLevelLibrary.load_level(map.get_full_name()) + + # Remove the camera from the level. + actors = EditorLevelLibrary.get_all_level_actors() + + for a in actors: + if a.__class__ == unreal.CineCameraActor: + EditorLevelLibrary.destroy_actor(a) + + EditorLevelLibrary.save_all_dirty_levels() + EditorLevelLibrary.load_level(world.get_full_name()) + + # There should be only one sequence in the path. + sequence_name = sequences[0].asset_name + + # Remove the Level Sequence from the parent. + # We need to traverse the hierarchy from the master sequence to find + # the level sequence. + root = "/Game/OpenPype" + namespace = container.get('namespace').replace(f"{root}/", "") + ms_asset = namespace.split('/')[0] + filter = unreal.ARFilter( + class_names=["LevelSequence"], + package_paths=[f"{root}/{ms_asset}"], + recursive_paths=False) + sequences = ar.get_assets(filter) + master_sequence = sequences[0].get_asset() + + sequences = [master_sequence] + + parent = None + for s in sequences: + tracks = s.get_master_tracks() + subscene_track = None + for t in tracks: + if t.get_class() == unreal.MovieSceneSubTrack.static_class(): + subscene_track = t + break + if subscene_track: + sections = subscene_track.get_sections() + for ss in sections: + if ss.get_sequence().get_name() == sequence_name: + parent = s + subscene_track.remove_section(ss) + break + sequences.append(ss.get_sequence()) + # Update subscenes indexes. + i = 0 + for ss in sections: + ss.set_row_index(i) + i += 1 + + if parent: + break + + assert parent, "Could not find the parent sequence" - EditorAssetLibrary.delete_directory(path) + EditorAssetLibrary.delete_directory(str(path.as_posix())) + # Check if there isn't any more assets in the parent folder, and + # delete it if not. asset_content = EditorAssetLibrary.list_assets( parent_path, recursive=False, include_folder=True ) diff --git a/openpype/hosts/unreal/plugins/load/load_layout.py b/openpype/hosts/unreal/plugins/load/load_layout.py index e09255d88cf..412f77e3a92 100644 --- a/openpype/hosts/unreal/plugins/load/load_layout.py +++ b/openpype/hosts/unreal/plugins/load/load_layout.py @@ -208,7 +208,14 @@ def _process_family( actors.append(actor) - binding = sequence.add_possessable(actor) + binding = None + for p in sequence.get_possessables(): + if p.get_name() == actor.get_name(): + binding = p + break + + if not binding: + binding = sequence.add_possessable(actor) bindings.append(binding) @@ -299,15 +306,101 @@ def _import_animation( # Add animation to the sequencer bindings = bindings_dict.get(instance_name) + ar = unreal.AssetRegistryHelpers.get_asset_registry() + for binding in bindings: - binding.add_track(unreal.MovieSceneSkeletalAnimationTrack) - for track in binding.get_tracks(): + tracks = binding.get_tracks() + track = None + if not tracks: + track = binding.add_track( + unreal.MovieSceneSkeletalAnimationTrack) + else: + track = tracks[0] + + sections = track.get_sections() + section = None + if not sections: section = track.add_section() - section.set_range( - sequence.get_playback_start(), - sequence.get_playback_end()) + else: + section = sections[0] + sec_params = section.get_editor_property('params') - sec_params.set_editor_property('animation', animation) + curr_anim = sec_params.get_editor_property('animation') + + if curr_anim: + # Checks if the animation path has a container. + # If it does, it means that the animation is already + # in the sequencer. + anim_path = str(Path( + curr_anim.get_path_name()).parent + ).replace('\\', '/') + + filter = unreal.ARFilter( + class_names=["AssetContainer"], + package_paths=[anim_path], + recursive_paths=False) + containers = ar.get_assets(filter) + + if len(containers) > 0: + return + + section.set_range( + sequence.get_playback_start(), + sequence.get_playback_end()) + sec_params = section.get_editor_property('params') + sec_params.set_editor_property('animation', animation) + + def _generate_sequence(self, h, h_dir): + tools = unreal.AssetToolsHelpers().get_asset_tools() + + sequence = tools.create_asset( + asset_name=h, + package_path=h_dir, + asset_class=unreal.LevelSequence, + factory=unreal.LevelSequenceFactoryNew() + ) + + asset_data = legacy_io.find_one({ + "type": "asset", + "name": h_dir.split('/')[-1] + }) + + id = asset_data.get('_id') + + start_frames = [] + end_frames = [] + + elements = list( + legacy_io.find({"type": "asset", "data.visualParent": id})) + for e in elements: + start_frames.append(e.get('data').get('clipIn')) + end_frames.append(e.get('data').get('clipOut')) + + elements.extend(legacy_io.find({ + "type": "asset", + "data.visualParent": e.get('_id') + })) + + min_frame = min(start_frames) + max_frame = max(end_frames) + + sequence.set_display_rate( + unreal.FrameRate(asset_data.get('data').get("fps"), 1.0)) + sequence.set_playback_start(min_frame) + sequence.set_playback_end(max_frame) + + tracks = sequence.get_master_tracks() + track = None + for t in tracks: + if (t.get_class() == + unreal.MovieSceneCameraCutTrack.static_class()): + track = t + break + if not track: + track = sequence.add_master_track( + unreal.MovieSceneCameraCutTrack) + + return sequence, (min_frame, max_frame) def _process(self, lib_path, asset_dir, sequence, loaded=None): ar = unreal.AssetRegistryHelpers.get_asset_registry() @@ -326,6 +419,8 @@ def _process(self, lib_path, asset_dir, sequence, loaded=None): actors_dict = {} bindings_dict = {} + loaded_assets = [] + for element in data: reference = None if element.get('reference_fbx'): @@ -360,7 +455,7 @@ def _process(self, lib_path, asset_dir, sequence, loaded=None): continue options = { - "asset_dir": asset_dir + # "asset_dir": asset_dir } assets = load_container( @@ -370,6 +465,17 @@ def _process(self, lib_path, asset_dir, sequence, loaded=None): options=options ) + container = None + + for asset in assets: + obj = ar.get_asset_by_object_path(asset).get_asset() + if obj.get_class().get_name() == 'AssetContainer': + container = obj + if obj.get_class().get_name() == 'Skeleton': + skeleton = obj + + loaded_assets.append(container.get_path_name()) + instances = [ item for item in data if (item.get('reference_fbx') == reference or @@ -390,15 +496,6 @@ def _process(self, lib_path, asset_dir, sequence, loaded=None): actors_dict[inst] = actors bindings_dict[inst] = bindings - if family == 'rig': - # Finds skeleton among the imported assets - for asset in assets: - obj = ar.get_asset_by_object_path(asset).get_asset() - if obj.get_class().get_name() == 'Skeleton': - skeleton = obj - if skeleton: - break - if skeleton: skeleton_dict[reference] = skeleton else: @@ -411,6 +508,8 @@ def _process(self, lib_path, asset_dir, sequence, loaded=None): asset_dir, path, instance_name, skeleton, actors_dict, animation_file, bindings_dict, sequence) + return loaded_assets + @staticmethod def _remove_family(assets, components, class_name, prop_name): ar = unreal.AssetRegistryHelpers.get_asset_registry() @@ -478,10 +577,10 @@ def load(self, context, name, namespace, options): hierarchy = context.get('asset').get('data').get('parents') root = self.ASSET_ROOT hierarchy_dir = root - hierarchy_list = [] + hierarchy_dir_list = [] for h in hierarchy: hierarchy_dir = f"{hierarchy_dir}/{h}" - hierarchy_list.append(hierarchy_dir) + hierarchy_dir_list.append(hierarchy_dir) asset = context.get('asset').get('name') suffix = "_CON" if asset: @@ -499,43 +598,31 @@ def load(self, context, name, namespace, options): # Create map for the shot, and create hierarchy of map. If the maps # already exist, we will use them. - maps = [] - for h in hierarchy_list: - a = h.split('/')[-1] - map = f"{h}/{a}_map.{a}_map" - new = False - - if not EditorAssetLibrary.does_asset_exist(map): - EditorLevelLibrary.new_level(f"{h}/{a}_map") - new = True - - maps.append({"map": map, "new": new}) + h_dir = hierarchy_dir_list[0] + h_asset = hierarchy[0] + master_level = f"{h_dir}/{h_asset}_map.{h_asset}_map" + if not EditorAssetLibrary.does_asset_exist(master_level): + EditorLevelLibrary.new_level(f"{h_dir}/{h_asset}_map") + level = f"{asset_dir}/{asset}_map.{asset}_map" EditorLevelLibrary.new_level(f"{asset_dir}/{asset}_map") - maps.append( - {"map": f"{asset_dir}/{asset}_map.{asset}_map", "new": True}) - - for i in range(0, len(maps) - 1): - for j in range(i + 1, len(maps)): - if maps[j].get('new'): - EditorLevelLibrary.load_level(maps[i].get('map')) - EditorLevelUtils.add_level_to_world( - EditorLevelLibrary.get_editor_world(), - maps[j].get('map'), - unreal.LevelStreamingDynamic - ) - EditorLevelLibrary.save_all_dirty_levels() - - EditorLevelLibrary.load_level(maps[-1].get('map')) + + EditorLevelLibrary.load_level(master_level) + EditorLevelUtils.add_level_to_world( + EditorLevelLibrary.get_editor_world(), + level, + unreal.LevelStreamingDynamic + ) + EditorLevelLibrary.save_all_dirty_levels() + EditorLevelLibrary.load_level(level) # Get all the sequences in the hierarchy. It will create them, if # they don't exist. sequences = [] frame_ranges = [] - i = 0 - for h in hierarchy_list: + for (h_dir, h) in zip(hierarchy_dir_list, hierarchy): root_content = EditorAssetLibrary.list_assets( - h, recursive=False, include_folder=False) + h_dir, recursive=False, include_folder=False) existing_sequences = [ EditorAssetLibrary.find_asset_data(asset) @@ -545,55 +632,10 @@ def load(self, context, name, namespace, options): ] if not existing_sequences: - sequence = tools.create_asset( - asset_name=hierarchy[i], - package_path=h, - asset_class=unreal.LevelSequence, - factory=unreal.LevelSequenceFactoryNew() - ) - - asset_data = legacy_io.find_one({ - "type": "asset", - "name": h.split('/')[-1] - }) - - id = asset_data.get('_id') - - start_frames = [] - end_frames = [] - - elements = list( - legacy_io.find({"type": "asset", "data.visualParent": id})) - for e in elements: - start_frames.append(e.get('data').get('clipIn')) - end_frames.append(e.get('data').get('clipOut')) - - elements.extend(legacy_io.find({ - "type": "asset", - "data.visualParent": e.get('_id') - })) - - min_frame = min(start_frames) - max_frame = max(end_frames) - - sequence.set_display_rate( - unreal.FrameRate(asset_data.get('data').get("fps"), 1.0)) - sequence.set_playback_start(min_frame) - sequence.set_playback_end(max_frame) + sequence, frame_range = self._generate_sequence(h, h_dir) sequences.append(sequence) - frame_ranges.append((min_frame, max_frame)) - - tracks = sequence.get_master_tracks() - track = None - for t in tracks: - if (t.get_class() == - unreal.MovieSceneCameraCutTrack.static_class()): - track = t - break - if not track: - track = sequence.add_master_track( - unreal.MovieSceneCameraCutTrack) + frame_ranges.append(frame_range) else: for e in existing_sequences: sequences.append(e.get_asset()) @@ -601,8 +643,6 @@ def load(self, context, name, namespace, options): e.get_asset().get_playback_start(), e.get_asset().get_playback_end())) - i += 1 - shot = tools.create_asset( asset_name=asset, package_path=asset_dir, @@ -612,15 +652,11 @@ def load(self, context, name, namespace, options): # sequences and frame_ranges have the same length for i in range(0, len(sequences) - 1): - maps_to_add = [] - for j in range(i + 1, len(maps)): - maps_to_add.append(maps[j].get('map')) - self._set_sequence_hierarchy( sequences[i], sequences[i + 1], frame_ranges[i][1], frame_ranges[i + 1][0], frame_ranges[i + 1][1], - maps_to_add) + [level]) data = self._get_data(asset) shot.set_display_rate( @@ -631,11 +667,11 @@ def load(self, context, name, namespace, options): sequences[-1], shot, frame_ranges[-1][1], data.get('clipIn'), data.get('clipOut'), - [maps[-1].get('map')]) + [level]) - EditorLevelLibrary.load_level(maps[-1].get('map')) + EditorLevelLibrary.load_level(level) - self._process(self.fname, asset_dir, shot) + loaded_assets = self._process(self.fname, asset_dir, shot) for s in sequences: EditorAssetLibrary.save_asset(s.get_full_name()) @@ -656,7 +692,8 @@ def load(self, context, name, namespace, options): "loader": str(self.__class__.__name__), "representation": context["representation"]["_id"], "parent": context["representation"]["parent"], - "family": context["representation"]["context"]["family"] + "family": context["representation"]["context"]["family"], + "loaded_assets": loaded_assets } unreal_pipeline.imprint( "{}/{}".format(asset_dir, container_name), data) @@ -667,148 +704,192 @@ def load(self, context, name, namespace, options): for a in asset_content: EditorAssetLibrary.save_asset(a) - EditorLevelLibrary.load_level(maps[0].get('map')) + EditorLevelLibrary.load_level(master_level) return asset_content def update(self, container, representation): ar = unreal.AssetRegistryHelpers.get_asset_registry() - source_path = get_representation_path(representation) - destination_path = container["namespace"] - lib_path = Path(get_representation_path(representation)) - - self._remove_actors(destination_path) - - # Delete old animations - anim_path = f"{destination_path}/animations/" - EditorAssetLibrary.delete_directory(anim_path) - - with open(source_path, "r") as fp: - data = json.load(fp) - - references = [e.get('reference_fbx') for e in data] - asset_containers = self._get_asset_containers(destination_path) - loaded = [] - - # Delete all the assets imported with the previous version of the - # layout, if they're not in the new layout. - for asset_container in asset_containers: - if asset_container.get_editor_property( - 'asset_name') == container["objectName"]: - continue - ref = EditorAssetLibrary.get_metadata_tag( - asset_container.get_asset(), 'representation') - ppath = asset_container.get_editor_property('package_path') - - if ref not in references: - # If the asset is not in the new layout, delete it. - # Also check if the parent directory is empty, and delete that - # as well, if it is. - EditorAssetLibrary.delete_directory(ppath) - - parent = os.path.dirname(str(ppath)) - parent_content = EditorAssetLibrary.list_assets( - parent, recursive=False, include_folder=True - ) - - if len(parent_content) == 0: - EditorAssetLibrary.delete_directory(parent) - else: - # If the asset is in the new layout, search the instances in - # the JSON file, and create actors for them. - - actors_dict = {} - skeleton_dict = {} - - for element in data: - reference = element.get('reference_fbx') - instance_name = element.get('instance_name') - - skeleton = None + root = "/Game/OpenPype" - if reference == ref and ref not in loaded: - loaded.append(ref) + asset_dir = container.get('namespace') - family = element.get('family') + context = representation.get("context") - assets = EditorAssetLibrary.list_assets( - ppath, recursive=True, include_folder=False) + hierarchy = context.get('hierarchy').split("/") + h_dir = f"{root}/{hierarchy[0]}" + h_asset = hierarchy[0] + master_level = f"{h_dir}/{h_asset}_map.{h_asset}_map" - instances = [ - item for item in data - if item.get('reference_fbx') == reference] + # # Create a temporary level to delete the layout level. + # EditorLevelLibrary.save_all_dirty_levels() + # EditorAssetLibrary.make_directory(f"{root}/tmp") + # tmp_level = f"{root}/tmp/temp_map" + # if not EditorAssetLibrary.does_asset_exist(f"{tmp_level}.temp_map"): + # EditorLevelLibrary.new_level(tmp_level) + # else: + # EditorLevelLibrary.load_level(tmp_level) - for instance in instances: - transform = instance.get('transform') - inst = instance.get('instance_name') + # Get layout level + filter = unreal.ARFilter( + class_names=["World"], + package_paths=[asset_dir], + recursive_paths=False) + levels = ar.get_assets(filter) + filter = unreal.ARFilter( + class_names=["LevelSequence"], + package_paths=[asset_dir], + recursive_paths=False) + sequences = ar.get_assets(filter) - actors = [] + layout_level = levels[0].get_editor_property('object_path') - if family == 'model': - actors = self._process_family( - assets, 'StaticMesh', transform, inst) - elif family == 'rig': - actors = self._process_family( - assets, 'SkeletalMesh', transform, inst) - actors_dict[inst] = actors + EditorLevelLibrary.save_all_dirty_levels() + EditorLevelLibrary.load_level(layout_level) - if family == 'rig': - # Finds skeleton among the imported assets - for asset in assets: - obj = ar.get_asset_by_object_path( - asset).get_asset() - if obj.get_class().get_name() == 'Skeleton': - skeleton = obj - if skeleton: - break + # Delete all the actors in the level + actors = unreal.EditorLevelLibrary.get_all_level_actors() + for actor in actors: + unreal.EditorLevelLibrary.destroy_actor(actor) - if skeleton: - skeleton_dict[reference] = skeleton - else: - skeleton = skeleton_dict.get(reference) + EditorLevelLibrary.save_current_level() - animation_file = element.get('animation') + EditorAssetLibrary.delete_directory(f"{asset_dir}/animations/") - if animation_file and skeleton: - self._import_animation( - destination_path, lib_path, - instance_name, skeleton, - actors_dict, animation_file) + source_path = get_representation_path(representation) - self._process(source_path, destination_path, loaded) + loaded_assets = self._process( + source_path, asset_dir, sequences[0].get_asset()) - container_path = "{}/{}".format(container["namespace"], - container["objectName"]) - # update metadata + data = { + "representation": str(representation["_id"]), + "parent": str(representation["parent"]), + "loaded_assets": loaded_assets + } unreal_pipeline.imprint( - container_path, - { - "representation": str(representation["_id"]), - "parent": str(representation["parent"]) - }) + "{}/{}".format(asset_dir, container.get('container_name')), data) + + EditorLevelLibrary.save_current_level() asset_content = EditorAssetLibrary.list_assets( - destination_path, recursive=True, include_folder=False) + asset_dir, recursive=True, include_folder=False) for a in asset_content: EditorAssetLibrary.save_asset(a) + EditorLevelLibrary.load_level(master_level) + def remove(self, container): """ - First, destroy all actors of the assets to be removed. Then, deletes - the asset's directory. + Delete the layout. First, check if the assets loaded with the layout + are used by other layouts. If not, delete the assets. """ - path = container["namespace"] - parent_path = os.path.dirname(path) + path = Path(container.get("namespace")) + + containers = unreal_pipeline.ls() + layout_containers = [ + c for c in containers + if (c.get('asset_name') != container.get('asset_name') and + c.get('family') == "layout")] + + # Check if the assets have been loaded by other layouts, and deletes + # them if they haven't. + for asset in container.get('loaded_assets'): + layouts = [ + lc for lc in layout_containers + if asset in lc.get('loaded_assets')] + + if len(layouts) == 0: + EditorAssetLibrary.delete_directory(str(Path(asset).parent)) + + # Remove the Level Sequence from the parent. + # We need to traverse the hierarchy from the master sequence to find + # the level sequence. + root = "/Game/OpenPype" + namespace = container.get('namespace').replace(f"{root}/", "") + ms_asset = namespace.split('/')[0] + ar = unreal.AssetRegistryHelpers.get_asset_registry() + filter = unreal.ARFilter( + class_names=["LevelSequence"], + package_paths=[f"{root}/{ms_asset}"], + recursive_paths=False) + sequences = ar.get_assets(filter) + master_sequence = sequences[0].get_asset() + filter = unreal.ARFilter( + class_names=["World"], + package_paths=[f"{root}/{ms_asset}"], + recursive_paths=False) + levels = ar.get_assets(filter) + master_level = levels[0].get_editor_property('object_path') + + sequences = [master_sequence] + + parent = None + for s in sequences: + tracks = s.get_master_tracks() + subscene_track = None + visibility_track = None + for t in tracks: + if t.get_class() == unreal.MovieSceneSubTrack.static_class(): + subscene_track = t + if (t.get_class() == + unreal.MovieSceneLevelVisibilityTrack.static_class()): + visibility_track = t + if subscene_track: + sections = subscene_track.get_sections() + for ss in sections: + if ss.get_sequence().get_name() == container.get('asset'): + parent = s + subscene_track.remove_section(ss) + break + sequences.append(ss.get_sequence()) + # Update subscenes indexes. + i = 0 + for ss in sections: + ss.set_row_index(i) + i += 1 + + if visibility_track: + sections = visibility_track.get_sections() + for ss in sections: + if (unreal.Name(f"{container.get('asset')}_map") + in ss.get_level_names()): + visibility_track.remove_section(ss) + # Update visibility sections indexes. + i = -1 + prev_name = [] + for ss in sections: + if prev_name != ss.get_level_names(): + i += 1 + ss.set_row_index(i) + prev_name = ss.get_level_names() + if parent: + break - self._remove_actors(path) + assert parent, "Could not find the parent sequence" - EditorAssetLibrary.delete_directory(path) + # Create a temporary level to delete the layout level. + EditorLevelLibrary.save_all_dirty_levels() + EditorAssetLibrary.make_directory(f"{root}/tmp") + tmp_level = f"{root}/tmp/temp_map" + if not EditorAssetLibrary.does_asset_exist(f"{tmp_level}.temp_map"): + EditorLevelLibrary.new_level(tmp_level) + else: + EditorLevelLibrary.load_level(tmp_level) + + # Delete the layout directory. + EditorAssetLibrary.delete_directory(str(path)) + + EditorLevelLibrary.load_level(master_level) + EditorAssetLibrary.delete_directory(f"{root}/tmp") + + EditorLevelLibrary.save_current_level() + # Delete the parent folder if there aren't any more layouts in it. asset_content = EditorAssetLibrary.list_assets( - parent_path, recursive=False, include_folder=True + str(path.parent), recursive=False, include_folder=True ) if len(asset_content) == 0: - EditorAssetLibrary.delete_directory(parent_path) + EditorAssetLibrary.delete_directory(str(path.parent)) diff --git a/openpype/hosts/unreal/plugins/load/load_rig.py b/openpype/hosts/unreal/plugins/load/load_rig.py index ff844a5e941..c27bd23aaf4 100644 --- a/openpype/hosts/unreal/plugins/load/load_rig.py +++ b/openpype/hosts/unreal/plugins/load/load_rig.py @@ -52,54 +52,55 @@ def load(self, context, name, namespace, options): asset_name = "{}_{}".format(asset, name) else: asset_name = "{}".format(name) + version = context.get('version').get('name') tools = unreal.AssetToolsHelpers().get_asset_tools() asset_dir, container_name = tools.create_unique_asset_name( - "{}/{}/{}".format(root, asset, name), suffix="") + f"{root}/{asset}/{name}_v{version:03d}", suffix="") container_name += suffix - unreal.EditorAssetLibrary.make_directory(asset_dir) - - task = unreal.AssetImportTask() - - task.set_editor_property('filename', self.fname) - task.set_editor_property('destination_path', asset_dir) - task.set_editor_property('destination_name', asset_name) - task.set_editor_property('replace_existing', False) - task.set_editor_property('automated', True) - task.set_editor_property('save', False) - - # set import options here - options = unreal.FbxImportUI() - options.set_editor_property('import_as_skeletal', True) - options.set_editor_property('import_animations', False) - options.set_editor_property('import_mesh', True) - options.set_editor_property('import_materials', True) - options.set_editor_property('import_textures', True) - options.set_editor_property('skeleton', None) - options.set_editor_property('create_physics_asset', False) - - options.set_editor_property('mesh_type_to_import', - unreal.FBXImportType.FBXIT_SKELETAL_MESH) - - options.skeletal_mesh_import_data.set_editor_property( - 'import_content_type', - unreal.FBXImportContentType.FBXICT_ALL - ) - # set to import normals, otherwise Unreal will compute them - # and it will take a long time, depending on the size of the mesh - options.skeletal_mesh_import_data.set_editor_property( - 'normal_import_method', - unreal.FBXNormalImportMethod.FBXNIM_IMPORT_NORMALS - ) - - task.options = options - unreal.AssetToolsHelpers.get_asset_tools().import_asset_tasks([task]) # noqa: E501 - - # Create Asset Container - unreal_pipeline.create_container( - container=container_name, path=asset_dir) + if not unreal.EditorAssetLibrary.does_directory_exist(asset_dir): + unreal.EditorAssetLibrary.make_directory(asset_dir) + + task = unreal.AssetImportTask() + + task.set_editor_property('filename', self.fname) + task.set_editor_property('destination_path', asset_dir) + task.set_editor_property('destination_name', asset_name) + task.set_editor_property('replace_existing', False) + task.set_editor_property('automated', True) + task.set_editor_property('save', False) + + # set import options here + options = unreal.FbxImportUI() + options.set_editor_property('import_as_skeletal', True) + options.set_editor_property('import_animations', False) + options.set_editor_property('import_mesh', True) + options.set_editor_property('import_materials', False) + options.set_editor_property('import_textures', False) + options.set_editor_property('skeleton', None) + options.set_editor_property('create_physics_asset', False) + + options.set_editor_property( + 'mesh_type_to_import', + unreal.FBXImportType.FBXIT_SKELETAL_MESH) + + options.skeletal_mesh_import_data.set_editor_property( + 'import_content_type', + unreal.FBXImportContentType.FBXICT_ALL) + # set to import normals, otherwise Unreal will compute them + # and it will take a long time, depending on the size of the mesh + options.skeletal_mesh_import_data.set_editor_property( + 'normal_import_method', + unreal.FBXNormalImportMethod.FBXNIM_IMPORT_NORMALS) + + task.options = options + unreal.AssetToolsHelpers.get_asset_tools().import_asset_tasks([task]) # noqa: E501 + + # Create Asset Container + unreal_pipeline.create_container( + container=container_name, path=asset_dir) data = { "schema": "openpype:container-2.0", diff --git a/openpype/hosts/unreal/plugins/publish/collect_render_instances.py b/openpype/hosts/unreal/plugins/publish/collect_render_instances.py index 9d60b65d087..9fb45ea7a7e 100644 --- a/openpype/hosts/unreal/plugins/publish/collect_render_instances.py +++ b/openpype/hosts/unreal/plugins/publish/collect_render_instances.py @@ -1,8 +1,11 @@ +import os from pathlib import Path + import unreal -import pyblish.api +from openpype.api import Anatomy from openpype.hosts.unreal.api import pipeline +import pyblish.api class CollectRenderInstances(pyblish.api.InstancePlugin): @@ -77,9 +80,15 @@ def process(self, instance): self.log.debug(f"new instance data: {new_data}") - project_dir = unreal.Paths.project_dir() - render_dir = (f"{project_dir}/Saved/MovieRenders/" - f"{s.get('output')}") + try: + project = os.environ.get("AVALON_PROJECT") + anatomy = Anatomy(project) + root = anatomy.roots['renders'] + except Exception: + raise Exception( + "Could not find render root in anatomy settings.") + + render_dir = f"{root}/{project}/{s.get('output')}" render_path = Path(render_dir) frames = [] diff --git a/openpype/lib/attribute_definitions.py b/openpype/lib/attribute_definitions.py index bfac9da5cec..a1f7c1e0f4c 100644 --- a/openpype/lib/attribute_definitions.py +++ b/openpype/lib/attribute_definitions.py @@ -316,6 +316,7 @@ def __init__( self.is_sequence = False self.template = None self.frames = [] + self.is_empty = True self.set_filenames(filenames, frames, template) @@ -323,7 +324,9 @@ def __str__(self): return json.dumps(self.to_dict()) def __repr__(self): - if self.is_sequence: + if self.is_empty: + filename = "< empty >" + elif self.is_sequence: filename = self.template else: filename = self.filenames[0] @@ -335,6 +338,9 @@ def __repr__(self): @property def label(self): + if self.is_empty: + return None + if not self.is_sequence: return self.filenames[0] @@ -386,6 +392,8 @@ def split_sequence(self): @property def ext(self): + if self.is_empty: + return None _, ext = os.path.splitext(self.filenames[0]) if ext: return ext @@ -393,6 +401,9 @@ def ext(self): @property def is_dir(self): + if self.is_empty: + return False + # QUESTION a better way how to define folder (in init argument?) if self.ext: return False @@ -411,6 +422,7 @@ def set_filenames(self, filenames, frames=None, template=None): if is_sequence and not template: raise ValueError("Missing template for sequence") + self.is_empty = len(filenames) == 0 self.filenames = filenames self.template = template self.frames = frames @@ -560,11 +572,7 @@ def __init__( # Change horizontal label is_label_horizontal = kwargs.get("is_label_horizontal") if is_label_horizontal is None: - if single_item: - is_label_horizontal = True - else: - is_label_horizontal = False - kwargs["is_label_horizontal"] = is_label_horizontal + kwargs["is_label_horizontal"] = False self.single_item = single_item self.folders = folders diff --git a/openpype/lib/editorial.py b/openpype/lib/editorial.py index bf868953ea8..1ee21deedc2 100644 --- a/openpype/lib/editorial.py +++ b/openpype/lib/editorial.py @@ -17,7 +17,7 @@ def otio_range_to_frame_range(otio_range): start = _ot.to_frames( otio_range.start_time, otio_range.start_time.rate) end = start + _ot.to_frames( - otio_range.duration, otio_range.duration.rate) - 1 + otio_range.duration, otio_range.duration.rate) return start, end @@ -254,7 +254,7 @@ def get_media_range_with_retimes(otio_clip, handle_start, handle_end): media_in + source_in + offset_in) media_out_trimmed = ( media_in + source_in + ( - ((source_range.duration.value - 1) * abs( + (source_range.duration.value * abs( time_scalar)) + offset_out)) # calculate available handles diff --git a/openpype/lib/transcoding.py b/openpype/lib/transcoding.py index f20bef3854c..adb9bb2c3ad 100644 --- a/openpype/lib/transcoding.py +++ b/openpype/lib/transcoding.py @@ -493,8 +493,9 @@ def convert_for_ffmpeg( erase_reason = "has too long value ({} chars).".format( len(attr_value) ) + erase_attribute = True - if erase_attribute: + if not erase_attribute: for char in NOT_ALLOWED_FFMPEG_CHARS: if char in attr_value: erase_attribute = True @@ -623,8 +624,9 @@ def convert_input_paths_for_ffmpeg( erase_reason = "has too long value ({} chars).".format( len(attr_value) ) + erase_attribute = True - if erase_attribute: + if not erase_attribute: for char in NOT_ALLOWED_FFMPEG_CHARS: if char in attr_value: erase_attribute = True diff --git a/openpype/modules/base.py b/openpype/modules/base.py index 0dd512ee8b6..5b49649359d 100644 --- a/openpype/modules/base.py +++ b/openpype/modules/base.py @@ -702,6 +702,32 @@ def collect_plugin_paths(self): ).format(expected_keys, " | ".join(msg_items))) return output + def collect_creator_plugin_paths(self, host_name): + """Helper to collect creator plugin paths from modules. + + Args: + host_name (str): For which host are creators meants. + + Returns: + list: List of creator plugin paths. + """ + # Output structure + from openpype_interfaces import IPluginPaths + + output = [] + for module in self.get_enabled_modules(): + # Skip module that do not inherit from `IPluginPaths` + if not isinstance(module, IPluginPaths): + continue + + paths = module.get_creator_plugin_paths(host_name) + if paths: + # Convert to list if value is not list + if not isinstance(paths, (list, tuple, set)): + paths = [paths] + output.extend(paths) + return output + def collect_launch_hook_paths(self): """Helper to collect hooks from modules inherited ILaunchHookPaths. diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index 8f776a33710..d4a34fbd41d 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -440,7 +440,10 @@ def process(self, instance): output_filename_0 = filename_0 - dirname = os.path.dirname(output_filename_0) + # this is needed because renderman handles directory and file + # prefixes separately + if self._instance.data["renderer"] == "renderman": + dirname = os.path.dirname(output_filename_0) # Create render folder ---------------------------------------------- try: @@ -515,6 +518,7 @@ def process(self, instance): "FTRACK_API_KEY", "FTRACK_API_USER", "FTRACK_SERVER", + "OPENPYPE_SG_USER", "AVALON_PROJECT", "AVALON_ASSET", "AVALON_TASK", diff --git a/openpype/modules/deadline/plugins/publish/submit_publish_job.py b/openpype/modules/deadline/plugins/publish/submit_publish_job.py index 306237c78c4..782f85c9d2d 100644 --- a/openpype/modules/deadline/plugins/publish/submit_publish_job.py +++ b/openpype/modules/deadline/plugins/publish/submit_publish_job.py @@ -883,8 +883,10 @@ def process(self, instance): new_i = copy(i) new_i["version"] = at.get("version") new_i["subset"] = at.get("subset") + new_i["family"] = at.get("family") new_i["append"] = True - new_i["families"].append(at.get("family")) + # don't set subsetGroup if we are attaching + new_i.pop("subsetGroup") new_instances.append(new_i) self.log.info(" - {} / v{}".format( at.get("subset"), at.get("version"))) diff --git a/openpype/modules/ftrack/event_handlers_user/action_delete_old_versions.py b/openpype/modules/ftrack/event_handlers_user/action_delete_old_versions.py index f5addde8ae5..a0bf6622e9e 100644 --- a/openpype/modules/ftrack/event_handlers_user/action_delete_old_versions.py +++ b/openpype/modules/ftrack/event_handlers_user/action_delete_old_versions.py @@ -569,7 +569,7 @@ def path_from_represenation(self, representation, anatomy): context["frame"] = self.sequence_splitter sequence_path = os.path.normpath( StringTemplate.format_strict_template( - context, template + template, context ) ) diff --git a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_api.py b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_api.py index 64af8cb2083..c4f7b1f05de 100644 --- a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_api.py +++ b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_api.py @@ -12,6 +12,7 @@ import os import sys +import collections import six import pyblish.api import clique @@ -84,6 +85,7 @@ def process(self, instance): asset_types_by_short = self._ensure_asset_types_exists( session, component_list ) + self._fill_component_locations(session, component_list) asset_versions_data_by_id = {} used_asset_versions = [] @@ -193,6 +195,70 @@ def _set_task_status(self, instance, project_entity, task_entity, session): session._configure_locations() six.reraise(tp, value, tb) + def _fill_component_locations(self, session, component_list): + components_by_location_name = collections.defaultdict(list) + components_by_location_id = collections.defaultdict(list) + for component_item in component_list: + # Location entity can be prefilled + # - this is not recommended as connection to ftrack server may + # be lost and in that case the entity is not valid when gets + # to this plugin + location = component_item.get("component_location") + if location is not None: + continue + + # Collect location id + location_id = component_item.get("component_location_id") + if location_id: + components_by_location_id[location_id].append( + component_item + ) + continue + + location_name = component_item.get("component_location_name") + if location_name: + components_by_location_name[location_name].append( + component_item + ) + continue + + # Skip if there is nothing to do + if not components_by_location_name and not components_by_location_id: + return + + # Query locations + query_filters = [] + if components_by_location_id: + joined_location_ids = ",".join([ + '"{}"'.format(location_id) + for location_id in components_by_location_id + ]) + query_filters.append("id in ({})".format(joined_location_ids)) + + if components_by_location_name: + joined_location_names = ",".join([ + '"{}"'.format(location_name) + for location_name in components_by_location_name + ]) + query_filters.append("name in ({})".format(joined_location_names)) + + locations = session.query( + "select id, name from Location where {}".format( + " or ".join(query_filters) + ) + ).all() + # Fill locations in components + for location in locations: + location_id = location["id"] + location_name = location["name"] + if location_id in components_by_location_id: + for component in components_by_location_id[location_id]: + component["component_location"] = location + + if location_name in components_by_location_name: + for component in components_by_location_name[location_name]: + component["component_location"] = location + def _ensure_asset_types_exists(self, session, component_list): """Make sure that all AssetType entities exists for integration. diff --git a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_instances.py b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_instances.py index 5eecf34c3d2..c8d9e4117d2 100644 --- a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_instances.py +++ b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_instances.py @@ -3,6 +3,7 @@ import copy import pyblish.api +from openpype.lib import get_ffprobe_streams from openpype.lib.profiles_filtering import filter_profiles @@ -105,11 +106,10 @@ def process(self, instance): # These must be changed for each component "component_data": None, "component_path": None, - "component_location": None + "component_location": None, + "component_location_name": None } - ft_session = instance.context.data["ftrackSession"] - # Filter types of representations review_representations = [] thumbnail_representations = [] @@ -127,12 +127,8 @@ def process(self, instance): other_representations.append(repre) # Prepare ftrack locations - unmanaged_location = ft_session.query( - "Location where name is \"ftrack.unmanaged\"" - ).one() - ftrack_server_location = ft_session.query( - "Location where name is \"ftrack.server\"" - ).one() + unmanaged_location_name = "ftrack.unmanaged" + ftrack_server_location_name = "ftrack.server" # Components data component_list = [] @@ -142,6 +138,7 @@ def process(self, instance): # Create thumbnail components # TODO what if there is multiple thumbnails? first_thumbnail_component = None + first_thumbnail_component_repre = None for repre in thumbnail_representations: published_path = repre.get("published_path") if not published_path: @@ -169,12 +166,49 @@ def process(self, instance): src_components_to_add.append(copy.deepcopy(thumbnail_item)) # Create copy of first thumbnail if first_thumbnail_component is None: - first_thumbnail_component = copy.deepcopy(thumbnail_item) + first_thumbnail_component_repre = repre + first_thumbnail_component = thumbnail_item # Set location - thumbnail_item["component_location"] = ftrack_server_location + thumbnail_item["component_location_name"] = ( + ftrack_server_location_name + ) + # Add item to component list component_list.append(thumbnail_item) + if ( + not review_representations + and first_thumbnail_component is not None + ): + width = first_thumbnail_component_repre.get("width") + height = first_thumbnail_component_repre.get("height") + if not width or not height: + component_path = first_thumbnail_component["component_path"] + streams = [] + try: + streams = get_ffprobe_streams(component_path) + except Exception: + self.log.debug(( + "Failed to retrieve information about intput {}" + ).format(component_path)) + + for stream in streams: + if "width" in stream and "height" in stream: + width = stream["width"] + height = stream["height"] + break + + if width and height: + component_data = first_thumbnail_component["component_data"] + component_data["name"] = "ftrackreview-image" + component_data["metadata"] = { + "ftr_meta": json.dumps({ + "width": width, + "height": height, + "format": "image" + }) + } + # Create review components # Change asset name of each new component for review is_first_review_repre = True @@ -260,7 +294,9 @@ def process(self, instance): src_components_to_add.append(copy.deepcopy(review_item)) # Set location - review_item["component_location"] = ftrack_server_location + review_item["component_location_name"] = ( + ftrack_server_location_name + ) # Add item to component list component_list.append(review_item) @@ -272,8 +308,8 @@ def process(self, instance): first_thumbnail_component ) new_thumbnail_component["asset_data"]["name"] = asset_name - new_thumbnail_component["component_location"] = ( - ftrack_server_location + new_thumbnail_component["component_location_name"] = ( + ftrack_server_location_name ) component_list.append(new_thumbnail_component) @@ -282,7 +318,7 @@ def process(self, instance): # Make sure thumbnail is disabled copy_src_item["thumbnail"] = False # Set location - copy_src_item["component_location"] = unmanaged_location + copy_src_item["component_location_name"] = unmanaged_location_name # Modify name of component to have suffix "_src" component_data = copy_src_item["component_data"] component_name = component_data["name"] @@ -307,7 +343,7 @@ def process(self, instance): other_item["component_data"] = { "name": repre["name"] } - other_item["component_location"] = unmanaged_location + other_item["component_location_name"] = unmanaged_location_name other_item["component_path"] = published_path component_list.append(other_item) diff --git a/openpype/modules/interfaces.py b/openpype/modules/interfaces.py index 13cbea690be..334485cab2d 100644 --- a/openpype/modules/interfaces.py +++ b/openpype/modules/interfaces.py @@ -14,11 +14,38 @@ class IPluginPaths(OpenPypeInterface): "publish": ["path/to/publish_plugins"] } """ - # TODO validation of an output + @abstractmethod def get_plugin_paths(self): pass + def get_creator_plugin_paths(self, host_name): + """Retreive creator plugin paths. + + Give addons ability to add creator plugin paths based on host name. + + NOTES: + - Default implementation uses 'get_plugin_paths' and always return + all creator plugins. + - Host name may help to organize plugins by host, but each creator + alsomay have host filtering. + + Args: + host_name (str): For which host are the plugins meant. + """ + + paths = self.get_plugin_paths() + if not paths or "create" not in paths: + return [] + + create_paths = paths["create"] + if not create_paths: + return [] + + if not isinstance(create_paths, (list, tuple, set)): + create_paths = [create_paths] + return create_paths + class ILaunchHookPaths(OpenPypeInterface): """Module has launch hook paths to return. diff --git a/openpype/modules/shotgrid/README.md b/openpype/modules/shotgrid/README.md new file mode 100644 index 00000000000..cbee0e9bf44 --- /dev/null +++ b/openpype/modules/shotgrid/README.md @@ -0,0 +1,19 @@ +## Shotgrid Module + +### Pre-requisites + +Install and launch a [shotgrid leecher](https://github.com/Ellipsanime/shotgrid-leecher) server + +### Quickstart + +The goal of this tutorial is to synchronize an already existing shotgrid project with OpenPype. + +- Activate the shotgrid module in the **system settings** and inform the shotgrid leecher server API url + +- Create a new OpenPype project with the **project manager** + +- Inform the shotgrid authentication infos (url, script name, api key) and the shotgrid project ID related to this OpenPype project in the **project settings** + +- Use the batch interface (Tray > shotgrid > Launch batch), select your project and click "batch" + +- You can now access your shotgrid entities within the **avalon launcher** and publish informations to shotgrid with **pyblish** diff --git a/openpype/modules/shotgrid/__init__.py b/openpype/modules/shotgrid/__init__.py new file mode 100644 index 00000000000..f1337a94920 --- /dev/null +++ b/openpype/modules/shotgrid/__init__.py @@ -0,0 +1,5 @@ +from .shotgrid_module import ( + ShotgridModule, +) + +__all__ = ("ShotgridModule",) diff --git a/openpype/modules/shotgrid/aop/patch.py b/openpype/modules/shotgrid/aop/patch.py new file mode 100644 index 00000000000..56477f3bf97 --- /dev/null +++ b/openpype/modules/shotgrid/aop/patch.py @@ -0,0 +1,52 @@ +from copy import copy + + +from openpype.api import Logger +from openpype.modules.shotgrid.lib import ( + credentials, + settings, + server, +) + +_LOG = Logger().get_logger("ShotgridModule.patch") + + +def _patched_projects( + self, projection=None, only_active=True +): + all_projects = list(self._prev_projects(projection, only_active)) + if ( + not credentials.get_local_login() + or not settings.filter_projects_by_login() + ): + return all_projects + try: + linked_names = _fetch_linked_project_names() or set() + return [x for x in all_projects if _upper(x["name"]) in linked_names] + except Exception as e: + print(e) + return all_projects + + +def _upper(x): + return str(x).strip().upper() + + +def _fetch_linked_project_names(): + return { + _upper(x["project_name"]) + for x in server.find_linked_projects(credentials.get_local_login()) + } + + +def patch_avalon_db(): + _LOG.debug("Run avalon patching") + try: + from avalon.api import AvalonMongoDB + if AvalonMongoDB.projects is _patched_projects: + return None + _LOG.debug("Patch Avalon.projects method") + AvalonMongoDB._prev_projects = copy(AvalonMongoDB.projects) + AvalonMongoDB.projects = _patched_projects + except Exception as e: + _LOG.error("Unable to patch avalon db {}".format(e)) diff --git a/openpype/modules/shotgrid/hooks/post_shotgrid_changes.py b/openpype/modules/shotgrid/hooks/post_shotgrid_changes.py new file mode 100644 index 00000000000..e8369ad3cbc --- /dev/null +++ b/openpype/modules/shotgrid/hooks/post_shotgrid_changes.py @@ -0,0 +1,9 @@ +from openpype.lib import PostLaunchHook + + +class PostShotgridHook(PostLaunchHook): + order = None + + def execute(self, *args, **kwargs): + print(args, kwargs) + pass diff --git a/openpype/modules/shotgrid/lib/__init__.py b/openpype/modules/shotgrid/lib/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/openpype/modules/shotgrid/lib/const.py b/openpype/modules/shotgrid/lib/const.py new file mode 100644 index 00000000000..2a34800fac6 --- /dev/null +++ b/openpype/modules/shotgrid/lib/const.py @@ -0,0 +1 @@ +MODULE_NAME = "shotgrid" diff --git a/openpype/modules/shotgrid/lib/credentials.py b/openpype/modules/shotgrid/lib/credentials.py new file mode 100644 index 00000000000..337c4f6ecb2 --- /dev/null +++ b/openpype/modules/shotgrid/lib/credentials.py @@ -0,0 +1,125 @@ + +from urllib.parse import urlparse + +import shotgun_api3 +from shotgun_api3.shotgun import AuthenticationFault + +from openpype.lib import OpenPypeSecureRegistry, OpenPypeSettingsRegistry +from openpype.modules.shotgrid.lib.record import Credentials + + +def _get_shotgrid_secure_key(hostname, key): + """Secure item key for entered hostname.""" + return f"shotgrid/{hostname}/{key}" + + +def _get_secure_value_and_registry( + hostname, + name, +): + key = _get_shotgrid_secure_key(hostname, name) + registry = OpenPypeSecureRegistry(key) + return registry.get_item(name, None), registry + + +def get_shotgrid_hostname(shotgrid_url): + + if not shotgrid_url: + raise Exception("Shotgrid url cannot be a null") + valid_shotgrid_url = ( + f"//{shotgrid_url}" if "//" not in shotgrid_url else shotgrid_url + ) + return urlparse(valid_shotgrid_url).hostname + + +# Credentials storing function (using keyring) + + +def get_credentials(shotgrid_url): + hostname = get_shotgrid_hostname(shotgrid_url) + if not hostname: + return None + login_value, _ = _get_secure_value_and_registry( + hostname, + Credentials.login_key_prefix(), + ) + password_value, _ = _get_secure_value_and_registry( + hostname, + Credentials.password_key_prefix(), + ) + return Credentials(login_value, password_value) + + +def save_credentials(login, password, shotgrid_url): + hostname = get_shotgrid_hostname(shotgrid_url) + _, login_registry = _get_secure_value_and_registry( + hostname, + Credentials.login_key_prefix(), + ) + _, password_registry = _get_secure_value_and_registry( + hostname, + Credentials.password_key_prefix(), + ) + clear_credentials(shotgrid_url) + login_registry.set_item(Credentials.login_key_prefix(), login) + password_registry.set_item(Credentials.password_key_prefix(), password) + + +def clear_credentials(shotgrid_url): + hostname = get_shotgrid_hostname(shotgrid_url) + login_value, login_registry = _get_secure_value_and_registry( + hostname, + Credentials.login_key_prefix(), + ) + password_value, password_registry = _get_secure_value_and_registry( + hostname, + Credentials.password_key_prefix(), + ) + + if login_value is not None: + login_registry.delete_item(Credentials.login_key_prefix()) + + if password_value is not None: + password_registry.delete_item(Credentials.password_key_prefix()) + + +# Login storing function (using json) + + +def get_local_login(): + reg = OpenPypeSettingsRegistry() + try: + return str(reg.get_item("shotgrid_login")) + except Exception: + return None + + +def save_local_login(login): + reg = OpenPypeSettingsRegistry() + reg.set_item("shotgrid_login", login) + + +def clear_local_login(): + reg = OpenPypeSettingsRegistry() + reg.delete_item("shotgrid_login") + + +def check_credentials( + login, + password, + shotgrid_url, +): + + if not shotgrid_url or not login or not password: + return False + try: + session = shotgun_api3.Shotgun( + shotgrid_url, + login=login, + password=password, + ) + session.preferences_read() + session.close() + except AuthenticationFault: + return False + return True diff --git a/openpype/modules/shotgrid/lib/record.py b/openpype/modules/shotgrid/lib/record.py new file mode 100644 index 00000000000..f62f4855d5a --- /dev/null +++ b/openpype/modules/shotgrid/lib/record.py @@ -0,0 +1,20 @@ + +class Credentials: + login = None + password = None + + def __init__(self, login, password) -> None: + super().__init__() + self.login = login + self.password = password + + def is_empty(self): + return not (self.login and self.password) + + @staticmethod + def login_key_prefix(): + return "login" + + @staticmethod + def password_key_prefix(): + return "password" diff --git a/openpype/modules/shotgrid/lib/server.py b/openpype/modules/shotgrid/lib/server.py new file mode 100644 index 00000000000..50645d40891 --- /dev/null +++ b/openpype/modules/shotgrid/lib/server.py @@ -0,0 +1,26 @@ +import traceback + +import requests + +from openpype.api import Logger +from openpype.modules.shotgrid.lib import ( + settings as settings_lib, +) + +_LOG = Logger().get_logger("ShotgridModule.server") + + +def find_linked_projects(email): + url = "".join( + [ + settings_lib.get_leecher_backend_url(), + "/user/", + email, + "/project-user-links", + ] + ) + try: + return requests.get(url).json() + except requests.exceptions.RequestException as e: + _LOG.error(e) + traceback.print_stack() diff --git a/openpype/modules/shotgrid/lib/settings.py b/openpype/modules/shotgrid/lib/settings.py new file mode 100644 index 00000000000..4a772de5b76 --- /dev/null +++ b/openpype/modules/shotgrid/lib/settings.py @@ -0,0 +1,41 @@ +import os + +from pymongo import MongoClient +from openpype.api import get_system_settings, get_project_settings +from openpype.modules.shotgrid.lib.const import MODULE_NAME +from openpype.modules.shotgrid.lib.tools import memoize + + +def get_project_list(): + mongo_url = os.getenv("OPENPYPE_MONGO") + client = MongoClient(mongo_url) + db = client['avalon'] + return db.list_collection_names() + + +@memoize +def get_shotgrid_project_settings(project): + return get_project_settings(project).get(MODULE_NAME, {}) + + +@memoize +def get_shotgrid_settings(): + return get_system_settings().get("modules", {}).get(MODULE_NAME, {}) + + +def get_shotgrid_servers(): + return get_shotgrid_settings().get("shotgrid_settings", {}) + + +def get_leecher_backend_url(): + return get_shotgrid_settings().get("leecher_backend_url") + + +def filter_projects_by_login(): + return bool(get_shotgrid_settings().get("filter_projects_by_login", False)) + + +def get_shotgrid_event_mongo_info(): + database_name = os.environ["OPENPYPE_DATABASE_NAME"] + collection_name = "shotgrid_events" + return database_name, collection_name diff --git a/openpype/modules/shotgrid/lib/tools.py b/openpype/modules/shotgrid/lib/tools.py new file mode 100644 index 00000000000..6305e9ca50e --- /dev/null +++ b/openpype/modules/shotgrid/lib/tools.py @@ -0,0 +1,16 @@ +from functools import wraps + + +def memoize(function): + memo = {} + + @wraps(function) + def wrapper(*args): + try: + return memo[args] + except KeyError: + rv = function(*args) + memo[args] = rv + return rv + + return wrapper diff --git a/openpype/modules/shotgrid/plugins/publish/collect_shotgrid_entities.py b/openpype/modules/shotgrid/plugins/publish/collect_shotgrid_entities.py new file mode 100644 index 00000000000..a770c1eb87d --- /dev/null +++ b/openpype/modules/shotgrid/plugins/publish/collect_shotgrid_entities.py @@ -0,0 +1,103 @@ +import os + +import pyblish.api +from pymongo import MongoClient + +from openpype.modules.shotgrid.lib.settings import ( + get_shotgrid_project_settings, +) + + +class CollectShotgridEntities(pyblish.api.ContextPlugin): + """Collect shotgrid entities according to the current context""" + + order = pyblish.api.CollectorOrder + 0.499 + label = "Shotgrid entities" + + def process(self, context): + + avalon_project = context.data.get("projectEntity") + avalon_asset = context.data.get("assetEntity") + avalon_task_name = os.getenv("AVALON_TASK") + + self.log.info(avalon_project) + self.log.info(avalon_asset) + + sg_project = _get_shotgrid_project(avalon_project) + sg_task = _get_shotgrid_task( + avalon_project, avalon_asset, avalon_task_name + ) + sg_entity = _get_shotgrid_entity(avalon_project, avalon_asset) + + if sg_project: + context.data["shotgridProject"] = sg_project + self.log.info( + "Collected correspondig shotgrid project : {}".format( + sg_project + ) + ) + + if sg_task: + context.data["shotgridTask"] = sg_task + self.log.info( + "Collected correspondig shotgrid task : {}".format(sg_task) + ) + + if sg_entity: + context.data["shotgridEntity"] = sg_entity + self.log.info( + "Collected correspondig shotgrid entity : {}".format(sg_entity) + ) + + def _find_existing_version(self, code, context): + + filters = [ + ["project", "is", context.data.get("shotgridProject")], + ["sg_task", "is", context.data.get("shotgridTask")], + ["entity", "is", context.data.get("shotgridEntity")], + ["code", "is", code], + ] + + sg = context.data.get("shotgridSession") + return sg.find_one("Version", filters, []) + + +def _get_shotgrid_collection(project): + mongo_url = os.getenv("OPENPYPE_MONGO") + client = MongoClient(mongo_url) + return client.get_database("shotgrid_openpype").get_collection(project) + + +def _get_shotgrid_project(avalon_project): + proj_settings = get_shotgrid_project_settings(avalon_project["name"]) + shotgrid_project_id = proj_settings.get("shotgrid_project_id") + if shotgrid_project_id: + return {"type": "Project", "id": shotgrid_project_id} + return {} + + +def _get_shotgrid_task(avalon_project, avalon_asset, avalon_task): + sg_col = _get_shotgrid_collection(avalon_project["name"]) + shotgrid_task_hierarchy_row = sg_col.find_one( + { + "type": "Task", + "_id": {"$regex": "^" + avalon_task + "_[0-9]*"}, + "parent": {"$regex": ".*," + avalon_asset["name"] + ","}, + } + ) + if shotgrid_task_hierarchy_row: + return {"type": "Task", "id": shotgrid_task_hierarchy_row["src_id"]} + return {} + + +def _get_shotgrid_entity(avalon_project, avalon_asset): + sg_col = _get_shotgrid_collection(avalon_project["name"]) + shotgrid_entity_hierarchy_row = sg_col.find_one( + {"_id": avalon_asset["name"]} + ) + if shotgrid_entity_hierarchy_row: + return { + "type": shotgrid_entity_hierarchy_row["type"], + "id": shotgrid_entity_hierarchy_row["src_id"], + } + return {} diff --git a/openpype/modules/shotgrid/plugins/publish/collect_shotgrid_session.py b/openpype/modules/shotgrid/plugins/publish/collect_shotgrid_session.py new file mode 100644 index 00000000000..9d5d2271bf8 --- /dev/null +++ b/openpype/modules/shotgrid/plugins/publish/collect_shotgrid_session.py @@ -0,0 +1,123 @@ +import os + +import pyblish.api +import shotgun_api3 +from shotgun_api3.shotgun import AuthenticationFault + +from openpype.lib import OpenPypeSettingsRegistry +from openpype.modules.shotgrid.lib.settings import ( + get_shotgrid_servers, + get_shotgrid_project_settings, +) + + +class CollectShotgridSession(pyblish.api.ContextPlugin): + """Collect shotgrid session using user credentials""" + + order = pyblish.api.CollectorOrder + label = "Shotgrid user session" + + def process(self, context): + + certificate_path = os.getenv("SHOTGUN_API_CACERTS") + if certificate_path is None or not os.path.exists(certificate_path): + self.log.info( + "SHOTGUN_API_CACERTS does not contains a valid \ + path: {}".format( + certificate_path + ) + ) + certificate_path = get_shotgrid_certificate() + self.log.info("Get Certificate from shotgrid_api") + + if not os.path.exists(certificate_path): + self.log.error( + "Could not find certificate in shotgun_api3: \ + {}".format( + certificate_path + ) + ) + return + + set_shotgrid_certificate(certificate_path) + self.log.info("Set Certificate: {}".format(certificate_path)) + + avalon_project = os.getenv("AVALON_PROJECT") + + shotgrid_settings = get_shotgrid_project_settings(avalon_project) + self.log.info("shotgrid settings: {}".format(shotgrid_settings)) + shotgrid_servers_settings = get_shotgrid_servers() + self.log.info( + "shotgrid_servers_settings: {}".format(shotgrid_servers_settings) + ) + + shotgrid_server = shotgrid_settings.get("shotgrid_server", "") + if not shotgrid_server: + self.log.error( + "No Shotgrid server found, please choose a credential" + "in script name and script key in OpenPype settings" + ) + + shotgrid_server_setting = shotgrid_servers_settings.get( + shotgrid_server, {} + ) + shotgrid_url = shotgrid_server_setting.get("shotgrid_url", "") + + shotgrid_script_name = shotgrid_server_setting.get( + "shotgrid_script_name", "" + ) + shotgrid_script_key = shotgrid_server_setting.get( + "shotgrid_script_key", "" + ) + if not shotgrid_script_name and not shotgrid_script_key: + self.log.error( + "No Shotgrid api credential found, please enter " + "script name and script key in OpenPype settings" + ) + + login = get_login() or os.getenv("OPENPYPE_SG_USER") + + if not login: + self.log.error( + "No Shotgrid login found, please " + "login to shotgrid withing openpype Tray" + ) + + session = shotgun_api3.Shotgun( + base_url=shotgrid_url, + script_name=shotgrid_script_name, + api_key=shotgrid_script_key, + sudo_as_login=login, + ) + + try: + session.preferences_read() + except AuthenticationFault: + raise ValueError( + "Could not connect to shotgrid {} with user {}".format( + shotgrid_url, login + ) + ) + + self.log.info( + "Logged to shotgrid {} with user {}".format(shotgrid_url, login) + ) + context.data["shotgridSession"] = session + context.data["shotgridUser"] = login + + +def get_shotgrid_certificate(): + shotgun_api_path = os.path.dirname(shotgun_api3.__file__) + return os.path.join(shotgun_api_path, "lib", "certifi", "cacert.pem") + + +def set_shotgrid_certificate(certificate): + os.environ["SHOTGUN_API_CACERTS"] = certificate + + +def get_login(): + reg = OpenPypeSettingsRegistry() + try: + return str(reg.get_item("shotgrid_login")) + except Exception: + return None diff --git a/openpype/modules/shotgrid/plugins/publish/integrate_shotgrid_publish.py b/openpype/modules/shotgrid/plugins/publish/integrate_shotgrid_publish.py new file mode 100644 index 00000000000..cfd2d10fd9d --- /dev/null +++ b/openpype/modules/shotgrid/plugins/publish/integrate_shotgrid_publish.py @@ -0,0 +1,77 @@ +import os +import pyblish.api + + +class IntegrateShotgridPublish(pyblish.api.InstancePlugin): + """ + Create published Files from representations and add it to version. If + representation is tagged add shotgrid review, it will add it in + path to movie for a movie file or path to frame for an image sequence. + """ + + order = pyblish.api.IntegratorOrder + 0.499 + label = "Shotgrid Published Files" + + def process(self, instance): + + context = instance.context + + self.sg = context.data.get("shotgridSession") + + shotgrid_version = instance.data.get("shotgridVersion") + + for representation in instance.data.get("representations", []): + + local_path = representation.get("published_path") + code = os.path.basename(local_path) + + if representation.get("tags", []): + continue + + published_file = self._find_existing_publish( + code, context, shotgrid_version + ) + + published_file_data = { + "project": context.data.get("shotgridProject"), + "code": code, + "entity": context.data.get("shotgridEntity"), + "task": context.data.get("shotgridTask"), + "version": shotgrid_version, + "path": {"local_path": local_path}, + } + if not published_file: + published_file = self._create_published(published_file_data) + self.log.info( + "Create Shotgrid PublishedFile: {}".format(published_file) + ) + else: + self.sg.update( + published_file["type"], + published_file["id"], + published_file_data, + ) + self.log.info( + "Update Shotgrid PublishedFile: {}".format(published_file) + ) + + if instance.data["family"] == "image": + self.sg.upload_thumbnail( + published_file["type"], published_file["id"], local_path + ) + instance.data["shotgridPublishedFile"] = published_file + + def _find_existing_publish(self, code, context, shotgrid_version): + + filters = [ + ["project", "is", context.data.get("shotgridProject")], + ["task", "is", context.data.get("shotgridTask")], + ["entity", "is", context.data.get("shotgridEntity")], + ["version", "is", shotgrid_version], + ["code", "is", code], + ] + return self.sg.find_one("PublishedFile", filters, []) + + def _create_published(self, published_file_data): + + return self.sg.create("PublishedFile", published_file_data) diff --git a/openpype/modules/shotgrid/plugins/publish/integrate_shotgrid_version.py b/openpype/modules/shotgrid/plugins/publish/integrate_shotgrid_version.py new file mode 100644 index 00000000000..a1b7140e226 --- /dev/null +++ b/openpype/modules/shotgrid/plugins/publish/integrate_shotgrid_version.py @@ -0,0 +1,92 @@ +import os +import pyblish.api + + +class IntegrateShotgridVersion(pyblish.api.InstancePlugin): + """Integrate Shotgrid Version""" + + order = pyblish.api.IntegratorOrder + 0.497 + label = "Shotgrid Version" + + sg = None + + def process(self, instance): + + context = instance.context + self.sg = context.data.get("shotgridSession") + + # TODO: Use path template solver to build version code from settings + anatomy = instance.data.get("anatomyData", {}) + code = "_".join( + [ + anatomy["project"]["code"], + anatomy["parent"], + anatomy["asset"], + anatomy["task"]["name"], + "v{:03}".format(int(anatomy["version"])), + ] + ) + + version = self._find_existing_version(code, context) + + if not version: + version = self._create_version(code, context) + self.log.info("Create Shotgrid version: {}".format(version)) + else: + self.log.info("Use existing Shotgrid version: {}".format(version)) + + data_to_update = {} + status = context.data.get("intent", {}).get("value") + if status: + data_to_update["sg_status_list"] = status + + for representation in instance.data.get("representations", []): + local_path = representation.get("published_path") + code = os.path.basename(local_path) + + if "shotgridreview" in representation.get("tags", []): + + if representation["ext"] in ["mov", "avi"]: + self.log.info( + "Upload review: {} for version shotgrid {}".format( + local_path, version.get("id") + ) + ) + self.sg.upload( + "Version", + version.get("id"), + local_path, + field_name="sg_uploaded_movie", + ) + + data_to_update["sg_path_to_movie"] = local_path + + elif representation["ext"] in ["jpg", "png", "exr", "tga"]: + path_to_frame = local_path.replace("0000", "#") + data_to_update["sg_path_to_frames"] = path_to_frame + + self.log.info("Update Shotgrid version with {}".format(data_to_update)) + self.sg.update("Version", version["id"], data_to_update) + + instance.data["shotgridVersion"] = version + + def _find_existing_version(self, code, context): + + filters = [ + ["project", "is", context.data.get("shotgridProject")], + ["sg_task", "is", context.data.get("shotgridTask")], + ["entity", "is", context.data.get("shotgridEntity")], + ["code", "is", code], + ] + return self.sg.find_one("Version", filters, []) + + def _create_version(self, code, context): + + version_data = { + "project": context.data.get("shotgridProject"), + "sg_task": context.data.get("shotgridTask"), + "entity": context.data.get("shotgridEntity"), + "code": code, + } + + return self.sg.create("Version", version_data) diff --git a/openpype/modules/shotgrid/plugins/publish/validate_shotgrid_user.py b/openpype/modules/shotgrid/plugins/publish/validate_shotgrid_user.py new file mode 100644 index 00000000000..c14c980e2a0 --- /dev/null +++ b/openpype/modules/shotgrid/plugins/publish/validate_shotgrid_user.py @@ -0,0 +1,38 @@ +import pyblish.api +import openpype.api + + +class ValidateShotgridUser(pyblish.api.ContextPlugin): + """ + Check if user is valid and have access to the project. + """ + + label = "Validate Shotgrid User" + order = openpype.api.ValidateContentsOrder + + def process(self, context): + sg = context.data.get("shotgridSession") + + login = context.data.get("shotgridUser") + self.log.info("Login shotgrid set in OpenPype is {}".format(login)) + project = context.data.get("shotgridProject") + self.log.info("Current shotgun project is {}".format(project)) + + if not (login and sg and project): + raise KeyError() + + user = sg.find_one("HumanUser", [["login", "is", login]], ["projects"]) + + self.log.info(user) + self.log.info(login) + user_projects_id = [p["id"] for p in user.get("projects", [])] + if not project.get("id") in user_projects_id: + raise PermissionError( + "Login {} don't have access to the project {}".format( + login, project + ) + ) + + self.log.info( + "Login {} have access to the project {}".format(login, project) + ) diff --git a/openpype/modules/shotgrid/server/README.md b/openpype/modules/shotgrid/server/README.md new file mode 100644 index 00000000000..15e056ff3e4 --- /dev/null +++ b/openpype/modules/shotgrid/server/README.md @@ -0,0 +1,5 @@ + +### Shotgrid server + +Please refer to the external project that covers Openpype/Shotgrid communication: + - https://github.com/Ellipsanime/shotgrid-leecher diff --git a/openpype/modules/shotgrid/shotgrid_module.py b/openpype/modules/shotgrid/shotgrid_module.py new file mode 100644 index 00000000000..6e0cbe987ba --- /dev/null +++ b/openpype/modules/shotgrid/shotgrid_module.py @@ -0,0 +1,63 @@ +import os +import threading + +from openpype_interfaces import ( + ITrayModule, + IPluginPaths, + ILaunchHookPaths, +) + +from openpype.modules import OpenPypeModule + +SHOTGRID_MODULE_DIR = os.path.dirname(os.path.abspath(__file__)) + + +class ShotgridModule( + OpenPypeModule, ITrayModule, IPluginPaths, ILaunchHookPaths +): + leecher_manager_url = None + name = "shotgrid" + enabled = False + project_id = None + tray_wrapper = None + + def initialize(self, modules_settings): + shotgrid_settings = modules_settings.get(self.name, dict()) + self.enabled = shotgrid_settings.get("enabled", False) + self.leecher_manager_url = shotgrid_settings.get( + "leecher_manager_url", "" + ) + + def connect_with_modules(self, enabled_modules): + pass + + def get_global_environments(self): + return {"PROJECT_ID": self.project_id} + + def get_plugin_paths(self): + return { + "publish": [ + os.path.join(SHOTGRID_MODULE_DIR, "plugins", "publish") + ] + } + + def get_launch_hook_paths(self): + return os.path.join(SHOTGRID_MODULE_DIR, "hooks") + + def tray_init(self): + from .tray.shotgrid_tray import ShotgridTrayWrapper + from .aop.patch import patch_avalon_db + + patch_avalon_db() + threading.Timer(10.0, patch_avalon_db).start() + + self.tray_wrapper = ShotgridTrayWrapper(self) + + def tray_start(self): + return self.tray_wrapper.validate() + + def tray_exit(self, *args, **kwargs): + return self.tray_wrapper + + def tray_menu(self, tray_menu): + return self.tray_wrapper.tray_menu(tray_menu) diff --git a/openpype/modules/shotgrid/tests/shotgrid/lib/test_credentials.py b/openpype/modules/shotgrid/tests/shotgrid/lib/test_credentials.py new file mode 100644 index 00000000000..1f78cf77c92 --- /dev/null +++ b/openpype/modules/shotgrid/tests/shotgrid/lib/test_credentials.py @@ -0,0 +1,34 @@ +import pytest +from assertpy import assert_that + +import openpype.modules.shotgrid.lib.credentials as sut + + +def test_missing_shotgrid_url(): + with pytest.raises(Exception) as ex: + # arrange + url = "" + # act + sut.get_shotgrid_hostname(url) + # assert + assert_that(ex).is_equal_to("Shotgrid url cannot be a null") + + +def test_full_shotgrid_url(): + # arrange + url = "https://shotgrid.com/myinstance" + # act + actual = sut.get_shotgrid_hostname(url) + # assert + assert_that(actual).is_not_empty() + assert_that(actual).is_equal_to("shotgrid.com") + + +def test_incomplete_shotgrid_url(): + # arrange + url = "shotgrid.com/myinstance" + # act + actual = sut.get_shotgrid_hostname(url) + # assert + assert_that(actual).is_not_empty() + assert_that(actual).is_equal_to("shotgrid.com") diff --git a/openpype/modules/shotgrid/tray/credential_dialog.py b/openpype/modules/shotgrid/tray/credential_dialog.py new file mode 100644 index 00000000000..9d841d98be3 --- /dev/null +++ b/openpype/modules/shotgrid/tray/credential_dialog.py @@ -0,0 +1,201 @@ +import os +from Qt import QtCore, QtWidgets, QtGui + +from openpype import style +from openpype import resources +from openpype.modules.shotgrid.lib import settings, credentials + + +class CredentialsDialog(QtWidgets.QDialog): + SIZE_W = 450 + SIZE_H = 200 + + _module = None + _is_logged = False + url_label = None + login_label = None + password_label = None + url_input = None + login_input = None + password_input = None + input_layout = None + login_button = None + buttons_layout = None + main_widget = None + + login_changed = QtCore.Signal() + + def __init__(self, module, parent=None): + super(CredentialsDialog, self).__init__(parent) + + self._module = module + self._is_logged = False + + self.setWindowTitle("OpenPype - Shotgrid Login") + + icon = QtGui.QIcon(resources.get_openpype_icon_filepath()) + self.setWindowIcon(icon) + + self.setWindowFlags( + QtCore.Qt.WindowCloseButtonHint + | QtCore.Qt.WindowMinimizeButtonHint + ) + self.setMinimumSize(QtCore.QSize(self.SIZE_W, self.SIZE_H)) + self.setMaximumSize(QtCore.QSize(self.SIZE_W + 100, self.SIZE_H + 100)) + self.setStyleSheet(style.load_stylesheet()) + + self.ui_init() + + def ui_init(self): + self.url_label = QtWidgets.QLabel("Shotgrid server:") + self.login_label = QtWidgets.QLabel("Login:") + self.password_label = QtWidgets.QLabel("Password:") + + self.url_input = QtWidgets.QComboBox() + # self.url_input.setReadOnly(True) + + self.login_input = QtWidgets.QLineEdit() + self.login_input.setPlaceholderText("login") + + self.password_input = QtWidgets.QLineEdit() + self.password_input.setPlaceholderText("password") + self.password_input.setEchoMode(QtWidgets.QLineEdit.Password) + + self.error_label = QtWidgets.QLabel("") + self.error_label.setStyleSheet("color: red;") + self.error_label.setWordWrap(True) + self.error_label.hide() + + self.input_layout = QtWidgets.QFormLayout() + self.input_layout.setContentsMargins(10, 15, 10, 5) + + self.input_layout.addRow(self.url_label, self.url_input) + self.input_layout.addRow(self.login_label, self.login_input) + self.input_layout.addRow(self.password_label, self.password_input) + self.input_layout.addRow(self.error_label) + + self.login_button = QtWidgets.QPushButton("Login") + self.login_button.setToolTip("Log in shotgrid instance") + self.login_button.clicked.connect(self._on_shotgrid_login_clicked) + + self.logout_button = QtWidgets.QPushButton("Logout") + self.logout_button.setToolTip("Log out shotgrid instance") + self.logout_button.clicked.connect(self._on_shotgrid_logout_clicked) + + self.buttons_layout = QtWidgets.QHBoxLayout() + self.buttons_layout.addWidget(self.logout_button) + self.buttons_layout.addWidget(self.login_button) + + self.main_widget = QtWidgets.QVBoxLayout(self) + self.main_widget.addLayout(self.input_layout) + self.main_widget.addLayout(self.buttons_layout) + self.setLayout(self.main_widget) + + def show(self, *args, **kwargs): + super(CredentialsDialog, self).show(*args, **kwargs) + self._fill_shotgrid_url() + self._fill_shotgrid_login() + + def _fill_shotgrid_url(self): + servers = settings.get_shotgrid_servers() + + if servers: + for _, v in servers.items(): + self.url_input.addItem("{}".format(v.get('shotgrid_url'))) + self._valid_input(self.url_input) + self.login_button.show() + self.logout_button.show() + enabled = True + else: + self.set_error("Ask your admin to add shotgrid server in settings") + self._invalid_input(self.url_input) + self.login_button.hide() + self.logout_button.hide() + enabled = False + + self.login_input.setEnabled(enabled) + self.password_input.setEnabled(enabled) + + def _fill_shotgrid_login(self): + login = credentials.get_local_login() + + if login: + self.login_input.setText(login) + + def _clear_shotgrid_login(self): + self.login_input.setText("") + self.password_input.setText("") + + def _on_shotgrid_login_clicked(self): + login = self.login_input.text().strip() + password = self.password_input.text().strip() + missing = [] + + if login == "": + missing.append("login") + self._invalid_input(self.login_input) + + if password == "": + missing.append("password") + self._invalid_input(self.password_input) + + url = self.url_input.currentText() + if url == "": + missing.append("url") + self._invalid_input(self.url_input) + + if len(missing) > 0: + self.set_error("You didn't enter {}".format(" and ".join(missing))) + return + + # if credentials.check_credentials( + # login=login, + # password=password, + # shotgrid_url=url, + # ): + credentials.save_local_login( + login=login + ) + os.environ['OPENPYPE_SG_USER'] = login + self._on_login() + + self.set_error("CANT LOGIN") + + def _on_shotgrid_logout_clicked(self): + credentials.clear_local_login() + del os.environ['OPENPYPE_SG_USER'] + self._clear_shotgrid_login() + self._on_logout() + + def set_error(self, msg): + self.error_label.setText(msg) + self.error_label.show() + + def _on_login(self): + self._is_logged = True + self.login_changed.emit() + self._close_widget() + + def _on_logout(self): + self._is_logged = False + self.login_changed.emit() + + def _close_widget(self): + self.hide() + + def _valid_input(self, input_widget): + input_widget.setStyleSheet("") + + def _invalid_input(self, input_widget): + input_widget.setStyleSheet("border: 1px solid red;") + + def login_with_credentials( + self, url, login, password + ): + verification = credentials.check_credentials(url, login, password) + if verification: + credentials.save_credentials(login, password, False) + self._module.set_credentials_to_env(login, password) + self.set_credentials(login, password) + self.login_changed.emit() + return verification diff --git a/openpype/modules/shotgrid/tray/shotgrid_tray.py b/openpype/modules/shotgrid/tray/shotgrid_tray.py new file mode 100644 index 00000000000..4038d77b03a --- /dev/null +++ b/openpype/modules/shotgrid/tray/shotgrid_tray.py @@ -0,0 +1,75 @@ +import os +import webbrowser + +from Qt import QtWidgets + +from openpype.modules.shotgrid.lib import credentials +from openpype.modules.shotgrid.tray.credential_dialog import ( + CredentialsDialog, +) + + +class ShotgridTrayWrapper: + module = None + credentials_dialog = None + logged_user_label = None + + def __init__(self, module): + self.module = module + self.credentials_dialog = CredentialsDialog(module) + self.credentials_dialog.login_changed.connect(self.set_login_label) + self.logged_user_label = QtWidgets.QAction("") + self.logged_user_label.setDisabled(True) + self.set_login_label() + + def show_batch_dialog(self): + if self.module.leecher_manager_url: + webbrowser.open(self.module.leecher_manager_url) + + def show_connect_dialog(self): + self.show_credential_dialog() + + def show_credential_dialog(self): + self.credentials_dialog.show() + self.credentials_dialog.activateWindow() + self.credentials_dialog.raise_() + + def set_login_label(self): + login = credentials.get_local_login() + if login: + self.logged_user_label.setText("{}".format(login)) + else: + self.logged_user_label.setText( + "No User logged in {0}".format(login) + ) + + def tray_menu(self, tray_menu): + # Add login to user menu + menu = QtWidgets.QMenu("Shotgrid", tray_menu) + show_connect_action = QtWidgets.QAction("Connect to Shotgrid", menu) + show_connect_action.triggered.connect(self.show_connect_dialog) + menu.addAction(self.logged_user_label) + menu.addSeparator() + menu.addAction(show_connect_action) + tray_menu.addMenu(menu) + + # Add manager to Admin menu + for m in tray_menu.findChildren(QtWidgets.QMenu): + if m.title() == "Admin": + shotgrid_manager_action = QtWidgets.QAction( + "Shotgrid manager", menu + ) + shotgrid_manager_action.triggered.connect( + self.show_batch_dialog + ) + m.addAction(shotgrid_manager_action) + + def validate(self): + login = credentials.get_local_login() + + if not login: + self.show_credential_dialog() + else: + os.environ["OPENPYPE_SG_USER"] = login + + return True diff --git a/openpype/modules/sync_server/sync_server_module.py b/openpype/modules/sync_server/sync_server_module.py index 45ff8bc4d14..5a1d8467ec3 100644 --- a/openpype/modules/sync_server/sync_server_module.py +++ b/openpype/modules/sync_server/sync_server_module.py @@ -921,12 +921,18 @@ def get_enabled_projects(self): if self.enabled: for project in self.connection.projects(projection={"name": 1}): project_name = project["name"] - project_settings = self.get_sync_project_setting(project_name) - if project_settings and project_settings.get("enabled"): + if self.is_project_enabled(project_name): enabled_projects.append(project_name) return enabled_projects + def is_project_enabled(self, project_name): + if self.enabled: + project_settings = self.get_sync_project_setting(project_name) + if project_settings and project_settings.get("enabled"): + return True + return False + def handle_alternate_site(self, collection, representation, processed_site, file_id, synced_file_id): """ diff --git a/openpype/pipeline/context_tools.py b/openpype/pipeline/context_tools.py index 06bd639776b..e849f5b0d11 100644 --- a/openpype/pipeline/context_tools.py +++ b/openpype/pipeline/context_tools.py @@ -12,7 +12,7 @@ from pyblish.lib import MessageHandler import openpype -from openpype.modules import load_modules +from openpype.modules import load_modules, ModulesManager from openpype.settings import get_project_settings from openpype.lib import ( Anatomy, @@ -107,7 +107,7 @@ def modified_emit(obj, record): install_openpype_plugins() -def install_openpype_plugins(project_name=None): +def install_openpype_plugins(project_name=None, host_name=None): # Make sure modules are loaded load_modules() @@ -116,6 +116,18 @@ def install_openpype_plugins(project_name=None): pyblish.api.register_discovery_filter(filter_pyblish_plugins) register_loader_plugin_path(LOAD_PATH) + modules_manager = ModulesManager() + publish_plugin_dirs = modules_manager.collect_plugin_paths()["publish"] + for path in publish_plugin_dirs: + pyblish.api.register_plugin_path(path) + + if host_name is None: + host_name = os.environ.get("AVALON_APP") + + creator_paths = modules_manager.collect_creator_plugin_paths(host_name) + for creator_path in creator_paths: + register_creator_plugin_path(creator_path) + if project_name is None: project_name = os.environ.get("AVALON_PROJECT") diff --git a/openpype/pipeline/create/context.py b/openpype/pipeline/create/context.py index 6f862e05883..2f1922c1035 100644 --- a/openpype/pipeline/create/context.py +++ b/openpype/pipeline/create/context.py @@ -749,6 +749,10 @@ def host_is_valid(self): """Is host valid for creation.""" return self._host_is_valid + @property + def host_name(self): + return os.environ["AVALON_APP"] + @property def log(self): """Dynamic access to logger.""" @@ -861,6 +865,17 @@ def reset_plugins(self, discover_publish_plugins=True): "Using first and skipping following" )) continue + + # Filter by host name + if ( + creator_class.host_name + and creator_class.host_name != self.host_name + ): + self.log.info(( + "Creator's host name is not supported for current host {}" + ).format(creator_class.host_name, self.host_name)) + continue + creator = creator_class( self, system_settings, diff --git a/openpype/pipeline/create/creator_plugins.py b/openpype/pipeline/create/creator_plugins.py index cbe19da064e..8006d4f4f84 100644 --- a/openpype/pipeline/create/creator_plugins.py +++ b/openpype/pipeline/create/creator_plugins.py @@ -63,6 +63,12 @@ class BaseCreator: # `openpype.pipeline.attribute_definitions` instance_attr_defs = [] + # Filtering by host name - can be used to be filtered by host name + # - used on all hosts when set to 'None' for Backwards compatibility + # - was added afterwards + # QUESTION make this required? + host_name = None + def __init__( self, create_context, system_settings, project_settings, headless=False ): diff --git a/openpype/plugins/publish/extract_jpeg_exr.py b/openpype/plugins/publish/extract_jpeg_exr.py index d6d68540926..ae29f8b95b6 100644 --- a/openpype/plugins/publish/extract_jpeg_exr.py +++ b/openpype/plugins/publish/extract_jpeg_exr.py @@ -49,6 +49,7 @@ def process(self, instance): return filtered_repres = self._get_filtered_repres(instance) + for repre in filtered_repres: repre_files = repre["files"] if not isinstance(repre_files, (list, tuple)): @@ -151,6 +152,11 @@ def process(self, instance): if convert_dir is not None and os.path.exists(convert_dir): shutil.rmtree(convert_dir) + # Create only one representation with name 'thumbnail' + # TODO maybe handle way how to decide from which representation + # will be thumbnail created + break + def _get_filtered_repres(self, instance): filtered_repres = [] src_repres = instance.data.get("representations") or [] diff --git a/openpype/resources/app_icons/shotgrid.png b/openpype/resources/app_icons/shotgrid.png new file mode 100644 index 00000000000..6d0cc047f9e Binary files /dev/null and b/openpype/resources/app_icons/shotgrid.png differ diff --git a/openpype/settings/defaults/project_anatomy/imageio.json b/openpype/settings/defaults/project_anatomy/imageio.json index 09a5d98f468..f0be8f95f4d 100644 --- a/openpype/settings/defaults/project_anatomy/imageio.json +++ b/openpype/settings/defaults/project_anatomy/imageio.json @@ -55,36 +55,49 @@ "nukeNodeClass": "Write", "knobs": [ { + "type": "text", "name": "file_type", "value": "exr" }, { + "type": "text", "name": "datatype", "value": "16 bit half" }, { + "type": "text", "name": "compression", "value": "Zip (1 scanline)" }, { + "type": "bool", "name": "autocrop", - "value": "True" + "value": true }, { + "type": "color_gui", "name": "tile_color", - "value": "0xff0000ff" + "value": [ + 186, + 35, + 35, + 255 + ] }, { + "type": "text", "name": "channels", "value": "rgb" }, { + "type": "text", "name": "colorspace", "value": "linear" }, { + "type": "bool", "name": "create_directories", - "value": "True" + "value": true } ] }, @@ -95,36 +108,49 @@ "nukeNodeClass": "Write", "knobs": [ { + "type": "text", "name": "file_type", "value": "exr" }, { + "type": "text", "name": "datatype", "value": "16 bit half" }, { + "type": "text", "name": "compression", "value": "Zip (1 scanline)" }, { + "type": "bool", "name": "autocrop", - "value": "False" + "value": true }, { + "type": "color_gui", "name": "tile_color", - "value": "0xadab1dff" + "value": [ + 171, + 171, + 10, + 255 + ] }, { + "type": "text", "name": "channels", "value": "rgb" }, { + "type": "text", "name": "colorspace", "value": "linear" }, { + "type": "bool", "name": "create_directories", - "value": "True" + "value": true } ] }, @@ -135,32 +161,44 @@ "nukeNodeClass": "Write", "knobs": [ { + "type": "text", "name": "file_type", "value": "tiff" }, { + "type": "text", "name": "datatype", "value": "16 bit" }, { + "type": "text", "name": "compression", "value": "Deflate" }, { + "type": "color_gui", "name": "tile_color", - "value": "0x23ff00ff" + "value": [ + 56, + 162, + 7, + 255 + ] }, { + "type": "text", "name": "channels", "value": "rgb" }, { + "type": "text", "name": "colorspace", "value": "sRGB" }, { + "type": "bool", "name": "create_directories", - "value": "True" + "value": true } ] } diff --git a/openpype/settings/defaults/project_settings/deadline.json b/openpype/settings/defaults/project_settings/deadline.json index f0b2a7e5557..5c5a14bf210 100644 --- a/openpype/settings/defaults/project_settings/deadline.json +++ b/openpype/settings/defaults/project_settings/deadline.json @@ -98,4 +98,4 @@ } } } -} +} \ No newline at end of file diff --git a/openpype/settings/defaults/project_settings/nuke.json b/openpype/settings/defaults/project_settings/nuke.json index c952d72bb1b..33ddc2f2514 100644 --- a/openpype/settings/defaults/project_settings/nuke.json +++ b/openpype/settings/defaults/project_settings/nuke.json @@ -22,10 +22,28 @@ "Main", "Mask" ], - "knobs": [] + "knobs": [], + "prenodes": { + "Reformat01": { + "nodeclass": "Reformat", + "dependent": "", + "knobs": [ + { + "type": "text", + "name": "resize", + "value": "none" + }, + { + "type": "bool", + "name": "black_outside", + "value": true + } + ] + } + } }, "CreateWritePrerender": { - "fpath_template": "{work}/prerenders/nuke/{subset}/{subset}.{frame}.{ext}", + "fpath_template": "{work}/renders/nuke/{subset}/{subset}.{frame}.{ext}", "use_range_limit": true, "defaults": [ "Key01", @@ -35,7 +53,31 @@ "Part01" ], "reviewable": false, - "knobs": [] + "knobs": [], + "prenodes": {} + }, + "CreateWriteStill": { + "fpath_template": "{work}/renders/nuke/{subset}/{subset}.{ext}", + "defaults": [ + "ImageFrame", + "MPFrame", + "LayoutFrame" + ], + "knobs": [], + "prenodes": { + "FrameHold01": { + "nodeclass": "FrameHold", + "dependent": "", + "knobs": [ + { + "type": "formatable", + "name": "first_frame", + "template": "{frame}", + "to_type": "number" + } + ] + } + } } }, "publish": { @@ -45,7 +87,8 @@ "camera", "gizmo", "source", - "render" + "render", + "write" ] }, "ValidateInstanceInContext": { @@ -131,17 +174,17 @@ "reformat_node_add": false, "reformat_node_config": [ { - "type": "string", + "type": "text", "name": "type", "value": "to format" }, { - "type": "string", + "type": "text", "name": "format", "value": "HD_1080" }, { - "type": "string", + "type": "text", "name": "filter", "value": "Lanczos6" }, diff --git a/openpype/settings/defaults/project_settings/shotgrid.json b/openpype/settings/defaults/project_settings/shotgrid.json new file mode 100644 index 00000000000..83b6f690741 --- /dev/null +++ b/openpype/settings/defaults/project_settings/shotgrid.json @@ -0,0 +1,22 @@ +{ + "shotgrid_project_id": 0, + "shotgrid_server": "", + "event": { + "enabled": false + }, + "fields": { + "asset": { + "type": "sg_asset_type" + }, + "sequence": { + "episode_link": "episode" + }, + "shot": { + "episode_link": "sg_episode", + "sequence_link": "sg_sequence" + }, + "task": { + "step": "step" + } + } +} diff --git a/openpype/settings/defaults/project_settings/traypublisher.json b/openpype/settings/defaults/project_settings/traypublisher.json index 1b0ad67abb3..0b54cfd39ed 100644 --- a/openpype/settings/defaults/project_settings/traypublisher.json +++ b/openpype/settings/defaults/project_settings/traypublisher.json @@ -8,7 +8,6 @@ "default_variants": [ "Main" ], - "enable_review": false, "description": "Publish workfile backup", "detailed_description": "", "allow_sequences": true, diff --git a/openpype/settings/defaults/system_settings/applications.json b/openpype/settings/defaults/system_settings/applications.json index 0fb99a26081..2b0de44fa9b 100644 --- a/openpype/settings/defaults/system_settings/applications.json +++ b/openpype/settings/defaults/system_settings/applications.json @@ -12,6 +12,26 @@ "LC_ALL": "C" }, "variants": { + "2023": { + "use_python_2": false, + "executables": { + "windows": [ + "C:\\Program Files\\Autodesk\\Maya2023\\bin\\maya.exe" + ], + "darwin": [], + "linux": [ + "/usr/autodesk/maya2023/bin/maya" + ] + }, + "arguments": { + "windows": [], + "darwin": [], + "linux": [] + }, + "environment": { + "MAYA_VERSION": "2023" + } + }, "2022": { "use_python_2": false, "executables": { @@ -91,9 +111,6 @@ "environment": { "MAYA_VERSION": "2018" } - }, - "__dynamic_keys_labels__": { - "2022": "2022" } } }, diff --git a/openpype/settings/defaults/system_settings/modules.json b/openpype/settings/defaults/system_settings/modules.json index d74269922f8..a5746e930be 100644 --- a/openpype/settings/defaults/system_settings/modules.json +++ b/openpype/settings/defaults/system_settings/modules.json @@ -137,6 +137,13 @@ } } }, + "shotgrid": { + "enabled": true, + "filter_projects_by_login": true, + "leecher_manager_url": "http://127.0.0.1:3000", + "leecher_backend_url": "http://127.0.0.1:8090", + "shotgrid_settings": {} + }, "timers_manager": { "enabled": true, "auto_stop": true, @@ -205,4 +212,4 @@ "linux": "" } } -} \ No newline at end of file +} diff --git a/openpype/settings/entities/__init__.py b/openpype/settings/entities/__init__.py index a173e2454fd..b2cb2204f44 100644 --- a/openpype/settings/entities/__init__.py +++ b/openpype/settings/entities/__init__.py @@ -107,6 +107,7 @@ TaskTypeEnumEntity, DeadlineUrlEnumEntity, AnatomyTemplatesEnumEntity, + ShotgridUrlEnumEntity ) from .list_entity import ListEntity @@ -171,6 +172,7 @@ "ToolsEnumEntity", "TaskTypeEnumEntity", "DeadlineUrlEnumEntity", + "ShotgridUrlEnumEntity", "AnatomyTemplatesEnumEntity", "ListEntity", diff --git a/openpype/settings/entities/enum_entity.py b/openpype/settings/entities/enum_entity.py index 92a397afbad..3b3dd47e618 100644 --- a/openpype/settings/entities/enum_entity.py +++ b/openpype/settings/entities/enum_entity.py @@ -1,10 +1,7 @@ import copy from .input_entities import InputEntity from .exceptions import EntitySchemaError -from .lib import ( - NOT_SET, - STRING_TYPE -) +from .lib import NOT_SET, STRING_TYPE class BaseEnumEntity(InputEntity): @@ -26,7 +23,7 @@ def schema_validations(self): for item in self.enum_items: key = tuple(item.keys())[0] if key in enum_keys: - reason = "Key \"{}\" is more than once in enum items.".format( + reason = 'Key "{}" is more than once in enum items.'.format( key ) raise EntitySchemaError(self, reason) @@ -34,7 +31,7 @@ def schema_validations(self): enum_keys.add(key) if not isinstance(key, STRING_TYPE): - reason = "Key \"{}\" has invalid type {}, expected {}.".format( + reason = 'Key "{}" has invalid type {}, expected {}.'.format( key, type(key), STRING_TYPE ) raise EntitySchemaError(self, reason) @@ -59,7 +56,7 @@ def set(self, value): for item in check_values: if item not in self.valid_keys: raise ValueError( - "{} Invalid value \"{}\". Expected one of: {}".format( + '{} Invalid value "{}". Expected one of: {}'.format( self.path, item, self.valid_keys ) ) @@ -84,7 +81,7 @@ def _item_initialization(self): self.valid_keys = set(all_keys) if self.multiselection: - self.valid_value_types = (list, ) + self.valid_value_types = (list,) value_on_not_set = [] if enum_default: if not isinstance(enum_default, list): @@ -109,7 +106,7 @@ def _item_initialization(self): self.value_on_not_set = key break - self.valid_value_types = (STRING_TYPE, ) + self.valid_value_types = (STRING_TYPE,) # GUI attribute self.placeholder = self.schema_data.get("placeholder") @@ -152,6 +149,7 @@ class HostsEnumEntity(BaseEnumEntity): Host name is not the same as application name. Host name defines implementation instead of application name. """ + schema_types = ["hosts-enum"] all_host_names = [ "aftereffects", @@ -169,7 +167,7 @@ class HostsEnumEntity(BaseEnumEntity): "tvpaint", "unreal", "standalonepublisher", - "webpublisher" + "webpublisher", ] def _item_initialization(self): @@ -210,7 +208,7 @@ def _item_initialization(self): self.valid_keys = valid_keys if self.multiselection: - self.valid_value_types = (list, ) + self.valid_value_types = (list,) self.value_on_not_set = [] else: for key in valid_keys: @@ -218,7 +216,7 @@ def _item_initialization(self): self.value_on_not_set = key break - self.valid_value_types = (STRING_TYPE, ) + self.valid_value_types = (STRING_TYPE,) # GUI attribute self.placeholder = self.schema_data.get("placeholder") @@ -226,14 +224,10 @@ def _item_initialization(self): def schema_validations(self): if self.hosts_filter: enum_len = len(self.enum_items) - if ( - enum_len == 0 - or (enum_len == 1 and self.use_empty_value) - ): - joined_filters = ", ".join([ - '"{}"'.format(item) - for item in self.hosts_filter - ]) + if enum_len == 0 or (enum_len == 1 and self.use_empty_value): + joined_filters = ", ".join( + ['"{}"'.format(item) for item in self.hosts_filter] + ) reason = ( "All host names were removed after applying" " host filters. {}" @@ -246,24 +240,25 @@ def schema_validations(self): invalid_filters.add(item) if invalid_filters: - joined_filters = ", ".join([ - '"{}"'.format(item) - for item in self.hosts_filter - ]) - expected_hosts = ", ".join([ - '"{}"'.format(item) - for item in self.all_host_names - ]) - self.log.warning(( - "Host filters containt invalid host names:" - " \"{}\" Expected values are {}" - ).format(joined_filters, expected_hosts)) + joined_filters = ", ".join( + ['"{}"'.format(item) for item in self.hosts_filter] + ) + expected_hosts = ", ".join( + ['"{}"'.format(item) for item in self.all_host_names] + ) + self.log.warning( + ( + "Host filters containt invalid host names:" + ' "{}" Expected values are {}' + ).format(joined_filters, expected_hosts) + ) super(HostsEnumEntity, self).schema_validations() class AppsEnumEntity(BaseEnumEntity): """Enum of applications for project anatomy attributes.""" + schema_types = ["apps-enum"] def _item_initialization(self): @@ -271,7 +266,7 @@ def _item_initialization(self): self.value_on_not_set = [] self.enum_items = [] self.valid_keys = set() - self.valid_value_types = (list, ) + self.valid_value_types = (list,) self.placeholder = None def _get_enum_values(self): @@ -352,7 +347,7 @@ def _item_initialization(self): self.value_on_not_set = [] self.enum_items = [] self.valid_keys = set() - self.valid_value_types = (list, ) + self.valid_value_types = (list,) self.placeholder = None def _get_enum_values(self): @@ -409,10 +404,10 @@ class TaskTypeEnumEntity(BaseEnumEntity): def _item_initialization(self): self.multiselection = self.schema_data.get("multiselection", True) if self.multiselection: - self.valid_value_types = (list, ) + self.valid_value_types = (list,) self.value_on_not_set = [] else: - self.valid_value_types = (STRING_TYPE, ) + self.valid_value_types = (STRING_TYPE,) self.value_on_not_set = "" self.enum_items = [] @@ -507,7 +502,8 @@ def _get_enum_values(self): enum_items_list = [] for server_name, url_entity in deadline_urls_entity.items(): enum_items_list.append( - {server_name: "{}: {}".format(server_name, url_entity.value)}) + {server_name: "{}: {}".format(server_name, url_entity.value)} + ) valid_keys.add(server_name) return enum_items_list, valid_keys @@ -530,6 +526,50 @@ def set_override_state(self, *args, **kwargs): self._current_value = tuple(self.valid_keys)[0] +class ShotgridUrlEnumEntity(BaseEnumEntity): + schema_types = ["shotgrid_url-enum"] + + def _item_initialization(self): + self.multiselection = False + + self.enum_items = [] + self.valid_keys = set() + + self.valid_value_types = (STRING_TYPE,) + self.value_on_not_set = "" + + # GUI attribute + self.placeholder = self.schema_data.get("placeholder") + + def _get_enum_values(self): + shotgrid_settings = self.get_entity_from_path( + "system_settings/modules/shotgrid/shotgrid_settings" + ) + + valid_keys = set() + enum_items_list = [] + for server_name, settings in shotgrid_settings.items(): + enum_items_list.append( + { + server_name: "{}: {}".format( + server_name, settings["shotgrid_url"].value + ) + } + ) + valid_keys.add(server_name) + return enum_items_list, valid_keys + + def set_override_state(self, *args, **kwargs): + super(ShotgridUrlEnumEntity, self).set_override_state(*args, **kwargs) + + self.enum_items, self.valid_keys = self._get_enum_values() + if not self.valid_keys: + self._current_value = "" + + elif self._current_value not in self.valid_keys: + self._current_value = tuple(self.valid_keys)[0] + + class AnatomyTemplatesEnumEntity(BaseEnumEntity): schema_types = ["anatomy-templates-enum"] diff --git a/openpype/settings/entities/schemas/projects_schema/schema_main.json b/openpype/settings/entities/schemas/projects_schema/schema_main.json index dbddd18c805..cb363d17fe8 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_main.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_main.json @@ -62,6 +62,10 @@ "type": "schema", "name": "schema_project_ftrack" }, + { + "type": "schema", + "name": "schema_project_shotgrid" + }, { "type": "schema", "name": "schema_project_deadline" diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_nuke.json b/openpype/settings/entities/schemas/projects_schema/schema_project_nuke.json index dfd3306b2e7..bc572cbdc8d 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_nuke.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_nuke.json @@ -106,8 +106,45 @@ } }, { - "type": "schema", - "name": "schema_nuke_knob_inputs" + "type": "schema_template", + "name": "template_nuke_knob_inputs", + "template_data": [ + { + "label": "Node knobs", + "key": "knobs" + } + ] + }, + { + "key": "prenodes", + "label": "Pre write nodes", + "type": "dict-modifiable", + "highlight_content": true, + "object_type": { + "type": "dict", + "children": [ + { + "key": "nodeclass", + "label": "Node class", + "type": "text" + }, + { + "key": "dependent", + "label": "Outside node dependency", + "type": "text" + }, + { + "type": "schema_template", + "name": "template_nuke_knob_inputs", + "template_data": [ + { + "label": "Node knobs", + "key": "knobs" + } + ] + } + ] + } } ] }, @@ -142,8 +179,108 @@ "label": "Add reviewable toggle" }, { - "type": "schema", - "name": "schema_nuke_knob_inputs" + "type": "schema_template", + "name": "template_nuke_knob_inputs", + "template_data": [ + { + "label": "Node knobs", + "key": "knobs" + } + ] + }, + { + "key": "prenodes", + "label": "Pre write nodes", + "type": "dict-modifiable", + "highlight_content": true, + "object_type": { + "type": "dict", + "children": [ + { + "key": "nodeclass", + "label": "Node class", + "type": "text" + }, + { + "key": "dependent", + "label": "Outside node dependency", + "type": "text" + }, + { + "type": "schema_template", + "name": "template_nuke_knob_inputs", + "template_data": [ + { + "label": "Node knobs", + "key": "knobs" + } + ] + } + ] + } + } + ] + }, + { + "type": "dict", + "collapsible": true, + "key": "CreateWriteStill", + "label": "CreateWriteStill", + "is_group": true, + "children": [ + { + "type": "text", + "key": "fpath_template", + "label": "Path template" + }, + { + "type": "list", + "key": "defaults", + "label": "Subset name defaults", + "object_type": { + "type": "text" + } + }, + { + "type": "schema_template", + "name": "template_nuke_knob_inputs", + "template_data": [ + { + "label": "Node knobs", + "key": "knobs" + } + ] + }, + { + "key": "prenodes", + "label": "Pre write nodes", + "type": "dict-modifiable", + "highlight_content": true, + "object_type": { + "type": "dict", + "children": [ + { + "key": "nodeclass", + "label": "Node class", + "type": "text" + }, + { + "key": "dependent", + "label": "Outside node dependency", + "type": "text" + }, + { + "type": "schema_template", + "name": "template_nuke_knob_inputs", + "template_data": [ + { + "label": "Node knobs", + "key": "knobs" + } + ] + } + ] + } } ] } diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_shotgrid.json b/openpype/settings/entities/schemas/projects_schema/schema_project_shotgrid.json new file mode 100644 index 00000000000..4faeca89f36 --- /dev/null +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_shotgrid.json @@ -0,0 +1,98 @@ +{ + "type": "dict", + "key": "shotgrid", + "label": "Shotgrid", + "collapsible": true, + "is_file": true, + "children": [ + { + "type": "number", + "key": "shotgrid_project_id", + "label": "Shotgrid project id" + }, + { + "type": "shotgrid_url-enum", + "key": "shotgrid_server", + "label": "Shotgrid Server" + }, + { + "type": "dict", + "key": "event", + "label": "Event Handler", + "collapsible": true, + "checkbox_key": "enabled", + "children": [ + { + "type": "boolean", + "key": "enabled", + "label": "Enabled" + } + ] + }, + { + "type": "dict", + "key": "fields", + "label": "Fields Template", + "collapsible": true, + "children": [ + { + "type": "dict", + "key": "asset", + "label": "Asset", + "collapsible": true, + "children": [ + { + "type": "text", + "key": "type", + "label": "Asset Type" + } + ] + }, + { + "type": "dict", + "key": "sequence", + "label": "Sequence", + "collapsible": true, + "children": [ + { + "type": "text", + "key": "episode_link", + "label": "Episode link" + } + ] + }, + { + "type": "dict", + "key": "shot", + "label": "Shot", + "collapsible": true, + "children": [ + { + "type": "text", + "key": "episode_link", + "label": "Episode link" + }, + { + "type": "text", + "key": "sequence_link", + "label": "Sequence link" + } + ] + }, + { + "type": "dict", + "key": "task", + "label": "Task", + "collapsible": true, + "children": [ + { + "type": "text", + "key": "step", + "label": "Step link" + } + ] + } + ] + } + ] +} diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_traypublisher.json b/openpype/settings/entities/schemas/projects_schema/schema_project_traypublisher.json index 59c675d4119..55c1b7b7d7c 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_traypublisher.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_traypublisher.json @@ -45,12 +45,6 @@ "type": "text" } }, - { - "type": "boolean", - "key": "enable_review", - "label": "Enable review", - "tooltip": "Allow to create review from source file/s.\nFiles must be supported to be able create review." - }, { "type": "separator" }, diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_anatomy_imageio.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_anatomy_imageio.json index 819f7121c43..ef8c907ddab 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_anatomy_imageio.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_anatomy_imageio.json @@ -272,29 +272,12 @@ "label": "Nuke Node Class" }, { - "type": "collapsible-wrap", - "label": "Knobs", - "collapsible": true, - "collapsed": true, - "children": [ + "type": "schema_template", + "name": "template_nuke_knob_inputs", + "template_data": [ { - "key": "knobs", - "type": "list", - "object_type": { - "type": "dict", - "children": [ - { - "type": "text", - "key": "name", - "label": "Name" - }, - { - "type": "text", - "key": "value", - "label": "Value" - } - ] - } + "label": "Knobs", + "key": "knobs" } ] } @@ -333,29 +316,12 @@ "object_type": "text" }, { - "type": "collapsible-wrap", - "label": "Knobs overrides", - "collapsible": true, - "collapsed": true, - "children": [ + "type": "schema_template", + "name": "template_nuke_knob_inputs", + "template_data": [ { - "key": "knobs", - "type": "list", - "object_type": { - "type": "dict", - "children": [ - { - "type": "text", - "key": "name", - "label": "Name" - }, - { - "type": "text", - "key": "value", - "label": "Value" - } - ] - } + "label": "Knobs overrides", + "key": "knobs" } ] } diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_nuke_knob_inputs.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_nuke_knob_inputs.json deleted file mode 100644 index 0d03b892884..00000000000 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_nuke_knob_inputs.json +++ /dev/null @@ -1,151 +0,0 @@ -{ - "type": "collapsible-wrap", - "label": "Knob defaults", - "collapsible": true, - "collapsed": true, - "children": [{ - "type": "list", - "key": "knobs", - "object_type": { - "type": "dict-conditional", - "enum_key": "type", - "enum_label": "Type", - "enum_children": [ - { - "key": "string", - "label": "String", - "children": [ - { - "type": "text", - "key": "name", - "label": "Name" - }, - { - "type": "text", - "key": "value", - "label": "Value" - } - ] - }, - { - "key": "bool", - "label": "Boolean", - "children": [ - { - "type": "text", - "key": "name", - "label": "Name" - }, - { - "type": "boolean", - "key": "value", - "label": "Value" - } - ] - }, - { - "key": "number", - "label": "Number", - "children": [ - { - "type": "text", - "key": "name", - "label": "Name" - }, - { - "type": "number", - "key": "value", - "default": 1, - "decimal": 0 - } - - ] - }, - { - "key": "decimal_number", - "label": "Decimal number", - "children": [ - { - "type": "text", - "key": "name", - "label": "Name" - }, - { - "type": "number", - "key": "value", - "default": 1, - "decimal": 4 - } - - ] - }, - { - "key": "2d_vector", - "label": "2D vector", - "children": [ - { - "type": "text", - "key": "name", - "label": "Name" - }, - { - "type": "list-strict", - "key": "value", - "label": "Value", - "object_types": [ - { - "type": "number", - "key": "x", - "default": 1, - "decimal": 4 - }, - { - "type": "number", - "key": "y", - "default": 1, - "decimal": 4 - } - ] - } - ] - }, - { - "key": "3d_vector", - "label": "3D vector", - "children": [ - { - "type": "text", - "key": "name", - "label": "Name" - }, - { - "type": "list-strict", - "key": "value", - "label": "Value", - "object_types": [ - { - "type": "number", - "key": "x", - "default": 1, - "decimal": 4 - }, - { - "type": "number", - "key": "y", - "default": 1, - "decimal": 4 - }, - { - "type": "number", - "key": "y", - "default": 1, - "decimal": 4 - } - ] - } - ] - } - ] - } - }] -} diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_nuke_publish.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_nuke_publish.json index d67fb309bd7..04df957d678 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_nuke_publish.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_nuke_publish.json @@ -41,6 +41,9 @@ }, { "render": "render" + }, + { + "write": "write" } ] } @@ -253,108 +256,12 @@ "default": false }, { - "type": "collapsible-wrap", - "label": "Reformat Node Knobs", - "collapsible": true, - "collapsed": true, - "children": [ + "type": "schema_template", + "name": "template_nuke_knob_inputs", + "template_data": [ { - "type": "list", - "key": "reformat_node_config", - "object_type": { - "type": "dict-conditional", - "enum_key": "type", - "enum_label": "Type", - "enum_children": [ - { - "key": "string", - "label": "String", - "children": [ - { - "type": "text", - "key": "name", - "label": "Name" - }, - { - "type": "text", - "key": "value", - "label": "Value" - } - ] - }, - { - "key": "bool", - "label": "Boolean", - "children": [ - { - "type": "text", - "key": "name", - "label": "Name" - }, - { - "type": "boolean", - "key": "value", - "label": "Value" - } - ] - }, - { - "key": "number", - "label": "Number", - "children": [ - { - "type": "text", - "key": "name", - "label": "Name" - }, - { - "type": "list-strict", - "key": "value", - "label": "Value", - "object_types": [ - { - "type": "number", - "key": "number", - "default": 1, - "decimal": 4 - } - ] - } - - ] - }, - { - "key": "list_numbers", - "label": "2 Numbers", - "children": [ - { - "type": "text", - "key": "name", - "label": "Name" - }, - { - "type": "list-strict", - "key": "value", - "label": "Value", - "object_types": [ - { - "type": "number", - "key": "x", - "default": 1, - "decimal": 4 - }, - { - "type": "number", - "key": "y", - "default": 1, - "decimal": 4 - } - ] - } - ] - } - ] - } + "label": "Reformat Node Knobs", + "key": "reformat_node_config" } ] }, diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_representation_tags.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_representation_tags.json index 484fbf9d070..a4b28f47bcf 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_representation_tags.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_representation_tags.json @@ -13,6 +13,9 @@ { "ftrackreview": "Add review to Ftrack" }, + { + "shotgridreview": "Add review to Shotgrid" + }, { "delete": "Delete output" }, diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/template_nuke_knob_inputs.json b/openpype/settings/entities/schemas/projects_schema/schemas/template_nuke_knob_inputs.json new file mode 100644 index 00000000000..52a14e06363 --- /dev/null +++ b/openpype/settings/entities/schemas/projects_schema/schemas/template_nuke_knob_inputs.json @@ -0,0 +1,275 @@ +[ + { + "type": "collapsible-wrap", + "label": "{label}", + "collapsible": true, + "collapsed": true, + "children": [{ + "type": "list", + "key": "{key}", + "object_type": { + "type": "dict-conditional", + "enum_key": "type", + "enum_label": "Type", + "enum_children": [ + { + "key": "text", + "label": "Text", + "children": [ + { + "type": "text", + "key": "name", + "label": "Name" + }, + { + "type": "text", + "key": "value", + "label": "Value" + } + ] + }, + { + "key": "formatable", + "label": "Formate from template", + "children": [ + { + "type": "text", + "key": "name", + "label": "Name" + }, + { + "type": "text", + "key": "template", + "label": "Template", + "placeholder": "{{key}} or {{key}};{{key}}" + }, + { + "type": "enum", + "key": "to_type", + "label": "Knob type", + "enum_items": [ + { + "text": "Text" + }, + { + "number": "Number" + }, + { + "decimal_number": "Decimal number" + }, + { + "2d_vector": "2D vector" + } + ] + } + ] + }, + { + "key": "color_gui", + "label": "Color GUI", + "children": [ + { + "type": "text", + "key": "name", + "label": "Name" + }, + { + "type": "color", + "key": "value", + "label": "Value", + "use_alpha": false + } + ] + }, + { + "key": "bool", + "label": "Boolean", + "children": [ + { + "type": "text", + "key": "name", + "label": "Name" + }, + { + "type": "boolean", + "key": "value", + "label": "Value" + } + ] + }, + { + "key": "number", + "label": "Number", + "children": [ + { + "type": "text", + "key": "name", + "label": "Name" + }, + { + "type": "number", + "key": "value", + "default": 1, + "decimal": 0, + "maximum": 99999999 + } + + ] + }, + { + "key": "decimal_number", + "label": "Decimal number", + "children": [ + { + "type": "text", + "key": "name", + "label": "Name" + }, + { + "type": "number", + "key": "value", + "default": 1, + "decimal": 4, + "maximum": 99999999 + } + + ] + }, + { + "key": "2d_vector", + "label": "2D vector", + "children": [ + { + "type": "text", + "key": "name", + "label": "Name" + }, + { + "type": "list-strict", + "key": "value", + "label": "Value", + "object_types": [ + { + "type": "number", + "key": "x", + "default": 1, + "decimal": 4, + "maximum": 99999999 + }, + { + "type": "number", + "key": "y", + "default": 1, + "decimal": 4, + "maximum": 99999999 + } + ] + } + ] + }, + { + "key": "3d_vector", + "label": "3D vector", + "children": [ + { + "type": "text", + "key": "name", + "label": "Name" + }, + { + "type": "list-strict", + "key": "value", + "label": "Value", + "object_types": [ + { + "type": "number", + "key": "x", + "default": 1, + "decimal": 4, + "maximum": 99999999 + }, + { + "type": "number", + "key": "y", + "default": 1, + "decimal": 4, + "maximum": 99999999 + }, + { + "type": "number", + "key": "y", + "default": 1, + "decimal": 4, + "maximum": 99999999 + } + ] + } + ] + }, + { + "key": "color", + "label": "Color", + "children": [ + { + "type": "text", + "key": "name", + "label": "Name" + }, + { + "type": "list-strict", + "key": "value", + "label": "Value", + "object_types": [ + { + "type": "number", + "key": "x", + "default": 1, + "decimal": 4, + "maximum": 99999999 + }, + { + "type": "number", + "key": "x", + "default": 1, + "decimal": 4, + "maximum": 99999999 + }, + { + "type": "number", + "key": "y", + "default": 1, + "decimal": 4, + "maximum": 99999999 + }, + { + "type": "number", + "key": "y", + "default": 1, + "decimal": 4, + "maximum": 99999999 + } + ] + } + ] + }, + { + "key": "__legacy__", + "label": "_ Legacy type _", + "children": [ + { + "type": "text", + "key": "name", + "label": "Name" + }, + { + "type": "text", + "key": "value", + "label": "Value" + } + ] + } + ] + } + }] + } +] diff --git a/openpype/settings/entities/schemas/system_schema/schema_modules.json b/openpype/settings/entities/schemas/system_schema/schema_modules.json index 52595914edc..dacbc8c3a1a 100644 --- a/openpype/settings/entities/schemas/system_schema/schema_modules.json +++ b/openpype/settings/entities/schemas/system_schema/schema_modules.json @@ -44,6 +44,60 @@ "type": "schema", "name": "schema_ftrack" }, + { + "type": "dict", + "key": "shotgrid", + "label": "Shotgrid", + "collapsible": true, + "checkbox_key": "enabled", + "children": [ + { + "type": "boolean", + "key": "enabled", + "label": "Enabled" + }, + { + "type": "text", + "key": "leecher_manager_url", + "label": "Shotgrid Leecher Manager URL" + }, + { + "type": "text", + "key": "leecher_backend_url", + "label": "Shotgrid Leecher Backend URL" + }, + { + "type": "boolean", + "key": "filter_projects_by_login", + "label": "Filter projects by SG login" + }, + { + "type": "dict-modifiable", + "key": "shotgrid_settings", + "label": "Shotgrid Servers", + "object_type": { + "type": "dict", + "children": [ + { + "key": "shotgrid_url", + "label": "Server URL", + "type": "text" + }, + { + "key": "shotgrid_script_name", + "label": "Script Name", + "type": "text" + }, + { + "key": "shotgrid_script_key", + "label": "Script api key", + "type": "text" + } + ] + } + } + ] + }, { "type": "dict", "key": "timers_manager", diff --git a/openpype/settings/lib.py b/openpype/settings/lib.py index f921b9c318d..6df41112c83 100644 --- a/openpype/settings/lib.py +++ b/openpype/settings/lib.py @@ -291,6 +291,22 @@ def _system_settings_backwards_compatible_conversion(studio_overrides): } +def _project_anatomy_backwards_compatible_conversion(project_anatomy): + # Backwards compatibility of node settings in Nuke 3.9.x - 3.10.0 + # - source PR - https://github.com/pypeclub/OpenPype/pull/3143 + value = project_anatomy + for key in ("imageio", "nuke", "nodes", "requiredNodes"): + if key not in value: + return + value = value[key] + + for item in value: + for node in item.get("knobs") or []: + if "type" in node: + break + node["type"] = "__legacy__" + + @require_handler def get_studio_system_settings_overrides(return_version=False): output = _SETTINGS_HANDLER.get_studio_system_settings_overrides( @@ -326,7 +342,9 @@ def get_project_settings_overrides(project_name, return_version=False): @require_handler def get_project_anatomy_overrides(project_name): - return _SETTINGS_HANDLER.get_project_anatomy_overrides(project_name) + output = _SETTINGS_HANDLER.get_project_anatomy_overrides(project_name) + _project_anatomy_backwards_compatible_conversion(output) + return output @require_handler diff --git a/openpype/style/style.css b/openpype/style/style.css index ae04a433fbc..d76d833be14 100644 --- a/openpype/style/style.css +++ b/openpype/style/style.css @@ -856,18 +856,31 @@ QScrollBar::add-page:vertical, QScrollBar::sub-page:vertical { } /* New Create/Publish UI */ +#CreatorDetailedDescription { + padding-left: 5px; + padding-right: 5px; + padding-top: 5px; + background: transparent; + border: 1px solid {color:border}; +} + #CreateDialogHelpButton { background: rgba(255, 255, 255, 31); + border-top-left-radius: 0.2em; + border-bottom-left-radius: 0.2em; border-top-right-radius: 0; border-bottom-right-radius: 0; font-size: 10pt; font-weight: bold; - padding: 3px 3px 3px 3px; + padding: 0px; } #CreateDialogHelpButton:hover { background: rgba(255, 255, 255, 63); } +#CreateDialogHelpButton QWidget { + background: transparent; +} #PublishLogConsole { font-family: "Noto Sans Mono"; @@ -1014,7 +1027,44 @@ VariantInputsWidget QToolButton { border-left: 1px solid {color:border}; } -#TasksCombobox[state="invalid"], #AssetNameInput[state="invalid"] { +#AssetNameInputWidget { + background: {color:bg-inputs}; + border: 1px solid {color:border}; + border-radius: 0.3em; +} + +#AssetNameInputWidget QWidget { + background: transparent; +} + +#AssetNameInputButton { + border-bottom-left-radius: 0px; + border-top-left-radius: 0px; + padding: 0px; + qproperty-iconSize: 11px 11px; + border-left: 1px solid {color:border}; + border-right: none; + border-top: none; + border-bottom: none; +} + +#AssetNameInput { + border-bottom-right-radius: 0px; + border-top-right-radius: 0px; + border: none; +} + +#AssetNameInputWidget:hover { + border-color: {color:border-hover}; +} +#AssetNameInputWidget:focus{ + border-color: {color:border-focus}; +} +#AssetNameInputWidget:disabled { + background: {color:bg-inputs-disabled}; +} + +#TasksCombobox[state="invalid"], #AssetNameInputWidget[state="invalid"], #AssetNameInputButton[state="invalid"] { border-color: {color:publisher:error}; } diff --git a/openpype/tools/loader/model.py b/openpype/tools/loader/model.py index 8cb8f300131..6f39c428bed 100644 --- a/openpype/tools/loader/model.py +++ b/openpype/tools/loader/model.py @@ -1,6 +1,7 @@ import copy import re import math +import time from uuid import uuid4 from Qt import QtCore, QtGui @@ -38,6 +39,14 @@ def is_filtering_recursible(): class BaseRepresentationModel(object): """Methods for SyncServer useful in multiple models""" + # Cheap & hackish way how to avoid refreshing of whole sync server module + # on each selection change + _last_project = None + _modules_manager = None + _last_project_cache = 0 + _last_manager_cache = 0 + _max_project_cache_time = 30 + _max_manager_cache_time = 60 def reset_sync_server(self, project_name=None): """Sets/Resets sync server vars after every change (refresh.)""" @@ -47,28 +56,53 @@ def reset_sync_server(self, project_name=None): remote_site = remote_provider = None if not project_name: - project_name = self.dbcon.Session["AVALON_PROJECT"] + project_name = self.dbcon.Session.get("AVALON_PROJECT") else: self.dbcon.Session["AVALON_PROJECT"] = project_name - if project_name: - manager = ModulesManager() - sync_server = manager.modules_by_name["sync_server"] + if not project_name: + self.repre_icons = repre_icons + self.sync_server = sync_server + self.active_site = active_site + self.active_provider = active_provider + self.remote_site = remote_site + self.remote_provider = remote_provider + return + + now_time = time.time() + project_cache_diff = now_time - self._last_project_cache + if project_cache_diff > self._max_project_cache_time: + self._last_project = None + + if project_name == self._last_project: + return - if project_name in sync_server.get_enabled_projects(): - active_site = sync_server.get_active_site(project_name) - active_provider = sync_server.get_provider_for_site( - project_name, active_site) - if active_site == 'studio': # for studio use explicit icon - active_provider = 'studio' + self._last_project = project_name + self._last_project_cache = now_time - remote_site = sync_server.get_remote_site(project_name) - remote_provider = sync_server.get_provider_for_site( - project_name, remote_site) - if remote_site == 'studio': # for studio use explicit icon - remote_provider = 'studio' + manager_cache_diff = now_time - self._last_manager_cache + if manager_cache_diff > self._max_manager_cache_time: + self._modules_manager = None + + if self._modules_manager is None: + self._modules_manager = ModulesManager() + self._last_manager_cache = now_time + + sync_server = self._modules_manager.modules_by_name["sync_server"] + if sync_server.is_project_enabled(project_name): + active_site = sync_server.get_active_site(project_name) + active_provider = sync_server.get_provider_for_site( + project_name, active_site) + if active_site == 'studio': # for studio use explicit icon + active_provider = 'studio' + + remote_site = sync_server.get_remote_site(project_name) + remote_provider = sync_server.get_provider_for_site( + project_name, remote_site) + if remote_site == 'studio': # for studio use explicit icon + remote_provider = 'studio' - repre_icons = lib.get_repre_icons() + repre_icons = lib.get_repre_icons() self.repre_icons = repre_icons self.sync_server = sync_server diff --git a/openpype/tools/publisher/widgets/assets_widget.py b/openpype/tools/publisher/widgets/assets_widget.py index 984da59c77b..46fdcc6526c 100644 --- a/openpype/tools/publisher/widgets/assets_widget.py +++ b/openpype/tools/publisher/widgets/assets_widget.py @@ -15,6 +15,7 @@ class CreateDialogAssetsWidget(SingleSelectAssetsWidget): current_context_required = QtCore.Signal() + header_height_changed = QtCore.Signal(int) def __init__(self, controller, parent): self._controller = controller @@ -27,6 +28,27 @@ def __init__(self, controller, parent): self._last_selection = None self._enabled = None + self._last_filter_height = None + + def _check_header_height(self): + """Catch header height changes. + + Label on top of creaters should have same height so Creators view has + same offset. + """ + height = self.header_widget.height() + if height != self._last_filter_height: + self._last_filter_height = height + self.header_height_changed.emit(height) + + def resizeEvent(self, event): + super(CreateDialogAssetsWidget, self).resizeEvent(event) + self._check_header_height() + + def showEvent(self, event): + super(CreateDialogAssetsWidget, self).showEvent(event) + self._check_header_height() + def _on_current_asset_click(self): self.current_context_required.emit() @@ -71,6 +93,7 @@ class AssetsHierarchyModel(QtGui.QStandardItemModel): Uses controller to load asset hierarchy. All asset documents are stored by their parents. """ + def __init__(self, controller): super(AssetsHierarchyModel, self).__init__() self._controller = controller @@ -143,6 +166,7 @@ def name_is_valid(self, item_name): class AssetsDialog(QtWidgets.QDialog): """Dialog to select asset for a context of instance.""" + def __init__(self, controller, parent): super(AssetsDialog, self).__init__(parent) self.setWindowTitle("Select asset") @@ -196,9 +220,26 @@ def __init__(self, controller, parent): # - adds ability to call reset on multiple places without repeating self._soft_reset_enabled = True + self._first_show = True + self._default_height = 500 + + def _on_first_show(self): + center = self.rect().center() + size = self.size() + size.setHeight(self._default_height) + + self.resize(size) + new_pos = self.mapToGlobal(center) + new_pos.setX(new_pos.x() - int(self.width() / 2)) + new_pos.setY(new_pos.y() - int(self.height() / 2)) + self.move(new_pos) + def showEvent(self, event): """Refresh asset model on show.""" super(AssetsDialog, self).showEvent(event) + if self._first_show: + self._first_show = False + self._on_first_show() # Refresh on show self.reset(False) diff --git a/openpype/tools/publisher/widgets/create_dialog.py b/openpype/tools/publisher/widgets/create_dialog.py index 243540f243c..9e357f3a567 100644 --- a/openpype/tools/publisher/widgets/create_dialog.py +++ b/openpype/tools/publisher/widgets/create_dialog.py @@ -3,6 +3,7 @@ import traceback import copy +import qtawesome try: import commonmark except Exception: @@ -15,7 +16,8 @@ ) from openpype.tools.utils import ( ErrorMessageBox, - MessageOverlayObject + MessageOverlayObject, + ClickableFrame, ) from .widgets import IconValuePixmapLabel @@ -114,6 +116,8 @@ def _create_content(self, content_layout): # TODO add creator identifier/label to details class CreatorShortDescWidget(QtWidgets.QWidget): + height_changed = QtCore.Signal(int) + def __init__(self, parent=None): super(CreatorShortDescWidget, self).__init__(parent=parent) @@ -152,6 +156,22 @@ def __init__(self, parent=None): self._family_label = family_label self._description_label = description_label + self._last_height = None + + def _check_height_change(self): + height = self.height() + if height != self._last_height: + self._last_height = height + self.height_changed.emit(height) + + def showEvent(self, event): + super(CreatorShortDescWidget, self).showEvent(event) + self._check_height_change() + + def resizeEvent(self, event): + super(CreatorShortDescWidget, self).resizeEvent(event) + self._check_height_change() + def set_plugin(self, plugin=None): if not plugin: self._icon_widget.set_icon_def(None) @@ -168,13 +188,43 @@ def set_plugin(self, plugin=None): self._description_label.setText(description) -class HelpButton(QtWidgets.QPushButton): - resized = QtCore.Signal() +class HelpButton(ClickableFrame): + resized = QtCore.Signal(int) + question_mark_icon_name = "fa.question" + help_icon_name = "fa.question-circle" + hide_icon_name = "fa.angle-left" def __init__(self, *args, **kwargs): super(HelpButton, self).__init__(*args, **kwargs) self.setObjectName("CreateDialogHelpButton") + question_mark_label = QtWidgets.QLabel(self) + help_widget = QtWidgets.QWidget(self) + + help_question = QtWidgets.QLabel(help_widget) + help_label = QtWidgets.QLabel("Help", help_widget) + hide_icon = QtWidgets.QLabel(help_widget) + + help_layout = QtWidgets.QHBoxLayout(help_widget) + help_layout.setContentsMargins(0, 0, 5, 0) + help_layout.addWidget(help_question, 0) + help_layout.addWidget(help_label, 0) + help_layout.addStretch(1) + help_layout.addWidget(hide_icon, 0) + + layout = QtWidgets.QHBoxLayout(self) + layout.setContentsMargins(0, 0, 0, 0) + layout.setSpacing(0) + layout.addWidget(question_mark_label, 0) + layout.addWidget(help_widget, 1) + + help_widget.setVisible(False) + + self._question_mark_label = question_mark_label + self._help_widget = help_widget + self._help_question = help_question + self._hide_icon = hide_icon + self._expanded = None self.set_expanded() @@ -184,31 +234,56 @@ def set_expanded(self, expanded=None): return expanded = False self._expanded = expanded - if expanded: - text = "<" + self._help_widget.setVisible(expanded) + self._update_content() + + def _update_content(self): + width = self.get_icon_width() + if self._expanded: + question_mark_pix = QtGui.QPixmap(width, width) + question_mark_pix.fill(QtCore.Qt.transparent) + else: - text = "?" - self.setText(text) + question_mark_icon = qtawesome.icon( + self.question_mark_icon_name, color=QtCore.Qt.white + ) + question_mark_pix = question_mark_icon.pixmap(width, width) + + hide_icon = qtawesome.icon( + self.hide_icon_name, color=QtCore.Qt.white + ) + help_question_icon = qtawesome.icon( + self.help_icon_name, color=QtCore.Qt.white + ) + self._question_mark_label.setPixmap(question_mark_pix) + self._question_mark_label.setMaximumWidth(width) + self._hide_icon.setPixmap(hide_icon.pixmap(width, width)) + self._help_question.setPixmap(help_question_icon.pixmap(width, width)) - self._update_size() + def get_icon_width(self): + metrics = self.fontMetrics() + return metrics.height() - def _update_size(self): - new_size = self.minimumSizeHint() - if self.size() != new_size: - self.resize(new_size) - self.resized.emit() + def set_pos_and_size(self, pos_x, pos_y, width, height): + update_icon = self.height() != height + self.move(pos_x, pos_y) + self.resize(width, height) + + if update_icon: + self._update_content() + self.updateGeometry() def showEvent(self, event): super(HelpButton, self).showEvent(event) - self._update_size() + self.resized.emit(self.height()) def resizeEvent(self, event): super(HelpButton, self).resizeEvent(event) - self._update_size() + self.resized.emit(self.height()) class CreateDialog(QtWidgets.QDialog): - default_size = (900, 500) + default_size = (1000, 560) def __init__( self, controller, asset_name=None, task_name=None, parent=None @@ -255,6 +330,14 @@ def __init__( context_layout.addWidget(tasks_widget, 1) # --- Creators view --- + creators_header_widget = QtWidgets.QWidget(self) + header_label_widget = QtWidgets.QLabel( + "Choose family:", creators_header_widget + ) + creators_header_layout = QtWidgets.QHBoxLayout(creators_header_widget) + creators_header_layout.setContentsMargins(0, 0, 0, 0) + creators_header_layout.addWidget(header_label_widget, 1) + creators_view = QtWidgets.QListView(self) creators_model = QtGui.QStandardItemModel() creators_view.setModel(creators_model) @@ -271,7 +354,6 @@ def __init__( variant_hints_menu = QtWidgets.QMenu(variant_widget) variant_hints_group = QtWidgets.QActionGroup(variant_hints_menu) - # variant_hints_btn.setMenu(variant_hints_menu) variant_layout = QtWidgets.QHBoxLayout(variant_widget) variant_layout.setContentsMargins(0, 0, 0, 0) @@ -282,9 +364,6 @@ def __init__( subset_name_input = QtWidgets.QLineEdit(self) subset_name_input.setEnabled(False) - create_btn = QtWidgets.QPushButton("Create", self) - create_btn.setEnabled(False) - form_layout = QtWidgets.QFormLayout() form_layout.addRow("Variant:", variant_widget) form_layout.addRow("Subset:", subset_name_input) @@ -292,10 +371,9 @@ def __init__( mid_widget = QtWidgets.QWidget(self) mid_layout = QtWidgets.QVBoxLayout(mid_widget) mid_layout.setContentsMargins(0, 0, 0, 0) - mid_layout.addWidget(QtWidgets.QLabel("Choose family:", self)) + mid_layout.addWidget(creators_header_widget, 0) mid_layout.addWidget(creators_view, 1) mid_layout.addLayout(form_layout, 0) - mid_layout.addWidget(create_btn, 0) # ------------ # --- Creator short info and attr defs --- @@ -305,31 +383,62 @@ def __init__( creator_attrs_widget ) - separator_widget = QtWidgets.QWidget(self) - separator_widget.setObjectName("Separator") - separator_widget.setMinimumHeight(2) - separator_widget.setMaximumHeight(2) + attr_separator_widget = QtWidgets.QWidget(self) + attr_separator_widget.setObjectName("Separator") + attr_separator_widget.setMinimumHeight(1) + attr_separator_widget.setMaximumHeight(1) # Precreate attributes widget pre_create_widget = PreCreateWidget(creator_attrs_widget) + # Create button + create_btn_wrapper = QtWidgets.QWidget(creator_attrs_widget) + create_btn = QtWidgets.QPushButton("Create", create_btn_wrapper) + create_btn.setEnabled(False) + + create_btn_wrap_layout = QtWidgets.QHBoxLayout(create_btn_wrapper) + create_btn_wrap_layout.setContentsMargins(0, 0, 0, 0) + create_btn_wrap_layout.addStretch(1) + create_btn_wrap_layout.addWidget(create_btn, 0) + creator_attrs_layout = QtWidgets.QVBoxLayout(creator_attrs_widget) creator_attrs_layout.setContentsMargins(0, 0, 0, 0) creator_attrs_layout.addWidget(creator_short_desc_widget, 0) - creator_attrs_layout.addWidget(separator_widget, 0) + creator_attrs_layout.addWidget(attr_separator_widget, 0) creator_attrs_layout.addWidget(pre_create_widget, 1) + creator_attrs_layout.addWidget(create_btn_wrapper, 0) # ------------------------------------- # --- Detailed information about creator --- # Detailed description of creator - detail_description_widget = QtWidgets.QTextEdit(self) - detail_description_widget.setObjectName("InfoText") - detail_description_widget.setTextInteractionFlags( + detail_description_widget = QtWidgets.QWidget(self) + + detail_placoholder_widget = QtWidgets.QWidget( + detail_description_widget + ) + detail_placoholder_widget.setAttribute( + QtCore.Qt.WA_TranslucentBackground + ) + + detail_description_input = QtWidgets.QTextEdit( + detail_description_widget + ) + detail_description_input.setObjectName("CreatorDetailedDescription") + detail_description_input.setTextInteractionFlags( QtCore.Qt.TextBrowserInteraction ) + + detail_description_layout = QtWidgets.QVBoxLayout( + detail_description_widget + ) + detail_description_layout.setContentsMargins(0, 0, 0, 0) + detail_description_layout.setSpacing(0) + detail_description_layout.addWidget(detail_placoholder_widget, 0) + detail_description_layout.addWidget(detail_description_input, 1) + detail_description_widget.setVisible(False) - # ------------------------------------------- + # ------------------------------------------- splitter_widget = QtWidgets.QSplitter(self) splitter_widget.addWidget(context_widget) splitter_widget.addWidget(mid_widget) @@ -344,17 +453,27 @@ def __init__( layout.addWidget(splitter_widget, 1) # Floating help button + # - Create this button as last to be fully visible help_btn = HelpButton(self) prereq_timer = QtCore.QTimer() prereq_timer.setInterval(50) prereq_timer.setSingleShot(True) + desc_width_anim_timer = QtCore.QTimer() + desc_width_anim_timer.setInterval(10) + prereq_timer.timeout.connect(self._on_prereq_timer) + desc_width_anim_timer.timeout.connect(self._on_desc_animation) + help_btn.clicked.connect(self._on_help_btn) help_btn.resized.connect(self._on_help_btn_resize) + assets_widget.header_height_changed.connect( + self._on_asset_filter_height_change + ) + create_btn.clicked.connect(self._on_create) variant_widget.resized.connect(self._on_variant_widget_resize) variant_input.returnPressed.connect(self._on_create) @@ -369,6 +488,10 @@ def __init__( self._on_current_session_context_request ) tasks_widget.task_changed.connect(self._on_task_change) + creator_short_desc_widget.height_changed.connect( + self._on_description_height_change + ) + splitter_widget.splitterMoved.connect(self._on_splitter_move) controller.add_plugins_refresh_callback(self._on_plugins_refresh) @@ -387,18 +510,33 @@ def __init__( self.variant_hints_menu = variant_hints_menu self.variant_hints_group = variant_hints_group + self._creators_header_widget = creators_header_widget self.creators_model = creators_model self.creators_view = creators_view self.create_btn = create_btn self._creator_short_desc_widget = creator_short_desc_widget self._pre_create_widget = pre_create_widget + self._attr_separator_widget = attr_separator_widget + + self._detail_placoholder_widget = detail_placoholder_widget self._detail_description_widget = detail_description_widget + self._detail_description_input = detail_description_input self._help_btn = help_btn self._prereq_timer = prereq_timer self._first_show = True + # Description animation + self._description_size_policy = detail_description_widget.sizePolicy() + self._desc_width_anim_timer = desc_width_anim_timer + self._desc_widget_step = 0 + self._last_description_width = None + self._last_full_width = 0 + self._expected_description_width = 0 + self._last_desc_max_width = None + self._other_widgets_widths = [] + def _emit_message(self, message): self._overlay_object.add_message(message) @@ -465,6 +603,10 @@ def refresh(self): def _invalidate_prereq(self): self._prereq_timer.start() + def _on_asset_filter_height_change(self, height): + self._creators_header_widget.setMinimumHeight(height) + self._creators_header_widget.setMaximumHeight(height) + def _on_prereq_timer(self): prereq_available = True creator_btn_tooltips = [] @@ -595,6 +737,12 @@ def _on_current_session_context_request(self): if self._task_name: self._tasks_widget.select_task_name(self._task_name) + def _on_description_height_change(self): + # Use separator's 'y' position as height + height = self._attr_separator_widget.y() + self._detail_placoholder_widget.setMinimumHeight(height) + self._detail_placoholder_widget.setMaximumHeight(height) + def _on_creator_item_change(self, new_index, _old_index): identifier = None if new_index.isValid(): @@ -602,54 +750,192 @@ def _on_creator_item_change(self, new_index, _old_index): self._set_creator_by_identifier(identifier) def _update_help_btn(self): - pos_x = self.width() - self._help_btn.width() - point = self._creator_short_desc_widget.rect().topRight() + short_desc_rect = self._creator_short_desc_widget.rect() + + # point = short_desc_rect.topRight() + point = short_desc_rect.center() mapped_point = self._creator_short_desc_widget.mapTo(self, point) - pos_y = mapped_point.y() - self._help_btn.move(max(0, pos_x), max(0, pos_y)) + # pos_y = mapped_point.y() + center_pos_y = mapped_point.y() + icon_width = self._help_btn.get_icon_width() + + _height = int(icon_width * 2.5) + height = min(_height, short_desc_rect.height()) + pos_y = center_pos_y - int(height / 2) + + pos_x = self.width() - icon_width + if self._detail_placoholder_widget.isVisible(): + pos_x -= ( + self._detail_placoholder_widget.width() + + self._splitter_widget.handle(3).width() + ) + + width = self.width() - pos_x + + self._help_btn.set_pos_and_size( + max(0, pos_x), max(0, pos_y), + width, height + ) + + def _on_help_btn_resize(self, height): + if self._creator_short_desc_widget.height() != height: + self._update_help_btn() - def _on_help_btn_resize(self): + def _on_splitter_move(self, *args): self._update_help_btn() def _on_help_btn(self): + if self._desc_width_anim_timer.isActive(): + return + final_size = self.size() cur_sizes = self._splitter_widget.sizes() - spacing = self._splitter_widget.handleWidth() + + if self._desc_widget_step == 0: + now_visible = self._detail_description_widget.isVisible() + else: + now_visible = self._desc_widget_step > 0 sizes = [] for idx, value in enumerate(cur_sizes): if idx < 3: sizes.append(value) - now_visible = self._detail_description_widget.isVisible() + self._last_full_width = final_size.width() + self._other_widgets_widths = list(sizes) + if now_visible: - width = final_size.width() - ( - spacing + self._detail_description_widget.width() - ) + cur_desc_width = self._detail_description_widget.width() + if cur_desc_width < 1: + cur_desc_width = 2 + step_size = int(cur_desc_width / 5) + if step_size < 1: + step_size = 1 + + step_size *= -1 + expected_width = 0 + desc_width = cur_desc_width - 1 + width = final_size.width() - 1 + min_max = desc_width + self._last_description_width = cur_desc_width else: - last_size = self._detail_description_widget.sizeHint().width() - width = final_size.width() + spacing + last_size - sizes.append(last_size) + self._detail_description_widget.setVisible(True) + handle = self._splitter_widget.handle(3) + desc_width = handle.sizeHint().width() + if self._last_description_width: + expected_width = self._last_description_width + else: + hint = self._detail_description_widget.sizeHint() + expected_width = hint.width() + + width = final_size.width() + desc_width + step_size = int(expected_width / 5) + if step_size < 1: + step_size = 1 + min_max = 0 + + if self._last_desc_max_width is None: + self._last_desc_max_width = ( + self._detail_description_widget.maximumWidth() + ) + self._detail_description_widget.setMinimumWidth(min_max) + self._detail_description_widget.setMaximumWidth(min_max) + self._expected_description_width = expected_width + self._desc_widget_step = step_size + + self._desc_width_anim_timer.start() + + sizes.append(desc_width) final_size.setWidth(width) - self._detail_description_widget.setVisible(not now_visible) self._splitter_widget.setSizes(sizes) self.resize(final_size) self._help_btn.set_expanded(not now_visible) + def _on_desc_animation(self): + current_width = self._detail_description_widget.width() + + desc_width = None + last_step = False + growing = self._desc_widget_step > 0 + + # Growing + if growing: + if current_width < self._expected_description_width: + desc_width = current_width + self._desc_widget_step + if desc_width >= self._expected_description_width: + desc_width = self._expected_description_width + last_step = True + + # Decreasing + elif self._desc_widget_step < 0: + if current_width > self._expected_description_width: + desc_width = current_width + self._desc_widget_step + if desc_width <= self._expected_description_width: + desc_width = self._expected_description_width + last_step = True + + if desc_width is None: + self._desc_widget_step = 0 + self._desc_width_anim_timer.stop() + return + + if last_step and not growing: + self._detail_description_widget.setVisible(False) + QtWidgets.QApplication.processEvents() + + width = self._last_full_width + handle_width = self._splitter_widget.handle(3).width() + if growing: + width += (handle_width + desc_width) + else: + width -= self._last_description_width + if last_step: + width -= handle_width + else: + width += desc_width + + if not last_step or growing: + self._detail_description_widget.setMaximumWidth(desc_width) + self._detail_description_widget.setMinimumWidth(desc_width) + + window_size = self.size() + window_size.setWidth(width) + self.resize(window_size) + if not last_step: + return + + self._desc_widget_step = 0 + self._desc_width_anim_timer.stop() + + if not growing: + return + + self._detail_description_widget.setMinimumWidth(0) + self._detail_description_widget.setMaximumWidth( + self._last_desc_max_width + ) + self._detail_description_widget.setSizePolicy( + self._description_size_policy + ) + + sizes = list(self._other_widgets_widths) + sizes.append(desc_width) + self._splitter_widget.setSizes(sizes) + def _set_creator_detailed_text(self, creator): if not creator: - self._detail_description_widget.setPlainText("") + self._detail_description_input.setPlainText("") return detailed_description = creator.get_detail_description() or "" if commonmark: html = commonmark.commonmark(detailed_description) - self._detail_description_widget.setHtml(html) + self._detail_description_input.setHtml(html) else: - self._detail_description_widget.setMarkdown(detailed_description) + self._detail_description_input.setMarkdown(detailed_description) def _set_creator_by_identifier(self, identifier): creator = self.controller.manual_creators.get(identifier) @@ -806,6 +1092,21 @@ def _set_variant_state_property(self, state): self.variant_input.setProperty("state", state) self.variant_input.style().polish(self.variant_input) + def _on_first_show(self): + center = self.rect().center() + + width, height = self.default_size + self.resize(width, height) + part = int(width / 7) + self._splitter_widget.setSizes( + [part * 2, part * 2, width - (part * 4)] + ) + + new_pos = self.mapToGlobal(center) + new_pos.setX(new_pos.x() - int(self.width() / 2)) + new_pos.setY(new_pos.y() - int(self.height() / 2)) + self.move(new_pos) + def moveEvent(self, event): super(CreateDialog, self).moveEvent(event) self._last_pos = self.pos() @@ -814,13 +1115,7 @@ def showEvent(self, event): super(CreateDialog, self).showEvent(event) if self._first_show: self._first_show = False - width, height = self.default_size - self.resize(width, height) - - third_size = int(width / 3) - self._splitter_widget.setSizes( - [third_size, third_size, width - (2 * third_size)] - ) + self._on_first_show() if self._last_pos is not None: self.move(self._last_pos) diff --git a/openpype/tools/publisher/widgets/widgets.py b/openpype/tools/publisher/widgets/widgets.py index 5ced469b59c..7096b9fb507 100644 --- a/openpype/tools/publisher/widgets/widgets.py +++ b/openpype/tools/publisher/widgets/widgets.py @@ -14,7 +14,8 @@ PlaceholderLineEdit, IconButton, PixmapLabel, - BaseClickableFrame + BaseClickableFrame, + set_style_property, ) from openpype.pipeline.create import SUBSET_NAME_ALLOWED_SYMBOLS from .assets_widget import AssetsDialog @@ -344,21 +345,42 @@ class AssetsField(BaseClickableFrame): def __init__(self, controller, parent): super(AssetsField, self).__init__(parent) + self.setObjectName("AssetNameInputWidget") - dialog = AssetsDialog(controller, self) + # Don't use 'self' for parent! + # - this widget has specific styles + dialog = AssetsDialog(controller, parent) name_input = ClickableLineEdit(self) name_input.setObjectName("AssetNameInput") + icon_name = "fa.window-maximize" + icon = qtawesome.icon(icon_name, color="white") + icon_btn = QtWidgets.QPushButton(self) + icon_btn.setIcon(icon) + icon_btn.setObjectName("AssetNameInputButton") + layout = QtWidgets.QHBoxLayout(self) layout.setContentsMargins(0, 0, 0, 0) + layout.setSpacing(0) layout.addWidget(name_input, 1) + layout.addWidget(icon_btn, 0) + # Make sure all widgets are vertically extended to highest widget + for widget in ( + name_input, + icon_btn + ): + size_policy = widget.sizePolicy() + size_policy.setVerticalPolicy(size_policy.MinimumExpanding) + widget.setSizePolicy(size_policy) name_input.clicked.connect(self._mouse_release_callback) + icon_btn.clicked.connect(self._mouse_release_callback) dialog.finished.connect(self._on_dialog_finish) self._dialog = dialog self._name_input = name_input + self._icon_btn = icon_btn self._origin_value = [] self._origin_selection = [] @@ -406,10 +428,9 @@ def _set_is_valid(self, valid): self._set_state_property(state) def _set_state_property(self, state): - current_value = self._name_input.property("state") - if current_value != state: - self._name_input.setProperty("state", state) - self._name_input.style().polish(self._name_input) + set_style_property(self, "state", state) + set_style_property(self._name_input, "state", state) + set_style_property(self._icon_btn, "state", state) def is_valid(self): """Is asset valid.""" @@ -842,6 +863,8 @@ def set_value(self, variants=None): self._ignore_value_change = True + self._has_value_changed = False + self._origin_value = list(variants) self._current_value = list(variants) @@ -892,11 +915,23 @@ def __init__(self, parent): layout.setContentsMargins(0, 0, 0, 0) layout.addWidget(view) + model.rowsInserted.connect(self._on_insert) + self._view = view self._model = model self._value = [] + def _on_insert(self): + self._update_size() + + def _update_size(self): + model = self._view.model() + if model.rowCount() == 0: + return + height = self._view.sizeHintForRow(0) + self.setMaximumHeight(height + (2 * self._view.spacing())) + def showEvent(self, event): super(MultipleItemWidget, self).showEvent(event) tmp_item = None @@ -904,13 +939,15 @@ def showEvent(self, event): # Add temp item to be able calculate maximum height of widget tmp_item = QtGui.QStandardItem("tmp") self._model.appendRow(tmp_item) - - height = self._view.sizeHintForRow(0) - self.setMaximumHeight(height + (2 * self._view.spacing())) + self._update_size() if tmp_item is not None: self._model.clear() + def resizeEvent(self, event): + super(MultipleItemWidget, self).resizeEvent(event) + self._update_size() + def set_value(self, value=None): """Set value/s of currently selected instance.""" if value is None: @@ -1235,7 +1272,11 @@ def set_current_instances(self, instances): ) content_widget = QtWidgets.QWidget(self._scroll_area) - content_layout = QtWidgets.QFormLayout(content_widget) + content_layout = QtWidgets.QGridLayout(content_widget) + content_layout.setColumnStretch(0, 0) + content_layout.setColumnStretch(1, 1) + + row = 0 for attr_def, attr_instances, values in result: widget = create_widget_for_attr_def(attr_def, content_widget) if attr_def.is_value_def: @@ -1246,10 +1287,28 @@ def set_current_instances(self, instances): else: widget.set_value(values, True) + expand_cols = 2 + if attr_def.is_value_def and attr_def.is_label_horizontal: + expand_cols = 1 + + col_num = 2 - expand_cols + label = attr_def.label or attr_def.key - content_layout.addRow(label, widget) - widget.value_changed.connect(self._input_value_changed) + if label: + label_widget = QtWidgets.QLabel(label, self) + content_layout.addWidget( + label_widget, row, 0, 1, expand_cols + ) + if not attr_def.is_label_horizontal: + row += 1 + + content_layout.addWidget( + widget, row, col_num, 1, expand_cols + ) + + row += 1 + widget.value_changed.connect(self._input_value_changed) self._attr_def_id_to_instances[attr_def.id] = attr_instances self._attr_def_id_to_attr_def[attr_def.id] = attr_def diff --git a/openpype/tools/standalonepublish/widgets/widget_components.py b/openpype/tools/standalonepublish/widgets/widget_components.py index fbafc7142a6..b3280089c31 100644 --- a/openpype/tools/standalonepublish/widgets/widget_components.py +++ b/openpype/tools/standalonepublish/widgets/widget_components.py @@ -202,6 +202,7 @@ def cli_publish(data, publish_paths, gui=True): if os.path.exists(json_data_path): with open(json_data_path, "r") as f: result = json.load(f) + os.remove(json_data_path) log.info(f"Publish result: {result}") diff --git a/openpype/tools/standalonepublish/widgets/widget_drop_frame.py b/openpype/tools/standalonepublish/widgets/widget_drop_frame.py index e6c7328e886..f8a8273b267 100644 --- a/openpype/tools/standalonepublish/widgets/widget_drop_frame.py +++ b/openpype/tools/standalonepublish/widgets/widget_drop_frame.py @@ -38,7 +38,7 @@ class DropDataFrame(QtWidgets.QFrame): } sequence_types = [ - ".bgeo", ".vdb" + ".bgeo", ".vdb", ".bgeosc", ".bgeogz" ] def __init__(self, parent): diff --git a/openpype/tools/utils/assets_widget.py b/openpype/tools/utils/assets_widget.py index 3d4efcdd4d2..d1df1193d2b 100644 --- a/openpype/tools/utils/assets_widget.py +++ b/openpype/tools/utils/assets_widget.py @@ -589,10 +589,12 @@ def __init__(self, dbcon, parent=None): view = AssetsView(self) view.setModel(proxy) + header_widget = QtWidgets.QWidget(self) + current_asset_icon = qtawesome.icon( "fa.arrow-down", color=get_default_tools_icon_color() ) - current_asset_btn = QtWidgets.QPushButton(self) + current_asset_btn = QtWidgets.QPushButton(header_widget) current_asset_btn.setIcon(current_asset_icon) current_asset_btn.setToolTip("Go to Asset from current Session") # Hide by default @@ -601,25 +603,35 @@ def __init__(self, dbcon, parent=None): refresh_icon = qtawesome.icon( "fa.refresh", color=get_default_tools_icon_color() ) - refresh_btn = QtWidgets.QPushButton(self) + refresh_btn = QtWidgets.QPushButton(header_widget) refresh_btn.setIcon(refresh_icon) refresh_btn.setToolTip("Refresh items") - filter_input = PlaceholderLineEdit(self) + filter_input = PlaceholderLineEdit(header_widget) filter_input.setPlaceholderText("Filter assets..") # Header - header_layout = QtWidgets.QHBoxLayout() + header_layout = QtWidgets.QHBoxLayout(header_widget) + header_layout.setContentsMargins(0, 0, 0, 0) header_layout.addWidget(filter_input) header_layout.addWidget(current_asset_btn) header_layout.addWidget(refresh_btn) + # Make header widgets expand vertically if there is a place + for widget in ( + current_asset_btn, + refresh_btn, + filter_input, + ): + size_policy = widget.sizePolicy() + size_policy.setVerticalPolicy(size_policy.MinimumExpanding) + widget.setSizePolicy(size_policy) + # Layout layout = QtWidgets.QVBoxLayout(self) layout.setContentsMargins(0, 0, 0, 0) - layout.setSpacing(4) - layout.addLayout(header_layout) - layout.addWidget(view) + layout.addWidget(header_widget, 0) + layout.addWidget(view, 1) # Signals/Slots filter_input.textChanged.connect(self._on_filter_text_change) @@ -630,6 +642,8 @@ def __init__(self, dbcon, parent=None): current_asset_btn.clicked.connect(self._on_current_asset_click) view.doubleClicked.connect(self.double_clicked) + self._header_widget = header_widget + self._filter_input = filter_input self._refresh_btn = refresh_btn self._current_asset_btn = current_asset_btn self._model = model @@ -637,8 +651,14 @@ def __init__(self, dbcon, parent=None): self._view = view self._last_project_name = None + self._last_btns_height = None + self.model_selection = {} + @property + def header_widget(self): + return self._header_widget + def _create_source_model(self): model = AssetModel(dbcon=self.dbcon, parent=self) model.refreshed.connect(self._on_model_refresh) @@ -669,6 +689,7 @@ def _on_current_asset_click(self): This separation gives ability to override this method and use it in differnt way. """ + self.set_current_session_asset() def set_current_session_asset(self): @@ -681,6 +702,7 @@ def set_refresh_btn_visibility(self, visible=None): Some tools may have their global refresh button or do not support refresh at all. """ + if visible is None: visible = not self._refresh_btn.isVisible() self._refresh_btn.setVisible(visible) @@ -690,6 +712,7 @@ def set_current_asset_btn_visibility(self, visible=None): Not all tools support using of current context asset. """ + if visible is None: visible = not self._current_asset_btn.isVisible() self._current_asset_btn.setVisible(visible) @@ -723,6 +746,7 @@ def _on_model_refresh(self, has_item): so if you're modifying model keep in mind that this method should be called when refresh is done. """ + self._proxy.sort(0) self._set_loading_state(loading=False, empty=not has_item) self.refreshed.emit() @@ -767,6 +791,7 @@ class SingleSelectAssetsWidget(AssetsWidget): Contain single selection specific api methods. """ + def get_selected_asset_id(self): """Currently selected asset id.""" selection_model = self._view.selectionModel() diff --git a/openpype/widgets/attribute_defs/files_widget.py b/openpype/widgets/attribute_defs/files_widget.py index a3ee370bd3b..23cf8342b16 100644 --- a/openpype/widgets/attribute_defs/files_widget.py +++ b/openpype/widgets/attribute_defs/files_widget.py @@ -151,7 +151,7 @@ def _create_item(self, file_item): item = QtGui.QStandardItem() item_id = str(uuid.uuid4()) item.setData(item_id, ITEM_ID_ROLE) - item.setData(file_item.label, ITEM_LABEL_ROLE) + item.setData(file_item.label or "< empty >", ITEM_LABEL_ROLE) item.setData(file_item.filenames, FILENAMES_ROLE) item.setData(file_item.directory, DIRPATH_ROLE) item.setData(icon_pixmap, ITEM_ICON_ROLE) @@ -251,7 +251,7 @@ def lessThan(self, left, right): class ItemWidget(QtWidgets.QWidget): - split_requested = QtCore.Signal(str) + context_menu_requested = QtCore.Signal(QtCore.QPoint) def __init__( self, item_id, label, pixmap_icon, is_sequence, multivalue, parent=None @@ -316,19 +316,9 @@ def resizeEvent(self, event): self._update_btn_size() def _on_actions_clicked(self): - menu = QtWidgets.QMenu(self._split_btn) - - action = QtWidgets.QAction("Split sequence", menu) - action.triggered.connect(self._on_split_sequence) - - menu.addAction(action) - pos = self._split_btn.rect().bottomLeft() point = self._split_btn.mapToGlobal(pos) - menu.popup(point) - - def _on_split_sequence(self): - self.split_requested.emit(self._item_id) + self.context_menu_requested.emit(point) class InViewButton(IconButton): @@ -339,6 +329,7 @@ class FilesView(QtWidgets.QListView): """View showing instances and their groups.""" remove_requested = QtCore.Signal() + context_menu_requested = QtCore.Signal(QtCore.QPoint) def __init__(self, *args, **kwargs): super(FilesView, self).__init__(*args, **kwargs) @@ -347,6 +338,7 @@ def __init__(self, *args, **kwargs): self.setSelectionMode( QtWidgets.QAbstractItemView.ExtendedSelection ) + self.setContextMenuPolicy(QtCore.Qt.CustomContextMenu) remove_btn = InViewButton(self) pix_enabled = paint_image_with_color( @@ -361,6 +353,7 @@ def __init__(self, *args, **kwargs): remove_btn.setEnabled(False) remove_btn.clicked.connect(self._on_remove_clicked) + self.customContextMenuRequested.connect(self._on_context_menu_request) self._remove_btn = remove_btn @@ -397,6 +390,12 @@ def get_selected_item_ids(self): selected_item_ids.add(instance_id) return selected_item_ids + def has_selected_sequence(self): + for index in self.selectionModel().selectedIndexes(): + if index.data(IS_SEQUENCE_ROLE): + return True + return False + def event(self, event): if event.type() == QtCore.QEvent.KeyPress: if ( @@ -408,6 +407,12 @@ def event(self, event): return super(FilesView, self).event(event) + def _on_context_menu_request(self, pos): + index = self.indexAt(pos) + if index.isValid(): + point = self.viewport().mapToGlobal(pos) + self.context_menu_requested.emit(point) + def _on_selection_change(self): self._remove_btn.setEnabled(self.has_selected_item_ids()) @@ -456,6 +461,9 @@ def __init__(self, single_item, allow_sequences, parent): files_proxy_model.rowsInserted.connect(self._on_rows_inserted) files_proxy_model.rowsRemoved.connect(self._on_rows_removed) files_view.remove_requested.connect(self._on_remove_requested) + files_view.context_menu_requested.connect( + self._on_context_menu_requested + ) self._in_set_value = False self._single_item = single_item self._multivalue = False @@ -504,7 +512,9 @@ def current_value(self): return file_items if file_items: return file_items[0] - return FileDefItem.create_empty_item() + + empty_item = FileDefItem.create_empty_item() + return empty_item.to_dict() def set_filters(self, folders_allowed, exts_filter): self._files_proxy_model.set_allow_folders(folders_allowed) @@ -527,7 +537,9 @@ def _on_rows_inserted(self, parent_index, start_row, end_row): is_sequence, self._multivalue ) - widget.split_requested.connect(self._on_split_request) + widget.context_menu_requested.connect( + self._on_context_menu_requested + ) self._files_view.setIndexWidget(index, widget) self._files_proxy_model.setData( index, widget.sizeHint(), QtCore.Qt.SizeHintRole @@ -559,17 +571,22 @@ def _on_rows_removed(self, parent_index, start_row, end_row): if not self._in_set_value: self.value_changed.emit() - def _on_split_request(self, item_id): + def _on_split_request(self): if self._multivalue: return - file_item = self._files_model.get_file_item_by_id(item_id) - if not file_item: + item_ids = self._files_view.get_selected_item_ids() + if not item_ids: return - new_items = file_item.split_sequence() - self._remove_item_by_ids([item_id]) - self._add_filepaths(new_items) + for item_id in item_ids: + file_item = self._files_model.get_file_item_by_id(item_id) + if not file_item: + return + + new_items = file_item.split_sequence() + self._add_filepaths(new_items) + self._remove_item_by_ids(item_ids) def _on_remove_requested(self): if self._multivalue: @@ -579,6 +596,23 @@ def _on_remove_requested(self): if items_to_delete: self._remove_item_by_ids(items_to_delete) + def _on_context_menu_requested(self, pos): + if self._multivalue: + return + + menu = QtWidgets.QMenu(self._files_view) + + if self._files_view.has_selected_sequence(): + split_action = QtWidgets.QAction("Split sequence", menu) + split_action.triggered.connect(self._on_split_request) + menu.addAction(split_action) + + remove_action = QtWidgets.QAction("Remove", menu) + remove_action.triggered.connect(self._on_remove_requested) + menu.addAction(remove_action) + + menu.popup(pos) + def sizeHint(self): # Get size hints of widget and visible widgets result = super(FilesWidget, self).sizeHint() diff --git a/openpype/widgets/attribute_defs/widgets.py b/openpype/widgets/attribute_defs/widgets.py index 875b69acb48..b6493b80a86 100644 --- a/openpype/widgets/attribute_defs/widgets.py +++ b/openpype/widgets/attribute_defs/widgets.py @@ -91,6 +91,8 @@ def clear_attr_defs(self): layout.deleteLater() new_layout = QtWidgets.QGridLayout() + new_layout.setColumnStretch(0, 0) + new_layout.setColumnStretch(1, 1) self.setLayout(new_layout) def set_attr_defs(self, attr_defs): diff --git a/poetry.lock b/poetry.lock index a79c865a9e6..4372f34ff75 100644 --- a/poetry.lock +++ b/poetry.lock @@ -94,7 +94,7 @@ python-dateutil = ">=2.7.0" [[package]] name = "astroid" -version = "2.9.3" +version = "2.11.2" description = "An abstract syntax tree for Python with inference support." category = "dev" optional = false @@ -104,7 +104,7 @@ python-versions = ">=3.6.2" lazy-object-proxy = ">=1.4.0" typed-ast = {version = ">=1.4.0,<2.0", markers = "implementation_name == \"cpython\" and python_version < \"3.8\""} typing-extensions = {version = ">=3.10", markers = "python_version < \"3.10\""} -wrapt = ">=1.11,<1.14" +wrapt = ">=1.11,<2" [[package]] name = "async-timeout" @@ -236,7 +236,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" [[package]] name = "charset-normalizer" -version = "2.0.11" +version = "2.0.12" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." category = "main" optional = false @@ -295,7 +295,7 @@ python-versions = "*" [[package]] name = "coverage" -version = "6.3.1" +version = "6.3.2" description = "Code coverage measurement for Python" category = "dev" optional = false @@ -309,7 +309,7 @@ toml = ["tomli"] [[package]] name = "cryptography" -version = "36.0.1" +version = "36.0.2" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." category = "main" optional = false @@ -346,9 +346,20 @@ category = "dev" optional = false python-versions = "*" +[[package]] +name = "dill" +version = "0.3.4" +description = "serialize all of python" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*" + +[package.extras] +graph = ["objgraph (>=1.7.2)"] + [[package]] name = "dnspython" -version = "2.2.0" +version = "2.2.1" description = "DNS toolkit" category = "main" optional = false @@ -364,7 +375,7 @@ wmi = ["wmi (>=1.5.1,<2.0.0)"] [[package]] name = "docutils" -version = "0.18.1" +version = "0.17.1" description = "Docutils -- Python Documentation Utilities" category = "dev" optional = false @@ -372,7 +383,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" [[package]] name = "dropbox" -version = "11.26.0" +version = "11.29.0" description = "Official Dropbox API Client" category = "main" optional = false @@ -397,7 +408,7 @@ prefixed = ">=0.3.2" [[package]] name = "evdev" -version = "1.4.0" +version = "1.5.0" description = "Bindings to the Linux input handling subsystem" category = "main" optional = false @@ -464,7 +475,7 @@ smmap = ">=3.0.1,<6" [[package]] name = "gitpython" -version = "3.1.26" +version = "3.1.27" description = "GitPython is a python library used to interact with Git repositories" category = "dev" optional = false @@ -476,7 +487,7 @@ typing-extensions = {version = ">=3.7.4.3", markers = "python_version < \"3.8\"" [[package]] name = "google-api-core" -version = "2.4.0" +version = "2.7.1" description = "Google API client core library" category = "main" optional = false @@ -495,7 +506,7 @@ grpcio-gcp = ["grpcio-gcp (>=0.2.2)"] [[package]] name = "google-api-python-client" -version = "1.12.10" +version = "1.12.11" description = "Google API Client Library for Python" category = "main" optional = false @@ -511,7 +522,7 @@ uritemplate = ">=3.0.0,<4dev" [[package]] name = "google-auth" -version = "2.6.0" +version = "2.6.3" description = "Google Authentication Library" category = "main" optional = false @@ -543,7 +554,7 @@ six = "*" [[package]] name = "googleapis-common-protos" -version = "1.54.0" +version = "1.56.0" description = "Common protobufs used in Google APIs" category = "main" optional = false @@ -557,7 +568,7 @@ grpc = ["grpcio (>=1.0.0)"] [[package]] name = "httplib2" -version = "0.20.2" +version = "0.20.4" description = "A comprehensive HTTP client library." category = "main" optional = false @@ -584,7 +595,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [[package]] name = "importlib-metadata" -version = "4.10.1" +version = "4.11.3" description = "Read metadata from Python packages" category = "main" optional = false @@ -595,9 +606,9 @@ typing-extensions = {version = ">=3.6.4", markers = "python_version < \"3.8\""} zipp = ">=0.5" [package.extras] -docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] +docs = ["sphinx", "jaraco.packaging (>=9)", "rst.linker (>=1.9)"] perf = ["ipython"] -testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "packaging", "pyfakefs", "flufl.flake8", "pytest-perf (>=0.9.2)", "pytest-black (>=0.3.7)", "pytest-mypy", "importlib-resources (>=1.3)"] +testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "packaging", "pyfakefs", "flufl.flake8", "pytest-perf (>=0.9.2)", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)", "importlib-resources (>=1.3)"] [[package]] name = "iniconfig" @@ -637,14 +648,14 @@ testing = ["colorama", "docopt", "pytest (>=3.1.0)"] [[package]] name = "jeepney" -version = "0.7.1" +version = "0.8.0" description = "Low-level, pure Python DBus protocol wrapper." category = "main" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [package.extras] -test = ["pytest", "pytest-trio", "pytest-asyncio", "testpath", "trio", "async-timeout"] +test = ["pytest", "pytest-trio", "pytest-asyncio (>=0.17)", "testpath", "trio", "async-timeout"] trio = ["trio", "async-generator"] [[package]] @@ -722,11 +733,11 @@ pymongo = "*" [[package]] name = "markupsafe" -version = "2.0.1" +version = "2.1.1" description = "Safely add untrusted strings to HTML/XML markup." category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [[package]] name = "mccabe" @@ -777,7 +788,7 @@ pyparsing = ">=2.0.2,<3.0.5 || >3.0.5" [[package]] name = "paramiko" -version = "2.10.1" +version = "2.10.3" description = "SSH2 protocol library" category = "main" optional = false @@ -809,7 +820,7 @@ testing = ["docopt", "pytest (<6.0.0)"] [[package]] name = "pathlib2" -version = "2.3.6" +version = "2.3.7.post1" description = "Object-oriented filesystem paths" category = "main" optional = false @@ -820,15 +831,19 @@ six = "*" [[package]] name = "pillow" -version = "9.0.1" +version = "9.1.0" description = "Python Imaging Library (Fork)" category = "main" optional = false python-versions = ">=3.7" +[package.extras] +docs = ["olefile", "sphinx (>=2.4)", "sphinx-copybutton", "sphinx-issues (>=3.0.1)", "sphinx-removed-in", "sphinx-rtd-theme (>=1.0)", "sphinxext-opengraph"] +tests = ["check-manifest", "coverage", "defusedxml", "markdown2", "olefile", "packaging", "pyroma", "pytest", "pytest-cov", "pytest-timeout"] + [[package]] name = "platformdirs" -version = "2.4.1" +version = "2.5.1" description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." category = "dev" optional = false @@ -871,11 +886,11 @@ python-versions = "*" [[package]] name = "protobuf" -version = "3.19.4" +version = "3.20.0" description = "Protocol Buffers" category = "main" optional = false -python-versions = ">=3.5" +python-versions = ">=3.7" [[package]] name = "py" @@ -966,21 +981,25 @@ python-versions = ">=3.5" [[package]] name = "pylint" -version = "2.12.2" +version = "2.13.5" description = "python code static checker" category = "dev" optional = false python-versions = ">=3.6.2" [package.dependencies] -astroid = ">=2.9.0,<2.10" +astroid = ">=2.11.2,<=2.12.0-dev0" colorama = {version = "*", markers = "sys_platform == \"win32\""} +dill = ">=0.2" isort = ">=4.2.5,<6" -mccabe = ">=0.6,<0.7" +mccabe = ">=0.6,<0.8" platformdirs = ">=2.2.0" -toml = ">=0.9.2" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} typing-extensions = {version = ">=3.10.0", markers = "python_version < \"3.10\""} +[package.extras] +testutil = ["gitpython (>3)"] + [[package]] name = "pymongo" version = "3.12.3" @@ -1031,7 +1050,7 @@ six = "*" [[package]] name = "pyobjc-core" -version = "8.2" +version = "8.4.1" description = "Python<->ObjC Interoperability Module" category = "main" optional = false @@ -1039,39 +1058,39 @@ python-versions = ">=3.6" [[package]] name = "pyobjc-framework-applicationservices" -version = "8.2" +version = "8.4.1" description = "Wrappers for the framework ApplicationServices on macOS" category = "main" optional = false python-versions = ">=3.6" [package.dependencies] -pyobjc-core = ">=8.2" -pyobjc-framework-Cocoa = ">=8.2" -pyobjc-framework-Quartz = ">=8.2" +pyobjc-core = ">=8.4.1" +pyobjc-framework-Cocoa = ">=8.4.1" +pyobjc-framework-Quartz = ">=8.4.1" [[package]] name = "pyobjc-framework-cocoa" -version = "8.2" +version = "8.4.1" description = "Wrappers for the Cocoa frameworks on macOS" category = "main" optional = false python-versions = ">=3.6" [package.dependencies] -pyobjc-core = ">=8.2" +pyobjc-core = ">=8.4.1" [[package]] name = "pyobjc-framework-quartz" -version = "8.2" +version = "8.4.1" description = "Wrappers for the Quartz frameworks on macOS" category = "main" optional = false python-versions = ">=3.6" [package.dependencies] -pyobjc-core = ">=8.2" -pyobjc-framework-Cocoa = ">=8.2" +pyobjc-core = ">=8.4.1" +pyobjc-framework-Cocoa = ">=8.4.1" [[package]] name = "pyparsing" @@ -1175,7 +1194,7 @@ python-versions = "*" [[package]] name = "pytz" -version = "2021.3" +version = "2022.1" description = "World timezone definitions, modern and historical" category = "dev" optional = false @@ -1287,6 +1306,21 @@ category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +[[package]] +name = "shotgun-api3" +version = "3.3.3" +description = "Shotgun Python API" +category = "main" +optional = false +python-versions = "*" +develop = false + +[package.source] +type = "git" +url = "https://github.com/shotgunsoftware/python-api.git" +reference = "v3.3.3" +resolved_reference = "b9f066c0edbea6e0733242e18f32f75489064840" + [[package]] name = "six" version = "1.16.0" @@ -1297,7 +1331,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" [[package]] name = "slack-sdk" -version = "3.13.0" +version = "3.15.2" description = "The Slack API Platform SDK for Python" category = "main" optional = false @@ -1305,7 +1339,7 @@ python-versions = ">=3.6.0" [package.extras] optional = ["aiodns (>1.0)", "aiohttp (>=3.7.3,<4)", "boto3 (<=2)", "SQLAlchemy (>=1,<2)", "websockets (>=10,<11)", "websocket-client (>=1,<2)"] -testing = ["pytest (>=6.2.5,<7)", "pytest-asyncio (<1)", "Flask-Sockets (>=0.2,<1)", "Flask (>=1,<2)", "Werkzeug (<2)", "pytest-cov (>=2,<3)", "codecov (>=2,<3)", "flake8 (>=4,<5)", "black (==21.12b0)", "psutil (>=5,<6)", "databases (>=0.3)", "boto3 (<=2)", "moto (<2)"] +testing = ["pytest (>=6.2.5,<7)", "pytest-asyncio (<1)", "Flask-Sockets (>=0.2,<1)", "Flask (>=1,<2)", "Werkzeug (<2)", "itsdangerous (==2.0.1)", "pytest-cov (>=2,<3)", "codecov (>=2,<3)", "flake8 (>=4,<5)", "black (==22.1.0)", "psutil (>=5,<6)", "databases (>=0.5)", "boto3 (<=2)", "moto (>=3,<4)"] [[package]] name = "smmap" @@ -1325,7 +1359,7 @@ python-versions = "*" [[package]] name = "speedcopy" -version = "2.1.2" +version = "2.1.3" description = "Replacement or alternative for python copyfile() utilizing server side copy on network shares for faster copying." category = "main" optional = false @@ -1333,18 +1367,19 @@ python-versions = "*" [[package]] name = "sphinx" -version = "3.5.3" +version = "4.5.0" description = "Python documentation generator" category = "dev" optional = false -python-versions = ">=3.5" +python-versions = ">=3.6" [package.dependencies] alabaster = ">=0.7,<0.8" babel = ">=1.3" colorama = {version = ">=0.3.5", markers = "sys_platform == \"win32\""} -docutils = ">=0.12" +docutils = ">=0.14,<0.18" imagesize = "*" +importlib-metadata = {version = ">=4.4", markers = "python_version < \"3.10\""} Jinja2 = ">=2.3" packaging = "*" Pygments = ">=2.0" @@ -1352,19 +1387,19 @@ requests = ">=2.5.0" snowballstemmer = ">=1.1" sphinxcontrib-applehelp = "*" sphinxcontrib-devhelp = "*" -sphinxcontrib-htmlhelp = "*" +sphinxcontrib-htmlhelp = ">=2.0.0" sphinxcontrib-jsmath = "*" sphinxcontrib-qthelp = "*" -sphinxcontrib-serializinghtml = "*" +sphinxcontrib-serializinghtml = ">=1.1.5" [package.extras] docs = ["sphinxcontrib-websupport"] -lint = ["flake8 (>=3.5.0)", "isort", "mypy (>=0.800)", "docutils-stubs"] +lint = ["flake8 (>=3.5.0)", "isort", "mypy (>=0.931)", "docutils-stubs", "types-typed-ast", "types-requests"] test = ["pytest", "pytest-cov", "html5lib", "cython", "typed-ast"] [[package]] name = "sphinx-qt-documentation" -version = "0.3" +version = "0.4" description = "Plugin for proper resolve intersphinx references for Qt elements" category = "dev" optional = false @@ -1374,16 +1409,22 @@ python-versions = ">=3.6" docutils = "*" sphinx = "*" +[package.extras] +dev = ["pre-commit"] +lint = ["black", "flake8", "pylint"] +test = ["pytest (>=3.0.0)", "pytest-cov"] + [[package]] name = "sphinx-rtd-theme" -version = "0.5.1" +version = "1.0.0" description = "Read the Docs theme for Sphinx" category = "dev" optional = false -python-versions = "*" +python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*" [package.dependencies] -sphinx = "*" +docutils = "<0.18" +sphinx = ">=1.6" [package.extras] dev = ["transifex-client", "sphinxcontrib-httpdomain", "bump2version"] @@ -1504,7 +1545,7 @@ python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" [[package]] name = "tomli" -version = "2.0.0" +version = "2.0.1" description = "A lil' TOML parser" category = "dev" optional = false @@ -1520,7 +1561,7 @@ python-versions = ">=3.6" [[package]] name = "typing-extensions" -version = "4.0.1" +version = "4.1.1" description = "Backported and Experimental Type Hints for Python 3.6+" category = "main" optional = false @@ -1536,14 +1577,14 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [[package]] name = "urllib3" -version = "1.26.8" +version = "1.26.9" description = "HTTP library with thread-safe connection pooling, file post, and more." category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4" [package.extras] -brotli = ["brotlipy (>=0.6.0)"] +brotli = ["brotlicffi (>=0.8.0)", "brotli (>=1.0.9)", "brotlipy (>=0.6.0)"] secure = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "certifi", "ipaddress"] socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] @@ -1568,7 +1609,7 @@ six = "*" [[package]] name = "wrapt" -version = "1.13.3" +version = "1.14.0" description = "Module for decorators, wrappers and monkey patching." category = "dev" optional = false @@ -1606,20 +1647,20 @@ typing-extensions = {version = ">=3.7.4", markers = "python_version < \"3.8\""} [[package]] name = "zipp" -version = "3.7.0" +version = "3.8.0" description = "Backport of pathlib-compatible object wrapper for zip files" category = "main" optional = false python-versions = ">=3.7" [package.extras] -docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] -testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "jaraco.itertools", "func-timeout", "pytest-black (>=0.3.7)", "pytest-mypy"] +docs = ["sphinx", "jaraco.packaging (>=9)", "rst.linker (>=1.9)"] +testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "jaraco.itertools", "func-timeout", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)"] [metadata] lock-version = "1.1" python-versions = "3.7.*" -content-hash = "b02313c8255a1897b0f0617ad4884a5943696c363512921aab1cb2dd8f4fdbe0" +content-hash = "5df45b7a0d0505ca0db8e62f5a4e945601c53908c54ab752d5362552596bd69c" [metadata.files] acre = [] @@ -1722,8 +1763,8 @@ arrow = [ {file = "arrow-0.17.0.tar.gz", hash = "sha256:ff08d10cda1d36c68657d6ad20d74fbea493d980f8b2d45344e00d6ed2bf6ed4"}, ] astroid = [ - {file = "astroid-2.9.3-py3-none-any.whl", hash = "sha256:506daabe5edffb7e696ad82483ad0228245a9742ed7d2d8c9cdb31537decf9f6"}, - {file = "astroid-2.9.3.tar.gz", hash = "sha256:1efdf4e867d4d8ba4a9f6cf9ce07cd182c4c41de77f23814feb27ca93ca9d877"}, + {file = "astroid-2.11.2-py3-none-any.whl", hash = "sha256:cc8cc0d2d916c42d0a7c476c57550a4557a083081976bf42a73414322a6411d9"}, + {file = "astroid-2.11.2.tar.gz", hash = "sha256:8d0a30fe6481ce919f56690076eafbb2fb649142a89dc874f1ec0e7a011492d0"}, ] async-timeout = [ {file = "async-timeout-4.0.2.tar.gz", hash = "sha256:2163e1640ddb52b7a8c80d0a67a08587e5d245cc9c553a74a847056bc2976b15"}, @@ -1830,8 +1871,8 @@ chardet = [ {file = "chardet-4.0.0.tar.gz", hash = "sha256:0d6f53a15db4120f2b08c94f11e7d93d2c911ee118b6b30a04ec3ee8310179fa"}, ] charset-normalizer = [ - {file = "charset-normalizer-2.0.11.tar.gz", hash = "sha256:98398a9d69ee80548c762ba991a4728bfc3836768ed226b3945908d1a688371c"}, - {file = "charset_normalizer-2.0.11-py3-none-any.whl", hash = "sha256:2842d8f5e82a1f6aa437380934d5e1cd4fcf2003b06fed6940769c164a480a45"}, + {file = "charset-normalizer-2.0.12.tar.gz", hash = "sha256:2857e29ff0d34db842cd7ca3230549d1a697f96ee6d3fb071cfa6c7393832597"}, + {file = "charset_normalizer-2.0.12-py3-none-any.whl", hash = "sha256:6881edbebdb17b39b4eaaa821b438bf6eddffb4468cf344f09f89def34a8b1df"}, ] click = [ {file = "click-7.1.2-py2.py3-none-any.whl", hash = "sha256:dacca89f4bfadd5de3d7489b7c8a566eee0d3676333fbb50030263894c38c0dc"}, @@ -1854,69 +1895,69 @@ coolname = [ {file = "coolname-1.1.0.tar.gz", hash = "sha256:410fe6ea9999bf96f2856ef0c726d5f38782bbefb7bb1aca0e91e0dc98ed09e3"}, ] coverage = [ - {file = "coverage-6.3.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:eeffd96882d8c06d31b65dddcf51db7c612547babc1c4c5db6a011abe9798525"}, - {file = "coverage-6.3.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:621f6ea7260ea2ffdaec64fe5cb521669984f567b66f62f81445221d4754df4c"}, - {file = "coverage-6.3.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:84f2436d6742c01136dd940ee158bfc7cf5ced3da7e4c949662b8703b5cd8145"}, - {file = "coverage-6.3.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:de73fca6fb403dd72d4da517cfc49fcf791f74eee697d3219f6be29adf5af6ce"}, - {file = "coverage-6.3.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78fbb2be068a13a5d99dce9e1e7d168db880870f7bc73f876152130575bd6167"}, - {file = "coverage-6.3.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:f5a4551dfd09c3bd12fca8144d47fe7745275adf3229b7223c2f9e29a975ebda"}, - {file = "coverage-6.3.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7bff3a98f63b47464480de1b5bdd80c8fade0ba2832c9381253c9b74c4153c27"}, - {file = "coverage-6.3.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a06c358f4aed05fa1099c39decc8022261bb07dfadc127c08cfbd1391b09689e"}, - {file = "coverage-6.3.1-cp310-cp310-win32.whl", hash = "sha256:9fff3ff052922cb99f9e52f63f985d4f7a54f6b94287463bc66b7cdf3eb41217"}, - {file = "coverage-6.3.1-cp310-cp310-win_amd64.whl", hash = "sha256:276b13cc085474e482566c477c25ed66a097b44c6e77132f3304ac0b039f83eb"}, - {file = "coverage-6.3.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:56c4a409381ddd7bbff134e9756077860d4e8a583d310a6f38a2315b9ce301d0"}, - {file = "coverage-6.3.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9eb494070aa060ceba6e4bbf44c1bc5fa97bfb883a0d9b0c9049415f9e944793"}, - {file = "coverage-6.3.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5e15d424b8153756b7c903bde6d4610be0c3daca3986173c18dd5c1a1625e4cd"}, - {file = "coverage-6.3.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:61d47a897c1e91f33f177c21de897267b38fbb45f2cd8e22a710bcef1df09ac1"}, - {file = "coverage-6.3.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:25e73d4c81efa8ea3785274a2f7f3bfbbeccb6fcba2a0bdd3be9223371c37554"}, - {file = "coverage-6.3.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:fac0bcc5b7e8169bffa87f0dcc24435446d329cbc2b5486d155c2e0f3b493ae1"}, - {file = "coverage-6.3.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:72128176fea72012063200b7b395ed8a57849282b207321124d7ff14e26988e8"}, - {file = "coverage-6.3.1-cp37-cp37m-win32.whl", hash = "sha256:1bc6d709939ff262fd1432f03f080c5042dc6508b6e0d3d20e61dd045456a1a0"}, - {file = "coverage-6.3.1-cp37-cp37m-win_amd64.whl", hash = "sha256:618eeba986cea7f621d8607ee378ecc8c2504b98b3fdc4952b30fe3578304687"}, - {file = "coverage-6.3.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d5ed164af5c9078596cfc40b078c3b337911190d3faeac830c3f1274f26b8320"}, - {file = "coverage-6.3.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:352c68e233409c31048a3725c446a9e48bbff36e39db92774d4f2380d630d8f8"}, - {file = "coverage-6.3.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:448d7bde7ceb6c69e08474c2ddbc5b4cd13c9e4aa4a717467f716b5fc938a734"}, - {file = "coverage-6.3.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9fde6b90889522c220dd56a670102ceef24955d994ff7af2cb786b4ba8fe11e4"}, - {file = "coverage-6.3.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e647a0be741edbb529a72644e999acb09f2ad60465f80757da183528941ff975"}, - {file = "coverage-6.3.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6a5cdc3adb4f8bb8d8f5e64c2e9e282bc12980ef055ec6da59db562ee9bdfefa"}, - {file = "coverage-6.3.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:2dd70a167843b4b4b2630c0c56f1b586fe965b4f8ac5da05b6690344fd065c6b"}, - {file = "coverage-6.3.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:9ad0a117b8dc2061ce9461ea4c1b4799e55edceb236522c5b8f958ce9ed8fa9a"}, - {file = "coverage-6.3.1-cp38-cp38-win32.whl", hash = "sha256:e92c7a5f7d62edff50f60a045dc9542bf939758c95b2fcd686175dd10ce0ed10"}, - {file = "coverage-6.3.1-cp38-cp38-win_amd64.whl", hash = "sha256:482fb42eea6164894ff82abbcf33d526362de5d1a7ed25af7ecbdddd28fc124f"}, - {file = "coverage-6.3.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c5b81fb37db76ebea79aa963b76d96ff854e7662921ce742293463635a87a78d"}, - {file = "coverage-6.3.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a4f923b9ab265136e57cc14794a15b9dcea07a9c578609cd5dbbfff28a0d15e6"}, - {file = "coverage-6.3.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:56d296cbc8254a7dffdd7bcc2eb70be5a233aae7c01856d2d936f5ac4e8ac1f1"}, - {file = "coverage-6.3.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1245ab82e8554fa88c4b2ab1e098ae051faac5af829efdcf2ce6b34dccd5567c"}, - {file = "coverage-6.3.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3f2b05757c92ad96b33dbf8e8ec8d4ccb9af6ae3c9e9bd141c7cc44d20c6bcba"}, - {file = "coverage-6.3.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9e3dd806f34de38d4c01416344e98eab2437ac450b3ae39c62a0ede2f8b5e4ed"}, - {file = "coverage-6.3.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:d651fde74a4d3122e5562705824507e2f5b2d3d57557f1916c4b27635f8fbe3f"}, - {file = "coverage-6.3.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:704f89b87c4f4737da2860695a18c852b78ec7279b24eedacab10b29067d3a38"}, - {file = "coverage-6.3.1-cp39-cp39-win32.whl", hash = "sha256:2aed4761809640f02e44e16b8b32c1a5dee5e80ea30a0ff0912158bde9c501f2"}, - {file = "coverage-6.3.1-cp39-cp39-win_amd64.whl", hash = "sha256:9976fb0a5709988778ac9bc44f3d50fccd989987876dfd7716dee28beed0a9fa"}, - {file = "coverage-6.3.1-pp36.pp37.pp38-none-any.whl", hash = "sha256:463e52616ea687fd323888e86bf25e864a3cc6335a043fad6bbb037dbf49bbe2"}, - {file = "coverage-6.3.1.tar.gz", hash = "sha256:6c3f6158b02ac403868eea390930ae64e9a9a2a5bbfafefbb920d29258d9f2f8"}, + {file = "coverage-6.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9b27d894748475fa858f9597c0ee1d4829f44683f3813633aaf94b19cb5453cf"}, + {file = "coverage-6.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:37d1141ad6b2466a7b53a22e08fe76994c2d35a5b6b469590424a9953155afac"}, + {file = "coverage-6.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f9987b0354b06d4df0f4d3e0ec1ae76d7ce7cbca9a2f98c25041eb79eec766f1"}, + {file = "coverage-6.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:26e2deacd414fc2f97dd9f7676ee3eaecd299ca751412d89f40bc01557a6b1b4"}, + {file = "coverage-6.3.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4dd8bafa458b5c7d061540f1ee9f18025a68e2d8471b3e858a9dad47c8d41903"}, + {file = "coverage-6.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:46191097ebc381fbf89bdce207a6c107ac4ec0890d8d20f3360345ff5976155c"}, + {file = "coverage-6.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6f89d05e028d274ce4fa1a86887b071ae1755082ef94a6740238cd7a8178804f"}, + {file = "coverage-6.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:58303469e9a272b4abdb9e302a780072c0633cdcc0165db7eec0f9e32f901e05"}, + {file = "coverage-6.3.2-cp310-cp310-win32.whl", hash = "sha256:2fea046bfb455510e05be95e879f0e768d45c10c11509e20e06d8fcaa31d9e39"}, + {file = "coverage-6.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:a2a8b8bcc399edb4347a5ca8b9b87e7524c0967b335fbb08a83c8421489ddee1"}, + {file = "coverage-6.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:f1555ea6d6da108e1999b2463ea1003fe03f29213e459145e70edbaf3e004aaa"}, + {file = "coverage-6.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e5f4e1edcf57ce94e5475fe09e5afa3e3145081318e5fd1a43a6b4539a97e518"}, + {file = "coverage-6.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7a15dc0a14008f1da3d1ebd44bdda3e357dbabdf5a0b5034d38fcde0b5c234b7"}, + {file = "coverage-6.3.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21b7745788866028adeb1e0eca3bf1101109e2dc58456cb49d2d9b99a8c516e6"}, + {file = "coverage-6.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:8ce257cac556cb03be4a248d92ed36904a59a4a5ff55a994e92214cde15c5bad"}, + {file = "coverage-6.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b0be84e5a6209858a1d3e8d1806c46214e867ce1b0fd32e4ea03f4bd8b2e3359"}, + {file = "coverage-6.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:acf53bc2cf7282ab9b8ba346746afe703474004d9e566ad164c91a7a59f188a4"}, + {file = "coverage-6.3.2-cp37-cp37m-win32.whl", hash = "sha256:8bdde1177f2311ee552f47ae6e5aa7750c0e3291ca6b75f71f7ffe1f1dab3dca"}, + {file = "coverage-6.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:b31651d018b23ec463e95cf10070d0b2c548aa950a03d0b559eaa11c7e5a6fa3"}, + {file = "coverage-6.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:07e6db90cd9686c767dcc593dff16c8c09f9814f5e9c51034066cad3373b914d"}, + {file = "coverage-6.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2c6dbb42f3ad25760010c45191e9757e7dce981cbfb90e42feef301d71540059"}, + {file = "coverage-6.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c76aeef1b95aff3905fb2ae2d96e319caca5b76fa41d3470b19d4e4a3a313512"}, + {file = "coverage-6.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8cf5cfcb1521dc3255d845d9dca3ff204b3229401994ef8d1984b32746bb45ca"}, + {file = "coverage-6.3.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8fbbdc8d55990eac1b0919ca69eb5a988a802b854488c34b8f37f3e2025fa90d"}, + {file = "coverage-6.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:ec6bc7fe73a938933d4178c9b23c4e0568e43e220aef9472c4f6044bfc6dd0f0"}, + {file = "coverage-6.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:9baff2a45ae1f17c8078452e9e5962e518eab705e50a0aa8083733ea7d45f3a6"}, + {file = "coverage-6.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fd9e830e9d8d89b20ab1e5af09b32d33e1a08ef4c4e14411e559556fd788e6b2"}, + {file = "coverage-6.3.2-cp38-cp38-win32.whl", hash = "sha256:f7331dbf301b7289013175087636bbaf5b2405e57259dd2c42fdcc9fcc47325e"}, + {file = "coverage-6.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:68353fe7cdf91f109fc7d474461b46e7f1f14e533e911a2a2cbb8b0fc8613cf1"}, + {file = "coverage-6.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b78e5afb39941572209f71866aa0b206c12f0109835aa0d601e41552f9b3e620"}, + {file = "coverage-6.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4e21876082ed887baed0146fe222f861b5815455ada3b33b890f4105d806128d"}, + {file = "coverage-6.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:34626a7eee2a3da12af0507780bb51eb52dca0e1751fd1471d0810539cefb536"}, + {file = "coverage-6.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1ebf730d2381158ecf3dfd4453fbca0613e16eaa547b4170e2450c9707665ce7"}, + {file = "coverage-6.3.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd6fe30bd519694b356cbfcaca9bd5c1737cddd20778c6a581ae20dc8c04def2"}, + {file = "coverage-6.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:96f8a1cb43ca1422f36492bebe63312d396491a9165ed3b9231e778d43a7fca4"}, + {file = "coverage-6.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:dd035edafefee4d573140a76fdc785dc38829fe5a455c4bb12bac8c20cfc3d69"}, + {file = "coverage-6.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5ca5aeb4344b30d0bec47481536b8ba1181d50dbe783b0e4ad03c95dc1296684"}, + {file = "coverage-6.3.2-cp39-cp39-win32.whl", hash = "sha256:f5fa5803f47e095d7ad8443d28b01d48c0359484fec1b9d8606d0e3282084bc4"}, + {file = "coverage-6.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:9548f10d8be799551eb3a9c74bbf2b4934ddb330e08a73320123c07f95cc2d92"}, + {file = "coverage-6.3.2-pp36.pp37.pp38-none-any.whl", hash = "sha256:18d520c6860515a771708937d2f78f63cc47ab3b80cb78e86573b0a760161faf"}, + {file = "coverage-6.3.2.tar.gz", hash = "sha256:03e2a7826086b91ef345ff18742ee9fc47a6839ccd517061ef8fa1976e652ce9"}, ] cryptography = [ - {file = "cryptography-36.0.1-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:73bc2d3f2444bcfeac67dd130ff2ea598ea5f20b40e36d19821b4df8c9c5037b"}, - {file = "cryptography-36.0.1-cp36-abi3-macosx_10_10_x86_64.whl", hash = "sha256:2d87cdcb378d3cfed944dac30596da1968f88fb96d7fc34fdae30a99054b2e31"}, - {file = "cryptography-36.0.1-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:74d6c7e80609c0f4c2434b97b80c7f8fdfaa072ca4baab7e239a15d6d70ed73a"}, - {file = "cryptography-36.0.1-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:6c0c021f35b421ebf5976abf2daacc47e235f8b6082d3396a2fe3ccd537ab173"}, - {file = "cryptography-36.0.1-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d59a9d55027a8b88fd9fd2826c4392bd487d74bf628bb9d39beecc62a644c12"}, - {file = "cryptography-36.0.1-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0a817b961b46894c5ca8a66b599c745b9a3d9f822725221f0e0fe49dc043a3a3"}, - {file = "cryptography-36.0.1-cp36-abi3-manylinux_2_24_x86_64.whl", hash = "sha256:94ae132f0e40fe48f310bba63f477f14a43116f05ddb69d6fa31e93f05848ae2"}, - {file = "cryptography-36.0.1-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:7be0eec337359c155df191d6ae00a5e8bbb63933883f4f5dffc439dac5348c3f"}, - {file = "cryptography-36.0.1-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:e0344c14c9cb89e76eb6a060e67980c9e35b3f36691e15e1b7a9e58a0a6c6dc3"}, - {file = "cryptography-36.0.1-cp36-abi3-win32.whl", hash = "sha256:4caa4b893d8fad33cf1964d3e51842cd78ba87401ab1d2e44556826df849a8ca"}, - {file = "cryptography-36.0.1-cp36-abi3-win_amd64.whl", hash = "sha256:391432971a66cfaf94b21c24ab465a4cc3e8bf4a939c1ca5c3e3a6e0abebdbcf"}, - {file = "cryptography-36.0.1-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:bb5829d027ff82aa872d76158919045a7c1e91fbf241aec32cb07956e9ebd3c9"}, - {file = "cryptography-36.0.1-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ebc15b1c22e55c4d5566e3ca4db8689470a0ca2babef8e3a9ee057a8b82ce4b1"}, - {file = "cryptography-36.0.1-pp37-pypy37_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:596f3cd67e1b950bc372c33f1a28a0692080625592ea6392987dba7f09f17a94"}, - {file = "cryptography-36.0.1-pp38-pypy38_pp73-macosx_10_10_x86_64.whl", hash = "sha256:30ee1eb3ebe1644d1c3f183d115a8c04e4e603ed6ce8e394ed39eea4a98469ac"}, - {file = "cryptography-36.0.1-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ec63da4e7e4a5f924b90af42eddf20b698a70e58d86a72d943857c4c6045b3ee"}, - {file = "cryptography-36.0.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ca238ceb7ba0bdf6ce88c1b74a87bffcee5afbfa1e41e173b1ceb095b39add46"}, - {file = "cryptography-36.0.1-pp38-pypy38_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:ca28641954f767f9822c24e927ad894d45d5a1e501767599647259cbf030b903"}, - {file = "cryptography-36.0.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:39bdf8e70eee6b1c7b289ec6e5d84d49a6bfa11f8b8646b5b3dfe41219153316"}, - {file = "cryptography-36.0.1.tar.gz", hash = "sha256:53e5c1dc3d7a953de055d77bef2ff607ceef7a2aac0353b5d630ab67f7423638"}, + {file = "cryptography-36.0.2-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:4e2dddd38a5ba733be6a025a1475a9f45e4e41139d1321f412c6b360b19070b6"}, + {file = "cryptography-36.0.2-cp36-abi3-macosx_10_10_x86_64.whl", hash = "sha256:4881d09298cd0b669bb15b9cfe6166f16fc1277b4ed0d04a22f3d6430cb30f1d"}, + {file = "cryptography-36.0.2-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ea634401ca02367c1567f012317502ef3437522e2fc44a3ea1844de028fa4b84"}, + {file = "cryptography-36.0.2-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:7be666cc4599b415f320839e36367b273db8501127b38316f3b9f22f17a0b815"}, + {file = "cryptography-36.0.2-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8241cac0aae90b82d6b5c443b853723bcc66963970c67e56e71a2609dc4b5eaf"}, + {file = "cryptography-36.0.2-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b2d54e787a884ffc6e187262823b6feb06c338084bbe80d45166a1cb1c6c5bf"}, + {file = "cryptography-36.0.2-cp36-abi3-manylinux_2_24_x86_64.whl", hash = "sha256:c2c5250ff0d36fd58550252f54915776940e4e866f38f3a7866d92b32a654b86"}, + {file = "cryptography-36.0.2-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:ec6597aa85ce03f3e507566b8bcdf9da2227ec86c4266bd5e6ab4d9e0cc8dab2"}, + {file = "cryptography-36.0.2-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:ca9f686517ec2c4a4ce930207f75c00bf03d94e5063cbc00a1dc42531511b7eb"}, + {file = "cryptography-36.0.2-cp36-abi3-win32.whl", hash = "sha256:f64b232348ee82f13aac22856515ce0195837f6968aeaa94a3d0353ea2ec06a6"}, + {file = "cryptography-36.0.2-cp36-abi3-win_amd64.whl", hash = "sha256:53e0285b49fd0ab6e604f4c5d9c5ddd98de77018542e88366923f152dbeb3c29"}, + {file = "cryptography-36.0.2-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:32db5cc49c73f39aac27574522cecd0a4bb7384e71198bc65a0d23f901e89bb7"}, + {file = "cryptography-36.0.2-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b3d199647468d410994dbeb8cec5816fb74feb9368aedf300af709ef507e3e"}, + {file = "cryptography-36.0.2-pp37-pypy37_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:da73d095f8590ad437cd5e9faf6628a218aa7c387e1fdf67b888b47ba56a17f0"}, + {file = "cryptography-36.0.2-pp38-pypy38_pp73-macosx_10_10_x86_64.whl", hash = "sha256:0a3bf09bb0b7a2c93ce7b98cb107e9170a90c51a0162a20af1c61c765b90e60b"}, + {file = "cryptography-36.0.2-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:8897b7b7ec077c819187a123174b645eb680c13df68354ed99f9b40a50898f77"}, + {file = "cryptography-36.0.2-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82740818f2f240a5da8dfb8943b360e4f24022b093207160c77cadade47d7c85"}, + {file = "cryptography-36.0.2-pp38-pypy38_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:1f64a62b3b75e4005df19d3b5235abd43fa6358d5516cfc43d87aeba8d08dd51"}, + {file = "cryptography-36.0.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:e167b6b710c7f7bc54e67ef593f8731e1f45aa35f8a8a7b72d6e42ec76afd4b3"}, + {file = "cryptography-36.0.2.tar.gz", hash = "sha256:70f8f4f7bb2ac9f340655cbac89d68c527af5bb4387522a8413e841e3e6628c9"}, ] cx-freeze = [ {file = "cx_Freeze-6.9-cp310-cp310-win32.whl", hash = "sha256:776d4fb68a4831691acbd3c374362b9b48ce2e568514a73c3d4cb14d5dcf1470"}, @@ -1946,25 +1987,29 @@ cx-logging = [ {file = "cx_Logging-3.0-cp39-cp39-win_amd64.whl", hash = "sha256:302e9c4f65a936c288a4fa59a90e7e142d9ef994aa29676731acafdcccdbb3f5"}, {file = "cx_Logging-3.0.tar.gz", hash = "sha256:ba8a7465facf7b98d8f494030fb481a2e8aeee29dc191e10383bb54ed42bdb34"}, ] +dill = [ + {file = "dill-0.3.4-py2.py3-none-any.whl", hash = "sha256:7e40e4a70304fd9ceab3535d36e58791d9c4a776b38ec7f7ec9afc8d3dca4d4f"}, + {file = "dill-0.3.4.zip", hash = "sha256:9f9734205146b2b353ab3fec9af0070237b6ddae78452af83d2fca84d739e675"}, +] dnspython = [ - {file = "dnspython-2.2.0-py3-none-any.whl", hash = "sha256:081649da27ced5e75709a1ee542136eaba9842a0fe4c03da4fb0a3d3ed1f3c44"}, - {file = "dnspython-2.2.0.tar.gz", hash = "sha256:e79351e032d0b606b98d38a4b0e6e2275b31a5b85c873e587cc11b73aca026d6"}, + {file = "dnspython-2.2.1-py3-none-any.whl", hash = "sha256:a851e51367fb93e9e1361732c1d60dab63eff98712e503ea7d92e6eccb109b4f"}, + {file = "dnspython-2.2.1.tar.gz", hash = "sha256:0f7569a4a6ff151958b64304071d370daa3243d15941a7beedf0c9fe5105603e"}, ] docutils = [ - {file = "docutils-0.18.1-py2.py3-none-any.whl", hash = "sha256:23010f129180089fbcd3bc08cfefccb3b890b0050e1ca00c867036e9d161b98c"}, - {file = "docutils-0.18.1.tar.gz", hash = "sha256:679987caf361a7539d76e584cbeddc311e3aee937877c87346f31debc63e9d06"}, + {file = "docutils-0.17.1-py2.py3-none-any.whl", hash = "sha256:cf316c8370a737a022b72b56874f6602acf974a37a9fba42ec2876387549fc61"}, + {file = "docutils-0.17.1.tar.gz", hash = "sha256:686577d2e4c32380bb50cbb22f575ed742d58168cee37e99117a854bcd88f125"}, ] dropbox = [ - {file = "dropbox-11.26.0-py2-none-any.whl", hash = "sha256:abd29587fa692bde0c3a48ce8efb56c24d8df92c5f402bbcdd78f3089d5ced5c"}, - {file = "dropbox-11.26.0-py3-none-any.whl", hash = "sha256:84becf043a63007ae67620946acb0fa164669ce691c7f1dbfdabb6712a258b29"}, - {file = "dropbox-11.26.0.tar.gz", hash = "sha256:dd79e3dfc216688020959462aefac54ffce7c07a45e89f4fb258348885195a73"}, + {file = "dropbox-11.29.0-py2-none-any.whl", hash = "sha256:2200ad5f42e00ae00d45db4a050fa199fe701ddc979fd1396d2c3e8912476c60"}, + {file = "dropbox-11.29.0-py3-none-any.whl", hash = "sha256:bf81a822e662bd337f4cd33fe39580c0b6ee4781d018ef1b31dcef2f402986f2"}, + {file = "dropbox-11.29.0.tar.gz", hash = "sha256:09b59f962ac28ce5b80d5f870c00c5fe7a637c4ac8d095c7c72fdab1e07376fc"}, ] enlighten = [ {file = "enlighten-1.10.2-py2.py3-none-any.whl", hash = "sha256:b237fe562b320bf9f1d4bb76d0c98e0daf914372a76ab87c35cd02f57aa9d8c1"}, {file = "enlighten-1.10.2.tar.gz", hash = "sha256:7a5b83cd0f4d095e59d80c648ebb5f7ffca0cd8bcf7ae6639828ee1ad000632a"}, ] evdev = [ - {file = "evdev-1.4.0.tar.gz", hash = "sha256:8782740eb1a86b187334c07feb5127d3faa0b236e113206dfe3ae8f77fb1aaf1"}, + {file = "evdev-1.5.0.tar.gz", hash = "sha256:5b33b174f7c84576e7dd6071e438bf5ad227da95efd4356a39fe4c8355412fe6"}, ] flake8 = [ {file = "flake8-3.9.2-py2.py3-none-any.whl", hash = "sha256:bf8fd333346d844f616e8d47905ef3a3384edae6b4e9beb0c5101e25e3110907"}, @@ -2043,32 +2088,32 @@ gitdb = [ {file = "gitdb-4.0.9.tar.gz", hash = "sha256:bac2fd45c0a1c9cf619e63a90d62bdc63892ef92387424b855792a6cabe789aa"}, ] gitpython = [ - {file = "GitPython-3.1.26-py3-none-any.whl", hash = "sha256:26ac35c212d1f7b16036361ca5cff3ec66e11753a0d677fb6c48fa4e1a9dd8d6"}, - {file = "GitPython-3.1.26.tar.gz", hash = "sha256:fc8868f63a2e6d268fb25f481995ba185a85a66fcad126f039323ff6635669ee"}, + {file = "GitPython-3.1.27-py3-none-any.whl", hash = "sha256:5b68b000463593e05ff2b261acff0ff0972df8ab1b70d3cdbd41b546c8b8fc3d"}, + {file = "GitPython-3.1.27.tar.gz", hash = "sha256:1c885ce809e8ba2d88a29befeb385fcea06338d3640712b59ca623c220bb5704"}, ] google-api-core = [ - {file = "google-api-core-2.4.0.tar.gz", hash = "sha256:ba8787b7c61632cd0340f095e1c036bef9426b2594f10afb290ba311ae8cb2cb"}, - {file = "google_api_core-2.4.0-py2.py3-none-any.whl", hash = "sha256:58e2c1171a3d51778bf4e428fbb4bf79cbd05007b4b44deaa80cf73c80eebc0f"}, + {file = "google-api-core-2.7.1.tar.gz", hash = "sha256:b0fa577e512f0c8e063386b974718b8614586a798c5894ed34bedf256d9dae24"}, + {file = "google_api_core-2.7.1-py3-none-any.whl", hash = "sha256:6be1fc59e2a7ba9f66808bbc22f976f81e4c3e7ab20fa0620ce42686288787d0"}, ] google-api-python-client = [ - {file = "google-api-python-client-1.12.10.tar.gz", hash = "sha256:1cb773647e7d97048d9d1c7fa746247fbad39fd1a3b5040f2cb2645dd7156b11"}, - {file = "google_api_python_client-1.12.10-py2.py3-none-any.whl", hash = "sha256:5a8742b9b604b34e13462cc3d6aedbbf11d3af1ef558eb95defe74a29ebc5c28"}, + {file = "google-api-python-client-1.12.11.tar.gz", hash = "sha256:1b4bd42a46321e13c0542a9e4d96fa05d73626f07b39f83a73a947d70ca706a9"}, + {file = "google_api_python_client-1.12.11-py2.py3-none-any.whl", hash = "sha256:7e0a1a265c8d3088ee1987778c72683fcb376e32bada8d7767162bd9c503fd9b"}, ] google-auth = [ - {file = "google-auth-2.6.0.tar.gz", hash = "sha256:ad160fc1ea8f19e331a16a14a79f3d643d813a69534ba9611d2c80dc10439dad"}, - {file = "google_auth-2.6.0-py2.py3-none-any.whl", hash = "sha256:218ca03d7744ca0c8b6697b6083334be7df49b7bf76a69d555962fd1a7657b5f"}, + {file = "google-auth-2.6.3.tar.gz", hash = "sha256:d65bb0e3701eaaa64fd2aa85e1325580524b0bddc6dc5db3ab89c481b6a20141"}, + {file = "google_auth-2.6.3-py2.py3-none-any.whl", hash = "sha256:5e079eb4d21df1853d55cf2b6766b77ef36f7f7bdaf7d4a70434aa97c7578d60"}, ] google-auth-httplib2 = [ {file = "google-auth-httplib2-0.1.0.tar.gz", hash = "sha256:a07c39fd632becacd3f07718dfd6021bf396978f03ad3ce4321d060015cc30ac"}, {file = "google_auth_httplib2-0.1.0-py2.py3-none-any.whl", hash = "sha256:31e49c36c6b5643b57e82617cb3e021e3e1d2df9da63af67252c02fa9c1f4a10"}, ] googleapis-common-protos = [ - {file = "googleapis-common-protos-1.54.0.tar.gz", hash = "sha256:a4031d6ec6c2b1b6dc3e0be7e10a1bd72fb0b18b07ef9be7b51f2c1004ce2437"}, - {file = "googleapis_common_protos-1.54.0-py2.py3-none-any.whl", hash = "sha256:e54345a2add15dc5e1a7891c27731ff347b4c33765d79b5ed7026a6c0c7cbcae"}, + {file = "googleapis-common-protos-1.56.0.tar.gz", hash = "sha256:4007500795bcfc269d279f0f7d253ae18d6dc1ff5d5a73613ffe452038b1ec5f"}, + {file = "googleapis_common_protos-1.56.0-py2.py3-none-any.whl", hash = "sha256:60220c89b8bd5272159bed4929ecdc1243ae1f73437883a499a44a1cbc084086"}, ] httplib2 = [ - {file = "httplib2-0.20.2-py3-none-any.whl", hash = "sha256:6b937120e7d786482881b44b8eec230c1ee1c5c1d06bce8cc865f25abbbf713b"}, - {file = "httplib2-0.20.2.tar.gz", hash = "sha256:e404681d2fbcec7506bcb52c503f2b021e95bee0ef7d01e5c221468a2406d8dc"}, + {file = "httplib2-0.20.4-py3-none-any.whl", hash = "sha256:8b6a905cb1c79eefd03f8669fd993c36dc341f7c558f056cb5a33b5c2f458543"}, + {file = "httplib2-0.20.4.tar.gz", hash = "sha256:58a98e45b4b1a48273073f905d2961666ecf0fbac4250ea5b47aef259eb5c585"}, ] idna = [ {file = "idna-2.10-py2.py3-none-any.whl", hash = "sha256:b97d804b1e9b523befed77c48dacec60e6dcb0b5391d57af6a65a312a90648c0"}, @@ -2079,8 +2124,8 @@ imagesize = [ {file = "imagesize-1.3.0.tar.gz", hash = "sha256:cd1750d452385ca327479d45b64d9c7729ecf0b3969a58148298c77092261f9d"}, ] importlib-metadata = [ - {file = "importlib_metadata-4.10.1-py3-none-any.whl", hash = "sha256:899e2a40a8c4a1aec681feef45733de8a6c58f3f6a0dbed2eb6574b4387a77b6"}, - {file = "importlib_metadata-4.10.1.tar.gz", hash = "sha256:951f0d8a5b7260e9db5e41d429285b5f451e928479f19d80818878527d36e95e"}, + {file = "importlib_metadata-4.11.3-py3-none-any.whl", hash = "sha256:1208431ca90a8cca1a6b8af391bb53c1a2db74e5d1cef6ddced95d4b2062edc6"}, + {file = "importlib_metadata-4.11.3.tar.gz", hash = "sha256:ea4c597ebf37142f827b8f39299579e31685c31d3a438b59f469406afd0f2539"}, ] iniconfig = [ {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"}, @@ -2095,8 +2140,8 @@ jedi = [ {file = "jedi-0.13.3.tar.gz", hash = "sha256:2bb0603e3506f708e792c7f4ad8fc2a7a9d9c2d292a358fbbd58da531695595b"}, ] jeepney = [ - {file = "jeepney-0.7.1-py3-none-any.whl", hash = "sha256:1b5a0ea5c0e7b166b2f5895b91a08c14de8915afda4407fb5022a195224958ac"}, - {file = "jeepney-0.7.1.tar.gz", hash = "sha256:fa9e232dfa0c498bd0b8a3a73b8d8a31978304dcef0515adc859d4e096f96f4f"}, + {file = "jeepney-0.8.0-py3-none-any.whl", hash = "sha256:c0a454ad016ca575060802ee4d590dd912e35c122fa04e70306de3d076cce755"}, + {file = "jeepney-0.8.0.tar.gz", hash = "sha256:5efe48d255973902f6badc3ce55e2aa6c5c3b3bc642059ef3a91247bcfcc5806"}, ] jinja2 = [ {file = "Jinja2-2.11.3-py2.py3-none-any.whl", hash = "sha256:03e47ad063331dd6a3f04a43eddca8a966a26ba0c5b7207a9a9e4e08f1b29419"}, @@ -2157,75 +2202,46 @@ log4mongo = [ {file = "log4mongo-1.7.0.tar.gz", hash = "sha256:dc374617206162a0b14167fbb5feac01dbef587539a235dadba6200362984a68"}, ] markupsafe = [ - {file = "MarkupSafe-2.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d8446c54dc28c01e5a2dbac5a25f071f6653e6e40f3a8818e8b45d790fe6ef53"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:36bc903cbb393720fad60fc28c10de6acf10dc6cc883f3e24ee4012371399a38"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d7d807855b419fc2ed3e631034685db6079889a1f01d5d9dac950f764da3dad"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:add36cb2dbb8b736611303cd3bfcee00afd96471b09cda130da3581cbdc56a6d"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:168cd0a3642de83558a5153c8bd34f175a9a6e7f6dc6384b9655d2697312a646"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:4dc8f9fb58f7364b63fd9f85013b780ef83c11857ae79f2feda41e270468dd9b"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:20dca64a3ef2d6e4d5d615a3fd418ad3bde77a47ec8a23d984a12b5b4c74491a"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:cdfba22ea2f0029c9261a4bd07e830a8da012291fbe44dc794e488b6c9bb353a"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-win32.whl", hash = "sha256:99df47edb6bda1249d3e80fdabb1dab8c08ef3975f69aed437cb69d0a5de1e28"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:e0f138900af21926a02425cf736db95be9f4af72ba1bb21453432a07f6082134"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f9081981fe268bd86831e5c75f7de206ef275defcb82bc70740ae6dc507aee51"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:0955295dd5eec6cb6cc2fe1698f4c6d84af2e92de33fbcac4111913cd100a6ff"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:0446679737af14f45767963a1a9ef7620189912317d095f2d9ffa183a4d25d2b"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:f826e31d18b516f653fe296d967d700fddad5901ae07c622bb3705955e1faa94"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:fa130dd50c57d53368c9d59395cb5526eda596d3ffe36666cd81a44d56e48872"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:905fec760bd2fa1388bb5b489ee8ee5f7291d692638ea5f67982d968366bef9f"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf5d821ffabf0ef3533c39c518f3357b171a1651c1ff6827325e4489b0e46c3c"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0d4b31cc67ab36e3392bbf3862cfbadac3db12bdd8b02a2731f509ed5b829724"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:baa1a4e8f868845af802979fcdbf0bb11f94f1cb7ced4c4b8a351bb60d108145"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:deb993cacb280823246a026e3b2d81c493c53de6acfd5e6bfe31ab3402bb37dd"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:63f3268ba69ace99cab4e3e3b5840b03340efed0948ab8f78d2fd87ee5442a4f"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:8d206346619592c6200148b01a2142798c989edcb9c896f9ac9722a99d4e77e6"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-win32.whl", hash = "sha256:6c4ca60fa24e85fe25b912b01e62cb969d69a23a5d5867682dd3e80b5b02581d"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:b2f4bf27480f5e5e8ce285a8c8fd176c0b03e93dcc6646477d4630e83440c6a9"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0717a7390a68be14b8c793ba258e075c6f4ca819f15edfc2a3a027c823718567"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:6557b31b5e2c9ddf0de32a691f2312a32f77cd7681d8af66c2692efdbef84c18"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:49e3ceeabbfb9d66c3aef5af3a60cc43b85c33df25ce03d0031a608b0a8b2e3f"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:d7f9850398e85aba693bb640262d3611788b1f29a79f0c93c565694658f4071f"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:6a7fae0dd14cf60ad5ff42baa2e95727c3d81ded453457771d02b7d2b3f9c0c2"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:b7f2d075102dc8c794cbde1947378051c4e5180d52d276987b8d28a3bd58c17d"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e9936f0b261d4df76ad22f8fee3ae83b60d7c3e871292cd42f40b81b70afae85"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:2a7d351cbd8cfeb19ca00de495e224dea7e7d919659c2841bbb7f420ad03e2d6"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:60bf42e36abfaf9aff1f50f52644b336d4f0a3fd6d8a60ca0d054ac9f713a864"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d6c7ebd4e944c85e2c3421e612a7057a2f48d478d79e61800d81468a8d842207"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f0567c4dc99f264f49fe27da5f735f414c4e7e7dd850cfd8e69f0862d7c74ea9"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:89c687013cb1cd489a0f0ac24febe8c7a666e6e221b783e53ac50ebf68e45d86"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-win32.whl", hash = "sha256:a30e67a65b53ea0a5e62fe23682cfe22712e01f453b95233b25502f7c61cb415"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:611d1ad9a4288cf3e3c16014564df047fe08410e628f89805e475368bd304914"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5bb28c636d87e840583ee3adeb78172efc47c8b26127267f54a9c0ec251d41a9"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:be98f628055368795d818ebf93da628541e10b75b41c559fdf36d104c5787066"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:1d609f577dc6e1aa17d746f8bd3c31aa4d258f4070d61b2aa5c4166c1539de35"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7d91275b0245b1da4d4cfa07e0faedd5b0812efc15b702576d103293e252af1b"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:01a9b8ea66f1658938f65b93a85ebe8bc016e6769611be228d797c9d998dd298"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:47ab1e7b91c098ab893b828deafa1203de86d0bc6ab587b160f78fe6c4011f75"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:97383d78eb34da7e1fa37dd273c20ad4320929af65d156e35a5e2d89566d9dfb"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6fcf051089389abe060c9cd7caa212c707e58153afa2c649f00346ce6d260f1b"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:5855f8438a7d1d458206a2466bf82b0f104a3724bf96a1c781ab731e4201731a"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:3dd007d54ee88b46be476e293f48c85048603f5f516008bee124ddd891398ed6"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:aca6377c0cb8a8253e493c6b451565ac77e98c2951c45f913e0b52facdcff83f"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:04635854b943835a6ea959e948d19dcd311762c5c0c6e1f0e16ee57022669194"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6300b8454aa6930a24b9618fbb54b5a68135092bc666f7b06901f897fa5c2fee"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-win32.whl", hash = "sha256:023cb26ec21ece8dc3907c0e8320058b2e0cb3c55cf9564da612bc325bed5e64"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:984d76483eb32f1bcb536dc27e4ad56bba4baa70be32fa87152832cdd9db0833"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:2ef54abee730b502252bcdf31b10dacb0a416229b72c18b19e24a4509f273d26"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3c112550557578c26af18a1ccc9e090bfe03832ae994343cfdacd287db6a6ae7"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux1_i686.whl", hash = "sha256:53edb4da6925ad13c07b6d26c2a852bd81e364f95301c66e930ab2aef5b5ddd8"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:f5653a225f31e113b152e56f154ccbe59eeb1c7487b39b9d9f9cdb58e6c79dc5"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:4efca8f86c54b22348a5467704e3fec767b2db12fc39c6d963168ab1d3fc9135"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:ab3ef638ace319fa26553db0624c4699e31a28bb2a835c5faca8f8acf6a5a902"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:f8ba0e8349a38d3001fae7eadded3f6606f0da5d748ee53cc1dab1d6527b9509"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c47adbc92fc1bb2b3274c4b3a43ae0e4573d9fbff4f54cd484555edbf030baf1"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:37205cac2a79194e3750b0af2a5720d95f786a55ce7df90c3af697bfa100eaac"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:1f2ade76b9903f39aa442b4aadd2177decb66525062db244b35d71d0ee8599b6"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4296f2b1ce8c86a6aea78613c34bb1a672ea0e3de9c6ba08a960efe0b0a09047"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f02365d4e99430a12647f09b6cc8bab61a6564363f313126f775eb4f6ef798e"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5b6d930f030f8ed98e3e6c98ffa0652bdb82601e7a016ec2ab5d7ff23baa78d1"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-win32.whl", hash = "sha256:10f82115e21dc0dfec9ab5c0223652f7197feb168c940f3ef61563fc2d6beb74"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:693ce3f9e70a6cf7d2fb9e6c9d8b204b6b39897a2c4a1aa65728d5ac97dcc1d8"}, - {file = "MarkupSafe-2.0.1.tar.gz", hash = "sha256:594c67807fb16238b30c44bdf74f36c02cdf22d1c8cda91ef8a0ed8dabf5620a"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:86b1f75c4e7c2ac2ccdaec2b9022845dbb81880ca318bb7a0a01fbf7813e3812"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f121a1420d4e173a5d96e47e9a0c0dcff965afdf1626d28de1460815f7c4ee7a"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a49907dd8420c5685cfa064a1335b6754b74541bbb3706c259c02ed65b644b3e"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10c1bfff05d95783da83491be968e8fe789263689c02724e0c691933c52994f5"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b7bd98b796e2b6553da7225aeb61f447f80a1ca64f41d83612e6139ca5213aa4"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b09bf97215625a311f669476f44b8b318b075847b49316d3e28c08e41a7a573f"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:694deca8d702d5db21ec83983ce0bb4b26a578e71fbdbd4fdcd387daa90e4d5e"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:efc1913fd2ca4f334418481c7e595c00aad186563bbc1ec76067848c7ca0a933"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-win32.whl", hash = "sha256:4a33dea2b688b3190ee12bd7cfa29d39c9ed176bda40bfa11099a3ce5d3a7ac6"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:dda30ba7e87fbbb7eab1ec9f58678558fd9a6b8b853530e176eabd064da81417"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:671cd1187ed5e62818414afe79ed29da836dde67166a9fac6d435873c44fdd02"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3799351e2336dc91ea70b034983ee71cf2f9533cdff7c14c90ea126bfd95d65a"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e72591e9ecd94d7feb70c1cbd7be7b3ebea3f548870aa91e2732960fa4d57a37"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6fbf47b5d3728c6aea2abb0589b5d30459e369baa772e0f37a0320185e87c980"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d5ee4f386140395a2c818d149221149c54849dfcfcb9f1debfe07a8b8bd63f9a"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:bcb3ed405ed3222f9904899563d6fc492ff75cce56cba05e32eff40e6acbeaa3"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e1c0b87e09fa55a220f058d1d49d3fb8df88fbfab58558f1198e08c1e1de842a"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-win32.whl", hash = "sha256:8dc1c72a69aa7e082593c4a203dcf94ddb74bb5c8a731e4e1eb68d031e8498ff"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:97a68e6ada378df82bc9f16b800ab77cbf4b2fada0081794318520138c088e4a"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e8c843bbcda3a2f1e3c2ab25913c80a3c5376cd00c6e8c4a86a89a28c8dc5452"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0212a68688482dc52b2d45013df70d169f542b7394fc744c02a57374a4207003"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e576a51ad59e4bfaac456023a78f6b5e6e7651dcd383bcc3e18d06f9b55d6d1"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b9fe39a2ccc108a4accc2676e77da025ce383c108593d65cc909add5c3bd601"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:96e37a3dc86e80bf81758c152fe66dbf60ed5eca3d26305edf01892257049925"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6d0072fea50feec76a4c418096652f2c3238eaa014b2f94aeb1d56a66b41403f"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:089cf3dbf0cd6c100f02945abeb18484bd1ee57a079aefd52cffd17fba910b88"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6a074d34ee7a5ce3effbc526b7083ec9731bb3cbf921bbe1d3005d4d2bdb3a63"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-win32.whl", hash = "sha256:421be9fbf0ffe9ffd7a378aafebbf6f4602d564d34be190fc19a193232fd12b1"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:fc7b548b17d238737688817ab67deebb30e8073c95749d55538ed473130ec0c7"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e04e26803c9c3851c931eac40c695602c6295b8d432cbe78609649ad9bd2da8a"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b87db4360013327109564f0e591bd2a3b318547bcef31b468a92ee504d07ae4f"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99a2a507ed3ac881b975a2976d59f38c19386d128e7a9a18b7df6fff1fd4c1d6"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:56442863ed2b06d19c37f94d999035e15ee982988920e12a5b4ba29b62ad1f77"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3ce11ee3f23f79dbd06fb3d63e2f6af7b12db1d46932fe7bd8afa259a5996603"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:33b74d289bd2f5e527beadcaa3f401e0df0a89927c1559c8566c066fa4248ab7"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:43093fb83d8343aac0b1baa75516da6092f58f41200907ef92448ecab8825135"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8e3dcf21f367459434c18e71b2a9532d96547aef8a871872a5bd69a715c15f96"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-win32.whl", hash = "sha256:d4306c36ca495956b6d568d276ac11fdd9c30a36f1b6eb928070dc5360b22e1c"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:46d00d6cfecdde84d40e572d63735ef81423ad31184100411e6e3388d405e247"}, + {file = "MarkupSafe-2.1.1.tar.gz", hash = "sha256:7f91197cc9e48f989d12e4e6fbc46495c446636dfc81b9ccf50bb0ec74b91d4b"}, ] mccabe = [ {file = "mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42"}, @@ -2298,57 +2314,60 @@ packaging = [ {file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"}, ] paramiko = [ - {file = "paramiko-2.10.1-py2.py3-none-any.whl", hash = "sha256:f6cbd3e1204abfdbcd40b3ecbc9d32f04027cd3080fe666245e21e7540ccfc1b"}, - {file = "paramiko-2.10.1.tar.gz", hash = "sha256:443f4da23ec24e9a9c0ea54017829c282abdda1d57110bf229360775ccd27a31"}, + {file = "paramiko-2.10.3-py2.py3-none-any.whl", hash = "sha256:ac6593479f2b47a9422eca076b22cff9f795495e6733a64723efc75dd8c92101"}, + {file = "paramiko-2.10.3.tar.gz", hash = "sha256:ddb1977853aef82804b35d72a0e597b244fa326c404c350bd00c5b01dbfee71a"}, ] parso = [ {file = "parso-0.8.3-py2.py3-none-any.whl", hash = "sha256:c001d4636cd3aecdaf33cbb40aebb59b094be2a74c556778ef5576c175e19e75"}, {file = "parso-0.8.3.tar.gz", hash = "sha256:8c07be290bb59f03588915921e29e8a50002acaf2cdc5fa0e0114f91709fafa0"}, ] pathlib2 = [ - {file = "pathlib2-2.3.6-py2.py3-none-any.whl", hash = "sha256:3a130b266b3a36134dcc79c17b3c7ac9634f083825ca6ea9d8f557ee6195c9c8"}, - {file = "pathlib2-2.3.6.tar.gz", hash = "sha256:7d8bcb5555003cdf4a8d2872c538faa3a0f5d20630cb360e518ca3b981795e5f"}, + {file = "pathlib2-2.3.7.post1-py2.py3-none-any.whl", hash = "sha256:5266a0fd000452f1b3467d782f079a4343c63aaa119221fbdc4e39577489ca5b"}, + {file = "pathlib2-2.3.7.post1.tar.gz", hash = "sha256:9fe0edad898b83c0c3e199c842b27ed216645d2e177757b2dd67384d4113c641"}, ] pillow = [ - {file = "Pillow-9.0.1-1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a5d24e1d674dd9d72c66ad3ea9131322819ff86250b30dc5821cbafcfa0b96b4"}, - {file = "Pillow-9.0.1-1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2632d0f846b7c7600edf53c48f8f9f1e13e62f66a6dbc15191029d950bfed976"}, - {file = "Pillow-9.0.1-1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b9618823bd237c0d2575283f2939655f54d51b4527ec3972907a927acbcc5bfc"}, - {file = "Pillow-9.0.1-cp310-cp310-macosx_10_10_universal2.whl", hash = "sha256:9bfdb82cdfeccec50aad441afc332faf8606dfa5e8efd18a6692b5d6e79f00fd"}, - {file = "Pillow-9.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5100b45a4638e3c00e4d2320d3193bdabb2d75e79793af7c3eb139e4f569f16f"}, - {file = "Pillow-9.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:528a2a692c65dd5cafc130de286030af251d2ee0483a5bf50c9348aefe834e8a"}, - {file = "Pillow-9.0.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0f29d831e2151e0b7b39981756d201f7108d3d215896212ffe2e992d06bfe049"}, - {file = "Pillow-9.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:855c583f268edde09474b081e3ddcd5cf3b20c12f26e0d434e1386cc5d318e7a"}, - {file = "Pillow-9.0.1-cp310-cp310-win32.whl", hash = "sha256:d9d7942b624b04b895cb95af03a23407f17646815495ce4547f0e60e0b06f58e"}, - {file = "Pillow-9.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:81c4b81611e3a3cb30e59b0cf05b888c675f97e3adb2c8672c3154047980726b"}, - {file = "Pillow-9.0.1-cp37-cp37m-macosx_10_10_x86_64.whl", hash = "sha256:413ce0bbf9fc6278b2d63309dfeefe452835e1c78398efb431bab0672fe9274e"}, - {file = "Pillow-9.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:80fe64a6deb6fcfdf7b8386f2cf216d329be6f2781f7d90304351811fb591360"}, - {file = "Pillow-9.0.1-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cef9c85ccbe9bee00909758936ea841ef12035296c748aaceee535969e27d31b"}, - {file = "Pillow-9.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1d19397351f73a88904ad1aee421e800fe4bbcd1aeee6435fb62d0a05ccd1030"}, - {file = "Pillow-9.0.1-cp37-cp37m-win32.whl", hash = "sha256:d21237d0cd37acded35154e29aec853e945950321dd2ffd1a7d86fe686814669"}, - {file = "Pillow-9.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:ede5af4a2702444a832a800b8eb7f0a7a1c0eed55b644642e049c98d589e5092"}, - {file = "Pillow-9.0.1-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:b5b3f092fe345c03bca1e0b687dfbb39364b21ebb8ba90e3fa707374b7915204"}, - {file = "Pillow-9.0.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:335ace1a22325395c4ea88e00ba3dc89ca029bd66bd5a3c382d53e44f0ccd77e"}, - {file = "Pillow-9.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:db6d9fac65bd08cea7f3540b899977c6dee9edad959fa4eaf305940d9cbd861c"}, - {file = "Pillow-9.0.1-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f154d173286a5d1863637a7dcd8c3437bb557520b01bddb0be0258dcb72696b5"}, - {file = "Pillow-9.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14d4b1341ac07ae07eb2cc682f459bec932a380c3b122f5540432d8977e64eae"}, - {file = "Pillow-9.0.1-cp38-cp38-win32.whl", hash = "sha256:effb7749713d5317478bb3acb3f81d9d7c7f86726d41c1facca068a04cf5bb4c"}, - {file = "Pillow-9.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:7f7609a718b177bf171ac93cea9fd2ddc0e03e84d8fa4e887bdfc39671d46b00"}, - {file = "Pillow-9.0.1-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:80ca33961ced9c63358056bd08403ff866512038883e74f3a4bf88ad3eb66838"}, - {file = "Pillow-9.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1c3c33ac69cf059bbb9d1a71eeaba76781b450bc307e2291f8a4764d779a6b28"}, - {file = "Pillow-9.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:12875d118f21cf35604176872447cdb57b07126750a33748bac15e77f90f1f9c"}, - {file = "Pillow-9.0.1-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:514ceac913076feefbeaf89771fd6febde78b0c4c1b23aaeab082c41c694e81b"}, - {file = "Pillow-9.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d3c5c79ab7dfce6d88f1ba639b77e77a17ea33a01b07b99840d6ed08031cb2a7"}, - {file = "Pillow-9.0.1-cp39-cp39-win32.whl", hash = "sha256:718856856ba31f14f13ba885ff13874be7fefc53984d2832458f12c38205f7f7"}, - {file = "Pillow-9.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:f25ed6e28ddf50de7e7ea99d7a976d6a9c415f03adcaac9c41ff6ff41b6d86ac"}, - {file = "Pillow-9.0.1-pp37-pypy37_pp73-macosx_10_10_x86_64.whl", hash = "sha256:011233e0c42a4a7836498e98c1acf5e744c96a67dd5032a6f666cc1fb97eab97"}, - {file = "Pillow-9.0.1-pp37-pypy37_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:253e8a302a96df6927310a9d44e6103055e8fb96a6822f8b7f514bb7ef77de56"}, - {file = "Pillow-9.0.1-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6295f6763749b89c994fcb6d8a7f7ce03c3992e695f89f00b741b4580b199b7e"}, - {file = "Pillow-9.0.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:a9f44cd7e162ac6191491d7249cceb02b8116b0f7e847ee33f739d7cb1ea1f70"}, - {file = "Pillow-9.0.1.tar.gz", hash = "sha256:6c8bc8238a7dfdaf7a75f5ec5a663f4173f8c367e5a39f87e720495e1eed75fa"}, + {file = "Pillow-9.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:af79d3fde1fc2e33561166d62e3b63f0cc3e47b5a3a2e5fea40d4917754734ea"}, + {file = "Pillow-9.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:55dd1cf09a1fd7c7b78425967aacae9b0d70125f7d3ab973fadc7b5abc3de652"}, + {file = "Pillow-9.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:66822d01e82506a19407d1afc104c3fcea3b81d5eb11485e593ad6b8492f995a"}, + {file = "Pillow-9.1.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a5eaf3b42df2bcda61c53a742ee2c6e63f777d0e085bbc6b2ab7ed57deb13db7"}, + {file = "Pillow-9.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:01ce45deec9df310cbbee11104bae1a2a43308dd9c317f99235b6d3080ddd66e"}, + {file = "Pillow-9.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:aea7ce61328e15943d7b9eaca87e81f7c62ff90f669116f857262e9da4057ba3"}, + {file = "Pillow-9.1.0-cp310-cp310-win32.whl", hash = "sha256:7a053bd4d65a3294b153bdd7724dce864a1d548416a5ef61f6d03bf149205160"}, + {file = "Pillow-9.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:97bda660702a856c2c9e12ec26fc6d187631ddfd896ff685814ab21ef0597033"}, + {file = "Pillow-9.1.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:21dee8466b42912335151d24c1665fcf44dc2ee47e021d233a40c3ca5adae59c"}, + {file = "Pillow-9.1.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b6d4050b208c8ff886fd3db6690bf04f9a48749d78b41b7a5bf24c236ab0165"}, + {file = "Pillow-9.1.0-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5cfca31ab4c13552a0f354c87fbd7f162a4fafd25e6b521bba93a57fe6a3700a"}, + {file = "Pillow-9.1.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ed742214068efa95e9844c2d9129e209ed63f61baa4d54dbf4cf8b5e2d30ccf2"}, + {file = "Pillow-9.1.0-cp37-cp37m-win32.whl", hash = "sha256:c9efef876c21788366ea1f50ecb39d5d6f65febe25ad1d4c0b8dff98843ac244"}, + {file = "Pillow-9.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:de344bcf6e2463bb25179d74d6e7989e375f906bcec8cb86edb8b12acbc7dfef"}, + {file = "Pillow-9.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:17869489de2fce6c36690a0c721bd3db176194af5f39249c1ac56d0bb0fcc512"}, + {file = "Pillow-9.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:25023a6209a4d7c42154073144608c9a71d3512b648a2f5d4465182cb93d3477"}, + {file = "Pillow-9.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8782189c796eff29dbb37dd87afa4ad4d40fc90b2742704f94812851b725964b"}, + {file = "Pillow-9.1.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:463acf531f5d0925ca55904fa668bb3461c3ef6bc779e1d6d8a488092bdee378"}, + {file = "Pillow-9.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3f42364485bfdab19c1373b5cd62f7c5ab7cc052e19644862ec8f15bb8af289e"}, + {file = "Pillow-9.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:3fddcdb619ba04491e8f771636583a7cc5a5051cd193ff1aa1ee8616d2a692c5"}, + {file = "Pillow-9.1.0-cp38-cp38-win32.whl", hash = "sha256:4fe29a070de394e449fd88ebe1624d1e2d7ddeed4c12e0b31624561b58948d9a"}, + {file = "Pillow-9.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:c24f718f9dd73bb2b31a6201e6db5ea4a61fdd1d1c200f43ee585fc6dcd21b34"}, + {file = "Pillow-9.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:fb89397013cf302f282f0fc998bb7abf11d49dcff72c8ecb320f76ea6e2c5717"}, + {file = "Pillow-9.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c870193cce4b76713a2b29be5d8327c8ccbe0d4a49bc22968aa1e680930f5581"}, + {file = "Pillow-9.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69e5ddc609230d4408277af135c5b5c8fe7a54b2bdb8ad7c5100b86b3aab04c6"}, + {file = "Pillow-9.1.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:35be4a9f65441d9982240e6966c1eaa1c654c4e5e931eaf580130409e31804d4"}, + {file = "Pillow-9.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82283af99c1c3a5ba1da44c67296d5aad19f11c535b551a5ae55328a317ce331"}, + {file = "Pillow-9.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a325ac71914c5c043fa50441b36606e64a10cd262de12f7a179620f579752ff8"}, + {file = "Pillow-9.1.0-cp39-cp39-win32.whl", hash = "sha256:a598d8830f6ef5501002ae85c7dbfcd9c27cc4efc02a1989369303ba85573e58"}, + {file = "Pillow-9.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:0c51cb9edac8a5abd069fd0758ac0a8bfe52c261ee0e330f363548aca6893595"}, + {file = "Pillow-9.1.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a336a4f74baf67e26f3acc4d61c913e378e931817cd1e2ef4dfb79d3e051b481"}, + {file = "Pillow-9.1.0-pp37-pypy37_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb1b89b11256b5b6cad5e7593f9061ac4624f7651f7a8eb4dfa37caa1dfaa4d0"}, + {file = "Pillow-9.1.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:255c9d69754a4c90b0ee484967fc8818c7ff8311c6dddcc43a4340e10cd1636a"}, + {file = "Pillow-9.1.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:5a3ecc026ea0e14d0ad7cd990ea7f48bfcb3eb4271034657dc9d06933c6629a7"}, + {file = "Pillow-9.1.0-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c5b0ff59785d93b3437c3703e3c64c178aabada51dea2a7f2c5eccf1bcf565a3"}, + {file = "Pillow-9.1.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c7110ec1701b0bf8df569a7592a196c9d07c764a0a74f65471ea56816f10e2c8"}, + {file = "Pillow-9.1.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:8d79c6f468215d1a8415aa53d9868a6b40c4682165b8cb62a221b1baa47db458"}, + {file = "Pillow-9.1.0.tar.gz", hash = "sha256:f401ed2bbb155e1ade150ccc63db1a4f6c1909d3d378f7d1235a44e90d75fb97"}, ] platformdirs = [ - {file = "platformdirs-2.4.1-py3-none-any.whl", hash = "sha256:1d7385c7db91728b83efd0ca99a5afb296cab9d0ed8313a45ed8ba17967ecfca"}, - {file = "platformdirs-2.4.1.tar.gz", hash = "sha256:440633ddfebcc36264232365d7840a970e75e1018d15b4327d11f91909045fda"}, + {file = "platformdirs-2.5.1-py3-none-any.whl", hash = "sha256:bcae7cab893c2d310a711b70b24efb93334febe65f8de776ee320b517471e227"}, + {file = "platformdirs-2.5.1.tar.gz", hash = "sha256:7535e70dfa32e84d4b34996ea99c5e432fa29a708d0f4e394bbcb2a8faa4f16d"}, ] pluggy = [ {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"}, @@ -2363,32 +2382,30 @@ prefixed = [ {file = "prefixed-0.3.2.tar.gz", hash = "sha256:ca48277ba5fa8346dd4b760847da930c7b84416387c39e93affef086add2c029"}, ] protobuf = [ - {file = "protobuf-3.19.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f51d5a9f137f7a2cec2d326a74b6e3fc79d635d69ffe1b036d39fc7d75430d37"}, - {file = "protobuf-3.19.4-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:09297b7972da685ce269ec52af761743714996b4381c085205914c41fcab59fb"}, - {file = "protobuf-3.19.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:072fbc78d705d3edc7ccac58a62c4c8e0cec856987da7df8aca86e647be4e35c"}, - {file = "protobuf-3.19.4-cp310-cp310-win32.whl", hash = "sha256:7bb03bc2873a2842e5ebb4801f5c7ff1bfbdf426f85d0172f7644fcda0671ae0"}, - {file = "protobuf-3.19.4-cp310-cp310-win_amd64.whl", hash = "sha256:f358aa33e03b7a84e0d91270a4d4d8f5df6921abe99a377828839e8ed0c04e07"}, - {file = "protobuf-3.19.4-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:1c91ef4110fdd2c590effb5dca8fdbdcb3bf563eece99287019c4204f53d81a4"}, - {file = "protobuf-3.19.4-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c438268eebb8cf039552897d78f402d734a404f1360592fef55297285f7f953f"}, - {file = "protobuf-3.19.4-cp36-cp36m-win32.whl", hash = "sha256:835a9c949dc193953c319603b2961c5c8f4327957fe23d914ca80d982665e8ee"}, - {file = "protobuf-3.19.4-cp36-cp36m-win_amd64.whl", hash = "sha256:4276cdec4447bd5015453e41bdc0c0c1234eda08420b7c9a18b8d647add51e4b"}, - {file = "protobuf-3.19.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:6cbc312be5e71869d9d5ea25147cdf652a6781cf4d906497ca7690b7b9b5df13"}, - {file = "protobuf-3.19.4-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:54a1473077f3b616779ce31f477351a45b4fef8c9fd7892d6d87e287a38df368"}, - {file = "protobuf-3.19.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:435bb78b37fc386f9275a7035fe4fb1364484e38980d0dd91bc834a02c5ec909"}, - {file = "protobuf-3.19.4-cp37-cp37m-win32.whl", hash = "sha256:16f519de1313f1b7139ad70772e7db515b1420d208cb16c6d7858ea989fc64a9"}, - {file = "protobuf-3.19.4-cp37-cp37m-win_amd64.whl", hash = "sha256:cdc076c03381f5c1d9bb1abdcc5503d9ca8b53cf0a9d31a9f6754ec9e6c8af0f"}, - {file = "protobuf-3.19.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:69da7d39e39942bd52848438462674c463e23963a1fdaa84d88df7fbd7e749b2"}, - {file = "protobuf-3.19.4-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:48ed3877fa43e22bcacc852ca76d4775741f9709dd9575881a373bd3e85e54b2"}, - {file = "protobuf-3.19.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bd95d1dfb9c4f4563e6093a9aa19d9c186bf98fa54da5252531cc0d3a07977e7"}, - {file = "protobuf-3.19.4-cp38-cp38-win32.whl", hash = "sha256:b38057450a0c566cbd04890a40edf916db890f2818e8682221611d78dc32ae26"}, - {file = "protobuf-3.19.4-cp38-cp38-win_amd64.whl", hash = "sha256:7ca7da9c339ca8890d66958f5462beabd611eca6c958691a8fe6eccbd1eb0c6e"}, - {file = "protobuf-3.19.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:36cecbabbda242915529b8ff364f2263cd4de7c46bbe361418b5ed859677ba58"}, - {file = "protobuf-3.19.4-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:c1068287025f8ea025103e37d62ffd63fec8e9e636246b89c341aeda8a67c934"}, - {file = "protobuf-3.19.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:96bd766831596d6014ca88d86dc8fe0fb2e428c0b02432fd9db3943202bf8c5e"}, - {file = "protobuf-3.19.4-cp39-cp39-win32.whl", hash = "sha256:84123274d982b9e248a143dadd1b9815049f4477dc783bf84efe6250eb4b836a"}, - {file = "protobuf-3.19.4-cp39-cp39-win_amd64.whl", hash = "sha256:3112b58aac3bac9c8be2b60a9daf6b558ca3f7681c130dcdd788ade7c9ffbdca"}, - {file = "protobuf-3.19.4-py2.py3-none-any.whl", hash = "sha256:8961c3a78ebfcd000920c9060a262f082f29838682b1f7201889300c1fbe0616"}, - {file = "protobuf-3.19.4.tar.gz", hash = "sha256:9df0c10adf3e83015ced42a9a7bd64e13d06c4cf45c340d2c63020ea04499d0a"}, + {file = "protobuf-3.20.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:9d0f3aca8ca51c8b5e204ab92bd8afdb2a8e3df46bd0ce0bd39065d79aabcaa4"}, + {file = "protobuf-3.20.0-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:001c2160c03b6349c04de39cf1a58e342750da3632f6978a1634a3dcca1ec10e"}, + {file = "protobuf-3.20.0-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:5b5860b790498f233cdc8d635a17fc08de62e59d4dcd8cdb6c6c0d38a31edf2b"}, + {file = "protobuf-3.20.0-cp310-cp310-win32.whl", hash = "sha256:0b250c60256c8824219352dc2a228a6b49987e5bf94d3ffcf4c46585efcbd499"}, + {file = "protobuf-3.20.0-cp310-cp310-win_amd64.whl", hash = "sha256:a1eebb6eb0653e594cb86cd8e536b9b083373fca9aba761ade6cd412d46fb2ab"}, + {file = "protobuf-3.20.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:bc14037281db66aa60856cd4ce4541a942040686d290e3f3224dd3978f88f554"}, + {file = "protobuf-3.20.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:47257d932de14a7b6c4ae1b7dbf592388153ee35ec7cae216b87ae6490ed39a3"}, + {file = "protobuf-3.20.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:fbcbb068ebe67c4ff6483d2e2aa87079c325f8470b24b098d6bf7d4d21d57a69"}, + {file = "protobuf-3.20.0-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:542f25a4adf3691a306dcc00bf9a73176554938ec9b98f20f929a044f80acf1b"}, + {file = "protobuf-3.20.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:fd7133b885e356fa4920ead8289bb45dc6f185a164e99e10279f33732ed5ce15"}, + {file = "protobuf-3.20.0-cp37-cp37m-win32.whl", hash = "sha256:8d84453422312f8275455d1cb52d850d6a4d7d714b784e41b573c6f5bfc2a029"}, + {file = "protobuf-3.20.0-cp37-cp37m-win_amd64.whl", hash = "sha256:52bae32a147c375522ce09bd6af4d2949aca32a0415bc62df1456b3ad17c6001"}, + {file = "protobuf-3.20.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:25d2fcd6eef340082718ec9ad2c58d734429f2b1f7335d989523852f2bba220b"}, + {file = "protobuf-3.20.0-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:88c8be0558bdfc35e68c42ae5bf785eb9390d25915d4863bbc7583d23da77074"}, + {file = "protobuf-3.20.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:38fd9eb74b852e4ee14b16e9670cd401d147ee3f3ec0d4f7652e0c921d6227f8"}, + {file = "protobuf-3.20.0-cp38-cp38-win32.whl", hash = "sha256:7dcd84dc31ebb35ade755e06d1561d1bd3b85e85dbdbf6278011fc97b22810db"}, + {file = "protobuf-3.20.0-cp38-cp38-win_amd64.whl", hash = "sha256:1eb13f5a5a59ca4973bcfa2fc8fff644bd39f2109c3f7a60bd5860cb6a49b679"}, + {file = "protobuf-3.20.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1d24c81c2310f0063b8fc1c20c8ed01f3331be9374b4b5c2de846f69e11e21fb"}, + {file = "protobuf-3.20.0-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:8be43a91ab66fe995e85ccdbdd1046d9f0443d59e060c0840319290de25b7d33"}, + {file = "protobuf-3.20.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:7a53d4035427b9dbfbb397f46642754d294f131e93c661d056366f2a31438263"}, + {file = "protobuf-3.20.0-cp39-cp39-win32.whl", hash = "sha256:32bf4a90c207a0b4e70ca6dd09d43de3cb9898f7d5b69c2e9e3b966a7f342820"}, + {file = "protobuf-3.20.0-cp39-cp39-win_amd64.whl", hash = "sha256:6efe066a7135233f97ce51a1aa007d4fb0be28ef093b4f88dac4ad1b3a2b7b6f"}, + {file = "protobuf-3.20.0-py2.py3-none-any.whl", hash = "sha256:4eda68bd9e2a4879385e6b1ea528c976f59cd9728382005cc54c28bcce8db983"}, + {file = "protobuf-3.20.0.tar.gz", hash = "sha256:71b2c3d1cd26ed1ec7c8196834143258b2ad7f444efff26fdc366c6f5e752702"}, ] py = [ {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, @@ -2453,8 +2470,8 @@ pygments = [ {file = "Pygments-2.11.2.tar.gz", hash = "sha256:4e426f72023d88d03b2fa258de560726ce890ff3b630f88c21cbb8b2503b8c6a"}, ] pylint = [ - {file = "pylint-2.12.2-py3-none-any.whl", hash = "sha256:daabda3f7ed9d1c60f52d563b1b854632fd90035bcf01443e234d3dc794e3b74"}, - {file = "pylint-2.12.2.tar.gz", hash = "sha256:9d945a73640e1fec07ee34b42f5669b770c759acd536ec7b16d7e4b87a9c9ff9"}, + {file = "pylint-2.13.5-py3-none-any.whl", hash = "sha256:c149694cfdeaee1aa2465e6eaab84c87a881a7d55e6e93e09466be7164764d1e"}, + {file = "pylint-2.13.5.tar.gz", hash = "sha256:dab221658368c7a05242e673c275c488670144123f4bd262b2777249c1c0de9b"}, ] pymongo = [ {file = "pymongo-3.12.3-cp27-cp27m-macosx_10_14_intel.whl", hash = "sha256:c164eda0be9048f83c24b9b2656900041e069ddf72de81c17d874d0c32f6079f"}, @@ -2583,40 +2600,40 @@ pynput = [ {file = "pynput-1.7.6.tar.gz", hash = "sha256:3a5726546da54116b687785d38b1db56997ce1d28e53e8d22fc656d8b92e533c"}, ] pyobjc-core = [ - {file = "pyobjc-core-8.2.tar.gz", hash = "sha256:6afb8ee1dd0647cbfaaf99906eca3b43ce045b27e3d4510462d04e7e5361c89b"}, - {file = "pyobjc_core-8.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:311e45556c3afa8831713b89017e0204562f51f35661d76c07ffe04985f44e1d"}, - {file = "pyobjc_core-8.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:711f361e83382e405e4273ff085178b0cf730901ccf6801f834e7037e50278f9"}, - {file = "pyobjc_core-8.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:bdd2e2960ec73214bcfe2d86bb4ca94f5f5119db86d129fa32d3c003b6532c50"}, - {file = "pyobjc_core-8.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b879f91fc614c399aafa1d08cf5e279c267510e904bad5c336c3a6064c0eb3aa"}, - {file = "pyobjc_core-8.2-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:b4ef4bdb99a330f5e15cc6273098964276fccbc432453cdba3c2963292bc066c"}, - {file = "pyobjc_core-8.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:fd8e5be0955790ff8f9d17a0f356b6eb7eb1ce4995e0c94355c462dd52d22d6d"}, + {file = "pyobjc-core-8.4.1.tar.gz", hash = "sha256:df98669e957adb33566d9ef46773a5ac876a81afe8849c282d6a80448e35dd74"}, + {file = "pyobjc_core-8.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:9a89cac910fbd64728fe7ec0c7a3a7cf20959bc1d7e2f41db4d7800556e47745"}, + {file = "pyobjc_core-8.4.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:2cf1d4348cb99fcba04fa38199a46e35263b2fe7bb66e6dfbd4f19bc2602998d"}, + {file = "pyobjc_core-8.4.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a130324b25c0f5f4cfe30b6a28b8e70865d6e1eee158caababb603906ef431d2"}, + {file = "pyobjc_core-8.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c31195d1a8f00da99abf79643f902d09c709dbbe9c9b6feb6b585303c57d720c"}, + {file = "pyobjc_core-8.4.1-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:e7fd2aefb53a96a8f688c8bb36c6ebd5446250a7251bfa6b688a045e05afc60b"}, + {file = "pyobjc_core-8.4.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:b3191173ce268e23c84d84f036fc94c3a8749a6726fc7fe73baf27dbac14f7d0"}, ] pyobjc-framework-applicationservices = [ - {file = "pyobjc-framework-ApplicationServices-8.2.tar.gz", hash = "sha256:f901b2ebb278b7d00033b45cf9ee9d43f651e096ff4c4defa261509238138da8"}, - {file = "pyobjc_framework_ApplicationServices-8.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:b5be1757a8df944a58449c628ed68fc76dd746fcd1e96ebb6db0f734e94cdfc8"}, - {file = "pyobjc_framework_ApplicationServices-8.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:32657c4b89983a98fbe831a14884954710719e763197968a20ac07e1daa21f1e"}, - {file = "pyobjc_framework_ApplicationServices-8.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:6ce2c9701590f752a75151b2fe1e462e7a7ad67dec3119a796711ea93ea01031"}, - {file = "pyobjc_framework_ApplicationServices-8.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:28124bc892045222305cf8953b163495bf662c401e54f48905dafb1104d9c1d1"}, - {file = "pyobjc_framework_ApplicationServices-8.2-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:d5c60d66c6ed2569cb042a6c14dd5b9f4d01e182a464b893cd6002ce445b4ddf"}, - {file = "pyobjc_framework_ApplicationServices-8.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e1727fccc1d48922fa28a4cebf4a6d287d633f451cb03779968df60de8cb1bd0"}, + {file = "pyobjc-framework-ApplicationServices-8.4.1.tar.gz", hash = "sha256:b058466266412d2bf24b0303785c91538961367f26db616bf2f9f45c498a83a3"}, + {file = "pyobjc_framework_ApplicationServices-8.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:362d178c624a617fc60c3a35397550193d82da9a59272b215cf1dc6fb68c011c"}, + {file = "pyobjc_framework_ApplicationServices-8.4.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:41f0f6be6d343f91de92cab053be4a983e3936b364ca267a94c6e06bc34ff7fe"}, + {file = "pyobjc_framework_ApplicationServices-8.4.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:73287b758a70a037b6e74a60036f4adb1677407dfeaddee94102074d92539e6e"}, + {file = "pyobjc_framework_ApplicationServices-8.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:287b541b66c5c931a11dde90d7be69ddd2e5c3624c2e980e679f5242ec3ee0da"}, + {file = "pyobjc_framework_ApplicationServices-8.4.1-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:780fbe2e3b02237f79e2f5780094dd95b7308ecdb265c062b78a507b9564eb4d"}, + {file = "pyobjc_framework_ApplicationServices-8.4.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:092c4835e73e5dab1f3c498511b28b2e96503671fc7c1bc25f732c5e687b7214"}, ] pyobjc-framework-cocoa = [ - {file = "pyobjc-framework-Cocoa-8.2.tar.gz", hash = "sha256:f0901998e2f18415ef6d1f8a12b083f69fc93bd56b3e88040002e3c09bd8c304"}, - {file = "pyobjc_framework_Cocoa-8.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:5af73f150e242542735e0663bb5504f88aabaec2a54c60e856cfca3ff6dd9712"}, - {file = "pyobjc_framework_Cocoa-8.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:d1de3763ee01850c311da74de5c82c85ec199120e85ab45acaf203accc37a470"}, - {file = "pyobjc_framework_Cocoa-8.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:86d69bf667f99f3c43184d8830567195fff94c675fe7f60f899dd90553d9b265"}, - {file = "pyobjc_framework_Cocoa-8.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9dbadb22826392c48b00087359f66579f8404a4f4f77498f31f9869c54bb0fa9"}, - {file = "pyobjc_framework_Cocoa-8.2-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:7adf8b57da1c1292c24375b8e74b6dd45f99a4d3c10ba925a9b38f63a97ba782"}, - {file = "pyobjc_framework_Cocoa-8.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:bc8279c8c1544087d46a7e99324f093029df89b8c527c5da2a682bad4cb3197e"}, + {file = "pyobjc-framework-Cocoa-8.4.1.tar.gz", hash = "sha256:dc596bac0f5d424f67944e95b2d0d7c94a07c4166359d7b4a4d4ae4f8e112822"}, + {file = "pyobjc_framework_Cocoa-8.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:cfbe038a0108ae1b45f8f7067af70af5811b8352d2dc3d86a7bcb4484ff5d56e"}, + {file = "pyobjc_framework_Cocoa-8.4.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:118225562064d991bafb41d0913899d6b3d723984d1888cb7181e4dba63b22c2"}, + {file = "pyobjc_framework_Cocoa-8.4.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:d117a1eb24fd317e9f63792ac6a8703ed899de5d42e8a861c7bf885625668c31"}, + {file = "pyobjc_framework_Cocoa-8.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3af8577acbd6b980d3b9270ec99bc0164f66ef8397351a72fcdee527f23c1a34"}, + {file = "pyobjc_framework_Cocoa-8.4.1-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:91fdc964acb4dee4d37ae81fb603d48397739dbbfcc1eadbe0cdafaa8144b6e6"}, + {file = "pyobjc_framework_Cocoa-8.4.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:197dd28668e786b55d7d4139afd85c285f780564ebbccc166e84a24be31de34f"}, ] pyobjc-framework-quartz = [ - {file = "pyobjc-framework-Quartz-8.2.tar.gz", hash = "sha256:219d8797235bf071723f8b0f30a681de0b12875e2d04ae902a3a269f72de0b66"}, - {file = "pyobjc_framework_Quartz-8.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e5ab3117201a494b11bb1b80febf5dd288111a196b35731815b656ae30ee6ce3"}, - {file = "pyobjc_framework_Quartz-8.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:4941b3039ab7bcf89cb4255c381358de2383c1ab45c9e00c3b64655f271a3b32"}, - {file = "pyobjc_framework_Quartz-8.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:ab75a50dea84ec794641522d9f035fc6c19ec4eb37a56c9186b9943575fbc7ab"}, - {file = "pyobjc_framework_Quartz-8.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8cb687b20ebfc467034868b38945b573d1c50f237e379cf86c4f557c9766b759"}, - {file = "pyobjc_framework_Quartz-8.2-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:8b5c3ca1fef900fa66aafe82fff07bc352c60ea7dceed2bb9b5b1db0957b4fea"}, - {file = "pyobjc_framework_Quartz-8.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7bca7e649da77056348d71032def44e345043319d71b0c592197888d92e3eec1"}, + {file = "pyobjc-framework-Quartz-8.4.1.tar.gz", hash = "sha256:f8acebf0b526f0687e79ea6946e8f2528ced4ef02d0ed3fbf7116124b2749e52"}, + {file = "pyobjc_framework_Quartz-8.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a82d9eab3b2a6fca46602465f731815026d06e4fd0ba65b5b5211f1a0472b861"}, + {file = "pyobjc_framework_Quartz-8.4.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:1bd0a506385141b81330464b6e2537a7056cdca56deb98222b6926a04f72a3e6"}, + {file = "pyobjc_framework_Quartz-8.4.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:64cc616386b7a387dce53d3adb8c12a8461c2720861ab36aeeb53768cd0ba7e4"}, + {file = "pyobjc_framework_Quartz-8.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a2dff998c4d1286998390f58a3131b99bdc9dbf5c3d881562b33f168098bbe2e"}, + {file = "pyobjc_framework_Quartz-8.4.1-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:a98c58e1b265d5b413f5ecc6ec186b9e305fb6e37909d8b4b97fd681176f5f1c"}, + {file = "pyobjc_framework_Quartz-8.4.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:39f55426ef883cbe12a53969f90886f71e2a94513c98cf3730efdf404cdc5c83"}, ] pyparsing = [ {file = "pyparsing-2.4.7-py2.py3-none-any.whl", hash = "sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b"}, @@ -2649,8 +2666,8 @@ python3-xlib = [ {file = "python3-xlib-0.15.tar.gz", hash = "sha256:dc4245f3ae4aa5949c1d112ee4723901ade37a96721ba9645f2bfa56e5b383f8"}, ] pytz = [ - {file = "pytz-2021.3-py2.py3-none-any.whl", hash = "sha256:3672058bc3453457b622aab7a1c3bfd5ab0bdae451512f6cf25f64ed37f5b87c"}, - {file = "pytz-2021.3.tar.gz", hash = "sha256:acad2d8b20a1af07d4e4c9d2e9285c5ed9104354062f275f3fcd88dcef4f1326"}, + {file = "pytz-2022.1-py2.py3-none-any.whl", hash = "sha256:e68985985296d9a66a881eb3193b0906246245294a881e7c8afe623866ac6a5c"}, + {file = "pytz-2022.1.tar.gz", hash = "sha256:1e760e2fe6a8163bc0b3d9a19c4f84342afa0a2affebfaa84b01b978a02ecaa7"}, ] pywin32 = [ {file = "pywin32-301-cp35-cp35m-win32.whl", hash = "sha256:93367c96e3a76dfe5003d8291ae16454ca7d84bb24d721e0b74a07610b7be4a7"}, @@ -2700,13 +2717,14 @@ semver = [ {file = "semver-2.13.0-py2.py3-none-any.whl", hash = "sha256:ced8b23dceb22134307c1b8abfa523da14198793d9787ac838e70e29e77458d4"}, {file = "semver-2.13.0.tar.gz", hash = "sha256:fa0fe2722ee1c3f57eac478820c3a5ae2f624af8264cbdf9000c980ff7f75e3f"}, ] +shotgun-api3 = [] six = [ {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, ] slack-sdk = [ - {file = "slack_sdk-3.13.0-py2.py3-none-any.whl", hash = "sha256:54f2a5f7419f1ab932af9e3200f7f2f93db96e0f0eb8ad7d3b4214aa9f124641"}, - {file = "slack_sdk-3.13.0.tar.gz", hash = "sha256:aae6ce057e286a5e7fe7a9f256e85b886eee556def8e04b82b08f699e64d7f67"}, + {file = "slack_sdk-3.15.2-py2.py3-none-any.whl", hash = "sha256:e1fa26786169176e707676decc287fd9d3d547bbc43c0a1a4f99eb373b07da94"}, + {file = "slack_sdk-3.15.2.tar.gz", hash = "sha256:128f3bb0b5b91454a3d5f140a61f3db370a0e1b50ffe0a8d9e9ebe0e894faed7"}, ] smmap = [ {file = "smmap-5.0.0-py3-none-any.whl", hash = "sha256:2aba19d6a040e78d8b09de5c57e96207b09ed71d8e55ce0959eeee6c8e190d94"}, @@ -2717,20 +2735,20 @@ snowballstemmer = [ {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"}, ] speedcopy = [ - {file = "speedcopy-2.1.2-py3-none-any.whl", hash = "sha256:91e271b84c00952812dbf669d360b2610fd8fa198670373e02acf2a04db89a4c"}, - {file = "speedcopy-2.1.2.tar.gz", hash = "sha256:1b2d779fadebd53a59384f7d286c40b2ef382e0d000fa53011699fcd3190d33f"}, + {file = "speedcopy-2.1.3-py3-none-any.whl", hash = "sha256:ecbd71b7fcd2a01ce444268fb50db6bf843845aa494018571b6726f5b37c7869"}, + {file = "speedcopy-2.1.3.tar.gz", hash = "sha256:8e14a2b94352b91229c2c0712ea39ed265ce9ec780f899e43e9f2cf5435cf959"}, ] sphinx = [ - {file = "Sphinx-3.5.3-py3-none-any.whl", hash = "sha256:3f01732296465648da43dec8fb40dc451ba79eb3e2cc5c6d79005fd98197107d"}, - {file = "Sphinx-3.5.3.tar.gz", hash = "sha256:ce9c228456131bab09a3d7d10ae58474de562a6f79abb3dc811ae401cf8c1abc"}, + {file = "Sphinx-4.5.0-py3-none-any.whl", hash = "sha256:ebf612653238bcc8f4359627a9b7ce44ede6fdd75d9d30f68255c7383d3a6226"}, + {file = "Sphinx-4.5.0.tar.gz", hash = "sha256:7bf8ca9637a4ee15af412d1a1d9689fec70523a68ca9bb9127c2f3eeb344e2e6"}, ] sphinx-qt-documentation = [ - {file = "sphinx_qt_documentation-0.3-py3-none-any.whl", hash = "sha256:bee247cb9e4fc03fc496d07adfdb943100e1103320c3e5e820e0cfa7c790d9b6"}, - {file = "sphinx_qt_documentation-0.3.tar.gz", hash = "sha256:f09a0c9d9e989172ba3e282b92bf55613bb23ad47315ec5b0d38536b343ac6c8"}, + {file = "sphinx_qt_documentation-0.4-py3-none-any.whl", hash = "sha256:fa131093f75cd1bd48699cd132e18e4d46ba9eaadc070e6026867cea75ecdb7b"}, + {file = "sphinx_qt_documentation-0.4.tar.gz", hash = "sha256:f43ba17baa93e353fb94045027fb67f9d935ed158ce8662de93f08b88eec6774"}, ] sphinx-rtd-theme = [ - {file = "sphinx_rtd_theme-0.5.1-py2.py3-none-any.whl", hash = "sha256:fa6bebd5ab9a73da8e102509a86f3fcc36dec04a0b52ea80e5a033b2aba00113"}, - {file = "sphinx_rtd_theme-0.5.1.tar.gz", hash = "sha256:eda689eda0c7301a80cf122dad28b1861e5605cbf455558f3775e1e8200e83a5"}, + {file = "sphinx_rtd_theme-1.0.0-py2.py3-none-any.whl", hash = "sha256:4d35a56f4508cfee4c4fb604373ede6feae2a306731d533f409ef5c3496fdbd8"}, + {file = "sphinx_rtd_theme-1.0.0.tar.gz", hash = "sha256:eec6d497e4c2195fa0e8b2016b337532b8a699a68bcb22a512870e16925c6a5c"}, ] sphinxcontrib-applehelp = [ {file = "sphinxcontrib-applehelp-1.0.2.tar.gz", hash = "sha256:a072735ec80e7675e3f432fcae8610ecf509c5f1869d17e2eecff44389cdbc58"}, @@ -2773,8 +2791,8 @@ toml = [ {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, ] tomli = [ - {file = "tomli-2.0.0-py3-none-any.whl", hash = "sha256:b5bde28da1fed24b9bd1d4d2b8cba62300bfb4ec9a6187a957e8ddb9434c5224"}, - {file = "tomli-2.0.0.tar.gz", hash = "sha256:c292c34f58502a1eb2bbb9f5bbc9a5ebc37bee10ffb8c2d6bbdfa8eb13cc14e1"}, + {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, + {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, ] typed-ast = [ {file = "typed_ast-1.5.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:183b183b7771a508395d2cbffd6db67d6ad52958a5fdc99f450d954003900266"}, @@ -2803,16 +2821,16 @@ typed-ast = [ {file = "typed_ast-1.5.2.tar.gz", hash = "sha256:525a2d4088e70a9f75b08b3f87a51acc9cde640e19cc523c7e41aa355564ae27"}, ] typing-extensions = [ - {file = "typing_extensions-4.0.1-py3-none-any.whl", hash = "sha256:7f001e5ac290a0c0401508864c7ec868be4e701886d5b573a9528ed3973d9d3b"}, - {file = "typing_extensions-4.0.1.tar.gz", hash = "sha256:4ca091dea149f945ec56afb48dae714f21e8692ef22a395223bcd328961b6a0e"}, + {file = "typing_extensions-4.1.1-py3-none-any.whl", hash = "sha256:21c85e0fe4b9a155d0799430b0ad741cdce7e359660ccbd8b530613e8df88ce2"}, + {file = "typing_extensions-4.1.1.tar.gz", hash = "sha256:1a9462dcc3347a79b1f1c0271fbe79e844580bb598bafa1ed208b94da3cdcd42"}, ] uritemplate = [ {file = "uritemplate-3.0.1-py2.py3-none-any.whl", hash = "sha256:07620c3f3f8eed1f12600845892b0e036a2420acf513c53f7de0abd911a5894f"}, {file = "uritemplate-3.0.1.tar.gz", hash = "sha256:5af8ad10cec94f215e3f48112de2022e1d5a37ed427fbd88652fa908f2ab7cae"}, ] urllib3 = [ - {file = "urllib3-1.26.8-py2.py3-none-any.whl", hash = "sha256:000ca7f471a233c2251c6c7023ee85305721bfdf18621ebff4fd17a8653427ed"}, - {file = "urllib3-1.26.8.tar.gz", hash = "sha256:0e7c33d9a63e7ddfcb86780aac87befc2fbddf46c58dbb487e0855f7ceec283c"}, + {file = "urllib3-1.26.9-py2.py3-none-any.whl", hash = "sha256:44ece4d53fb1706f667c9bd1c648f5469a2ec925fcf3a776667042d645472c14"}, + {file = "urllib3-1.26.9.tar.gz", hash = "sha256:aabaf16477806a5e1dd19aa41f8c2b7950dd3c746362d7e3223dbe6de6ac448e"}, ] wcwidth = [ {file = "wcwidth-0.2.5-py2.py3-none-any.whl", hash = "sha256:beb4802a9cebb9144e99086eff703a642a13d6a0052920003a230f3294bbe784"}, @@ -2823,57 +2841,70 @@ websocket-client = [ {file = "websocket_client-0.59.0-py2.py3-none-any.whl", hash = "sha256:2e50d26ca593f70aba7b13a489435ef88b8fc3b5c5643c1ce8808ff9b40f0b32"}, ] wrapt = [ - {file = "wrapt-1.13.3-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:e05e60ff3b2b0342153be4d1b597bbcfd8330890056b9619f4ad6b8d5c96a81a"}, - {file = "wrapt-1.13.3-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:85148f4225287b6a0665eef08a178c15097366d46b210574a658c1ff5b377489"}, - {file = "wrapt-1.13.3-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:2dded5496e8f1592ec27079b28b6ad2a1ef0b9296d270f77b8e4a3a796cf6909"}, - {file = "wrapt-1.13.3-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:e94b7d9deaa4cc7bac9198a58a7240aaf87fe56c6277ee25fa5b3aa1edebd229"}, - {file = "wrapt-1.13.3-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:498e6217523111d07cd67e87a791f5e9ee769f9241fcf8a379696e25806965af"}, - {file = "wrapt-1.13.3-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:ec7e20258ecc5174029a0f391e1b948bf2906cd64c198a9b8b281b811cbc04de"}, - {file = "wrapt-1.13.3-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:87883690cae293541e08ba2da22cacaae0a092e0ed56bbba8d018cc486fbafbb"}, - {file = "wrapt-1.13.3-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:f99c0489258086308aad4ae57da9e8ecf9e1f3f30fa35d5e170b4d4896554d80"}, - {file = "wrapt-1.13.3-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:6a03d9917aee887690aa3f1747ce634e610f6db6f6b332b35c2dd89412912bca"}, - {file = "wrapt-1.13.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:936503cb0a6ed28dbfa87e8fcd0a56458822144e9d11a49ccee6d9a8adb2ac44"}, - {file = "wrapt-1.13.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:f9c51d9af9abb899bd34ace878fbec8bf357b3194a10c4e8e0a25512826ef056"}, - {file = "wrapt-1.13.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:220a869982ea9023e163ba915077816ca439489de6d2c09089b219f4e11b6785"}, - {file = "wrapt-1.13.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:0877fe981fd76b183711d767500e6b3111378ed2043c145e21816ee589d91096"}, - {file = "wrapt-1.13.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:43e69ffe47e3609a6aec0fe723001c60c65305784d964f5007d5b4fb1bc6bf33"}, - {file = "wrapt-1.13.3-cp310-cp310-win32.whl", hash = "sha256:78dea98c81915bbf510eb6a3c9c24915e4660302937b9ae05a0947164248020f"}, - {file = "wrapt-1.13.3-cp310-cp310-win_amd64.whl", hash = "sha256:ea3e746e29d4000cd98d572f3ee2a6050a4f784bb536f4ac1f035987fc1ed83e"}, - {file = "wrapt-1.13.3-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:8c73c1a2ec7c98d7eaded149f6d225a692caa1bd7b2401a14125446e9e90410d"}, - {file = "wrapt-1.13.3-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:086218a72ec7d986a3eddb7707c8c4526d677c7b35e355875a0fe2918b059179"}, - {file = "wrapt-1.13.3-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:e92d0d4fa68ea0c02d39f1e2f9cb5bc4b4a71e8c442207433d8db47ee79d7aa3"}, - {file = "wrapt-1.13.3-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:d4a5f6146cfa5c7ba0134249665acd322a70d1ea61732723c7d3e8cc0fa80755"}, - {file = "wrapt-1.13.3-cp35-cp35m-win32.whl", hash = "sha256:8aab36778fa9bba1a8f06a4919556f9f8c7b33102bd71b3ab307bb3fecb21851"}, - {file = "wrapt-1.13.3-cp35-cp35m-win_amd64.whl", hash = "sha256:944b180f61f5e36c0634d3202ba8509b986b5fbaf57db3e94df11abee244ba13"}, - {file = "wrapt-1.13.3-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:2ebdde19cd3c8cdf8df3fc165bc7827334bc4e353465048b36f7deeae8ee0918"}, - {file = "wrapt-1.13.3-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:610f5f83dd1e0ad40254c306f4764fcdc846641f120c3cf424ff57a19d5f7ade"}, - {file = "wrapt-1.13.3-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:5601f44a0f38fed36cc07db004f0eedeaadbdcec90e4e90509480e7e6060a5bc"}, - {file = "wrapt-1.13.3-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:e6906d6f48437dfd80464f7d7af1740eadc572b9f7a4301e7dd3d65db285cacf"}, - {file = "wrapt-1.13.3-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:766b32c762e07e26f50d8a3468e3b4228b3736c805018e4b0ec8cc01ecd88125"}, - {file = "wrapt-1.13.3-cp36-cp36m-win32.whl", hash = "sha256:5f223101f21cfd41deec8ce3889dc59f88a59b409db028c469c9b20cfeefbe36"}, - {file = "wrapt-1.13.3-cp36-cp36m-win_amd64.whl", hash = "sha256:f122ccd12fdc69628786d0c947bdd9cb2733be8f800d88b5a37c57f1f1d73c10"}, - {file = "wrapt-1.13.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:46f7f3af321a573fc0c3586612db4decb7eb37172af1bc6173d81f5b66c2e068"}, - {file = "wrapt-1.13.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:778fd096ee96890c10ce96187c76b3e99b2da44e08c9e24d5652f356873f6709"}, - {file = "wrapt-1.13.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:0cb23d36ed03bf46b894cfec777eec754146d68429c30431c99ef28482b5c1df"}, - {file = "wrapt-1.13.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:96b81ae75591a795d8c90edc0bfaab44d3d41ffc1aae4d994c5aa21d9b8e19a2"}, - {file = "wrapt-1.13.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:7dd215e4e8514004c8d810a73e342c536547038fb130205ec4bba9f5de35d45b"}, - {file = "wrapt-1.13.3-cp37-cp37m-win32.whl", hash = "sha256:47f0a183743e7f71f29e4e21574ad3fa95676136f45b91afcf83f6a050914829"}, - {file = "wrapt-1.13.3-cp37-cp37m-win_amd64.whl", hash = "sha256:fd76c47f20984b43d93de9a82011bb6e5f8325df6c9ed4d8310029a55fa361ea"}, - {file = "wrapt-1.13.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b73d4b78807bd299b38e4598b8e7bd34ed55d480160d2e7fdaabd9931afa65f9"}, - {file = "wrapt-1.13.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:ec9465dd69d5657b5d2fa6133b3e1e989ae27d29471a672416fd729b429eb554"}, - {file = "wrapt-1.13.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:dd91006848eb55af2159375134d724032a2d1d13bcc6f81cd8d3ed9f2b8e846c"}, - {file = "wrapt-1.13.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:ae9de71eb60940e58207f8e71fe113c639da42adb02fb2bcbcaccc1ccecd092b"}, - {file = "wrapt-1.13.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:51799ca950cfee9396a87f4a1240622ac38973b6df5ef7a41e7f0b98797099ce"}, - {file = "wrapt-1.13.3-cp38-cp38-win32.whl", hash = "sha256:4b9c458732450ec42578b5642ac53e312092acf8c0bfce140ada5ca1ac556f79"}, - {file = "wrapt-1.13.3-cp38-cp38-win_amd64.whl", hash = "sha256:7dde79d007cd6dfa65afe404766057c2409316135cb892be4b1c768e3f3a11cb"}, - {file = "wrapt-1.13.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:981da26722bebb9247a0601e2922cedf8bb7a600e89c852d063313102de6f2cb"}, - {file = "wrapt-1.13.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:705e2af1f7be4707e49ced9153f8d72131090e52be9278b5dbb1498c749a1e32"}, - {file = "wrapt-1.13.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:25b1b1d5df495d82be1c9d2fad408f7ce5ca8a38085e2da41bb63c914baadff7"}, - {file = "wrapt-1.13.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:77416e6b17926d953b5c666a3cb718d5945df63ecf922af0ee576206d7033b5e"}, - {file = "wrapt-1.13.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:865c0b50003616f05858b22174c40ffc27a38e67359fa1495605f96125f76640"}, - {file = "wrapt-1.13.3-cp39-cp39-win32.whl", hash = "sha256:0a017a667d1f7411816e4bf214646d0ad5b1da2c1ea13dec6c162736ff25a374"}, - {file = "wrapt-1.13.3-cp39-cp39-win_amd64.whl", hash = "sha256:81bd7c90d28a4b2e1df135bfbd7c23aee3050078ca6441bead44c42483f9ebfb"}, - {file = "wrapt-1.13.3.tar.gz", hash = "sha256:1fea9cd438686e6682271d36f3481a9f3636195578bab9ca3382e2f5f01fc185"}, + {file = "wrapt-1.14.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:5a9a1889cc01ed2ed5f34574c90745fab1dd06ec2eee663e8ebeefe363e8efd7"}, + {file = "wrapt-1.14.0-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:9a3ff5fb015f6feb78340143584d9f8a0b91b6293d6b5cf4295b3e95d179b88c"}, + {file = "wrapt-1.14.0-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:4b847029e2d5e11fd536c9ac3136ddc3f54bc9488a75ef7d040a3900406a91eb"}, + {file = "wrapt-1.14.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:9a5a544861b21e0e7575b6023adebe7a8c6321127bb1d238eb40d99803a0e8bd"}, + {file = "wrapt-1.14.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:88236b90dda77f0394f878324cfbae05ae6fde8a84d548cfe73a75278d760291"}, + {file = "wrapt-1.14.0-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:f0408e2dbad9e82b4c960274214af533f856a199c9274bd4aff55d4634dedc33"}, + {file = "wrapt-1.14.0-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:9d8c68c4145041b4eeae96239802cfdfd9ef927754a5be3f50505f09f309d8c6"}, + {file = "wrapt-1.14.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:22626dca56fd7f55a0733e604f1027277eb0f4f3d95ff28f15d27ac25a45f71b"}, + {file = "wrapt-1.14.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:65bf3eb34721bf18b5a021a1ad7aa05947a1767d1aa272b725728014475ea7d5"}, + {file = "wrapt-1.14.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:09d16ae7a13cff43660155383a2372b4aa09109c7127aa3f24c3cf99b891c330"}, + {file = "wrapt-1.14.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:debaf04f813ada978d7d16c7dfa16f3c9c2ec9adf4656efdc4defdf841fc2f0c"}, + {file = "wrapt-1.14.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:748df39ed634851350efa87690c2237a678ed794fe9ede3f0d79f071ee042561"}, + {file = "wrapt-1.14.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1807054aa7b61ad8d8103b3b30c9764de2e9d0c0978e9d3fc337e4e74bf25faa"}, + {file = "wrapt-1.14.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:763a73ab377390e2af26042f685a26787c402390f682443727b847e9496e4a2a"}, + {file = "wrapt-1.14.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:8529b07b49b2d89d6917cfa157d3ea1dfb4d319d51e23030664a827fe5fd2131"}, + {file = "wrapt-1.14.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:68aeefac31c1f73949662ba8affaf9950b9938b712fb9d428fa2a07e40ee57f8"}, + {file = "wrapt-1.14.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:59d7d92cee84a547d91267f0fea381c363121d70fe90b12cd88241bd9b0e1763"}, + {file = "wrapt-1.14.0-cp310-cp310-win32.whl", hash = "sha256:3a88254881e8a8c4784ecc9cb2249ff757fd94b911d5df9a5984961b96113fff"}, + {file = "wrapt-1.14.0-cp310-cp310-win_amd64.whl", hash = "sha256:9a242871b3d8eecc56d350e5e03ea1854de47b17f040446da0e47dc3e0b9ad4d"}, + {file = "wrapt-1.14.0-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:a65bffd24409454b889af33b6c49d0d9bcd1a219b972fba975ac935f17bdf627"}, + {file = "wrapt-1.14.0-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:9d9fcd06c952efa4b6b95f3d788a819b7f33d11bea377be6b8980c95e7d10775"}, + {file = "wrapt-1.14.0-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:db6a0ddc1282ceb9032e41853e659c9b638789be38e5b8ad7498caac00231c23"}, + {file = "wrapt-1.14.0-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:14e7e2c5f5fca67e9a6d5f753d21f138398cad2b1159913ec9e9a67745f09ba3"}, + {file = "wrapt-1.14.0-cp35-cp35m-win32.whl", hash = "sha256:6d9810d4f697d58fd66039ab959e6d37e63ab377008ef1d63904df25956c7db0"}, + {file = "wrapt-1.14.0-cp35-cp35m-win_amd64.whl", hash = "sha256:d808a5a5411982a09fef6b49aac62986274ab050e9d3e9817ad65b2791ed1425"}, + {file = "wrapt-1.14.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:b77159d9862374da213f741af0c361720200ab7ad21b9f12556e0eb95912cd48"}, + {file = "wrapt-1.14.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:36a76a7527df8583112b24adc01748cd51a2d14e905b337a6fefa8b96fc708fb"}, + {file = "wrapt-1.14.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a0057b5435a65b933cbf5d859cd4956624df37b8bf0917c71756e4b3d9958b9e"}, + {file = "wrapt-1.14.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a0a4ca02752ced5f37498827e49c414d694ad7cf451ee850e3ff160f2bee9d3"}, + {file = "wrapt-1.14.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:8c6be72eac3c14baa473620e04f74186c5d8f45d80f8f2b4eda6e1d18af808e8"}, + {file = "wrapt-1.14.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:21b1106bff6ece8cb203ef45b4f5778d7226c941c83aaaa1e1f0f4f32cc148cd"}, + {file = "wrapt-1.14.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:493da1f8b1bb8a623c16552fb4a1e164c0200447eb83d3f68b44315ead3f9036"}, + {file = "wrapt-1.14.0-cp36-cp36m-win32.whl", hash = "sha256:89ba3d548ee1e6291a20f3c7380c92f71e358ce8b9e48161401e087e0bc740f8"}, + {file = "wrapt-1.14.0-cp36-cp36m-win_amd64.whl", hash = "sha256:729d5e96566f44fccac6c4447ec2332636b4fe273f03da128fff8d5559782b06"}, + {file = "wrapt-1.14.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:891c353e95bb11abb548ca95c8b98050f3620a7378332eb90d6acdef35b401d4"}, + {file = "wrapt-1.14.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23f96134a3aa24cc50614920cc087e22f87439053d886e474638c68c8d15dc80"}, + {file = "wrapt-1.14.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6807bcee549a8cb2f38f73f469703a1d8d5d990815c3004f21ddb68a567385ce"}, + {file = "wrapt-1.14.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6915682f9a9bc4cf2908e83caf5895a685da1fbd20b6d485dafb8e218a338279"}, + {file = "wrapt-1.14.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:f2f3bc7cd9c9fcd39143f11342eb5963317bd54ecc98e3650ca22704b69d9653"}, + {file = "wrapt-1.14.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:3a71dbd792cc7a3d772ef8cd08d3048593f13d6f40a11f3427c000cf0a5b36a0"}, + {file = "wrapt-1.14.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:5a0898a640559dec00f3614ffb11d97a2666ee9a2a6bad1259c9facd01a1d4d9"}, + {file = "wrapt-1.14.0-cp37-cp37m-win32.whl", hash = "sha256:167e4793dc987f77fd476862d32fa404d42b71f6a85d3b38cbce711dba5e6b68"}, + {file = "wrapt-1.14.0-cp37-cp37m-win_amd64.whl", hash = "sha256:d066ffc5ed0be00cd0352c95800a519cf9e4b5dd34a028d301bdc7177c72daf3"}, + {file = "wrapt-1.14.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d9bdfa74d369256e4218000a629978590fd7cb6cf6893251dad13d051090436d"}, + {file = "wrapt-1.14.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2498762814dd7dd2a1d0248eda2afbc3dd9c11537bc8200a4b21789b6df6cd38"}, + {file = "wrapt-1.14.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f24ca7953f2643d59a9c87d6e272d8adddd4a53bb62b9208f36db408d7aafc7"}, + {file = "wrapt-1.14.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5b835b86bd5a1bdbe257d610eecab07bf685b1af2a7563093e0e69180c1d4af1"}, + {file = "wrapt-1.14.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b21650fa6907e523869e0396c5bd591cc326e5c1dd594dcdccac089561cacfb8"}, + {file = "wrapt-1.14.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:354d9fc6b1e44750e2a67b4b108841f5f5ea08853453ecbf44c81fdc2e0d50bd"}, + {file = "wrapt-1.14.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1f83e9c21cd5275991076b2ba1cd35418af3504667affb4745b48937e214bafe"}, + {file = "wrapt-1.14.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:61e1a064906ccba038aa3c4a5a82f6199749efbbb3cef0804ae5c37f550eded0"}, + {file = "wrapt-1.14.0-cp38-cp38-win32.whl", hash = "sha256:28c659878f684365d53cf59dc9a1929ea2eecd7ac65da762be8b1ba193f7e84f"}, + {file = "wrapt-1.14.0-cp38-cp38-win_amd64.whl", hash = "sha256:b0ed6ad6c9640671689c2dbe6244680fe8b897c08fd1fab2228429b66c518e5e"}, + {file = "wrapt-1.14.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b3f7e671fb19734c872566e57ce7fc235fa953d7c181bb4ef138e17d607dc8a1"}, + {file = "wrapt-1.14.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:87fa943e8bbe40c8c1ba4086971a6fefbf75e9991217c55ed1bcb2f1985bd3d4"}, + {file = "wrapt-1.14.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4775a574e9d84e0212f5b18886cace049a42e13e12009bb0491562a48bb2b758"}, + {file = "wrapt-1.14.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9d57677238a0c5411c76097b8b93bdebb02eb845814c90f0b01727527a179e4d"}, + {file = "wrapt-1.14.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:00108411e0f34c52ce16f81f1d308a571df7784932cc7491d1e94be2ee93374b"}, + {file = "wrapt-1.14.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d332eecf307fca852d02b63f35a7872de32d5ba8b4ec32da82f45df986b39ff6"}, + {file = "wrapt-1.14.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:01f799def9b96a8ec1ef6b9c1bbaf2bbc859b87545efbecc4a78faea13d0e3a0"}, + {file = "wrapt-1.14.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:47045ed35481e857918ae78b54891fac0c1d197f22c95778e66302668309336c"}, + {file = "wrapt-1.14.0-cp39-cp39-win32.whl", hash = "sha256:2eca15d6b947cfff51ed76b2d60fd172c6ecd418ddab1c5126032d27f74bc350"}, + {file = "wrapt-1.14.0-cp39-cp39-win_amd64.whl", hash = "sha256:bb36fbb48b22985d13a6b496ea5fb9bb2a076fea943831643836c9f6febbcfdc"}, + {file = "wrapt-1.14.0.tar.gz", hash = "sha256:8323a43bd9c91f62bb7d4be74cc9ff10090e7ef820e27bfe8815c57e68261311"}, ] wsrpc-aiohttp = [ {file = "wsrpc-aiohttp-3.2.0.tar.gz", hash = "sha256:f467abc51bcdc760fc5aeb7041abdeef46eeca3928dc43dd6e7fa7a533563818"}, @@ -2954,6 +2985,6 @@ yarl = [ {file = "yarl-1.7.2.tar.gz", hash = "sha256:45399b46d60c253327a460e99856752009fcee5f5d3c80b2f7c0cae1c38d56dd"}, ] zipp = [ - {file = "zipp-3.7.0-py3-none-any.whl", hash = "sha256:b47250dd24f92b7dd6a0a8fc5244da14608f3ca90a5efcd37a3b1642fac9a375"}, - {file = "zipp-3.7.0.tar.gz", hash = "sha256:9f50f446828eb9d45b267433fd3e9da8d801f614129124863f9c51ebceafb87d"}, + {file = "zipp-3.8.0-py3-none-any.whl", hash = "sha256:c4f6e5bbf48e74f7a38e7cc5b0480ff42b0ae5178957d564d18932525d5cf099"}, + {file = "zipp-3.8.0.tar.gz", hash = "sha256:56bf8aadb83c24db6c4b577e13de374ccfb67da2078beba1d037c17980bf43ad"}, ] diff --git a/pyproject.toml b/pyproject.toml index f32e385e807..a2d1fe99462 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -40,6 +40,7 @@ clique = "1.6.*" Click = "^7" dnspython = "^2.1.0" ftrack-python-api = "2.0.*" +shotgun_api3 = {git = "https://github.com/shotgunsoftware/python-api.git", rev = "v3.3.3"} google-api-python-client = "^1.12.8" # sync server google support (should be separate?) jsonschema = "^2.6.0" keyring = "^22.0.1" diff --git a/setup.py b/setup.py index d3b967a4b6a..8b5a545c169 100644 --- a/setup.py +++ b/setup.py @@ -107,7 +107,8 @@ def validate_thirdparty_binaries(): # Python defaults (cx_Freeze skip them by default) "dbm", "sqlite3", - "dataclasses" + "dataclasses", + "timeit" ] includes = [] diff --git a/start.py b/start.py index 6e339fabab6..ace33ab92ad 100644 --- a/start.py +++ b/start.py @@ -386,18 +386,6 @@ def set_modules_environments(): modules_manager = ModulesManager() module_envs = modules_manager.collect_global_environments() - publish_plugin_dirs = modules_manager.collect_plugin_paths()["publish"] - - # Set pyblish plugins paths if any module want to register them - if publish_plugin_dirs: - publish_paths_str = os.environ.get("PYBLISHPLUGINPATH") or "" - publish_paths = publish_paths_str.split(os.pathsep) - _publish_paths = { - os.path.normpath(path) for path in publish_paths if path - } - for path in publish_plugin_dirs: - _publish_paths.add(os.path.normpath(path)) - module_envs["PYBLISHPLUGINPATH"] = os.pathsep.join(_publish_paths) # Merge environments with current environments and update values if module_envs: