From 644f1b5590f5bb7e59fa3538fd7a4a5545c64809 Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Mon, 14 Oct 2024 12:21:19 +0200 Subject: [PATCH 001/135] bump version to '1.0.11-dev.1' --- ayon_api/version.py | 2 +- pyproject.toml | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/ayon_api/version.py b/ayon_api/version.py index 2371bac09..a27cffcec 100644 --- a/ayon_api/version.py +++ b/ayon_api/version.py @@ -1,2 +1,2 @@ """Package declaring Python API for AYON server.""" -__version__ = "1.0.10" +__version__ = "1.0.11-dev.1" diff --git a/pyproject.toml b/pyproject.toml index aff50ebb7..680d19b1f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "ayon-python-api" -version = "1.0.10" +version = "1.0.11-dev.1" description = "AYON Python API" license = {file = "LICENSE"} readme = {file = "README.md", content-type = "text/markdown"} @@ -29,7 +29,7 @@ build-backend = "poetry.core.masonry.api" [tool.poetry] name = "ayon-python-api" -version = "1.0.10" +version = "1.0.11-dev.1" description = "AYON Python API" authors = [ "ynput.io " From 688806e59c0871fe9aa9ff829c4adb071c8e96ef Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Mon, 14 Oct 2024 13:44:01 +0200 Subject: [PATCH 002/135] do not copy 'get_default_settings_variant' on auto api creation --- automated_api.py | 1 + ayon_api/_api.py | 27 ++++++++++++++------------- 2 files changed, 15 insertions(+), 13 deletions(-) diff --git a/automated_api.py b/automated_api.py index 09aa0d8cd..528702ca3 100644 --- a/automated_api.py +++ b/automated_api.py @@ -27,6 +27,7 @@ EXCLUDED_METHODS = { "get_default_service_username", + "get_default_settings_variant", "validate_token", "set_token", "reset_token", diff --git a/ayon_api/_api.py b/ayon_api/_api.py index fd66ded0b..c4e41ebda 100644 --- a/ayon_api/_api.py +++ b/ayon_api/_api.py @@ -331,6 +331,20 @@ def get_server_api_connection(): """ return GlobalContext.get_server_api_connection() + +def get_default_settings_variant(): + """Default variant used for settings. + + Returns: + Union[str, None]: name of variant or None. + + """ + if not GlobalContext.is_connection_created(): + return _get_default_settings_variant() + con = get_server_api_connection() + return con.get_default_settings_variant() + + # ------------------------------------------------ # This content is generated automatically. # ------------------------------------------------ @@ -498,19 +512,6 @@ def set_client_version(*args, **kwargs): return con.set_client_version(*args, **kwargs) -def get_default_settings_variant(): - """Default variant used for settings. - - Returns: - Union[str, None]: name of variant or None. - - """ - if not GlobalContext.is_connection_created(): - return _get_default_settings_variant() - con = get_server_api_connection() - return con.get_default_settings_variant() - - def set_default_settings_variant(*args, **kwargs): """Change default variant for addon settings. From e94824e7115b8f13717e6bec2ceada41e304ab04 Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Mon, 14 Oct 2024 13:45:04 +0200 Subject: [PATCH 003/135] fix typo and typhint in get representations --- ayon_api/_api.py | 6 +++--- ayon_api/server_api.py | 22 +++++++++++----------- 2 files changed, 14 insertions(+), 14 deletions(-) diff --git a/ayon_api/_api.py b/ayon_api/_api.py index fd66ded0b..3e54c77fb 100644 --- a/ayon_api/_api.py +++ b/ayon_api/_api.py @@ -3405,9 +3405,9 @@ def get_representations(*args, **kwargs): version_ids (Optional[Iterable[str]]): Version ids used for representation filtering. Versions are parents of representations. - names_by_version_ids (Optional[bool]): Find representations - by names and version ids. This filter discard all - other filters. + names_by_version_ids (Optional[Dict[str, Iterable[str]]): Find + representations by names and version ids. This filter + discard all other filters. statuses (Optional[Iterable[str]]): Representation statuses used for filtering. tags (Optional[Iterable[str]]): Representation tags used diff --git a/ayon_api/server_api.py b/ayon_api/server_api.py index 2b40bd991..d55eae70f 100644 --- a/ayon_api/server_api.py +++ b/ayon_api/server_api.py @@ -6374,9 +6374,9 @@ def get_representations( version_ids (Optional[Iterable[str]]): Version ids used for representation filtering. Versions are parents of representations. - names_by_version_ids (Optional[bool]): Find representations - by names and version ids. This filter discard all - other filters. + names_by_version_ids (Optional[Dict[str, Iterable[str]]): Find + representations by names and version ids. This filter + discard all other filters. statuses (Optional[Iterable[str]]): Representation statuses used for filtering. tags (Optional[Iterable[str]]): Representation tags used @@ -6438,21 +6438,21 @@ def get_representations( filters["representationIds"] = list(representation_ids) version_ids_filter = None - representaion_names_filter = None + representation_names_filter = None if names_by_version_ids is not None: version_ids_filter = set() - representaion_names_filter = set() + representation_names_filter = set() for version_id, names in names_by_version_ids.items(): version_ids_filter.add(version_id) - representaion_names_filter |= set(names) + representation_names_filter |= set(names) - if not version_ids_filter or not representaion_names_filter: + if not version_ids_filter or not representation_names_filter: return else: if representation_names is not None: - representaion_names_filter = set(representation_names) - if not representaion_names_filter: + representation_names_filter = set(representation_names) + if not representation_names_filter: return if version_ids is not None: @@ -6463,8 +6463,8 @@ def get_representations( if version_ids_filter: filters["versionIds"] = list(version_ids_filter) - if representaion_names_filter: - filters["representationNames"] = list(representaion_names_filter) + if representation_names_filter: + filters["representationNames"] = list(representation_names_filter) if statuses is not None: statuses = set(statuses) From cdb7defe9e93db7e734ebd2113f7e110d8bdcc21 Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Mon, 14 Oct 2024 14:02:10 +0200 Subject: [PATCH 004/135] fix grammar --- ayon_api/_api.py | 2 +- ayon_api/server_api.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/ayon_api/_api.py b/ayon_api/_api.py index bf3018a6b..0ece9b708 100644 --- a/ayon_api/_api.py +++ b/ayon_api/_api.py @@ -3408,7 +3408,7 @@ def get_representations(*args, **kwargs): representations. names_by_version_ids (Optional[Dict[str, Iterable[str]]): Find representations by names and version ids. This filter - discard all other filters. + discards all other filters. statuses (Optional[Iterable[str]]): Representation statuses used for filtering. tags (Optional[Iterable[str]]): Representation tags used diff --git a/ayon_api/server_api.py b/ayon_api/server_api.py index d55eae70f..2e151ffc9 100644 --- a/ayon_api/server_api.py +++ b/ayon_api/server_api.py @@ -6376,7 +6376,7 @@ def get_representations( representations. names_by_version_ids (Optional[Dict[str, Iterable[str]]): Find representations by names and version ids. This filter - discard all other filters. + discards all other filters. statuses (Optional[Iterable[str]]): Representation statuses used for filtering. tags (Optional[Iterable[str]]): Representation tags used From 0214ecaf7a9f7617e4379b5d22ce07abf5ee8e7b Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Mon, 14 Oct 2024 15:30:32 +0200 Subject: [PATCH 005/135] use abort endpoint instead of take --- ayon_api/utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ayon_api/utils.py b/ayon_api/utils.py index 01384982f..0346fb4fa 100644 --- a/ayon_api/utils.py +++ b/ayon_api/utils.py @@ -881,7 +881,7 @@ def abort_web_action_event( """ response = requests.post( - f"{server_url}/api/actions/take/{action_token}", + f"{server_url}/api/actions/abort/{action_token}", json={"message": reason}, ) response.raise_for_status() From 852a913f468b5baf40776f91b30518b90e5baa46 Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Mon, 21 Oct 2024 18:40:23 +0200 Subject: [PATCH 006/135] use explicit arguments in entity init --- ayon_api/entity_hub.py | 69 ++++++++++++++++++++++++++++++++++++------ 1 file changed, 60 insertions(+), 9 deletions(-) diff --git a/ayon_api/entity_hub.py b/ayon_api/entity_hub.py index 8989c0b05..df06c4b32 100644 --- a/ayon_api/entity_hub.py +++ b/ayon_api/entity_hub.py @@ -2417,10 +2417,27 @@ def __init__( folder_types, task_types, statuses, - *args, - **kwargs, + entity_id=None, + parent_id=UNKNOWN_VALUE, + name=UNKNOWN_VALUE, + attribs=UNKNOWN_VALUE, + data=UNKNOWN_VALUE, + thumbnail_id=UNKNOWN_VALUE, + active=UNKNOWN_VALUE, + entity_hub=None, + created=None, ): - super().__init__(*args, **kwargs) + super().__init__( + entity_id, + parent_id, + name, + attribs, + data, + thumbnail_id, + active, + entity_hub, + created + ) self._project_code = project_code self._library_project = library @@ -2580,14 +2597,31 @@ class FolderEntity(BaseEntity): def __init__( self, folder_type, - *args, + entity_id=None, + parent_id=UNKNOWN_VALUE, + name=UNKNOWN_VALUE, + attribs=UNKNOWN_VALUE, + data=UNKNOWN_VALUE, + thumbnail_id=UNKNOWN_VALUE, + active=UNKNOWN_VALUE, + entity_hub=None, + created=None, label=None, path=None, tags=None, status=UNKNOWN_VALUE, - **kwargs ): - super(FolderEntity, self).__init__(*args, **kwargs) + super().__init__( + entity_id, + parent_id, + name, + attribs, + data, + thumbnail_id, + active, + entity_hub, + created, + ) # Autofill project as parent of folder if is not yet set # - this can be guessed only if folder was just created if self.created and self._parent_id is UNKNOWN_VALUE: @@ -2861,14 +2895,31 @@ class TaskEntity(BaseEntity): def __init__( self, task_type, - *args, + entity_id=None, + parent_id=UNKNOWN_VALUE, + name=UNKNOWN_VALUE, + attribs=UNKNOWN_VALUE, + data=UNKNOWN_VALUE, + thumbnail_id=UNKNOWN_VALUE, + active=UNKNOWN_VALUE, + entity_hub=None, + created=None, label=None, tags=None, assignees=None, status=UNKNOWN_VALUE, - **kwargs ): - super(TaskEntity, self).__init__(*args, **kwargs) + super().__init__( + entity_id, + parent_id, + name, + attribs, + data, + thumbnail_id, + active, + entity_hub, + created, + ) if tags is None: tags = [] From e29f38faaf55b0b9b2b217ed16712ed09b866ccb Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Fri, 25 Oct 2024 17:30:38 +0200 Subject: [PATCH 007/135] delegated common attributes to base entity --- ayon_api/entity_hub.py | 524 +++++++++++++++++++++-------------------- 1 file changed, 269 insertions(+), 255 deletions(-) diff --git a/ayon_api/entity_hub.py b/ayon_api/entity_hub.py index df06c4b32..9ecea77a9 100644 --- a/ayon_api/entity_hub.py +++ b/ayon_api/entity_hub.py @@ -928,7 +928,7 @@ class Attributes(object): Args: attrib_keys (Iterable[str]): Keys that are available in attribs of the entity. - values (Union[None, Dict[str, Any]]): Values of attributes. + values (Optional[Dict[str, Any]]): Values of attributes. """ @@ -1114,32 +1114,40 @@ class BaseEntity(ABC): entity are set as "current data" on server. Args: - entity_id (Union[str, None]): Id of the entity. New id is created if + entity_id (Optional[str]): Entity id. New id is created if not passed. - parent_id (Union[str, None]): Id of parent entity. - name (str): Name of entity. - attribs (Dict[str, Any]): Attribute values. - data (Dict[str, Any]): Entity data (custom data). - thumbnail_id (Union[str, None]): Id of entity's thumbnail. - active (bool): Is entity active. + parent_id (Optional[str]): Parent entity id. + attribs (Optional[Dict[str, Any]]): Attribute values. + data (Optional[Dict[str, Any]]): Entity data (custom data). + thumbnail_id (Optional[str]): Thumbnail id. + active (Optional[bool]): Is entity active. entity_hub (EntityHub): Object of entity hub which created object of the entity. created (Optional[bool]): Entity is new. When 'None' is passed the value is defined based on value of 'entity_id'. """ + _supports_name = False + _supports_label = False + _supports_status = False + _supports_tags = False + _supports_thumbnail = False def __init__( self, - entity_id=None, - parent_id=UNKNOWN_VALUE, - name=UNKNOWN_VALUE, - attribs=UNKNOWN_VALUE, - data=UNKNOWN_VALUE, - thumbnail_id=UNKNOWN_VALUE, - active=UNKNOWN_VALUE, - entity_hub=None, - created=None + entity_id: Optional[str] = None, + parent_id: Optional[Union[str, _CustomNone]] = UNKNOWN_VALUE, + attribs: Optional[Dict[str, Any]] = UNKNOWN_VALUE, + data: Optional[Dict[str, Any]] = UNKNOWN_VALUE, + active: Optional[bool] = UNKNOWN_VALUE, + created: Optional[bool] = None, + entity_hub: EntityHub = None, + # Optional arguments + name=None, + label=None, + status: Optional[str] = UNKNOWN_VALUE, + tags: Optional[List[str]] = None, + thumbnail_id: Optional[str] = UNKNOWN_VALUE, ): if entity_hub is None: raise ValueError("Missing required kwarg 'entity_hub'") @@ -1164,15 +1172,18 @@ def __init__( if not created and parent_id is UNKNOWN_VALUE: raise ValueError("Existing entity is missing parent id.") + if tags is None: + tags = [] + else: + tags = list(tags) + # These are public without any validation at this moment # may change in future (e.g. name will have regex validation) self._entity_id = entity_id self._parent_id = parent_id - self._name = name self.active = active self._created = created - self._thumbnail_id = thumbnail_id self._attribs = Attributes( self._get_attributes_for_type(self.entity_type), attribs @@ -1181,9 +1192,20 @@ def __init__( self._children_ids = children_ids self._orig_parent_id = parent_id + self._orig_active = active + + # Optional only if supported by entity type + self._name = name + self._label = label + self._status = status + self._tags = copy.deepcopy(tags) + self._thumbnail_id = thumbnail_id + self._orig_name = name + self._orig_label = label + self._orig_status = status + self._orig_tags = copy.deepcopy(tags) self._orig_thumbnail_id = thumbnail_id - self._orig_active = active self._immutable_for_hierarchy_cache = None @@ -1380,9 +1402,6 @@ def _get_default_changes(self): """ changes = {} - if self._orig_name != self._name: - changes["name"] = self._name - if ( self._entity_hub.allow_data_changes and self._data is not UNKNOWN_VALUE @@ -1400,6 +1419,20 @@ def _get_default_changes(self): attrib_changes = self.attribs.changes if attrib_changes: changes["attrib"] = attrib_changes + + if self._supports_name and self._orig_name != self._name: + changes["name"] = self._name + + if self._supports_label: + label = self._get_label_value() + if label != self._orig_label: + changes["label"] = label + + if self._supports_status and self._orig_status != self._status: + changes["status"] = self._status + + if self._supports_tags and self._orig_tags != self._tags: + changes["tags"] = self._tags return changes def _get_attributes_for_type(self, entity_type): @@ -1417,6 +1450,15 @@ def lock(self): self._immutable_for_hierarchy_cache = None self._created = False + if self._supports_label: + self._orig_label = self._get_label_value() + if self._supports_status: + self._orig_status = self._status + if self._supports_tags: + self._orig_tags = copy.deepcopy(self._tags) + if self._supports_thumbnail: + self._orig_thumbnail_id = self._thumbnail_id + def _get_entity_by_id(self, entity_id): return self._entity_hub.get_entity_by_id(entity_id) @@ -1432,7 +1474,7 @@ def get_parent_id(self): """Parent entity id. Returns: - Union[str, None]: Id of parent entity or none if is not set. + Optional[str]: Parent entity id or none if is not set. """ return self._parent_id @@ -1441,7 +1483,7 @@ def set_parent_id(self, parent_id): """Change parent by id. Args: - parent_id (Union[str, None]): Id of new parent for entity. + parent_id (Optional[str]): Id of new parent for entity. Raises: ValueError: If parent was not found by id. @@ -1461,7 +1503,7 @@ def get_parent(self, allow_query=True): """Parent entity. Returns: - Union[BaseEntity, None]: Parent object. + Optional[BaseEntity]: Parent object. """ parent = self._entity_hub.get_entity_by_id(self._parent_id) @@ -1575,7 +1617,7 @@ def get_thumbnail_id(self): """Thumbnail id of entity. Returns: - Union[str, None]: Id of parent entity or none if is not set. + Optional[str]: Thumbnail id or none if is not set. """ return self._thumbnail_id @@ -1584,7 +1626,7 @@ def set_thumbnail_id(self, thumbnail_id): """Change thumbnail id. Args: - thumbnail_id (Union[str, None]): Id of thumbnail for entity. + thumbnail_id (Union[str, None]): Thumbnail id for entity. """ self._thumbnail_id = thumbnail_id @@ -1610,6 +1652,139 @@ def fill_children_ids(self, children_ids): """ self._children_ids = set(children_ids) + def get_name(self): + if not self._supports_name: + raise NotImplementedError( + f"Name is not supported for '{self.entity_type}'." + ) + return self._name + + name = property(get_name) + + def get_label(self) -> Optional[str]: + if not self._supports_label: + raise NotImplementedError( + f"Label is not supported for '{self.entity_type}'." + ) + return self._label + + def set_label(self, label: Optional[str]): + if not self._supports_label: + raise NotImplementedError( + f"Label is not supported for '{self.entity_type}'." + ) + self._label = label + + def _get_label_value(self): + """Get label value that will be used for operations. + + Returns: + Optional[str]: Label value. + + """ + label = self._label + if not label or self._name == label: + return None + return label + + label = property(get_label, set_label) + + def get_thumbnail_id(self): + """Thumbnail id of entity. + + Returns: + Optional[str]: Thumbnail id or none if is not set. + + """ + if not self._supports_thumbnail: + raise NotImplementedError( + f"Thumbnail is not supported for '{self.entity_type}'." + ) + return self._thumbnail_id + + def set_thumbnail_id(self, thumbnail_id): + """Change thumbnail id. + + Args: + thumbnail_id (Union[str, None]): Thumbnail id for entity. + + """ + if not self._supports_thumbnail: + raise NotImplementedError( + f"Thumbnail is not supported for '{self.entity_type}'." + ) + self._thumbnail_id = thumbnail_id + + thumbnail_id = property(get_thumbnail_id, set_thumbnail_id) + + def get_status(self) -> Union[str, UNKNOWN_VALUE]: + """Folder status. + + Returns: + Union[str, UNKNOWN_VALUE]: Folder status or 'UNKNOWN_VALUE'. + + """ + if not self._supports_status: + raise NotImplementedError( + f"Status is not supported for '{self.entity_type}'." + ) + return self._status + + def set_status(self, status_name: str): + """Set folder status. + + Args: + status_name (str): Status name. + + """ + if not self._supports_status: + raise NotImplementedError( + f"Status is not supported for '{self.entity_type}'." + ) + project_entity = self._entity_hub.project_entity + status = project_entity.get_status_by_slugified_name(status_name) + if status is None: + raise ValueError( + f"Status {status_name} is not available on project." + ) + + if not status.is_available_for_entity_type(self.entity_type): + raise ValueError( + f"Status {status_name} is not available for folder." + ) + + self._status = status_name + + status = property(get_status, set_status) + + def get_tags(self): + """Task tags. + + Returns: + list[str]: Task tags. + + """ + if not self._supports_tags: + raise NotImplementedError( + f"Tags are not supported for '{self.entity_type}'." + ) + return self._tags + + def set_tags(self, tags): + """Change tags. + + Args: + tags (Iterable[str]): Tags. + + """ + if not self._supports_tags: + raise NotImplementedError( + f"Tags are not supported for '{self.entity_type}'." + ) + self._tags = list(tags) + + tags = property(get_tags, set_tags) + class ProjectStatus: """Project status class. @@ -2402,8 +2577,9 @@ class ProjectEntity(BaseEntity): the entity. created (Optional[bool]): Entity is new. When 'None' is passed the value is defined based on value of 'entity_id'. - """ + """ + _supports_name = True entity_type = "project" parent_entity_types = [] # TODO These are hardcoded but maybe should be used from server??? @@ -2428,15 +2604,15 @@ def __init__( created=None, ): super().__init__( - entity_id, - parent_id, - name, - attribs, - data, - thumbnail_id, - active, - entity_hub, - created + entity_id=name, + parent_id=PROJECT_PARENT_ID, + attribs=attribs, + data=data, + active=active, + created=False, + entity_hub=entity_hub, + name=name, + thumbnail_id=thumbnail_id, ) self._project_code = project_code @@ -2458,6 +2634,11 @@ def _prepare_entity_id(self, entity_id): entity_id, self.project_name)) return entity_id + def set_name(self, name): + if self._name == name: + return + raise ValueError("It is not allowed to change project name.") + def get_parent(self, *args, **kwargs): return None @@ -2590,6 +2771,11 @@ class FolderEntity(BaseEntity): created (Optional[bool]): Entity is new. When 'None' is passed the value is defined based on value of 'entity_id'. """ + _supports_name = True + _supports_label = True + _supports_tags = True + _supports_status = True + _supports_thumbnail = True entity_type = "folder" parent_entity_types = ["folder", "project"] @@ -2612,108 +2798,40 @@ def __init__( status=UNKNOWN_VALUE, ): super().__init__( - entity_id, - parent_id, - name, - attribs, - data, - thumbnail_id, - active, - entity_hub, - created, + entity_id=entity_id, + parent_id=parent_id, + attribs=attribs, + data=data, + active=active, + created=created, + entity_hub=entity_hub, + name=name, + label=label, + tags=tags, + status=status, + thumbnail_id=thumbnail_id, ) # Autofill project as parent of folder if is not yet set # - this can be guessed only if folder was just created if self.created and self._parent_id is UNKNOWN_VALUE: self._parent_id = self.project_name - if tags is None: - tags = [] - else: - tags = list(tags) - self._folder_type = folder_type - self._label = label - self._tags = copy.deepcopy(tags) - self._status = status self._orig_folder_type = folder_type - self._orig_label = label - self._orig_status = status - self._orig_tags = copy.deepcopy(tags) # Know if folder has any products # - is used to know if folder allows hierarchy changes self._has_published_content = False self._path = path - def get_folder_type(self): + def get_folder_type(self) -> str: return self._folder_type - def set_folder_type(self, folder_type): + def set_folder_type(self, folder_type: str): self._folder_type = folder_type folder_type = property(get_folder_type, set_folder_type) - def get_label(self): - return self._label - - def set_label(self, label): - self._label = label - - label = property(get_label, set_label) - - def get_status(self): - """Folder status. - - Returns: - Union[str, UNKNOWN_VALUE]: Folder status or 'UNKNOWN_VALUE'. - - """ - return self._status - - def set_status(self, status_name): - """Set folder status. - - Args: - status_name (str): Status name. - - """ - project_entity = self._entity_hub.project_entity - status = project_entity.get_status_by_slugified_name(status_name) - if status is None: - raise ValueError( - f"Status {status_name} is not available on project." - ) - - if not status.is_available_for_entity_type("folder"): - raise ValueError( - f"Status {status_name} is not available for folder." - ) - - self._status = status_name - - status = property(get_status, set_status) - - def get_tags(self): - """Folder tags. - - Returns: - list[str]: Folder tags. - - """ - return self._tags - - def set_tags(self, tags): - """Change tags. - - Args: - tags (Iterable[str]): Tags. - - """ - self._tags = list(tags) - - tags = property(get_tags, set_tags) - def get_path(self, dynamic_value=True): if not dynamic_value: return self._path @@ -2758,16 +2876,12 @@ def _immutable_for_hierarchy(self): return None def lock(self): - super(FolderEntity, self).lock() - self._orig_label = self._get_label_value() + super().lock() self._orig_folder_type = self._folder_type - self._orig_status = self._status - self._orig_tags = copy.deepcopy(self._tags) @property def changes(self): changes = self._get_default_changes() - if self._orig_parent_id != self._parent_id: parent_id = self._parent_id if parent_id == self.project_name: @@ -2777,15 +2891,6 @@ def changes(self): if self._orig_folder_type != self._folder_type: changes["folderType"] = self._folder_type - if self._orig_status != self._status: - changes["status"] = self._status - - if self._orig_tags != self._tags: - changes["tags"] = self._tags - - label = self._get_label_value() - if label != self._orig_label: - changes["label"] = label return changes @@ -2855,29 +2960,17 @@ def to_create_body_data(self): output["data"] = self._data.get_new_entity_value() return output - def _get_label_value(self): - """Get label value that will be used for operations. - - Returns: - Union[str, None]: Label value. - - """ - label = self._label - if not label or self._name == label: - return None - return label - class TaskEntity(BaseEntity): """Entity representing a task on AYON server. Args: + name (str): Name of entity. task_type (str): Type of task. Task type must be available in config of project task types. entity_id (Union[str, None]): Id of the entity. New id is created if not passed. parent_id (Union[str, None]): Id of parent entity. - name (str): Name of entity. label (Optional[str]): Task label. attribs (Dict[str, Any]): Attribute values. data (Dict[str, Any]): Entity data (custom data). @@ -2887,8 +2980,13 @@ class TaskEntity(BaseEntity): the entity. created (Optional[bool]): Entity is new. When 'None' is passed the value is defined based on value of 'entity_id'. + status (Optional[str]): Task status. """ - + _supports_name = True + _supports_label = True + _supports_tags = True + _supports_status = True + _supports_thumbnail = True entity_type = "task" parent_entity_types = ["folder"] @@ -2910,116 +3008,52 @@ def __init__( status=UNKNOWN_VALUE, ): super().__init__( - entity_id, - parent_id, - name, - attribs, - data, - thumbnail_id, - active, - entity_hub, - created, + entity_id=entity_id, + parent_id=parent_id, + attribs=attribs, + data=data, + active=active, + created=created, + entity_hub=entity_hub, + name=name, + label=label, + tags=tags, + status=status, + thumbnail_id=thumbnail_id, ) - - if tags is None: - tags = [] - else: - tags = list(tags) - if assignees is None: assignees = [] else: assignees = list(assignees) self._task_type = task_type - self._label = label - self._status = status - self._tags = tags self._assignees = assignees self._orig_task_type = task_type - self._orig_label = label - self._orig_status = status - self._orig_tags = copy.deepcopy(tags) self._orig_assignees = copy.deepcopy(assignees) self._children_ids = set() def lock(self): - super(TaskEntity, self).lock() - self._orig_label = self._get_label_value() + super().lock() self._orig_task_type = self._task_type - self._orig_status = self._status - self._orig_tags = copy.deepcopy(self._tags) self._orig_assignees = copy.deepcopy(self._assignees) - def get_task_type(self): - return self._task_type - - def set_task_type(self, task_type): - self._task_type = task_type - - task_type = property(get_task_type, set_task_type) - - def get_label(self): - return self._label - - def set_label(self, label): - self._label = label - - label = property(get_label, set_label) - - def get_status(self): - """Task status. - - Returns: - Union[str, UNKNOWN_VALUE]: Task status or 'UNKNOWN_VALUE'. - - """ - return self._status - - def set_status(self, status_name): - """Set Task status. - - Args: - status_name (str): Status name. - - """ - project_entity = self._entity_hub.project_entity - status = project_entity.get_status_by_slugified_name(status_name) - if status is None: - raise ValueError( - f"Status {status_name} is not available on project." - ) - - if not status.is_available_for_entity_type("task"): - raise ValueError( - f"Status {status_name} is not available for task." - ) - - self._status = status_name - - status = property(get_status, set_status) - - def get_tags(self): - """Task tags. - - Returns: - list[str]: Task tags. + def get_folder_id(self): + return self._parent_id - """ - return self._tags + def set_folder_id(self, folder_id): + self.set_parent_id(folder_id) - def set_tags(self, tags): - """Change tags. + folder_id = property(get_folder_id, set_folder_id) - Args: - tags (Iterable[str]): Tags. + def get_task_type(self) -> str: + return self._task_type - """ - self._tags = list(tags) + def set_task_type(self, task_type: str): + self._task_type = task_type - tags = property(get_tags, set_tags) + task_type = property(get_task_type, set_task_type) def get_assignees(self): """Task assignees. @@ -3054,19 +3088,9 @@ def changes(self): if self._orig_task_type != self._task_type: changes["taskType"] = self._task_type - if self._orig_status != self._status: - changes["status"] = self._status - - if self._orig_tags != self._tags: - changes["tags"] = self._tags - if self._orig_assignees != self._assignees: changes["assignees"] = self._assignees - label = self._get_label_value() - if label != self._orig_label: - changes["label"] = label - return changes @classmethod @@ -3124,14 +3148,4 @@ def to_create_body_data(self): output["data"] = self._data.get_new_entity_value() return output - def _get_label_value(self): - """Get label value that will be used for operations. - - Returns: - Union[str, None]: Label value. - """ - label = self._label - if not label or self._name == label: - return None - return label From e43b27f5e539f47705f6e62d389874418d7556d6 Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Fri, 25 Oct 2024 17:32:52 +0200 Subject: [PATCH 008/135] use 'fetch' over 'query' --- ayon_api/entity_hub.py | 89 +++++++++++++++++++++++++++++------------- 1 file changed, 61 insertions(+), 28 deletions(-) diff --git a/ayon_api/entity_hub.py b/ayon_api/entity_hub.py index 9ecea77a9..1451a7195 100644 --- a/ayon_api/entity_hub.py +++ b/ayon_api/entity_hub.py @@ -1,8 +1,10 @@ import re import copy import collections +import warnings from abc import ABC, abstractmethod import typing +from typing import Optional, Union, Iterable, Dict, List, Set, Any from ._api import get_server_api_connection from .utils import create_entity_id, convert_entity_id, slugify_string @@ -146,7 +148,7 @@ def get_attributes_for_type(self, entity_type): """ return self._connection.get_attributes_for_type(entity_type) - def get_entity_by_id(self, entity_id): + def get_entity_by_id(self, entity_id: str) -> Optional["BaseEntity"]: """Receive entity by its id without entity type. The entity must be already existing in cached objects. @@ -155,44 +157,55 @@ def get_entity_by_id(self, entity_id): entity_id (str): Id of entity. Returns: - Union[BaseEntity, None]: Entity object or None. + Optional[BaseEntity]: Entity object or None. """ return self._entities_by_id.get(entity_id) - def get_folder_by_id(self, entity_id, allow_query=True): + def get_folder_by_id( + self, + entity_id: str, + allow_fetch: Optional[bool] = True, + ) -> Optional["FolderEntity"]: """Get folder entity by id. Args: - entity_id (str): Id of folder entity. - allow_query (bool): Try to query entity from server if is not + entity_id (str): Folder entity id. + allow_fetch (bool): Try to query entity from server if is not available in cache. Returns: - Union[FolderEntity, None]: Object of folder or 'None'. + Optional[FolderEntity]: Folder entity object. """ - if allow_query: - return self.get_or_query_entity_by_id(entity_id, ["folder"]) + if allow_fetch: + return self.get_or_fetch_entity_by_id(entity_id, ["folder"]) return self._entities_by_id.get(entity_id) - def get_task_by_id(self, entity_id, allow_query=True): + def get_task_by_id( + self, + entity_id: str, + allow_fetch: Optional[bool] = True, + ) -> Optional["TaskEntity"]: """Get task entity by id. Args: entity_id (str): Id of task entity. - allow_query (bool): Try to query entity from server if is not + allow_fetch (bool): Try to query entity from server if is not available in cache. Returns: - Union[TaskEntity, None]: Object of folder or 'None'. + Optional[TaskEntity]: Task entity object or None. """ - if allow_query: - return self.get_or_query_entity_by_id(entity_id, ["task"]) + if allow_fetch: + return self.get_or_fetch_entity_by_id(entity_id, ["task"]) return self._entities_by_id.get(entity_id) - - def get_or_query_entity_by_id(self, entity_id, entity_types): + def get_or_fetch_entity_by_id( + self, + entity_id: str, + entity_types: List["EntityType"], + ): """Get or query entity based on it's id and possible entity types. This is a helper function when entity id is known but entity type may @@ -249,6 +262,18 @@ def get_or_query_entity_by_id(self, entity_id, entity_types): return None + def get_or_query_entity_by_id( + self, + entity_id: str, + entity_types: List["EntityType"], + ): + warnings.warn( + "Method 'get_or_query_entity_by_id' is deprecated. " + "Please use 'get_or_fetch_entity_by_id' instead.", + DeprecationWarning + ) + return self.get_or_fetch_entity_by_id(entity_id, entity_types) + @property def entities(self): """Iterator over available entities. @@ -463,7 +488,7 @@ def set_entity_parent(self, entity_id, parent_id, orig_parent_id=_NOT_SET): parent.add_child(entity_id) self.reset_immutable_for_hierarchy_cache(parent_id) - def _query_entity_children(self, entity): + def _fetch_entity_children(self, entity): folder_fields = self._get_folder_fields() task_fields = self._get_task_fields() tasks = [] @@ -518,15 +543,15 @@ def _query_entity_children(self, entity): entity.fill_children_ids(children_ids) - def get_entity_children(self, entity, allow_query=True): - children_ids = entity.get_children_ids(allow_query=False) + def get_entity_children(self, entity, allow_fetch=True): + children_ids = entity.get_children_ids(allow_fetch=False) if children_ids is not UNKNOWN_VALUE: return entity.get_children() - if children_ids is UNKNOWN_VALUE and not allow_query: + if children_ids is UNKNOWN_VALUE and not allow_fetch: return UNKNOWN_VALUE - self._query_entity_children(entity) + self._fetch_entity_children(entity) return entity.get_children() @@ -614,7 +639,7 @@ def _get_task_fields(self): self._connection.get_default_fields_for_type("task") ) - def query_entities_from_server(self): + def fetch_hierarchy_entities(self): """Query whole project at once.""" project_entity = self.fill_project_from_server() @@ -670,6 +695,14 @@ def query_entities_from_server(self): entity = lock_queue.popleft() entity.lock() + def query_entities_from_server(self): + warnings.warn( + "Method 'query_entities_from_server' is deprecated." + " Please use 'fetch_hierarchy_entities' instead.", + DeprecationWarning + ) + return self.fetch_hierarchy_entities() + def lock(self): if self._project_entity is None: return @@ -1499,7 +1532,7 @@ def set_parent_id(self, parent_id): parent_id = property(get_parent_id, set_parent_id) - def get_parent(self, allow_query=True): + def get_parent(self, allow_fetch=True): """Parent entity. Returns: @@ -1510,13 +1543,13 @@ def get_parent(self, allow_query=True): if parent is not None: return parent - if not allow_query: + if not allow_fetch: return self._parent_id if self._parent_id is UNKNOWN_VALUE: return self._parent_id - return self._entity_hub.get_or_query_entity_by_id( + return self._entity_hub.get_or_fetch_entity_by_id( self._parent_id, self.parent_entity_types ) @@ -1537,7 +1570,7 @@ def set_parent(self, parent): parent = property(get_parent, set_parent) - def get_children_ids(self, allow_query=True): + def get_children_ids(self, allow_fetch=True): """Access to children objects. Todos: @@ -1551,14 +1584,14 @@ def get_children_ids(self, allow_query=True): """ if self._children_ids is UNKNOWN_VALUE: - if not allow_query: + if not allow_fetch: return self._children_ids self._entity_hub.get_entity_children(self, True) return set(self._children_ids) children_ids = property(get_children_ids) - def get_children(self, allow_query=True): + def get_children(self, allow_fetch=True): """Access to children objects. Returns: @@ -1566,7 +1599,7 @@ def get_children(self, allow_query=True): """ if self._children_ids is UNKNOWN_VALUE: - if not allow_query: + if not allow_fetch: return self._children_ids return self._entity_hub.get_entity_children(self, True) From 9452e3768a99e29e6a949ffcf1b3cdf967449430 Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Fri, 25 Oct 2024 17:36:44 +0200 Subject: [PATCH 009/135] added some typehints --- ayon_api/entity_hub.py | 69 ++++++++++++++++++++++-------------------- 1 file changed, 37 insertions(+), 32 deletions(-) diff --git a/ayon_api/entity_hub.py b/ayon_api/entity_hub.py index 1451a7195..2c308fc8b 100644 --- a/ayon_api/entity_hub.py +++ b/ayon_api/entity_hub.py @@ -129,7 +129,7 @@ def project_entity(self): self.fill_project_from_server() return self._project_entity - def get_attributes_for_type(self, entity_type): + def get_attributes_for_type(self, entity_type: "EntityType"): """Get attributes available for a type. Attributes are based on entity types. @@ -321,14 +321,14 @@ def add_new_task(self, *args, created=True, **kwargs): Args: task_type (str): Type of task. Task type must be available in config of project folder types. - entity_id (Union[str, None]): Id of the entity. New id is created + entity_id (Optional[str]): Id of the entity. New id is created if not passed. - parent_id (Union[str, None]): Id of parent entity. + parent_id (Optional[str]): Id of parent entity. name (str): Name of entity. label (Optional[str]): Folder label. attribs (Dict[str, Any]): Attribute values. data (Dict[str, Any]): Entity data (custom data). - thumbnail_id (Union[str, None]): Id of entity's thumbnail. + thumbnail_id (Optional[str]): Id of entity's thumbnail. active (bool): Is entity active. created (Optional[bool]): Entity is new. When 'None' is passed the value is defined based on value of 'entity_id'. @@ -565,7 +565,7 @@ def delete_entity(self, entity): parent.remove_child(entity.id) def reset_immutable_for_hierarchy_cache( - self, entity_id, bottom_to_top=True + self, entity_id: Optional[str], bottom_to_top: Optional[bool] = True ): if bottom_to_top is None or entity_id is None: return @@ -574,16 +574,20 @@ def reset_immutable_for_hierarchy_cache( reset_queue.append(entity_id) if bottom_to_top: while reset_queue: - entity_id = reset_queue.popleft() - entity = self.get_entity_by_id(entity_id) + entity_id: str = reset_queue.popleft() + entity: Optional["BaseEntity"] = self.get_entity_by_id( + entity_id + ) if entity is None: continue entity.reset_immutable_for_hierarchy_cache(None) reset_queue.append(entity.parent_id) else: while reset_queue: - entity_id = reset_queue.popleft() - entity = self.get_entity_by_id(entity_id) + entity_id: str = reset_queue.popleft() + entity: Optional["BaseEntity"] = self.get_entity_by_id( + entity_id + ) if entity is None: continue entity.reset_immutable_for_hierarchy_cache(None) @@ -625,7 +629,7 @@ def fill_project_from_server(self): self.add_entity(self._project_entity) return self._project_entity - def _get_folder_fields(self): + def _get_folder_fields(self) -> Set[str]: folder_fields = set( self._connection.get_default_fields_for_type("folder") ) @@ -634,7 +638,7 @@ def _get_folder_fields(self): folder_fields.add("data") return folder_fields - def _get_task_fields(self): + def _get_task_fields(self) -> Set[str]: return set( self._connection.get_default_fields_for_type("task") ) @@ -1251,14 +1255,14 @@ def __getitem__(self, item): def __setitem__(self, item, value): return setattr(self, item, value) - def _prepare_entity_id(self, entity_id): + def _prepare_entity_id(self, entity_id: Any) -> str: entity_id = convert_entity_id(entity_id) if entity_id is None: entity_id = create_entity_id() return entity_id @property - def id(self): + def id(self) -> str: """Access to entity id under which is entity available on server. Returns: @@ -1268,7 +1272,7 @@ def id(self): return self._entity_id @property - def removed(self): + def removed(self) -> bool: return self._parent_id is None @property @@ -1300,7 +1304,7 @@ def data(self): return self._data @property - def project_name(self): + def project_name(self) -> str: """Quick access to project from entity hub. Returns: @@ -1311,8 +1315,8 @@ def project_name(self): @property @abstractmethod - def entity_type(self): - """Entity type coresponding to server. + def entity_type(self) -> "EntityType": + """Entity type corresponding to server. Returns: EntityType: Entity type. @@ -1322,22 +1326,22 @@ def entity_type(self): @property @abstractmethod - def parent_entity_types(self): - """Entity type coresponding to server. + def parent_entity_types(self) -> List[str]: + """Entity type corresponding to server. Returns: - Iterable[str]: Possible entity types of parent. + List[str]: Possible entity types of parent. """ pass @property @abstractmethod - def changes(self): + def changes(self) -> Optional[Dict[str, Any]]: """Receive entity changes. Returns: - Union[Dict[str, Any], None]: All values that have changed on + Optional[Dict[str, Any]]: All values that have changed on entity. New entity must return None. """ @@ -1345,7 +1349,9 @@ def changes(self): @classmethod @abstractmethod - def from_entity_data(cls, entity_data, entity_hub): + def from_entity_data( + cls, entity_data: Dict[str, Any], entity_hub: EntityHub + ) -> "BaseEntity": """Create entity based on queried data from server. Args: @@ -1359,7 +1365,7 @@ def from_entity_data(cls, entity_data, entity_hub): pass @abstractmethod - def to_create_body_data(self): + def to_create_body_data(self) -> Dict[str, Any]: """Convert object of entity to data for server on creation. Returns: @@ -1369,7 +1375,7 @@ def to_create_body_data(self): pass @property - def immutable_for_hierarchy(self): + def immutable_for_hierarchy(self) -> bool: """Entity is immutable for hierarchy changes. Hierarchy changes can be considered as change of name or parents. @@ -1402,17 +1408,19 @@ def _immutable_for_hierarchy(self): which is used in property 'immutable_for_hierarchy'. Returns: - Union[bool, None]: Bool to explicitly telling if is immutable or + Optional[bool]: Bool to explicitly telling if is immutable or not otherwise None. """ return None @property - def has_cached_immutable_hierarchy(self): + def has_cached_immutable_hierarchy(self) -> bool: return self._immutable_for_hierarchy_cache is not None - def reset_immutable_for_hierarchy_cache(self, bottom_to_top=True): + def reset_immutable_for_hierarchy_cache( + self, bottom_to_top: Optional[bool] = True + ): """Clear cache of immutable hierarchy property. This is used when entity changed parent or a child was added. @@ -2598,11 +2606,8 @@ class ProjectEntity(BaseEntity): library (bool): Is project library project. folder_types (list[dict[str, Any]]): Folder types definition. task_types (list[dict[str, Any]]): Task types definition. - entity_id (Optional[str]): Id of the entity. New id is created if - not passed. - parent_id (Union[str, None]): Id of parent entity. name (str): Name of entity. - attribs (Dict[str, Any]): Attribute values. + attribs (Optional[Dict[str, Any]]): Attribute values. data (Dict[str, Any]): Entity data (custom data). thumbnail_id (Union[str, None]): Id of entity's thumbnail. active (bool): Is entity active. From 41ddf872547f9080d361e7502f65dc128ef0d732 Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Fri, 25 Oct 2024 17:37:31 +0200 Subject: [PATCH 010/135] simplified ProjectEntity arguments --- ayon_api/entity_hub.py | 37 ++++++++++++++----------------------- 1 file changed, 14 insertions(+), 23 deletions(-) diff --git a/ayon_api/entity_hub.py b/ayon_api/entity_hub.py index 2c308fc8b..f1651f929 100644 --- a/ayon_api/entity_hub.py +++ b/ayon_api/entity_hub.py @@ -2602,19 +2602,17 @@ class ProjectEntity(BaseEntity): """Entity representing project on AYON server. Args: + name (str): Name of entity. project_code (str): Project code. library (bool): Is project library project. folder_types (list[dict[str, Any]]): Folder types definition. task_types (list[dict[str, Any]]): Task types definition. - name (str): Name of entity. + statuses: (list[dict[str, Any]]): Statuses definition. attribs (Optional[Dict[str, Any]]): Attribute values. data (Dict[str, Any]): Entity data (custom data). - thumbnail_id (Union[str, None]): Id of entity's thumbnail. active (bool): Is entity active. entity_hub (EntityHub): Object of entity hub which created object of the entity. - created (Optional[bool]): Entity is new. When 'None' is passed the - value is defined based on value of 'entity_id'. """ _supports_name = True @@ -2626,20 +2624,16 @@ class ProjectEntity(BaseEntity): def __init__( self, - project_code, - library, - folder_types, - task_types, - statuses, - entity_id=None, - parent_id=UNKNOWN_VALUE, - name=UNKNOWN_VALUE, - attribs=UNKNOWN_VALUE, - data=UNKNOWN_VALUE, - thumbnail_id=UNKNOWN_VALUE, - active=UNKNOWN_VALUE, - entity_hub=None, - created=None, + name: str, + project_code: str, + library: bool, + folder_types: List[Dict[str, Any]], + task_types: List[Dict[str, Any]], + statuses: List[Dict[str, Any]], + attribs: Optional[Dict[str, Any]] = UNKNOWN_VALUE, + data: Optional[Dict[str, Any]] = UNKNOWN_VALUE, + active: Optional[bool] = UNKNOWN_VALUE, + entity_hub: EntityHub = None, ): super().__init__( entity_id=name, @@ -2650,7 +2644,6 @@ def __init__( created=False, entity_hub=entity_hub, name=name, - thumbnail_id=thumbnail_id, ) self._project_code = project_code @@ -2765,16 +2758,14 @@ def changes(self): return changes @classmethod - def from_entity_data(cls, project, entity_hub): + def from_entity_data(cls, project, entity_hub) -> "ProjectEntity": return cls( + project["name"], project["code"], - parent_id=PROJECT_PARENT_ID, - entity_id=project["name"], library=project["library"], folder_types=project["folderTypes"], task_types=project["taskTypes"], statuses=project["statuses"], - name=project["name"], attribs=project["ownAttrib"], data=project["data"], active=project["active"], From 98e9db24812d19225e059b51d6eabb9453e1d580 Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Fri, 25 Oct 2024 17:44:09 +0200 Subject: [PATCH 011/135] better arguments order for folder entity --- ayon_api/entity_hub.py | 97 +++++++++++++++++++++++++++--------------- 1 file changed, 63 insertions(+), 34 deletions(-) diff --git a/ayon_api/entity_hub.py b/ayon_api/entity_hub.py index f1651f929..a3ccc4f6a 100644 --- a/ayon_api/entity_hub.py +++ b/ayon_api/entity_hub.py @@ -285,23 +285,39 @@ def entities(self): for entity in self._entities_by_id.values(): yield entity - def add_new_folder(self, *args, created=True, **kwargs): + def add_new_folder( + self, + name: str, + folder_type: str, + label: Optional[str] = None, + parent_id: Optional[str] = UNKNOWN_VALUE, + attribs: Optional[Dict[str, Any]] = UNKNOWN_VALUE, + data: Optional[Dict[str, Any]] = UNKNOWN_VALUE, + path: Optional[str] = None, + status: Optional[str] = UNKNOWN_VALUE, + thumbnail_id: Optional[str] = UNKNOWN_VALUE, + active: bool = UNKNOWN_VALUE, + entity_id: Optional[str] = None, + created: Optional[bool] = None, + tags: Optional[List[str]] = None, + ): """Create folder object and add it to entity hub. Args: + name (str): Name of entity. folder_type (str): Type of folder. Folder type must be available in config of project folder types. - entity_id (Union[str, None]): Id of the entity. New id is created - if not passed. - parent_id (Union[str, None]): Id of parent entity. - name (str): Name of entity. label (Optional[str]): Folder label. - path (Optional[str]): Folder path. Path consist of all parent names - with slash('/') used as separator. + parent_id (Union[str, None]): Id of parent entity. attribs (Dict[str, Any]): Attribute values. data (Dict[str, Any]): Entity data (custom data). + path (Optional[str]): Folder path. Path consist of all parent names + with slash('/') used as separator. + status (Optional[str]): Folder status. thumbnail_id (Union[str, None]): Id of entity's thumbnail. active (bool): Is entity active. + entity_id (Union[str, None]): Id of the entity. New id is created if + not passed. created (Optional[bool]): Entity is new. When 'None' is passed the value is defined based on value of 'entity_id'. @@ -310,7 +326,20 @@ def add_new_folder(self, *args, created=True, **kwargs): """ folder_entity = FolderEntity( - *args, **kwargs, created=created, entity_hub=self + name=name, + folder_type=folder_type, + label=label, + parent_id=parent_id, + attribs=attribs, + data=data, + path=path, + status=status, + thumbnail_id=thumbnail_id, + active=active, + entity_id=entity_id, + created=created, + tags=tags, + entity_hub=self ) self.add_entity(folder_entity) return folder_entity @@ -2782,23 +2811,24 @@ class FolderEntity(BaseEntity): """Entity representing a folder on AYON server. Args: + name (str): Name of entity. folder_type (str): Type of folder. Folder type must be available in config of project folder types. - entity_id (Union[str, None]): Id of the entity. New id is created if - not passed. + label (Optional[str]): Folder label. parent_id (Union[str, None]): Id of parent entity. - name (str): Name of entity. attribs (Dict[str, Any]): Attribute values. data (Dict[str, Any]): Entity data (custom data). - thumbnail_id (Union[str, None]): Id of entity's thumbnail. - active (bool): Is entity active. - label (Optional[str]): Folder label. path (Optional[str]): Folder path. Path consist of all parent names with slash('/') used as separator. - entity_hub (EntityHub): Object of entity hub which created object of - the entity. + status (Optional[str]): Folder status. + thumbnail_id (Union[str, None]): Id of entity's thumbnail. + active (bool): Is entity active. + entity_id (Union[str, None]): Id of the entity. New id is created if + not passed. created (Optional[bool]): Entity is new. When 'None' is passed the value is defined based on value of 'entity_id'. + entity_hub (EntityHub): Object of entity hub which created object of + the entity. """ _supports_name = True _supports_label = True @@ -2811,20 +2841,20 @@ class FolderEntity(BaseEntity): def __init__( self, - folder_type, - entity_id=None, - parent_id=UNKNOWN_VALUE, - name=UNKNOWN_VALUE, - attribs=UNKNOWN_VALUE, - data=UNKNOWN_VALUE, - thumbnail_id=UNKNOWN_VALUE, - active=UNKNOWN_VALUE, - entity_hub=None, - created=None, - label=None, - path=None, - tags=None, - status=UNKNOWN_VALUE, + name: str, + folder_type: str, + label: Optional[str] = None, + parent_id: Optional[str] = UNKNOWN_VALUE, + attribs: Optional[Dict[str, Any]] = UNKNOWN_VALUE, + data: Optional[Dict[str, Any]] = UNKNOWN_VALUE, + path: Optional[str] = None, + status: Optional[str] = UNKNOWN_VALUE, + thumbnail_id: Optional[str] = UNKNOWN_VALUE, + active: bool = UNKNOWN_VALUE, + entity_id: Optional[str] = None, + created: Optional[bool] = None, + tags: Optional[List[str]] = None, + entity_hub: EntityHub = None, ): super().__init__( entity_id=entity_id, @@ -2920,23 +2950,22 @@ def changes(self): if self._orig_folder_type != self._folder_type: changes["folderType"] = self._folder_type - return changes @classmethod - def from_entity_data(cls, folder, entity_hub): + def from_entity_data(cls, folder, entity_hub) -> "FolderEntity": parent_id = folder["parentId"] if parent_id is None: parent_id = entity_hub.project_entity.id return cls( - folder["folderType"], + name=folder["name"], + folder_type=folder["folderType"], label=folder["label"], path=folder["path"], status=folder["status"], tags=folder["tags"], entity_id=folder["id"], parent_id=parent_id, - name=folder["name"], data=folder.get("data"), attribs=folder["ownAttrib"], active=folder["active"], From f59f8788199406eebae395571725d90ef746d172 Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Fri, 25 Oct 2024 17:48:01 +0200 Subject: [PATCH 012/135] even better args order --- ayon_api/entity_hub.py | 39 +++++++++++++++++++++------------------ 1 file changed, 21 insertions(+), 18 deletions(-) diff --git a/ayon_api/entity_hub.py b/ayon_api/entity_hub.py index a3ccc4f6a..8a0de641f 100644 --- a/ayon_api/entity_hub.py +++ b/ayon_api/entity_hub.py @@ -289,17 +289,17 @@ def add_new_folder( self, name: str, folder_type: str, - label: Optional[str] = None, parent_id: Optional[str] = UNKNOWN_VALUE, - attribs: Optional[Dict[str, Any]] = UNKNOWN_VALUE, - data: Optional[Dict[str, Any]] = UNKNOWN_VALUE, + label: Optional[str] = None, path: Optional[str] = None, status: Optional[str] = UNKNOWN_VALUE, + tags: Optional[List[str]] = None, + attribs: Optional[Dict[str, Any]] = UNKNOWN_VALUE, + data: Optional[Dict[str, Any]] = UNKNOWN_VALUE, thumbnail_id: Optional[str] = UNKNOWN_VALUE, active: bool = UNKNOWN_VALUE, entity_id: Optional[str] = None, created: Optional[bool] = None, - tags: Optional[List[str]] = None, ): """Create folder object and add it to entity hub. @@ -307,13 +307,14 @@ def add_new_folder( name (str): Name of entity. folder_type (str): Type of folder. Folder type must be available in config of project folder types. - label (Optional[str]): Folder label. parent_id (Union[str, None]): Id of parent entity. - attribs (Dict[str, Any]): Attribute values. - data (Dict[str, Any]): Entity data (custom data). + label (Optional[str]): Folder label. path (Optional[str]): Folder path. Path consist of all parent names with slash('/') used as separator. status (Optional[str]): Folder status. + tags (Optional[List[str]]): Folder tags. + attribs (Dict[str, Any]): Attribute values. + data (Dict[str, Any]): Entity data (custom data). thumbnail_id (Union[str, None]): Id of entity's thumbnail. active (bool): Is entity active. entity_id (Union[str, None]): Id of the entity. New id is created if @@ -328,17 +329,17 @@ def add_new_folder( folder_entity = FolderEntity( name=name, folder_type=folder_type, - label=label, parent_id=parent_id, - attribs=attribs, - data=data, + label=label, path=path, status=status, + tags=tags, + attribs=attribs, + data=data, thumbnail_id=thumbnail_id, active=active, entity_id=entity_id, created=created, - tags=tags, entity_hub=self ) self.add_entity(folder_entity) @@ -2814,13 +2815,14 @@ class FolderEntity(BaseEntity): name (str): Name of entity. folder_type (str): Type of folder. Folder type must be available in config of project folder types. - label (Optional[str]): Folder label. parent_id (Union[str, None]): Id of parent entity. - attribs (Dict[str, Any]): Attribute values. - data (Dict[str, Any]): Entity data (custom data). + label (Optional[str]): Folder label. path (Optional[str]): Folder path. Path consist of all parent names with slash('/') used as separator. status (Optional[str]): Folder status. + tags (Optional[List[str]]): Folder tags. + attribs (Dict[str, Any]): Attribute values. + data (Dict[str, Any]): Entity data (custom data). thumbnail_id (Union[str, None]): Id of entity's thumbnail. active (bool): Is entity active. entity_id (Union[str, None]): Id of the entity. New id is created if @@ -2829,6 +2831,7 @@ class FolderEntity(BaseEntity): value is defined based on value of 'entity_id'. entity_hub (EntityHub): Object of entity hub which created object of the entity. + """ _supports_name = True _supports_label = True @@ -2843,17 +2846,17 @@ def __init__( self, name: str, folder_type: str, - label: Optional[str] = None, parent_id: Optional[str] = UNKNOWN_VALUE, - attribs: Optional[Dict[str, Any]] = UNKNOWN_VALUE, - data: Optional[Dict[str, Any]] = UNKNOWN_VALUE, + label: Optional[str] = None, path: Optional[str] = None, status: Optional[str] = UNKNOWN_VALUE, + tags: Optional[List[str]] = None, + attribs: Optional[Dict[str, Any]] = UNKNOWN_VALUE, + data: Optional[Dict[str, Any]] = UNKNOWN_VALUE, thumbnail_id: Optional[str] = UNKNOWN_VALUE, active: bool = UNKNOWN_VALUE, entity_id: Optional[str] = None, created: Optional[bool] = None, - tags: Optional[List[str]] = None, entity_hub: EntityHub = None, ): super().__init__( From 4679f6f50ef1044a7c7d7b41c1bf3a7fa9696134 Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Fri, 25 Oct 2024 17:54:26 +0200 Subject: [PATCH 013/135] match order of args in classmethod --- ayon_api/entity_hub.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/ayon_api/entity_hub.py b/ayon_api/entity_hub.py index 8a0de641f..dcda65f3a 100644 --- a/ayon_api/entity_hub.py +++ b/ayon_api/entity_hub.py @@ -2963,16 +2963,16 @@ def from_entity_data(cls, folder, entity_hub) -> "FolderEntity": return cls( name=folder["name"], folder_type=folder["folderType"], + parent_id=parent_id, label=folder["label"], path=folder["path"], status=folder["status"], tags=folder["tags"], - entity_id=folder["id"], - parent_id=parent_id, - data=folder.get("data"), attribs=folder["ownAttrib"], - active=folder["active"], + data=folder.get("data"), thumbnail_id=folder["thumbnailId"], + active=folder["active"], + entity_id=folder["id"], created=False, entity_hub=entity_hub ) From 6d7002b39ae245aa1d4eb3b7b175f8e9ab0b8fbb Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Fri, 25 Oct 2024 17:56:48 +0200 Subject: [PATCH 014/135] better order of arguments for task entity --- ayon_api/entity_hub.py | 117 ++++++++++++++++++++++++++--------------- 1 file changed, 76 insertions(+), 41 deletions(-) diff --git a/ayon_api/entity_hub.py b/ayon_api/entity_hub.py index dcda65f3a..edd52612d 100644 --- a/ayon_api/entity_hub.py +++ b/ayon_api/entity_hub.py @@ -345,21 +345,39 @@ def add_new_folder( self.add_entity(folder_entity) return folder_entity - def add_new_task(self, *args, created=True, **kwargs): + def add_new_task( + self, + name: str, + task_type: str, + parent_id: Optional[str] = UNKNOWN_VALUE, + label: Optional[str] = None, + status: Optional[str] = UNKNOWN_VALUE, + tags: Optional[Iterable[str]] = None, + attribs: Optional[Dict[str, Any]] = UNKNOWN_VALUE, + data: Optional[Dict[str, Any]] = UNKNOWN_VALUE, + assignees: Optional[Iterable[str]] = None, + thumbnail_id: Optional[str] = UNKNOWN_VALUE, + active: Optional[bool] = UNKNOWN_VALUE, + entity_id: Optional[str] = None, + created: Optional[bool] = None, + ): """Create folder object and add it to entity hub. Args: - task_type (str): Type of task. Task type must be available in - config of project folder types. - entity_id (Optional[str]): Id of the entity. New id is created - if not passed. - parent_id (Optional[str]): Id of parent entity. name (str): Name of entity. - label (Optional[str]): Folder label. + task_type (str): Type of task. Task type must be available in config + of project task types. + parent_id (Union[str, None]): Id of parent entity. + label (Optional[str]): Task label. + status (Optional[str]): Task status. + tags (Optional[Iterable[str]]): Folder tags. attribs (Dict[str, Any]): Attribute values. data (Dict[str, Any]): Entity data (custom data). - thumbnail_id (Optional[str]): Id of entity's thumbnail. + assignees (Optional[Iterable[str]]): User assignees to the task. + thumbnail_id (Union[str, None]): Id of entity's thumbnail. active (bool): Is entity active. + entity_id (Union[str, None]): Id of the entity. New id is created if + not passed. created (Optional[bool]): Entity is new. When 'None' is passed the value is defined based on value of 'entity_id'. @@ -368,7 +386,20 @@ def add_new_task(self, *args, created=True, **kwargs): """ task_entity = TaskEntity( - *args, **kwargs, created=created, entity_hub=self + name=name, + task_type=task_type, + parent_id=parent_id, + label=label, + status=status, + tags=tags, + attribs=attribs, + data=data, + assignees=assignees, + thumbnail_id=thumbnail_id, + active=active, + entity_id=entity_id, + created=created, + entity_hub=self, ) self.add_entity(task_entity) return task_entity @@ -3029,19 +3060,22 @@ class TaskEntity(BaseEntity): name (str): Name of entity. task_type (str): Type of task. Task type must be available in config of project task types. - entity_id (Union[str, None]): Id of the entity. New id is created if - not passed. parent_id (Union[str, None]): Id of parent entity. label (Optional[str]): Task label. + status (Optional[str]): Task status. + tags (Optional[Iterable[str]]): Folder tags. attribs (Dict[str, Any]): Attribute values. data (Dict[str, Any]): Entity data (custom data). + assignees (Optional[Iterable[str]]): User assignees to the task. thumbnail_id (Union[str, None]): Id of entity's thumbnail. active (bool): Is entity active. - entity_hub (EntityHub): Object of entity hub which created object of - the entity. + entity_id (Union[str, None]): Id of the entity. New id is created if + not passed. created (Optional[bool]): Entity is new. When 'None' is passed the value is defined based on value of 'entity_id'. - status (Optional[str]): Task status. + entity_hub (EntityHub): Object of entity hub which created object of + the entity. + """ _supports_name = True _supports_label = True @@ -3053,34 +3087,34 @@ class TaskEntity(BaseEntity): def __init__( self, - task_type, - entity_id=None, - parent_id=UNKNOWN_VALUE, - name=UNKNOWN_VALUE, - attribs=UNKNOWN_VALUE, - data=UNKNOWN_VALUE, - thumbnail_id=UNKNOWN_VALUE, - active=UNKNOWN_VALUE, - entity_hub=None, - created=None, - label=None, - tags=None, - assignees=None, - status=UNKNOWN_VALUE, + name: str, + task_type: str, + parent_id: Optional[str] = UNKNOWN_VALUE, + label: Optional[str] = None, + status: Optional[str] = UNKNOWN_VALUE, + tags: Optional[Iterable[str]] = None, + attribs: Optional[Dict[str, Any]] = UNKNOWN_VALUE, + data: Optional[Dict[str, Any]] = UNKNOWN_VALUE, + assignees: Optional[Iterable[str]] = None, + thumbnail_id: Optional[str] = UNKNOWN_VALUE, + active: Optional[bool] = UNKNOWN_VALUE, + entity_id: Optional[str] = None, + created: Optional[bool] = None, + entity_hub: EntityHub = None, ): super().__init__( - entity_id=entity_id, + name=name, parent_id=parent_id, + label=label, + status=status, + tags=tags, attribs=attribs, data=data, + thumbnail_id=thumbnail_id, active=active, + entity_id=entity_id, created=created, entity_hub=entity_hub, - name=name, - label=label, - tags=tags, - status=status, - thumbnail_id=thumbnail_id, ) if assignees is None: assignees = [] @@ -3155,19 +3189,20 @@ def changes(self): return changes @classmethod - def from_entity_data(cls, task, entity_hub): + def from_entity_data(cls, task, entity_hub) -> "TaskEntity": return cls( - task["taskType"], - entity_id=task["id"], + name=task["name"], + task_type=task["taskType"], + parent_id=task["folderId"], label=task["label"], status=task["status"], tags=task["tags"], - assignees=task["assignees"], - parent_id=task["folderId"], - name=task["name"], - data=task.get("data"), attribs=task["ownAttrib"], + data=task.get("data"), + assignees=task["assignees"], + thumbnail_id=task["thumbnailId"], active=task["active"], + entity_id=task["id"], created=False, entity_hub=entity_hub ) From fdc27eb016a0181dbbb3630b294ccc74169e2930 Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Fri, 25 Oct 2024 17:56:57 +0200 Subject: [PATCH 015/135] removed unused variable --- ayon_api/entity_hub.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ayon_api/entity_hub.py b/ayon_api/entity_hub.py index edd52612d..e437f6c6b 100644 --- a/ayon_api/entity_hub.py +++ b/ayon_api/entity_hub.py @@ -61,7 +61,7 @@ def __init__( ): if not connection: connection = get_server_api_connection() - major, minor, patch, _, _ = connection.server_version_tuple + major, minor, _, _, _ = connection.server_version_tuple path_start_with_slash = True if (major, minor) < (0, 6): path_start_with_slash = False From 1984157642e6d3e3885cacd3a8595f164afeb089 Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Fri, 25 Oct 2024 18:23:36 +0200 Subject: [PATCH 016/135] use folder_id instead of parent_id --- ayon_api/entity_hub.py | 22 ++++++++++++++++------ 1 file changed, 16 insertions(+), 6 deletions(-) diff --git a/ayon_api/entity_hub.py b/ayon_api/entity_hub.py index e437f6c6b..ab4b73b47 100644 --- a/ayon_api/entity_hub.py +++ b/ayon_api/entity_hub.py @@ -349,7 +349,7 @@ def add_new_task( self, name: str, task_type: str, - parent_id: Optional[str] = UNKNOWN_VALUE, + folder_id: Optional[str] = UNKNOWN_VALUE, label: Optional[str] = None, status: Optional[str] = UNKNOWN_VALUE, tags: Optional[Iterable[str]] = None, @@ -360,6 +360,7 @@ def add_new_task( active: Optional[bool] = UNKNOWN_VALUE, entity_id: Optional[str] = None, created: Optional[bool] = None, + parent_id: Optional[str] = UNKNOWN_VALUE, ): """Create folder object and add it to entity hub. @@ -367,7 +368,7 @@ def add_new_task( name (str): Name of entity. task_type (str): Type of task. Task type must be available in config of project task types. - parent_id (Union[str, None]): Id of parent entity. + folder_id (Union[str, None]): Parent folder id. label (Optional[str]): Task label. status (Optional[str]): Task status. tags (Optional[Iterable[str]]): Folder tags. @@ -380,15 +381,24 @@ def add_new_task( not passed. created (Optional[bool]): Entity is new. When 'None' is passed the value is defined based on value of 'entity_id'. + parent_id (Union[str, None]): DEPRECATED Parent folder id. Returns: TaskEntity: Added task entity. """ + if parent_id is not UNKNOWN_VALUE: + warnings.warn( + "Used deprecated argument 'parent_id'." + " Use 'folder_id' instead.", + DeprecationWarning + ) + folder_id = parent_id + task_entity = TaskEntity( name=name, task_type=task_type, - parent_id=parent_id, + folder_id=folder_id, label=label, status=status, tags=tags, @@ -3089,7 +3099,7 @@ def __init__( self, name: str, task_type: str, - parent_id: Optional[str] = UNKNOWN_VALUE, + folder_id: Optional[str] = UNKNOWN_VALUE, label: Optional[str] = None, status: Optional[str] = UNKNOWN_VALUE, tags: Optional[Iterable[str]] = None, @@ -3104,7 +3114,7 @@ def __init__( ): super().__init__( name=name, - parent_id=parent_id, + parent_id=folder_id, label=label, status=status, tags=tags, @@ -3193,7 +3203,7 @@ def from_entity_data(cls, task, entity_hub) -> "TaskEntity": return cls( name=task["name"], task_type=task["taskType"], - parent_id=task["folderId"], + folder_id=task["folderId"], label=task["label"], status=task["status"], tags=task["tags"], From ad1f62927a6860a24e1b9e117397c80f768ba82d Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Fri, 25 Oct 2024 18:23:46 +0200 Subject: [PATCH 017/135] don't add attribs to data automatically --- ayon_api/entity_hub.py | 1 - 1 file changed, 1 deletion(-) diff --git a/ayon_api/entity_hub.py b/ayon_api/entity_hub.py index ab4b73b47..b9e457536 100644 --- a/ayon_api/entity_hub.py +++ b/ayon_api/entity_hub.py @@ -3225,7 +3225,6 @@ def to_create_body_data(self): "name": self.name, "taskType": self.task_type, "folderId": self.parent_id, - "attrib": self.attribs.to_dict(), } label = self._get_label_value() if label: From 71496fbb2d2959b55ccdb29bce68ab1b94035716 Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Fri, 25 Oct 2024 18:23:58 +0200 Subject: [PATCH 018/135] added product and version entity --- ayon_api/entity_hub.py | 234 +++++++++++++++++++++++++++++++++++++++++ 1 file changed, 234 insertions(+) diff --git a/ayon_api/entity_hub.py b/ayon_api/entity_hub.py index b9e457536..248b4d58e 100644 --- a/ayon_api/entity_hub.py +++ b/ayon_api/entity_hub.py @@ -3254,3 +3254,237 @@ def to_create_body_data(self): return output +class ProductEntity(BaseEntity): + _supports_name = True + _supports_tags = True + + def __init__( + self, + name: str, + product_type: str, + folder_id: Optional[Union[str, _CustomNone]] = UNKNOWN_VALUE, + tags: Optional[Iterable[str]] = None, + attribs: Optional[Dict[str, Any]] = UNKNOWN_VALUE, + data: Optional[Dict[str, Any]] = UNKNOWN_VALUE, + active: Optional[bool] = UNKNOWN_VALUE, + entity_id: Optional[str] = None, + created: Optional[bool] = None, + entity_hub: EntityHub = None, + ): + super().__init__( + name=name, + parent_id=folder_id, + tags=tags, + attribs=attribs, + data=data, + created=created, + entity_id=entity_id, + active=active, + entity_hub=entity_hub, + ) + self._product_type = product_type + + self._orig_product_type = product_type + + def get_folder_id(self): + return self._parent_id + + def set_folder_id(self, folder_id): + self.set_parent_id(folder_id) + + folder_id = property(get_folder_id, set_folder_id) + + def get_product_type(self): + return self._product_type + + def set_product_type(self, product_type): + self._product_type = product_type + + product_type = property(get_product_type, set_product_type) + + def lock(self): + super().lock() + self._orig_product_type = self._product_type + + @property + def changes(self): + changes = self._get_default_changes() + + if self._orig_parent_id != self._parent_id: + changes["folderId"] = self._parent_id + + if self._orig_product_type != self._product_type: + changes["productType"] = self._product_type + + return changes + + @classmethod + def from_entity_data(cls, product, entity_hub): + return cls( + name=product["name"], + product_type=product["productType"], + folder_id=product["folderId"], + tags=product["tags"], + attribs=product["ownAttrib"], + data=product.get("data"), + active=product["active"], + entity_id=product["id"], + created=False, + entity_hub=entity_hub + ) + + def to_create_body_data(self): + if self.parent_id is UNKNOWN_VALUE: + raise ValueError("Product does not have set 'folder_id'") + + output = { + "name": self.name, + "productType": self.product_type, + "folderId": self.parent_id, + } + + attrib = self.attribs.to_dict() + if attrib: + output["attrib"] = attrib + + if self.active is not UNKNOWN_VALUE: + output["active"] = self.active + + if self.tags: + output["tags"] = self.tags + + if ( + self._entity_hub.allow_data_changes + and self._data is not UNKNOWN_VALUE + ): + output["data"] = self._data.get_new_entity_value() + return output + + +class VersionEntity(BaseEntity): + _supports_tags = True + _supports_status = True + _supports_thumbnail = True + + def __init__( + self, + version: int, + product_id: Optional[Union[str, _CustomNone]] = UNKNOWN_VALUE, + task_id: Optional[Union[str, _CustomNone]] = UNKNOWN_VALUE, + status: Optional[str] = UNKNOWN_VALUE, + tags: Optional[Iterable[str]] = None, + attribs: Optional[Dict[str, Any]] = UNKNOWN_VALUE, + data: Optional[Dict[str, Any]] = UNKNOWN_VALUE, + thumbnail_id: Optional[str] = UNKNOWN_VALUE, + active: Optional[bool] = UNKNOWN_VALUE, + entity_id: Optional[str] = None, + created: Optional[bool] = None, + entity_hub: EntityHub = None, + ): + super().__init__( + parent_id=product_id, + status=status, + tags=tags, + attribs=attribs, + data=data, + thumbnail_id=thumbnail_id, + active=active, + entity_id=entity_id, + created=created, + entity_hub=entity_hub, + ) + self._version = version + self._task_id = task_id + + self._orig_version = version + self._orig_task_id = task_id + + def get_version(self): + return self._version + + def set_version(self, version): + self._version = version + + version = property(get_version, set_version) + + def get_product_id(self): + return self._parent_id + + def set_product_id(self, product_id): + self.set_parent_id(product_id) + + product_id = property(get_product_id, set_product_id) + + def get_task_id(self): + return self._task_id + + def set_task_id(self, task_id): + self._task_id = task_id + + task_id = property(get_task_id, set_task_id) + + def lock(self): + super().lock() + self._orig_version = self._version + self._orig_task_id = self._task_id + + @property + def changes(self): + changes = self._get_default_changes() + + if self._orig_parent_id != self._parent_id: + changes["productId"] = self._parent_id + + if self._orig_task_id != self._task_id: + changes["taskId"] = self._task_id + + return changes + + @classmethod + def from_entity_data(cls, version, entity_hub): + return cls( + version=version["version"], + product_id=version["productId"], + task_id=version["taskId"], + status=version["status"], + tags=version["tags"], + attribs=version["ownAttrib"], + data=version.get("data"), + thumbnail_id=version["thumbnailId"], + active=version["active"], + entity_id=version["id"], + created=False, + entity_hub=entity_hub + ) + + def to_create_body_data(self): + if self.parent_id is UNKNOWN_VALUE: + raise ValueError("Version does not have set 'product_id'") + + output = { + "version": self.version, + "productId": self.parent_id, + } + task_id = self.task_id + if task_id: + output["taskId"] = task_id + + attrib = self.attribs.to_dict() + if attrib: + output["attrib"] = attrib + + if self.active is not UNKNOWN_VALUE: + output["active"] = self.active + + if self.tags: + output["tags"] = self.tags + + if self.status: + output["status"] = self.status + + if ( + self._entity_hub.allow_data_changes + and self._data is not UNKNOWN_VALUE + ): + output["data"] = self._data.get_new_entity_value() + return output From 82c8835301ce5235f7a7fc5f28ae536dd43d5e98 Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Fri, 25 Oct 2024 18:25:13 +0200 Subject: [PATCH 019/135] implement remaining abstract properties --- ayon_api/entity_hub.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/ayon_api/entity_hub.py b/ayon_api/entity_hub.py index 248b4d58e..ca08722a0 100644 --- a/ayon_api/entity_hub.py +++ b/ayon_api/entity_hub.py @@ -3258,6 +3258,9 @@ class ProductEntity(BaseEntity): _supports_name = True _supports_tags = True + entity_type = "product" + parent_entity_types = ["folder"] + def __init__( self, name: str, @@ -3366,6 +3369,9 @@ class VersionEntity(BaseEntity): _supports_status = True _supports_thumbnail = True + entity_type = "version" + parent_entity_types = ["product"] + def __init__( self, version: int, From b4fc406adcbfab8cd4088f7b806bc2b78121f29c Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Fri, 25 Oct 2024 18:38:12 +0200 Subject: [PATCH 020/135] added few helper functions to get entities --- ayon_api/entity_hub.py | 69 ++++++++++++++++++++++++++++++++++++++++-- 1 file changed, 67 insertions(+), 2 deletions(-) diff --git a/ayon_api/entity_hub.py b/ayon_api/entity_hub.py index ca08722a0..52c36478c 100644 --- a/ayon_api/entity_hub.py +++ b/ayon_api/entity_hub.py @@ -171,7 +171,7 @@ def get_folder_by_id( Args: entity_id (str): Folder entity id. - allow_fetch (bool): Try to query entity from server if is not + allow_fetch (bool): Try to fetch entity from server if is not available in cache. Returns: @@ -191,7 +191,7 @@ def get_task_by_id( Args: entity_id (str): Id of task entity. - allow_fetch (bool): Try to query entity from server if is not + allow_fetch (bool): Try to fetch entity from server if is not available in cache. Returns: @@ -201,6 +201,47 @@ def get_task_by_id( if allow_fetch: return self.get_or_fetch_entity_by_id(entity_id, ["task"]) return self._entities_by_id.get(entity_id) + + def get_product_by_id( + self, + entity_id: str, + allow_fetch: Optional[bool] = True, + ) -> Optional["ProductEntity"]: + """Get product entity by id. + + Args: + entity_id (str): Product id. + allow_fetch (bool): Try to fetch entity from server if is not + available in cache. + + Returns: + Optional[ProductEntity]: Product entity object or None. + + """ + if allow_fetch: + return self.get_or_fetch_entity_by_id(entity_id, ["product"]) + return self._entities_by_id.get(entity_id) + + def get_version_by_id( + self, + entity_id: str, + allow_fetch: Optional[bool] = True, + ) -> Optional["VersionEntity"]: + """Get version entity by id. + + Args: + entity_id (str): Version id. + allow_fetch (bool): Try to fetch entity from server if is not + available in cache. + + Returns: + Optional[VersionEntity]: Version entity object or None. + + """ + if allow_fetch: + return self.get_or_fetch_entity_by_id(entity_id, ["version"]) + return self._entities_by_id.get(entity_id) + def get_or_fetch_entity_by_id( self, entity_id: str, @@ -241,6 +282,20 @@ def get_or_fetch_entity_by_id( fields=self._get_task_fields(), own_attributes=True ) + elif entity_type == "product": + entity_data = self._connection.get_product_by_id( + self.project_name, + entity_id, + fields=self._get_product_fields(), + own_attributes=True + ) + elif entity_type == "version": + entity_data = self._connection.get_version_by_id( + self.project_name, + entity_id, + fields=self._get_version_fields(), + own_attributes=True + ) else: raise ValueError( "Unknonwn entity type \"{}\"".format(entity_type) @@ -714,6 +769,16 @@ def _get_task_fields(self) -> Set[str]: self._connection.get_default_fields_for_type("task") ) + def _get_product_fields(self) -> Set[str]: + return set( + self._connection.get_default_fields_for_type("product") + ) + + def _get_version_fields(self) -> Set[str]: + return set( + self._connection.get_default_fields_for_type("version") + ) + def fetch_hierarchy_entities(self): """Query whole project at once.""" project_entity = self.fill_project_from_server() From ee1e01ea6165b72743079fb94949494dd9cd8727 Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Mon, 28 Oct 2024 16:07:27 +0100 Subject: [PATCH 021/135] fix ownAttrib on version --- ayon_api/entity_hub.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/ayon_api/entity_hub.py b/ayon_api/entity_hub.py index 52c36478c..d6bd9099a 100644 --- a/ayon_api/entity_hub.py +++ b/ayon_api/entity_hub.py @@ -294,7 +294,6 @@ def get_or_fetch_entity_by_id( self.project_name, entity_id, fields=self._get_version_fields(), - own_attributes=True ) else: raise ValueError( @@ -3519,7 +3518,7 @@ def from_entity_data(cls, version, entity_hub): task_id=version["taskId"], status=version["status"], tags=version["tags"], - attribs=version["ownAttrib"], + attribs=version["attrib"], data=version.get("data"), thumbnail_id=version["thumbnailId"], active=version["active"], From de8a1fb6405bb01140236343418126c6f380bca1 Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Mon, 28 Oct 2024 16:07:54 +0100 Subject: [PATCH 022/135] avoid crashes for entities without thumbnail id --- ayon_api/entity_hub.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ayon_api/entity_hub.py b/ayon_api/entity_hub.py index d6bd9099a..c8b08fb2e 100644 --- a/ayon_api/entity_hub.py +++ b/ayon_api/entity_hub.py @@ -1618,7 +1618,7 @@ def lock(self): """Lock entity as 'saved' so all changes are discarded.""" self._orig_parent_id = self._parent_id self._orig_name = self._name - self._orig_thumbnail_id = self.thumbnail_id + self._orig_thumbnail_id = self._thumbnail_id if isinstance(self._data, EntityData): self._data.lock() self._attribs.lock() From f174b3e19020c6f9c33a56ef26b11fd52aecde09 Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Mon, 28 Oct 2024 16:08:15 +0100 Subject: [PATCH 023/135] fix docstring --- ayon_api/entity_hub.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ayon_api/entity_hub.py b/ayon_api/entity_hub.py index c8b08fb2e..9ef6ad870 100644 --- a/ayon_api/entity_hub.py +++ b/ayon_api/entity_hub.py @@ -416,7 +416,7 @@ def add_new_task( created: Optional[bool] = None, parent_id: Optional[str] = UNKNOWN_VALUE, ): - """Create folder object and add it to entity hub. + """Create task object and add it to entity hub. Args: name (str): Name of entity. From 7c55604e4aa3395ffe9a430d15c70e6b90f5474e Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Mon, 28 Oct 2024 16:08:25 +0100 Subject: [PATCH 024/135] fix typehint --- ayon_api/entity_hub.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ayon_api/entity_hub.py b/ayon_api/entity_hub.py index 9ef6ad870..10075f9d1 100644 --- a/ayon_api/entity_hub.py +++ b/ayon_api/entity_hub.py @@ -1893,7 +1893,7 @@ def set_thumbnail_id(self, thumbnail_id): thumbnail_id = property(get_thumbnail_id, set_thumbnail_id) - def get_status(self) -> Union[str, UNKNOWN_VALUE]: + def get_status(self) -> Union[str, _CustomNone]: """Folder status. Returns: From 6b48b9c9e7b5ae98cbc17b097fbf793b97a977ad Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Mon, 28 Oct 2024 16:10:13 +0100 Subject: [PATCH 025/135] added option to get product and version entities --- ayon_api/entity_hub.py | 34 ++++++++++++++++++++++++++++++++++ 1 file changed, 34 insertions(+) diff --git a/ayon_api/entity_hub.py b/ayon_api/entity_hub.py index 10075f9d1..010861fe8 100644 --- a/ayon_api/entity_hub.py +++ b/ayon_api/entity_hub.py @@ -314,6 +314,12 @@ def get_or_fetch_entity_by_id( elif entity_type == "task": return self.add_task(entity_data) + elif entity_type == "product": + return self.add_product(entity_data) + + elif entity_type == "version": + return self.add_version(entity_data) + return None def get_or_query_entity_by_id( @@ -496,6 +502,34 @@ def add_task(self, task): self.add_entity(task_entity) return task_entity + def add_product(self, product): + """Create version object and add it to entity hub. + + Args: + product (Dict[str, Any]): Version entity data. + + Returns: + ProductEntity: Added version entity. + + """ + product_entity = ProductEntity.from_entity_data(product, entity_hub=self) + self.add_entity(product_entity) + return product_entity + + def add_version(self, version): + """Create version object and add it to entity hub. + + Args: + version (Dict[str, Any]): Version entity data. + + Returns: + VersionEntity: Added version entity. + + """ + version_entity = VersionEntity.from_entity_data(version, entity_hub=self) + self.add_entity(version_entity) + return version_entity + def add_entity(self, entity): """Add entity to hub cache. From 8a56ed6cb421c386feee4318192282092392af8f Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Mon, 28 Oct 2024 16:10:28 +0100 Subject: [PATCH 026/135] added option to create new entities --- ayon_api/entity_hub.py | 98 ++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 98 insertions(+) diff --git a/ayon_api/entity_hub.py b/ayon_api/entity_hub.py index 010861fe8..dedb13929 100644 --- a/ayon_api/entity_hub.py +++ b/ayon_api/entity_hub.py @@ -474,6 +474,104 @@ def add_new_task( self.add_entity(task_entity) return task_entity + def add_new_product( + self, + name: str, + product_type: str, + folder_id: Optional[Union[str, _CustomNone]] = UNKNOWN_VALUE, + tags: Optional[Iterable[str]] = None, + attribs: Optional[Dict[str, Any]] = UNKNOWN_VALUE, + data: Optional[Dict[str, Any]] = UNKNOWN_VALUE, + active: Optional[bool] = UNKNOWN_VALUE, + entity_id: Optional[str] = None, + created: Optional[bool] = None, + ): + """Create task object and add it to entity hub. + + Args: + name (str): Name of entity. + product_type (str): Type of product. + folder_id (Union[str, None]): Parent folder id. + tags (Optional[Iterable[str]]): Folder tags. + attribs (Dict[str, Any]): Attribute values. + data (Dict[str, Any]): Entity data (custom data). + active (bool): Is entity active. + entity_id (Union[str, None]): Id of the entity. New id is created if + not passed. + created (Optional[bool]): Entity is new. When 'None' is passed the + value is defined based on value of 'entity_id'. + + Returns: + ProductEntity: Added product entity. + + """ + product_entity = ProductEntity( + name=name, + product_type=product_type, + folder_id=folder_id, + tags=tags, + attribs=attribs, + data=data, + active=active, + entity_id=entity_id, + created=created, + entity_hub=self, + ) + self.add_entity(product_entity) + return product_entity + + def add_new_version( + self, + version: int, + product_id: Optional[Union[str, _CustomNone]] = UNKNOWN_VALUE, + task_id: Optional[Union[str, _CustomNone]] = UNKNOWN_VALUE, + status: Optional[str] = UNKNOWN_VALUE, + tags: Optional[Iterable[str]] = None, + attribs: Optional[Dict[str, Any]] = UNKNOWN_VALUE, + data: Optional[Dict[str, Any]] = UNKNOWN_VALUE, + thumbnail_id: Optional[str] = UNKNOWN_VALUE, + active: Optional[bool] = UNKNOWN_VALUE, + entity_id: Optional[str] = None, + created: Optional[bool] = None, + ): + """Create task object and add it to entity hub. + + Args: + version (int): Version. + product_id (Union[str, None]): Parent product id. + task_id (Union[str, None]): Parent task id. + status (Optional[str]): Task status. + tags (Optional[Iterable[str]]): Folder tags. + attribs (Dict[str, Any]): Attribute values. + data (Dict[str, Any]): Entity data (custom data). + thumbnail_id (Union[str, None]): Id of entity's thumbnail. + active (bool): Is entity active. + entity_id (Union[str, None]): Id of the entity. New id is created if + not passed. + created (Optional[bool]): Entity is new. When 'None' is passed the + value is defined based on value of 'entity_id'. + + Returns: + VersionEntity: Added version entity. + + """ + version_entity = VersionEntity( + version=version, + product_id=product_id, + task_id=task_id, + status=status, + tags=tags, + attribs=attribs, + data=data, + thumbnail_id=thumbnail_id, + active=active, + entity_id=entity_id, + created=created, + entity_hub=self, + ) + self.add_entity(version_entity) + return version_entity + def add_folder(self, folder): """Create folder object and add it to entity hub. From abfcb2ce116135db23741967a70711acc814e8ac Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Mon, 28 Oct 2024 16:10:39 +0100 Subject: [PATCH 027/135] updated typehint literal --- ayon_api/entity_hub.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ayon_api/entity_hub.py b/ayon_api/entity_hub.py index dedb13929..ad02a313a 100644 --- a/ayon_api/entity_hub.py +++ b/ayon_api/entity_hub.py @@ -13,7 +13,7 @@ from typing import Literal StatusState = Literal["not_started", "in_progress", "done", "blocked"] - EntityType = Literal["project", "folder", "task"] + EntityType = Literal["project", "folder", "task", "product", "version"] class _CustomNone(object): From 76b243f8d12589899cbf59e32e5c129fa759f78e Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Mon, 28 Oct 2024 16:13:00 +0100 Subject: [PATCH 028/135] Union is imported only for typehints --- ayon_api/entity_hub.py | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/ayon_api/entity_hub.py b/ayon_api/entity_hub.py index ad02a313a..07795c22f 100644 --- a/ayon_api/entity_hub.py +++ b/ayon_api/entity_hub.py @@ -4,13 +4,13 @@ import warnings from abc import ABC, abstractmethod import typing -from typing import Optional, Union, Iterable, Dict, List, Set, Any +from typing import Optional, Iterable, Dict, List, Set, Any from ._api import get_server_api_connection from .utils import create_entity_id, convert_entity_id, slugify_string if typing.TYPE_CHECKING: - from typing import Literal + from typing import Literal, Union StatusState = Literal["not_started", "in_progress", "done", "blocked"] EntityType = Literal["project", "folder", "task", "product", "version"] @@ -478,7 +478,7 @@ def add_new_product( self, name: str, product_type: str, - folder_id: Optional[Union[str, _CustomNone]] = UNKNOWN_VALUE, + folder_id: Optional["Union[str, _CustomNone]"] = UNKNOWN_VALUE, tags: Optional[Iterable[str]] = None, attribs: Optional[Dict[str, Any]] = UNKNOWN_VALUE, data: Optional[Dict[str, Any]] = UNKNOWN_VALUE, @@ -523,8 +523,8 @@ def add_new_product( def add_new_version( self, version: int, - product_id: Optional[Union[str, _CustomNone]] = UNKNOWN_VALUE, - task_id: Optional[Union[str, _CustomNone]] = UNKNOWN_VALUE, + product_id: Optional["Union[str, _CustomNone]"] = UNKNOWN_VALUE, + task_id: Optional["Union[str, _CustomNone]"] = UNKNOWN_VALUE, status: Optional[str] = UNKNOWN_VALUE, tags: Optional[Iterable[str]] = None, attribs: Optional[Dict[str, Any]] = UNKNOWN_VALUE, @@ -1440,7 +1440,7 @@ class BaseEntity(ABC): def __init__( self, entity_id: Optional[str] = None, - parent_id: Optional[Union[str, _CustomNone]] = UNKNOWN_VALUE, + parent_id: Optional["Union[str, _CustomNone]"] = UNKNOWN_VALUE, attribs: Optional[Dict[str, Any]] = UNKNOWN_VALUE, data: Optional[Dict[str, Any]] = UNKNOWN_VALUE, active: Optional[bool] = UNKNOWN_VALUE, @@ -2025,7 +2025,7 @@ def set_thumbnail_id(self, thumbnail_id): thumbnail_id = property(get_thumbnail_id, set_thumbnail_id) - def get_status(self) -> Union[str, _CustomNone]: + def get_status(self) -> "Union[str, _CustomNone]": """Folder status. Returns: @@ -3461,7 +3461,7 @@ def __init__( self, name: str, product_type: str, - folder_id: Optional[Union[str, _CustomNone]] = UNKNOWN_VALUE, + folder_id: Optional["Union[str, _CustomNone]"] = UNKNOWN_VALUE, tags: Optional[Iterable[str]] = None, attribs: Optional[Dict[str, Any]] = UNKNOWN_VALUE, data: Optional[Dict[str, Any]] = UNKNOWN_VALUE, @@ -3571,8 +3571,8 @@ class VersionEntity(BaseEntity): def __init__( self, version: int, - product_id: Optional[Union[str, _CustomNone]] = UNKNOWN_VALUE, - task_id: Optional[Union[str, _CustomNone]] = UNKNOWN_VALUE, + product_id: Optional["Union[str, _CustomNone]"] = UNKNOWN_VALUE, + task_id: Optional["Union[str, _CustomNone]"] = UNKNOWN_VALUE, status: Optional[str] = UNKNOWN_VALUE, tags: Optional[Iterable[str]] = None, attribs: Optional[Dict[str, Any]] = UNKNOWN_VALUE, From 8a52679afa2ccafe405173a98d66e8757ca32d71 Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Mon, 28 Oct 2024 16:19:13 +0100 Subject: [PATCH 029/135] fix typo --- ayon_api/entity_hub.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ayon_api/entity_hub.py b/ayon_api/entity_hub.py index 07795c22f..48d5d06e5 100644 --- a/ayon_api/entity_hub.py +++ b/ayon_api/entity_hub.py @@ -297,7 +297,7 @@ def get_or_fetch_entity_by_id( ) else: raise ValueError( - "Unknonwn entity type \"{}\"".format(entity_type) + "Unknown entity type \"{}\"".format(entity_type) ) if entity_data: From 80bdd7174aeccd67f32ab173b48043b77310bf5b Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Mon, 28 Oct 2024 18:07:10 +0100 Subject: [PATCH 030/135] keep folder and task type as first in arguments --- ayon_api/entity_hub.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/ayon_api/entity_hub.py b/ayon_api/entity_hub.py index 48d5d06e5..00a73082c 100644 --- a/ayon_api/entity_hub.py +++ b/ayon_api/entity_hub.py @@ -347,8 +347,10 @@ def entities(self): def add_new_folder( self, - name: str, + # TODO move 'folder_type' after 'name' + # - that will break backwards compatibility folder_type: str, + name: str, parent_id: Optional[str] = UNKNOWN_VALUE, label: Optional[str] = None, path: Optional[str] = None, @@ -407,8 +409,10 @@ def add_new_folder( def add_new_task( self, - name: str, + # TODO move 'folder_type' after 'name' + # - that will break backwards compatibility task_type: str, + name: str, folder_id: Optional[str] = UNKNOWN_VALUE, label: Optional[str] = None, status: Optional[str] = UNKNOWN_VALUE, From 61227c6db34b5ab29cb04a748e47db590dad41b8 Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Mon, 28 Oct 2024 18:13:33 +0100 Subject: [PATCH 031/135] fix linting issues --- ayon_api/entity_hub.py | 20 ++++++++++++-------- 1 file changed, 12 insertions(+), 8 deletions(-) diff --git a/ayon_api/entity_hub.py b/ayon_api/entity_hub.py index 00a73082c..751282227 100644 --- a/ayon_api/entity_hub.py +++ b/ayon_api/entity_hub.py @@ -379,7 +379,7 @@ def add_new_folder( data (Dict[str, Any]): Entity data (custom data). thumbnail_id (Union[str, None]): Id of entity's thumbnail. active (bool): Is entity active. - entity_id (Union[str, None]): Id of the entity. New id is created if + entity_id (Optional[str]): Id of the entity. New id is created if not passed. created (Optional[bool]): Entity is new. When 'None' is passed the value is defined based on value of 'entity_id'. @@ -430,8 +430,8 @@ def add_new_task( Args: name (str): Name of entity. - task_type (str): Type of task. Task type must be available in config - of project task types. + task_type (str): Type of task. Task type must be available in + config of project task types. folder_id (Union[str, None]): Parent folder id. label (Optional[str]): Task label. status (Optional[str]): Task status. @@ -441,7 +441,7 @@ def add_new_task( assignees (Optional[Iterable[str]]): User assignees to the task. thumbnail_id (Union[str, None]): Id of entity's thumbnail. active (bool): Is entity active. - entity_id (Union[str, None]): Id of the entity. New id is created if + entity_id (Optional[str]): Id of the entity. New id is created if not passed. created (Optional[bool]): Entity is new. When 'None' is passed the value is defined based on value of 'entity_id'. @@ -500,7 +500,7 @@ def add_new_product( attribs (Dict[str, Any]): Attribute values. data (Dict[str, Any]): Entity data (custom data). active (bool): Is entity active. - entity_id (Union[str, None]): Id of the entity. New id is created if + entity_id (Optional[str]): Id of the entity. New id is created if not passed. created (Optional[bool]): Entity is new. When 'None' is passed the value is defined based on value of 'entity_id'. @@ -550,7 +550,7 @@ def add_new_version( data (Dict[str, Any]): Entity data (custom data). thumbnail_id (Union[str, None]): Id of entity's thumbnail. active (bool): Is entity active. - entity_id (Union[str, None]): Id of the entity. New id is created if + entity_id (Optional[str]): Id of the entity. New id is created if not passed. created (Optional[bool]): Entity is new. When 'None' is passed the value is defined based on value of 'entity_id'. @@ -614,7 +614,9 @@ def add_product(self, product): ProductEntity: Added version entity. """ - product_entity = ProductEntity.from_entity_data(product, entity_hub=self) + product_entity = ProductEntity.from_entity_data( + product, entity_hub=self + ) self.add_entity(product_entity) return product_entity @@ -628,7 +630,9 @@ def add_version(self, version): VersionEntity: Added version entity. """ - version_entity = VersionEntity.from_entity_data(version, entity_hub=self) + version_entity = VersionEntity.from_entity_data( + version, entity_hub=self + ) self.add_entity(version_entity) return version_entity From ccaa28974288cf70092c9918d5026598b01b9e58 Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Mon, 28 Oct 2024 18:20:58 +0100 Subject: [PATCH 032/135] updated some docstrings --- ayon_api/entity_hub.py | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/ayon_api/entity_hub.py b/ayon_api/entity_hub.py index 751282227..f0a01ef9e 100644 --- a/ayon_api/entity_hub.py +++ b/ayon_api/entity_hub.py @@ -43,14 +43,14 @@ class EntityHub(object): frequently. Todos: - Listen to server events about entity changes to be able update already - queried entities. + Listen to server events about entity changes to be able to update + already queried entities. Args: project_name (str): Name of project where changes will happen. connection (ServerAPI): Connection to server with logged user. allow_data_changes (bool): This option gives ability to change 'data' - key on entities. This is not recommended as 'data' may be use for + key on entities. This is not recommended as 'data' may be used for secure information and would also slow down server queries. Content of 'data' key can't be received only GraphQl. @@ -85,9 +85,11 @@ def __init__( def allow_data_changes(self): """Entity hub allows changes of 'data' key on entities. - Data are private and not all users may have access to them. Also to get - 'data' for entity is required to use REST api calls, which means to - query each entity on-by-one from server. + Data are private and not all users may have access to them. + + Older version of AYON server allowed to get 'data' for entity only + using REST api calls, which means to query each entity on-by-one + from server. Returns: bool: Data changes are allowed. From f4f316d8c8904276abde1996cf5f85a99db868f2 Mon Sep 17 00:00:00 2001 From: Tadeas Hejnic Date: Wed, 6 Nov 2024 15:50:21 +0100 Subject: [PATCH 033/135] get_events: tests for get_events with multiple filter combinations --- tests/test_server.py | 302 +++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 302 insertions(+) diff --git a/tests/test_server.py b/tests/test_server.py index ce5a98d07..b1410c338 100644 --- a/tests/test_server.py +++ b/tests/test_server.py @@ -11,10 +11,14 @@ is_connection_created, close_connection, get, + get_events, + get_project_names, + get_user, get_server_api_connection, get_base_url, get_rest_url, ) +from ayon_api import exceptions AYON_BASE_URL = os.getenv("AYON_SERVER_URL") AYON_REST_URL = "{}/api".format(AYON_BASE_URL) @@ -43,3 +47,301 @@ def test_get(): res = get("info") assert res.status_code == 200 assert isinstance(res.data, dict) + + +test_project_names = [ + # (None), + # ([]), + (["demo_Big_Episodic"]), + (["demo_Big_Feature"]), + (["demo_Commercial"]), + (["AY_Tests"]), + (["demo_Big_Episodic", "demo_Big_Feature", "demo_Commercial", "AY_Tests"]) +] + +test_topics = [ + # (None), + # ([]), + (["entity.folder.attrib_changed"]), + (["entity.task.created", "entity.project.created"]), + (["settings.changed", "entity.version.status_changed"]), + (["entity.task.status_changed", "entity.folder.deleted"]), + # (["entity.project.changed", "entity.task.tags_changed", "entity.product.created"]) +] + +test_users = [ + # (None), + # ([]), + (["admin"]), + (["mkolar", "tadeas.8964"]), + # (["roy", "luke.inderwick", "ynbot"]), + # (["entity.folder.attrib_changed", "entity.project.created", "entity.task.created", "settings.changed"]), +] + +# incorrect name for statuses +test_states = [ + # (None), + # ([]), + (["pending", "in_progress", "finished", "failed", "aborted", "restarted"]), + # (["failed", "aborted"]), + # (["pending", "in_progress"]), + # (["finished", "failed", "restarted"]), + (["finished"]), +] + +test_include_logs = [ + (None), + (True), + (False), +] + +test_has_children = [ + (None), + (True), + (False), +] + +from datetime import datetime, timedelta + +test_newer_than = [ + (None), + ((datetime.now() - timedelta(days=2)).isoformat()), + ((datetime.now() - timedelta(days=5)).isoformat()), + # ((datetime.now() - timedelta(days=10)).isoformat()), + # ((datetime.now() - timedelta(days=20)).isoformat()), + # ((datetime.now() - timedelta(days=30)).isoformat()), +] + +test_older_than = [ + (None), + ((datetime.now() - timedelta(days=0)).isoformat()), + ((datetime.now() - timedelta(days=0)).isoformat()), + # ((datetime.now() - timedelta(days=5)).isoformat()), + # ((datetime.now() - timedelta(days=10)).isoformat()), + # ((datetime.now() - timedelta(days=20)).isoformat()), + # ((datetime.now() - timedelta(days=30)).isoformat()), +] + +test_fields = [ + (None), + ([]), +] + + +@pytest.mark.parametrize("topics", test_topics) +@pytest.mark.parametrize("project_names", test_project_names) +@pytest.mark.parametrize("states", test_states) +@pytest.mark.parametrize("users", test_users) +@pytest.mark.parametrize("include_logs", test_include_logs) +@pytest.mark.parametrize("has_children", test_has_children) +@pytest.mark.parametrize("newer_than", test_newer_than) +@pytest.mark.parametrize("older_than", test_older_than) +@pytest.mark.parametrize("fields", test_fields) +def test_get_events_all_filter_combinations( + topics, + project_names, + states, + users, + include_logs, + has_children, + newer_than, + older_than, + fields): + """Tests all combination of possible filters. + """ + res = get_events( + topics=topics, + project_names=project_names, + states=states, + users=users, + include_logs=include_logs, + has_children=has_children, + newer_than=newer_than, + older_than=older_than, + fields=fields + ) + + list_res = list(res) + + for item in list_res: + assert item.get("topic") in topics, ( + f"Expected 'project' one of values: {topics}, but got '{item.get('topic')}'" + ) + assert item.get("project") in project_names, ( + f"Expected 'project' one of values: {project_names}, but got '{item.get('project')}'" + ) + assert item.get("user") in users, ( + f"Expected 'user' one of values: {users}, but got '{item.get('user')}'" + ) + assert item.get("status") in states, ( + f"Expected 'state' to be one of {states}, but got '{item.get('state')}'" + ) + assert (newer_than is None) or ( + datetime.fromisoformat(item.get("createdAt") > datetime.fromisoformat(newer_than)) + ) + assert (older_than is None) or ( + datetime.fromisoformat(item.get("createdAt") < datetime.fromisoformat(older_than)) + ) + + assert topics is None or len(list_res) == sum(len(list(get_events( + topics=[topic], + project_names=project_names, + states=states, + users=users, + include_logs=include_logs, + has_children=has_children, + newer_than=newer_than, + older_than=older_than, + fields=fields) + )) for topic in topics) + + assert project_names is None or len(list_res) == sum(len(list(get_events( + topics=topics, + project_names=[project_name], + states=states, + users=users, + include_logs=include_logs, + has_children=has_children, + newer_than=newer_than, + older_than=older_than, + fields=fields) + )) for project_name in project_names) + + assert states is None or len(list_res) == sum(len(list(get_events( + topics=topics, + project_names=project_names, + states=[state], + users=users, + include_logs=include_logs, + has_children=has_children, + newer_than=newer_than, + older_than=older_than, + fields=fields) + )) for state in states) + + assert users is None or len(list_res) == sum(len(list(get_events( + topics=topics, + project_names=project_names, + states=states, + users=[user], + include_logs=include_logs, + has_children=has_children, + newer_than=newer_than, + older_than=older_than, + fields=fields) + )) for user in users) + + assert fields is None or len(list_res) == sum(len(list(get_events( + topics=topics, + project_names=project_names, + states=states, + users=users, + include_logs=include_logs, + has_children=has_children, + newer_than=newer_than, + older_than=older_than, + fields=[field]) + )) for field in fields) + + +######################## +# topics=None, event_ids=None, project_names=None, states=None, users=None, include_logs=None, has_children=None, newer_than=None, older_than=None, fields=None + +# [ +# { +# 'description': 'Changed task animation status to In progress', +# 'hash': 'a259521612b611ef95920242c0a81005', +# 'project': 'demo_Big_Episodic', +# 'id': 'a259521612b611ef95920242c0a81005', +# 'status': 'finished', +# 'user': 'admin', +# 'createdAt': '2024-05-15T14:28:28.889144+02:00', +# 'dependsOn': None, +# 'updatedAt': '2024-05-15T14:28:28.889144+02:00', +# 'retries': 0, +# 'sender': 'wWN64PyUo1kqAxechtJucy', +# 'topic': 'entity.task.status_changed' +# }, +# { +# 'description': 'Changed task animation status to On hold', +# 'hash': 'a8fb977812b611ef95920242c0a81005', +# 'project': 'demo_Big_Episodic', +# 'id': 'a8fb977812b611ef95920242c0a81005', +# 'status': 'finished', +# 'user': 'admin', +# 'createdAt': '2024-05-15T14:28:40.018934+02:00', +# 'dependsOn': None, +# 'updatedAt': '2024-05-15T14:28:40.018934+02:00', +# 'retries': 0, +# 'sender': 'fx5SG26FHvhFKkDsXHp53k', +# 'topic': 'entity.task.status_changed' +# }, +# { +# 'description': 'Changed task animation status to Pending review', +# 'hash': 'f0686ec412b611ef95920242c0a81005', +# 'project': 'demo_Big_Episodic', +# 'id': 'f0686ec412b611ef95920242c0a81005', +# 'status': 'finished', +# 'user': 'admin', +# 'createdAt': '2024-05-15T14:30:39.850258+02:00', +# 'dependsOn': None, +# 'updatedAt': '2024-05-15T14:30:39.850258+02:00', +# 'retries': 0, +# 'sender': 'v9ciM94XnfJ33X1bYr5ESv', +# 'topic': 'entity.task.status_changed' +# } +# ] + + +@pytest.mark.parametrize("project_names", test_project_names) +def test_get_events_project_name(project_names): + res = get_events(project_names=project_names) + + list_res = list(res) + + users = set() + for item in list_res: + users.add(item.get("user")) + assert item.get("project") in project_names, f"Expected 'project' value '{project_names}', but got '{item.get('project')}'" + + print(users) + # test if the legths are equal + assert len(list_res) == sum(len(list(get_events(project_names=[project_name]))) for project_name in project_names) + + +@pytest.mark.parametrize("project_names", test_project_names) +@pytest.mark.parametrize("topics", test_topics) +def test_get_events_project_name_topic(project_names, topics): + print(project_names, "", topics) + res = get_events(topics=topics, project_names=project_names) + + list_res = list(res) + + for item in list_res: + assert item.get("topic") in topics + assert item.get("project") in project_names, f"Expected 'project' value '{project_names}', but got '{item.get('project')}'" + + # test if the legths are equal + assert len(list_res) == sum(len(list(get_events(project_names=[project_name], topics=topics))) for project_name in project_names) + assert len(list_res) == sum(len(list(get_events(project_names=project_names, topics=[topic]))) for topic in topics) + + +@pytest.mark.parametrize("project_names", test_project_names) +@pytest.mark.parametrize("topics", test_topics) +@pytest.mark.parametrize("users", test_users) +def test_get_events_project_name_topic_user(project_names, topics, users): + # print(project_names, "", topics) + res = get_events(topics=topics, project_names=project_names, users=users) + + list_res = list(res) + + for item in list_res: + assert item.get("topic") in topics, f"Expected 'project' one of values: {topics}, but got '{item.get('topic')}'" + assert item.get("project") in project_names, f"Expected 'project' one of values: {project_names}, but got '{item.get('project')}'" + assert item.get("user") in project_names, f"Expected 'project' one of values: {users}, but got '{item.get('user')}'" + + + # test if the legths are equal + assert len(list_res) == sum(len(list(get_events(project_names=[project_name], topics=topics))) for project_name in project_names) + assert len(list_res) == sum(len(list(get_events(project_names=project_names, topics=[topic]))) for topic in topics) + assert len(list_res) == sum(len(list(get_events(project_names=project_names, topics=[topic]))) for topic in topics) From da5801ef82696f38f34c5cfc9867d58204c948c9 Mon Sep 17 00:00:00 2001 From: Tadeas Hejnic Date: Thu, 7 Nov 2024 10:14:32 +0100 Subject: [PATCH 034/135] get_events: solved issue with connection timeout while testing, new tests added and reduced the number of combinations in all filter combination test --- tests/test_server.py | 154 ++++++++++++++++++------------------------- 1 file changed, 65 insertions(+), 89 deletions(-) diff --git a/tests/test_server.py b/tests/test_server.py index b1410c338..a20439ff7 100644 --- a/tests/test_server.py +++ b/tests/test_server.py @@ -17,6 +17,8 @@ get_server_api_connection, get_base_url, get_rest_url, + get_timeout, + set_timeout ) from ayon_api import exceptions @@ -50,8 +52,8 @@ def test_get(): test_project_names = [ - # (None), - # ([]), + (None), + ([]), (["demo_Big_Episodic"]), (["demo_Big_Feature"]), (["demo_Commercial"]), @@ -60,32 +62,32 @@ def test_get(): ] test_topics = [ - # (None), - # ([]), + (None), + ([]), (["entity.folder.attrib_changed"]), (["entity.task.created", "entity.project.created"]), (["settings.changed", "entity.version.status_changed"]), (["entity.task.status_changed", "entity.folder.deleted"]), - # (["entity.project.changed", "entity.task.tags_changed", "entity.product.created"]) + (["entity.project.changed", "entity.task.tags_changed", "entity.product.created"]) ] test_users = [ - # (None), - # ([]), + (None), + ([]), (["admin"]), (["mkolar", "tadeas.8964"]), - # (["roy", "luke.inderwick", "ynbot"]), - # (["entity.folder.attrib_changed", "entity.project.created", "entity.task.created", "settings.changed"]), + (["roy", "luke.inderwick", "ynbot"]), + (["entity.folder.attrib_changed", "entity.project.created", "entity.task.created", "settings.changed"]), ] # incorrect name for statuses test_states = [ - # (None), - # ([]), + (None), + ([]), (["pending", "in_progress", "finished", "failed", "aborted", "restarted"]), - # (["failed", "aborted"]), - # (["pending", "in_progress"]), - # (["finished", "failed", "restarted"]), + (["failed", "aborted"]), + (["pending", "in_progress"]), + (["finished", "failed", "restarted"]), (["finished"]), ] @@ -101,25 +103,25 @@ def test_get(): (False), ] -from datetime import datetime, timedelta +from datetime import datetime, timedelta, timezone test_newer_than = [ (None), - ((datetime.now() - timedelta(days=2)).isoformat()), - ((datetime.now() - timedelta(days=5)).isoformat()), - # ((datetime.now() - timedelta(days=10)).isoformat()), - # ((datetime.now() - timedelta(days=20)).isoformat()), - # ((datetime.now() - timedelta(days=30)).isoformat()), + ((datetime.now(timezone.utc) - timedelta(days=2)).isoformat()), + ((datetime.now(timezone.utc) - timedelta(days=5)).isoformat()), + ((datetime.now(timezone.utc) - timedelta(days=10)).isoformat()), + ((datetime.now(timezone.utc) - timedelta(days=20)).isoformat()), + ((datetime.now(timezone.utc) - timedelta(days=30)).isoformat()), ] test_older_than = [ (None), - ((datetime.now() - timedelta(days=0)).isoformat()), - ((datetime.now() - timedelta(days=0)).isoformat()), - # ((datetime.now() - timedelta(days=5)).isoformat()), - # ((datetime.now() - timedelta(days=10)).isoformat()), - # ((datetime.now() - timedelta(days=20)).isoformat()), - # ((datetime.now() - timedelta(days=30)).isoformat()), + ((datetime.now(timezone.utc) - timedelta(days=0)).isoformat()), + ((datetime.now(timezone.utc) - timedelta(days=0)).isoformat()), + ((datetime.now(timezone.utc) - timedelta(days=5)).isoformat()), + ((datetime.now(timezone.utc) - timedelta(days=10)).isoformat()), + ((datetime.now(timezone.utc) - timedelta(days=20)).isoformat()), + ((datetime.now(timezone.utc) - timedelta(days=30)).isoformat()), ] test_fields = [ @@ -128,15 +130,16 @@ def test_get(): ] -@pytest.mark.parametrize("topics", test_topics) -@pytest.mark.parametrize("project_names", test_project_names) -@pytest.mark.parametrize("states", test_states) -@pytest.mark.parametrize("users", test_users) -@pytest.mark.parametrize("include_logs", test_include_logs) -@pytest.mark.parametrize("has_children", test_has_children) -@pytest.mark.parametrize("newer_than", test_newer_than) -@pytest.mark.parametrize("older_than", test_older_than) -@pytest.mark.parametrize("fields", test_fields) +# takes max 3 items in a list to reduce the number of combinations +@pytest.mark.parametrize("topics", test_topics[-3:]) +@pytest.mark.parametrize("project_names", test_project_names[-3:]) +@pytest.mark.parametrize("states", test_states[-3:]) +@pytest.mark.parametrize("users", test_users[-3:]) +@pytest.mark.parametrize("include_logs", test_include_logs[-3:]) +@pytest.mark.parametrize("has_children", test_has_children[-3:]) +@pytest.mark.parametrize("newer_than", test_newer_than[-3:]) +@pytest.mark.parametrize("older_than", test_older_than[-3:]) +@pytest.mark.parametrize("fields", test_fields[-3:]) def test_get_events_all_filter_combinations( topics, project_names, @@ -146,9 +149,15 @@ def test_get_events_all_filter_combinations( has_children, newer_than, older_than, - fields): + fields +): """Tests all combination of possible filters. """ + # with many tests - ayon_api.exceptions.ServerError: Connection timed out. + # TODO - maybe some better solution + if get_timeout() < 5: + set_timeout(10.0) + res = get_events( topics=topics, project_names=project_names, @@ -163,6 +172,7 @@ def test_get_events_all_filter_combinations( list_res = list(res) + # test if filtering was correct for item in list_res: assert item.get("topic") in topics, ( f"Expected 'project' one of values: {topics}, but got '{item.get('topic')}'" @@ -177,12 +187,13 @@ def test_get_events_all_filter_combinations( f"Expected 'state' to be one of {states}, but got '{item.get('state')}'" ) assert (newer_than is None) or ( - datetime.fromisoformat(item.get("createdAt") > datetime.fromisoformat(newer_than)) + datetime.fromisoformat(item.get("createdAt")) > datetime.fromisoformat(newer_than) ) assert (older_than is None) or ( - datetime.fromisoformat(item.get("createdAt") < datetime.fromisoformat(older_than)) + datetime.fromisoformat(item.get("createdAt")) < datetime.fromisoformat(older_than) ) + # test if all events were given assert topics is None or len(list_res) == sum(len(list(get_events( topics=[topic], project_names=project_names, @@ -242,55 +253,6 @@ def test_get_events_all_filter_combinations( older_than=older_than, fields=[field]) )) for field in fields) - - -######################## -# topics=None, event_ids=None, project_names=None, states=None, users=None, include_logs=None, has_children=None, newer_than=None, older_than=None, fields=None - -# [ -# { -# 'description': 'Changed task animation status to In progress', -# 'hash': 'a259521612b611ef95920242c0a81005', -# 'project': 'demo_Big_Episodic', -# 'id': 'a259521612b611ef95920242c0a81005', -# 'status': 'finished', -# 'user': 'admin', -# 'createdAt': '2024-05-15T14:28:28.889144+02:00', -# 'dependsOn': None, -# 'updatedAt': '2024-05-15T14:28:28.889144+02:00', -# 'retries': 0, -# 'sender': 'wWN64PyUo1kqAxechtJucy', -# 'topic': 'entity.task.status_changed' -# }, -# { -# 'description': 'Changed task animation status to On hold', -# 'hash': 'a8fb977812b611ef95920242c0a81005', -# 'project': 'demo_Big_Episodic', -# 'id': 'a8fb977812b611ef95920242c0a81005', -# 'status': 'finished', -# 'user': 'admin', -# 'createdAt': '2024-05-15T14:28:40.018934+02:00', -# 'dependsOn': None, -# 'updatedAt': '2024-05-15T14:28:40.018934+02:00', -# 'retries': 0, -# 'sender': 'fx5SG26FHvhFKkDsXHp53k', -# 'topic': 'entity.task.status_changed' -# }, -# { -# 'description': 'Changed task animation status to Pending review', -# 'hash': 'f0686ec412b611ef95920242c0a81005', -# 'project': 'demo_Big_Episodic', -# 'id': 'f0686ec412b611ef95920242c0a81005', -# 'status': 'finished', -# 'user': 'admin', -# 'createdAt': '2024-05-15T14:30:39.850258+02:00', -# 'dependsOn': None, -# 'updatedAt': '2024-05-15T14:30:39.850258+02:00', -# 'retries': 0, -# 'sender': 'v9ciM94XnfJ33X1bYr5ESv', -# 'topic': 'entity.task.status_changed' -# } -# ] @pytest.mark.parametrize("project_names", test_project_names) @@ -304,7 +266,6 @@ def test_get_events_project_name(project_names): users.add(item.get("user")) assert item.get("project") in project_names, f"Expected 'project' value '{project_names}', but got '{item.get('project')}'" - print(users) # test if the legths are equal assert len(list_res) == sum(len(list(get_events(project_names=[project_name]))) for project_name in project_names) @@ -340,8 +301,23 @@ def test_get_events_project_name_topic_user(project_names, topics, users): assert item.get("project") in project_names, f"Expected 'project' one of values: {project_names}, but got '{item.get('project')}'" assert item.get("user") in project_names, f"Expected 'project' one of values: {users}, but got '{item.get('user')}'" - # test if the legths are equal assert len(list_res) == sum(len(list(get_events(project_names=[project_name], topics=topics))) for project_name in project_names) assert len(list_res) == sum(len(list(get_events(project_names=project_names, topics=[topic]))) for topic in topics) assert len(list_res) == sum(len(list(get_events(project_names=project_names, topics=[topic]))) for topic in topics) + + +@pytest.mark.parametrize("newer_than", test_newer_than) +@pytest.mark.parametrize("older_than", test_older_than) +def test_get_events_timestamp(newer_than, older_than): + res = get_events(newer_than=newer_than, older_than=older_than) + + list_res = list(res) + + for item in list_res: + assert (newer_than is None) or ( + datetime.fromisoformat(item.get("createdAt") > datetime.fromisoformat(newer_than)) + ) + assert (older_than is None) or ( + datetime.fromisoformat(item.get("createdAt") < datetime.fromisoformat(older_than)) + ) From 20dd3253db4ebae1f6e315117a7f6906a5533c33 Mon Sep 17 00:00:00 2001 From: Tadeas Hejnic Date: Thu, 7 Nov 2024 13:58:13 +0100 Subject: [PATCH 035/135] get_events: New test for invalid names data, small code adjustments --- tests/test_server.py | 164 ++++++++++++++++++++++++++++++++----------- 1 file changed, 123 insertions(+), 41 deletions(-) diff --git a/tests/test_server.py b/tests/test_server.py index a20439ff7..0ed6b7172 100644 --- a/tests/test_server.py +++ b/tests/test_server.py @@ -13,21 +13,20 @@ get, get_events, get_project_names, - get_user, + get_user_by_name, get_server_api_connection, get_base_url, get_rest_url, get_timeout, set_timeout ) -from ayon_api import exceptions AYON_BASE_URL = os.getenv("AYON_SERVER_URL") AYON_REST_URL = "{}/api".format(AYON_BASE_URL) def test_close_connection(): - _con = get_server_api_connection() + _ = get_server_api_connection() assert is_connection_created() is True close_connection() assert is_connection_created() is False @@ -100,7 +99,7 @@ def test_get(): test_has_children = [ (None), (True), - (False), + # (False), ] from datetime import datetime, timedelta, timezone @@ -156,9 +155,9 @@ def test_get_events_all_filter_combinations( # with many tests - ayon_api.exceptions.ServerError: Connection timed out. # TODO - maybe some better solution if get_timeout() < 5: - set_timeout(10.0) + set_timeout(20.0) - res = get_events( + res = list(get_events( topics=topics, project_names=project_names, states=states, @@ -168,12 +167,10 @@ def test_get_events_all_filter_combinations( newer_than=newer_than, older_than=older_than, fields=fields - ) - - list_res = list(res) + )) # test if filtering was correct - for item in list_res: + for item in res: assert item.get("topic") in topics, ( f"Expected 'project' one of values: {topics}, but got '{item.get('topic')}'" ) @@ -194,7 +191,7 @@ def test_get_events_all_filter_combinations( ) # test if all events were given - assert topics is None or len(list_res) == sum(len(list(get_events( + assert topics is None or len(res) == sum(len(list(get_events( topics=[topic], project_names=project_names, states=states, @@ -206,7 +203,7 @@ def test_get_events_all_filter_combinations( fields=fields) )) for topic in topics) - assert project_names is None or len(list_res) == sum(len(list(get_events( + assert project_names is None or len(res) == sum(len(list(get_events( topics=topics, project_names=[project_name], states=states, @@ -218,7 +215,7 @@ def test_get_events_all_filter_combinations( fields=fields) )) for project_name in project_names) - assert states is None or len(list_res) == sum(len(list(get_events( + assert states is None or len(res) == sum(len(list(get_events( topics=topics, project_names=project_names, states=[state], @@ -230,7 +227,7 @@ def test_get_events_all_filter_combinations( fields=fields) )) for state in states) - assert users is None or len(list_res) == sum(len(list(get_events( + assert users is None or len(res) == sum(len(list(get_events( topics=topics, project_names=project_names, states=states, @@ -242,7 +239,7 @@ def test_get_events_all_filter_combinations( fields=fields) )) for user in users) - assert fields is None or len(list_res) == sum(len(list(get_events( + assert fields is None or len(res) == sum(len(list(get_events( topics=topics, project_names=project_names, states=states, @@ -257,67 +254,152 @@ def test_get_events_all_filter_combinations( @pytest.mark.parametrize("project_names", test_project_names) def test_get_events_project_name(project_names): - res = get_events(project_names=project_names) - - list_res = list(res) - - users = set() - for item in list_res: - users.add(item.get("user")) + res = list(get_events(project_names=project_names)) + + for item in res: assert item.get("project") in project_names, f"Expected 'project' value '{project_names}', but got '{item.get('project')}'" # test if the legths are equal - assert len(list_res) == sum(len(list(get_events(project_names=[project_name]))) for project_name in project_names) + assert len(res) == sum(len(list(get_events(project_names=[project_name]))) for project_name in project_names) @pytest.mark.parametrize("project_names", test_project_names) @pytest.mark.parametrize("topics", test_topics) def test_get_events_project_name_topic(project_names, topics): print(project_names, "", topics) - res = get_events(topics=topics, project_names=project_names) - - list_res = list(res) + res = list(get_events( + topics=topics, + project_names=project_names + )) - for item in list_res: + for item in res: assert item.get("topic") in topics assert item.get("project") in project_names, f"Expected 'project' value '{project_names}', but got '{item.get('project')}'" # test if the legths are equal - assert len(list_res) == sum(len(list(get_events(project_names=[project_name], topics=topics))) for project_name in project_names) - assert len(list_res) == sum(len(list(get_events(project_names=project_names, topics=[topic]))) for topic in topics) + assert len(res) == sum(len(list(get_events(project_names=[project_name], topics=topics))) for project_name in project_names) + assert len(res) == sum(len(list(get_events(project_names=project_names, topics=[topic]))) for topic in topics) @pytest.mark.parametrize("project_names", test_project_names) @pytest.mark.parametrize("topics", test_topics) @pytest.mark.parametrize("users", test_users) def test_get_events_project_name_topic_user(project_names, topics, users): - # print(project_names, "", topics) - res = get_events(topics=topics, project_names=project_names, users=users) - - list_res = list(res) + res = list(get_events( + topics=topics, + project_names=project_names, + users=users + )) - for item in list_res: + for item in res: assert item.get("topic") in topics, f"Expected 'project' one of values: {topics}, but got '{item.get('topic')}'" assert item.get("project") in project_names, f"Expected 'project' one of values: {project_names}, but got '{item.get('project')}'" assert item.get("user") in project_names, f"Expected 'project' one of values: {users}, but got '{item.get('user')}'" # test if the legths are equal - assert len(list_res) == sum(len(list(get_events(project_names=[project_name], topics=topics))) for project_name in project_names) - assert len(list_res) == sum(len(list(get_events(project_names=project_names, topics=[topic]))) for topic in topics) - assert len(list_res) == sum(len(list(get_events(project_names=project_names, topics=[topic]))) for topic in topics) + assert len(res) == sum(len(list(get_events(project_names=[project_name], topics=topics))) for project_name in project_names) + assert len(res) == sum(len(list(get_events(project_names=project_names, topics=[topic]))) for topic in topics) + assert len(res) == sum(len(list(get_events(project_names=project_names, topics=[topic]))) for topic in topics) @pytest.mark.parametrize("newer_than", test_newer_than) @pytest.mark.parametrize("older_than", test_older_than) def test_get_events_timestamp(newer_than, older_than): - res = get_events(newer_than=newer_than, older_than=older_than) - - list_res = list(res) + res = list(get_events( + newer_than=newer_than, + older_than=older_than + )) - for item in list_res: + for item in res: assert (newer_than is None) or ( datetime.fromisoformat(item.get("createdAt") > datetime.fromisoformat(newer_than)) ) assert (older_than is None) or ( datetime.fromisoformat(item.get("createdAt") < datetime.fromisoformat(older_than)) ) + + +test_invalid_topics = [ + (None), + (["invalid_topic_name_1", "invalid_topic_name_2"]), + (["invalid_topic_name_1"]), +] + +test_invalid_project_names = [ + (None), + (["invalid_project"]), + (["invalid_project", "demo_Big_Episodic", "demo_Big_Feature"]), + (["invalid_name_2", "demo_Commercial"]), + (["demo_Commercial"]), +] + +test_invalid_states = [ + (None), + (["pending_invalid"]), + (["in_progress_invalid"]), + (["finished_invalid", "failed_invalid"]), +] + +test_invalid_users = [ + (None), + (["ayon_invalid_user"]), + (["ayon_invalid_user1", "ayon_invalid_user2"]), + (["ayon_invalid_user1", "ayon_invalid_user2", "admin"]), +] + +test_invalid_newer_than = [ + (None), + ((datetime.now(timezone.utc) + timedelta(days=2)).isoformat()), + ((datetime.now(timezone.utc) + timedelta(days=5)).isoformat()), + ((datetime.now(timezone.utc) - timedelta(days=5)).isoformat()), +] + + +@pytest.mark.parametrize("topics", test_invalid_topics) +@pytest.mark.parametrize("project_names", test_invalid_project_names) +@pytest.mark.parametrize("states", test_invalid_states) +@pytest.mark.parametrize("users", test_invalid_users) +@pytest.mark.parametrize("newer_than", test_invalid_newer_than) +def test_get_events_invalid_data( + topics, + project_names, + states, + users, + newer_than +): + # with many tests - ayon_api.exceptions.ServerError: Connection timed out. + # TODO - maybe some better solution + if get_timeout() < 5: + set_timeout(20.0) + + res = list(get_events( + topics=topics, + project_names=project_names, + states=states, + users=users, + newer_than=newer_than + )) + + valid_project_names = get_project_names() + + assert res == [] \ + or topics is None + assert res == [] \ + or project_names is None \ + or any(project_name in valid_project_names for project_name in project_names) + assert res == [] \ + or states is None + assert res == [] \ + or users is None \ + or any(get_user_by_name(user) is not None for user in users) + assert res == [] \ + or newer_than is None \ + or datetime.fromisoformat(newer_than) < datetime.now(timezone.utc) + + +test_update_sender = [ + (), +] + +# def test_update_event(): + From 3ef5a3a2328197eafdaafbaadc389dc8eacef139 Mon Sep 17 00:00:00 2001 From: Tadeas Hejnic Date: Thu, 7 Nov 2024 16:26:57 +0100 Subject: [PATCH 036/135] get_events/update_event: All possible filters and their combinations, tests for invalid filter values, new tests for update_event --- tests/test_server.py | 143 +++++++++++++++++++++++++++++++++++++++++-- 1 file changed, 137 insertions(+), 6 deletions(-) diff --git a/tests/test_server.py b/tests/test_server.py index 0ed6b7172..a5fa743a0 100644 --- a/tests/test_server.py +++ b/tests/test_server.py @@ -11,6 +11,7 @@ is_connection_created, close_connection, get, + get_event, get_events, get_project_names, get_user_by_name, @@ -18,7 +19,9 @@ get_base_url, get_rest_url, get_timeout, - set_timeout + set_timeout, + update_event, + exceptions ) AYON_BASE_URL = os.getenv("AYON_SERVER_URL") @@ -140,7 +143,7 @@ def test_get(): @pytest.mark.parametrize("older_than", test_older_than[-3:]) @pytest.mark.parametrize("fields", test_fields[-3:]) def test_get_events_all_filter_combinations( - topics, + topics, project_names, states, users, @@ -158,7 +161,7 @@ def test_get_events_all_filter_combinations( set_timeout(20.0) res = list(get_events( - topics=topics, + topics=topics, project_names=project_names, states=states, users=users, @@ -252,6 +255,28 @@ def test_get_events_all_filter_combinations( )) for field in fields) +@pytest.fixture(params=[1, 2, 3, 4, 5]) +def event_ids(request): + length = request.param + if length == 0: + return None + + recent_events = list(get_events( + newer_than=(datetime.now(timezone.utc) - timedelta(days=5)).isoformat() + )) + + return [recent_event["id"] for recent_event in recent_events[:length]] + + +def test_get_events_event_ids(event_ids): + res = list(get_events(event_ids=event_ids)) + + for item in res: + assert item.get("id") in event_ids + + assert len(res) == sum(len(list(get_events(event_ids=[event_id]))) for event_id in event_ids) + + @pytest.mark.parametrize("project_names", test_project_names) def test_get_events_project_name(project_names): res = list(get_events(project_names=project_names)) @@ -266,7 +291,6 @@ def test_get_events_project_name(project_names): @pytest.mark.parametrize("project_names", test_project_names) @pytest.mark.parametrize("topics", test_topics) def test_get_events_project_name_topic(project_names, topics): - print(project_names, "", topics) res = list(get_events( topics=topics, project_names=project_names @@ -397,9 +421,116 @@ def test_get_events_invalid_data( or datetime.fromisoformat(newer_than) < datetime.now(timezone.utc) +@pytest.fixture +def event_id(): + recent_event = list(get_events( + newer_than=(datetime.now(timezone.utc) - timedelta(days=5)).isoformat() + )) + return recent_event[0]["id"] if recent_event else None + test_update_sender = [ - (), + ("test.server.api"), +] + +test_update_username = [ + ("testing_user"), +] + +test_update_status = [ + ("pending"), + ("in_progress"), + ("finished"), + ("failed"), + ("aborted"), + ("restarted") +] + +test_update_description = [ + ("Lorem ipsum dolor sit amet, consectetur adipiscing elit. Fusce viverra."), + ("Updated description test...") +] + +test_update_retries = [ + (1), + (0), + (10), +] + +@pytest.mark.parametrize("sender", test_update_sender) +@pytest.mark.parametrize("username", test_update_username) +@pytest.mark.parametrize("status", test_update_status) +@pytest.mark.parametrize("description", test_update_description) +@pytest.mark.parametrize("retries", test_update_retries) +def test_update_event( + event_id, + sender, + username, + status, + description, + retries, + project_name=None, + summary=None, + payload=None, + progress=None, +): + kwargs = { + key: value + for key, value in ( + ("event_id", event_id), + ("sender", sender), + ("project", project_name), + ("username", username), + ("status", status), + ("description", description), + ("summary", summary), + ("payload", payload), + ("progress", progress), + ("retries", retries), + ) + if value is not None + } + + prev = get_event(event_id=event_id) + update_event(**kwargs) + res = get_event(event_id=event_id) + + for key, value in res.items(): + assert value == prev.get(key) \ + or key in kwargs.keys() and value == kwargs.get(key) \ + or ( + key == "updatedAt" and ( + datetime.fromisoformat(value) - datetime.now(timezone.utc) < timedelta(minutes=1) + ) + ) + + +test_update_invalid_status = [ + ("finisheddd"), + ("pending_pending"), + (42), + (False), + ("_in_progress") +] + +@pytest.mark.parametrize("status", test_update_invalid_status) +def test_update_event_invalid_status(status): + events = list(get_events(project_names=["demo_Commercial"])) + + with pytest.raises(exceptions.HTTPRequestError): + update_event(events[0]["id"], status=status) + + +test_update_invalid_progress = [ + ("good"), + ("bad"), + (-1), + ([0, 1, 2]), + (101) ] -# def test_update_event(): +@pytest.mark.parametrize("progress", test_update_invalid_progress) +def test_update_event_invalid_progress(progress): + events = list(get_events(project_names=["demo_Commercial"])) + with pytest.raises(exceptions.HTTPRequestError): + update_event(events[0]["id"], progress=progress) From e46e8343166393e59b7a9746e7f92d32634a8a61 Mon Sep 17 00:00:00 2001 From: Tadeas Hejnic Date: Thu, 7 Nov 2024 17:41:00 +0100 Subject: [PATCH 037/135] get_events/update_event: New test for timeout added, handling exception for timeout (has_children filter), code adjustments --- tests/test_server.py | 108 +++++++++++++++++++++++++------------------ 1 file changed, 63 insertions(+), 45 deletions(-) diff --git a/tests/test_server.py b/tests/test_server.py index a5fa743a0..8afb24a4d 100644 --- a/tests/test_server.py +++ b/tests/test_server.py @@ -11,6 +11,7 @@ is_connection_created, close_connection, get, + get_default_fields_for_type, get_event, get_events, get_project_names, @@ -102,7 +103,7 @@ def test_get(): test_has_children = [ (None), (True), - # (False), + (False), ] from datetime import datetime, timedelta, timezone @@ -129,21 +130,36 @@ def test_get(): test_fields = [ (None), ([]), + ([]) ] +@pytest.fixture(params=[3, 4, 5]) +def event_ids(request): + length = request.param + if length == 0: + return None + + recent_events = list(get_events( + newer_than=(datetime.now(timezone.utc) - timedelta(days=5)).isoformat() + )) + + return [recent_event["id"] for recent_event in recent_events[:length]] + # takes max 3 items in a list to reduce the number of combinations @pytest.mark.parametrize("topics", test_topics[-3:]) +@pytest.mark.parametrize("event_ids", [None] + [pytest.param(None, marks=pytest.mark.usefixtures("event_ids"))]) @pytest.mark.parametrize("project_names", test_project_names[-3:]) @pytest.mark.parametrize("states", test_states[-3:]) @pytest.mark.parametrize("users", test_users[-3:]) @pytest.mark.parametrize("include_logs", test_include_logs[-3:]) -@pytest.mark.parametrize("has_children", test_has_children[-3:]) +@pytest.mark.parametrize("has_children", test_has_children[2:3]) @pytest.mark.parametrize("newer_than", test_newer_than[-3:]) @pytest.mark.parametrize("older_than", test_older_than[-3:]) @pytest.mark.parametrize("fields", test_fields[-3:]) def test_get_events_all_filter_combinations( topics, + event_ids, project_names, states, users, @@ -155,22 +171,26 @@ def test_get_events_all_filter_combinations( ): """Tests all combination of possible filters. """ - # with many tests - ayon_api.exceptions.ServerError: Connection timed out. - # TODO - maybe some better solution if get_timeout() < 5: - set_timeout(20.0) - - res = list(get_events( - topics=topics, - project_names=project_names, - states=states, - users=users, - include_logs=include_logs, - has_children=has_children, - newer_than=newer_than, - older_than=older_than, - fields=fields - )) + set_timeout(None) # default timeout + + try: + res = list(get_events( + topics=topics, + event_ids=event_ids, + project_names=project_names, + states=states, + users=users, + include_logs=include_logs, + has_children=has_children, + newer_than=newer_than, + older_than=older_than, + fields=fields + )) + except exceptions.ServerError as exc: + assert has_children == False, f"{exc} even if has_children is {has_children}." + print("Warning: ServerError encountered, test skipped due to timeout.") + pytest.skip("Skipping test due to server timeout.") # test if filtering was correct for item in res: @@ -242,30 +262,32 @@ def test_get_events_all_filter_combinations( fields=fields) )) for user in users) - assert fields is None or len(res) == sum(len(list(get_events( - topics=topics, - project_names=project_names, - states=states, - users=users, - include_logs=include_logs, - has_children=has_children, - newer_than=newer_than, - older_than=older_than, - fields=[field]) - )) for field in fields) + if fields == []: + fields = get_default_fields_for_type("event") + assert fields is None \ + or all( + set(event.keys()) == set(fields) + for event in res + ) -@pytest.fixture(params=[1, 2, 3, 4, 5]) -def event_ids(request): - length = request.param - if length == 0: - return None - recent_events = list(get_events( - newer_than=(datetime.now(timezone.utc) - timedelta(days=5)).isoformat() - )) +@pytest.mark.parametrize("has_children", test_has_children) +def test_get_events_timeout_has_children(has_children): + """Separete test for has_children filter. - return [recent_event["id"] for recent_event in recent_events[:length]] + Issues with timeouts. + """ + try: + _ = list(get_events( + has_children=has_children, + newer_than=(datetime.now(timezone.utc) - timedelta(days=5)).isoformat() + )) + except exceptions.ServerError as exc: + has_children = True + assert has_children == False, f"{exc} even if has_children is {has_children}." + print("Warning: ServerError encountered, test skipped due to timeout.") + pytest.skip("Skipping test due to server timeout.") def test_get_events_event_ids(event_ids): @@ -328,7 +350,7 @@ def test_get_events_project_name_topic_user(project_names, topics, users): @pytest.mark.parametrize("newer_than", test_newer_than) @pytest.mark.parametrize("older_than", test_older_than) -def test_get_events_timestamp(newer_than, older_than): +def test_get_events_timestamps(newer_than, older_than): res = list(get_events( newer_than=newer_than, older_than=older_than @@ -514,10 +536,8 @@ def test_update_event( @pytest.mark.parametrize("status", test_update_invalid_status) def test_update_event_invalid_status(status): - events = list(get_events(project_names=["demo_Commercial"])) - with pytest.raises(exceptions.HTTPRequestError): - update_event(events[0]["id"], status=status) + update_event(event_id, status=status) test_update_invalid_progress = [ @@ -529,8 +549,6 @@ def test_update_event_invalid_status(status): ] @pytest.mark.parametrize("progress", test_update_invalid_progress) -def test_update_event_invalid_progress(progress): - events = list(get_events(project_names=["demo_Commercial"])) - +def test_update_event_invalid_progress(event_id, progress): with pytest.raises(exceptions.HTTPRequestError): - update_event(events[0]["id"], progress=progress) + update_event(event_id, progress=progress) From 5059b3d04ec3d0f7d7d4b79a447cd1ccc627fe2d Mon Sep 17 00:00:00 2001 From: Tadeas Hejnic Date: Mon, 11 Nov 2024 11:13:13 +0100 Subject: [PATCH 038/135] Docs: Docstrings for all tests were added. --- tests/test_server.py | 169 ++++++++++++++++++++++++++++++++++++++++--- 1 file changed, 157 insertions(+), 12 deletions(-) diff --git a/tests/test_server.py b/tests/test_server.py index 8afb24a4d..35074970d 100644 --- a/tests/test_server.py +++ b/tests/test_server.py @@ -30,6 +30,16 @@ def test_close_connection(): + """Tests the functionality of opening and closing the server API + connection. + + Verifies: + - Confirms that the connection is successfully created when + `get_server_api_connection()` is called. + - Ensures that the connection is closed correctly when + `close_connection()` is invoked, and that the connection + state is appropriately updated. + """ _ = get_server_api_connection() assert is_connection_created() is True close_connection() @@ -37,18 +47,37 @@ def test_close_connection(): def test_get_base_url(): + """Tests the retrieval of the base URL for the API. + + Verifies: + - Confirms that `get_base_url()` returns a string. + - Ensures that the returned URL matches the expected `AYON_BASE_URL`. + """ res = get_base_url() assert isinstance(res, str) assert res == AYON_BASE_URL def test_get_rest_url(): + """Tests the retrieval of the REST API URL. + + Verifies: + - Confirms that `get_rest_url()` returns a string. + - Ensures that the returned URL matches the expected `AYON_REST_URL`. + """ res = get_rest_url() assert isinstance(res, str) assert res == AYON_REST_URL def test_get(): + """Tests the `get` method for making API requests. + + Verifies: + - Ensures that a successful GET request to the endpoint 'info' + returns a status code of 200. + - Confirms that the response data is in the form of a dictionary. + """ res = get("info") assert res.status_code == 200 assert isinstance(res.data, dict) @@ -83,7 +112,7 @@ def test_get(): (["entity.folder.attrib_changed", "entity.project.created", "entity.task.created", "settings.changed"]), ] -# incorrect name for statuses +# states is incorrect name for statuses test_states = [ (None), ([]), @@ -148,7 +177,10 @@ def event_ids(request): # takes max 3 items in a list to reduce the number of combinations @pytest.mark.parametrize("topics", test_topics[-3:]) -@pytest.mark.parametrize("event_ids", [None] + [pytest.param(None, marks=pytest.mark.usefixtures("event_ids"))]) +@pytest.mark.parametrize( + "event_ids", + [None] + [pytest.param(None, marks=pytest.mark.usefixtures("event_ids"))] +) @pytest.mark.parametrize("project_names", test_project_names[-3:]) @pytest.mark.parametrize("states", test_states[-3:]) @pytest.mark.parametrize("users", test_users[-3:]) @@ -169,7 +201,23 @@ def test_get_events_all_filter_combinations( older_than, fields ): - """Tests all combination of possible filters. + """Tests all combinations of possible filters for `get_events`. + + Verifies: + - Calls `get_events` with the provided filter parameters. + - Ensures each event in the result set matches the specified filters. + - Checks that the number of returned events matches the expected count + based on the filters applied. + - Confirms that each event contains only the specified fields, with + no extra keys. + + Note: + - Adjusts the timeout setting if necessary to handle a large number + of tests and avoid timeout errors. + - Some combinations of filter parameters may lead to a server timeout + error. When this occurs, the test will skip instead of failing. + - Currently, a ServerError due to timeout may occur when `has_children` + is set to False. """ if get_timeout() < 5: set_timeout(None) # default timeout @@ -192,7 +240,6 @@ def test_get_events_all_filter_combinations( print("Warning: ServerError encountered, test skipped due to timeout.") pytest.skip("Skipping test due to server timeout.") - # test if filtering was correct for item in res: assert item.get("topic") in topics, ( f"Expected 'project' one of values: {topics}, but got '{item.get('topic')}'" @@ -213,7 +260,6 @@ def test_get_events_all_filter_combinations( datetime.fromisoformat(item.get("createdAt")) < datetime.fromisoformat(older_than) ) - # test if all events were given assert topics is None or len(res) == sum(len(list(get_events( topics=[topic], project_names=project_names, @@ -274,9 +320,16 @@ def test_get_events_all_filter_combinations( @pytest.mark.parametrize("has_children", test_has_children) def test_get_events_timeout_has_children(has_children): - """Separete test for has_children filter. - - Issues with timeouts. + """Test `get_events` function with the `has_children` filter. + + Verifies: + - The `get_events` function handles requests correctly and does + not time out when using the `has_children` filter with events + created within the last 5 days. + - If a `ServerError` (likely due to a timeout) is raised: + - Logs a warning message and skips the test to avoid failure. + - Asserts that the `ServerError` should occur only when + `has_children` is set to False. """ try: _ = list(get_events( @@ -291,6 +344,13 @@ def test_get_events_timeout_has_children(has_children): def test_get_events_event_ids(event_ids): + """Test `get_events` function using specified event IDs. + + Verifies: + - Each item returned has an ID in the `event_ids` list. + - The number of items returned matches the expected count + when filtered by each individual event ID. + """ res = list(get_events(event_ids=event_ids)) for item in res: @@ -301,6 +361,13 @@ def test_get_events_event_ids(event_ids): @pytest.mark.parametrize("project_names", test_project_names) def test_get_events_project_name(project_names): + """Test `get_events` function using specified project names. + + Verifies: + - Each item returned has a project in the `project_names` list. + - The count of items matches the expected number when filtered + by each individual project name. + """ res = list(get_events(project_names=project_names)) for item in res: @@ -313,6 +380,14 @@ def test_get_events_project_name(project_names): @pytest.mark.parametrize("project_names", test_project_names) @pytest.mark.parametrize("topics", test_topics) def test_get_events_project_name_topic(project_names, topics): + """Test `get_events` function using both project names and topics. + + Verifies: + - Each item returned has a project in `project_names` and a topic + in `topics`. + - The item count matches the expected number when filtered by + each project name and topic combination. + """ res = list(get_events( topics=topics, project_names=project_names @@ -331,6 +406,14 @@ def test_get_events_project_name_topic(project_names, topics): @pytest.mark.parametrize("topics", test_topics) @pytest.mark.parametrize("users", test_users) def test_get_events_project_name_topic_user(project_names, topics, users): + """Test `get_events` function using project names, topics, and users. + + Verifies: + - Each item has a project in `project_names`, a topic in `topics`, + and a user in `users`. + - The item count matches the expected number when filtered by + combinations of project names, topics, and users. + """ res = list(get_events( topics=topics, project_names=project_names, @@ -351,6 +434,12 @@ def test_get_events_project_name_topic_user(project_names, topics, users): @pytest.mark.parametrize("newer_than", test_newer_than) @pytest.mark.parametrize("older_than", test_older_than) def test_get_events_timestamps(newer_than, older_than): + """Test `get_events` function using date filters `newer_than` and `older_than`. + + Verifies: + - Each item's creation date falls within the specified date + range between `newer_than` and `older_than`. + """ res = list(get_events( newer_than=newer_than, older_than=older_than @@ -413,10 +502,26 @@ def test_get_events_invalid_data( users, newer_than ): - # with many tests - ayon_api.exceptions.ServerError: Connection timed out. - # TODO - maybe some better solution + """Tests `get_events` with invalid filter data to ensure correct handling + of invalid input and prevent errors or unexpected results. + + Verifies: + - Confirms that the result is either empty or aligns with expected valid + entries: + - `topics`: Result is empty or topics is set to `None`. + - `project_names`: Result is empty or project names exist in the + list of valid project names. + - `states`: Result is empty or states is set to `None`. + - `users`: Result is empty or each user exists as a valid user. + - `newer_than`: Result is empty or `newer_than` date is in the past. + + Note: + - Adjusts the timeout setting if necessary to handle a large number + of tests and avoid timeout errors. + """ + if get_timeout() < 5: - set_timeout(20.0) + set_timeout(None) # default timeout value res = list(get_events( topics=topics, @@ -445,6 +550,14 @@ def test_get_events_invalid_data( @pytest.fixture def event_id(): + """Fixture that retrieves the ID of a recent event created within + the last 5 days. + + Returns: + - The event ID of the most recent event within the last 5 days + if available. + - `None` if no recent events are found within this time frame. + """ recent_event = list(get_events( newer_than=(datetime.now(timezone.utc) - timedelta(days=5)).isoformat() )) @@ -494,7 +607,25 @@ def test_update_event( summary=None, payload=None, progress=None, -): +): + """Verifies that the `update_event` function correctly updates event fields. + + Verifies: + - The function updates the specified event fields based on the provided + parameters (`sender`, `username`, `status`, `description`, `retries`, + etc.). + - Only the fields specified in `kwargs` are updated, and other fields + remain unchanged. + - The `updatedAt` field is updated and the change occurs within a + reasonable time frame (within one minute). + - The event's state before and after the update matches the expected + values for the updated fields. + + Notes: + - Parameters like `event_id`, `sender`, `username`, `status`, + `description`, `retries`, etc., are passed dynamically to the function. + - If any parameter is `None`, it is excluded from the update request. + """ kwargs = { key: value for key, value in ( @@ -536,6 +667,13 @@ def test_update_event( @pytest.mark.parametrize("status", test_update_invalid_status) def test_update_event_invalid_status(status): + """Tests `update_event` with invalid status values to ensure correct + error handling for unsupported status inputs. + + Verifies: + - Confirms that an `HTTPRequestError` is raised for invalid status values + when attempting to update an event with an unsupported status. + """ with pytest.raises(exceptions.HTTPRequestError): update_event(event_id, status=status) @@ -550,5 +688,12 @@ def test_update_event_invalid_status(status): @pytest.mark.parametrize("progress", test_update_invalid_progress) def test_update_event_invalid_progress(event_id, progress): + """Tests `update_event` with invalid progress values to ensure correct + error handling for unsupported progress inputs. + + Verifies: + - Confirms that an `HTTPRequestError` is raised for invalid progress values + when attempting to update an event with unsupported progress. + """ with pytest.raises(exceptions.HTTPRequestError): update_event(event_id, progress=progress) From 6bed1b19a6439d1ae772136b3a198218a234da73 Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Wed, 13 Nov 2024 13:21:04 +0100 Subject: [PATCH 039/135] add sender type to server api and headers --- ayon_api/server_api.py | 36 +++++++++++++++++++++++++++++++++++- 1 file changed, 35 insertions(+), 1 deletion(-) diff --git a/ayon_api/server_api.py b/ayon_api/server_api.py index 2e151ffc9..6c75b1a1a 100644 --- a/ayon_api/server_api.py +++ b/ayon_api/server_api.py @@ -393,7 +393,10 @@ class ServerAPI(object): default_settings_variant (Optional[Literal["production", "staging"]]): Settings variant used by default if a method for settings won't get any (by default is 'production'). - sender (Optional[str]): Sender of requests. Used in server logs and + sender_type (Optional[str]): Sender type of requests. Used in server + logs and propagated into events. + sender (Optional[str]): Sender of requests, more specific than + sender type (e.g. machine name). Used in server logs and propagated into events. ssl_verify (Union[bool, str, None]): Verify SSL certificate Looks for env variable value ``AYON_CA_FILE`` by default. If not @@ -419,6 +422,7 @@ def __init__( site_id=NOT_SET, client_version=None, default_settings_variant=None, + sender_type=None, sender=None, ssl_verify=None, cert=None, @@ -445,6 +449,7 @@ def __init__( or get_default_settings_variant() ) self._sender = sender + self._sender_type = sender_type self._timeout = None self._max_retries = None @@ -763,6 +768,31 @@ def set_sender(self, sender): sender = property(get_sender, set_sender) + def get_sender_type(self): + """Sender type used to send requests. + + Sender type is supported since AYON server 1.5.5 . + + Returns: + Union[str, None]: Sender type or None. + + """ + return self._sender_type + + def set_sender_type(self, sender_type): + """Change sender type used for requests. + + Args: + sender_type (Union[str, None]): Sender type or None. + + """ + if sender_type == self._sender_type: + return + self._sender_type = sender_type + self._update_session_headers() + + sender_type = property(get_sender_type, set_sender_type) + def get_default_service_username(self): """Default username used for callbacks when used with service API key. @@ -946,6 +976,7 @@ def _update_session_headers(self): ("X-as-user", self._as_user_stack.username), ("x-ayon-version", self._client_version), ("x-ayon-site-id", self._site_id), + ("x-sender-type", self._sender_type), ("x-sender", self._sender), ): if value is not None: @@ -1157,6 +1188,9 @@ def get_headers(self, content_type=None): if self._client_version is not None: headers["x-ayon-version"] = self._client_version + if self._sender_type is not None: + headers["x-sender-type"] = self._sender_type + if self._sender is not None: headers["x-sender"] = self._sender From 5c525f66c882cf1b12c56f024e8b3654d46d2891 Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Wed, 13 Nov 2024 13:21:41 +0100 Subject: [PATCH 040/135] added sender type functions to global api --- ayon_api/__init__.py | 4 ++++ ayon_api/_api.py | 24 ++++++++++++++++++++++++ 2 files changed, 28 insertions(+) diff --git a/ayon_api/__init__.py b/ayon_api/__init__.py index 2ddbe9e03..46b30d99b 100644 --- a/ayon_api/__init__.py +++ b/ayon_api/__init__.py @@ -48,6 +48,8 @@ set_default_settings_variant, get_sender, set_sender, + get_sender_type, + set_sender_type, get_info, get_server_version, get_server_version_tuple, @@ -280,6 +282,8 @@ "set_default_settings_variant", "get_sender", "set_sender", + "get_sender_type", + "set_sender_type", "get_info", "get_server_version", "get_server_version_tuple", diff --git a/ayon_api/_api.py b/ayon_api/_api.py index 0ece9b708..724e1d945 100644 --- a/ayon_api/_api.py +++ b/ayon_api/_api.py @@ -550,6 +550,30 @@ def set_sender(*args, **kwargs): return con.set_sender(*args, **kwargs) +def get_sender_type(): + """Sender type used to send requests. + + Sender type is supported since AYON server 1.5.5 . + + Returns: + Union[str, None]: Sender type or None. + + """ + con = get_server_api_connection() + return con.get_sender_type() + + +def set_sender_type(*args, **kwargs): + """Change sender type used for requests. + + Args: + sender_type (Union[str, None]): Sender type or None. + + """ + con = get_server_api_connection() + return con.set_sender_type(*args, **kwargs) + + def get_info(): """Get information about current used api key. From 896d66c9c98036857d021e6aab5814357eb900a1 Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Wed, 13 Nov 2024 14:08:58 +0100 Subject: [PATCH 041/135] added missing filters to enroll job --- ayon_api/_api.py | 4 ++++ ayon_api/server_api.py | 17 +++++++++++++++++ 2 files changed, 21 insertions(+) diff --git a/ayon_api/_api.py b/ayon_api/_api.py index 724e1d945..0acfd7e38 100644 --- a/ayon_api/_api.py +++ b/ayon_api/_api.py @@ -875,6 +875,10 @@ def enroll_event_job(*args, **kwargs): TODO: Add example of filters. max_retries (Optional[int]): How many times can be event retried. Default value is based on server (3 at the time of this PR). + ignore_older_than (Optional[int]): Ignore events older than + given number in days. + ignore_sender_types (Optional[List[str]]): Ignore events triggered + by given sender types. Returns: Union[None, dict[str, Any]]: None if there is no event matching diff --git a/ayon_api/server_api.py b/ayon_api/server_api.py index 6c75b1a1a..b079c3ced 100644 --- a/ayon_api/server_api.py +++ b/ayon_api/server_api.py @@ -1663,6 +1663,8 @@ def enroll_event_job( sequential=None, events_filter=None, max_retries=None, + ignore_older_than=None, + ignore_sender_types=None, ): """Enroll job based on events. @@ -1711,6 +1713,10 @@ def enroll_event_job( TODO: Add example of filters. max_retries (Optional[int]): How many times can be event retried. Default value is based on server (3 at the time of this PR). + ignore_older_than (Optional[int]): Ignore events older than + given number in days. + ignore_sender_types (Optional[List[str]]): Ignore events triggered + by given sender types. Returns: Union[None, dict[str, Any]]: None if there is no event matching @@ -1722,6 +1728,7 @@ def enroll_event_job( "targetTopic": target_topic, "sender": sender, } + major, minor, patch, _, _ = self.server_version_tuple if max_retries is not None: kwargs["maxRetries"] = max_retries if sequential is not None: @@ -1730,6 +1737,16 @@ def enroll_event_job( kwargs["description"] = description if events_filter is not None: kwargs["filter"] = events_filter + if ( + ignore_older_than is not None + and (major, minor, patch) > (1, 5, 1) + ): + kwargs["ignoreOlderThan"] = ignore_older_than + if ( + ignore_sender_types is not None + and (major, minor, patch) > (1, 5, 4) + ): + kwargs["ignoreSenderTypes"] = ignore_sender_types response = self.post("enroll", **kwargs) if response.status_code == 204: From 98d9dc6b97e21e4d7ecc1a07bd15418f5060124b Mon Sep 17 00:00:00 2001 From: Tadeas Hejnic Date: Thu, 14 Nov 2024 11:38:07 +0100 Subject: [PATCH 042/135] enroll_event_job/addons/thumbnails: New tests for enroll_event_job method in multiple scenarios. Tests for thumbnail operations - upload, download. Fixture for artist user --- tests/test_server.py | 331 ++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 326 insertions(+), 5 deletions(-) diff --git a/tests/test_server.py b/tests/test_server.py index 35074970d..39ab027ad 100644 --- a/tests/test_server.py +++ b/tests/test_server.py @@ -4,24 +4,54 @@ Make sure you have set AYON_TOKEN in your environment. """ +from datetime import datetime, timedelta, timezone import os import pytest +import time from ayon_api import ( - is_connection_created, close_connection, + create_folder, + create_project, + create_thumbnail, + delete, + delete_project, + dispatch_event, + download_addon_private_file, + download_file_to_stream, + download_file, + enroll_event_job, get, + get_addon_project_settings, + get_addon_settings, + get_addon_settings_schema, + get_addon_site_settings_schema, + get_addon_site_settings, + get_addon_endpoint, + get_addon_url, + get_addons_info, + get_addons_project_settings, + get_addons_settings, + get_addons_studio_settings, get_default_fields_for_type, get_event, get_events, + get_folder_thumbnail, + get_project, get_project_names, get_user_by_name, get_server_api_connection, get_base_url, get_rest_url, + get_thumbnail, + get_thumbnail_by_id, get_timeout, + is_connection_created, set_timeout, + trigger_server_restart, update_event, + upload_addon_zip, + ServerAPI, exceptions ) @@ -39,6 +69,7 @@ def test_close_connection(): - Ensures that the connection is closed correctly when `close_connection()` is invoked, and that the connection state is appropriately updated. + """ _ = get_server_api_connection() assert is_connection_created() is True @@ -52,6 +83,7 @@ def test_get_base_url(): Verifies: - Confirms that `get_base_url()` returns a string. - Ensures that the returned URL matches the expected `AYON_BASE_URL`. + """ res = get_base_url() assert isinstance(res, str) @@ -64,6 +96,7 @@ def test_get_rest_url(): Verifies: - Confirms that `get_rest_url()` returns a string. - Ensures that the returned URL matches the expected `AYON_REST_URL`. + """ res = get_rest_url() assert isinstance(res, str) @@ -77,6 +110,7 @@ def test_get(): - Ensures that a successful GET request to the endpoint 'info' returns a status code of 200. - Confirms that the response data is in the form of a dictionary. + """ res = get("info") assert res.status_code == 200 @@ -135,8 +169,6 @@ def test_get(): (False), ] -from datetime import datetime, timedelta, timezone - test_newer_than = [ (None), ((datetime.now(timezone.utc) - timedelta(days=2)).isoformat()), @@ -218,6 +250,7 @@ def test_get_events_all_filter_combinations( error. When this occurs, the test will skip instead of failing. - Currently, a ServerError due to timeout may occur when `has_children` is set to False. + """ if get_timeout() < 5: set_timeout(None) # default timeout @@ -284,7 +317,7 @@ def test_get_events_all_filter_combinations( fields=fields) )) for project_name in project_names) - assert states is None or len(res) == sum(len(list(get_events( + assert states is None or len(res) == sum(len(list(get_events( topics=topics, project_names=project_names, states=[state], @@ -330,6 +363,7 @@ def test_get_events_timeout_has_children(has_children): - Logs a warning message and skips the test to avoid failure. - Asserts that the `ServerError` should occur only when `has_children` is set to False. + """ try: _ = list(get_events( @@ -350,6 +384,7 @@ def test_get_events_event_ids(event_ids): - Each item returned has an ID in the `event_ids` list. - The number of items returned matches the expected count when filtered by each individual event ID. + """ res = list(get_events(event_ids=event_ids)) @@ -367,6 +402,7 @@ def test_get_events_project_name(project_names): - Each item returned has a project in the `project_names` list. - The count of items matches the expected number when filtered by each individual project name. + """ res = list(get_events(project_names=project_names)) @@ -387,6 +423,7 @@ def test_get_events_project_name_topic(project_names, topics): in `topics`. - The item count matches the expected number when filtered by each project name and topic combination. + """ res = list(get_events( topics=topics, @@ -439,6 +476,7 @@ def test_get_events_timestamps(newer_than, older_than): Verifies: - Each item's creation date falls within the specified date range between `newer_than` and `older_than`. + """ res = list(get_events( newer_than=newer_than, @@ -518,8 +556,8 @@ def test_get_events_invalid_data( Note: - Adjusts the timeout setting if necessary to handle a large number of tests and avoid timeout errors. + """ - if get_timeout() < 5: set_timeout(None) # default timeout value @@ -557,6 +595,7 @@ def event_id(): - The event ID of the most recent event within the last 5 days if available. - `None` if no recent events are found within this time frame. + """ recent_event = list(get_events( newer_than=(datetime.now(timezone.utc) - timedelta(days=5)).isoformat() @@ -625,6 +664,7 @@ def test_update_event( - Parameters like `event_id`, `sender`, `username`, `status`, `description`, `retries`, etc., are passed dynamically to the function. - If any parameter is `None`, it is excluded from the update request. + """ kwargs = { key: value @@ -673,6 +713,7 @@ def test_update_event_invalid_status(status): Verifies: - Confirms that an `HTTPRequestError` is raised for invalid status values when attempting to update an event with an unsupported status. + """ with pytest.raises(exceptions.HTTPRequestError): update_event(event_id, status=status) @@ -694,6 +735,286 @@ def test_update_event_invalid_progress(event_id, progress): Verifies: - Confirms that an `HTTPRequestError` is raised for invalid progress values when attempting to update an event with unsupported progress. + """ with pytest.raises(exceptions.HTTPRequestError): update_event(event_id, progress=progress) + + + +TEST_SOURCE_TOPIC = "test.source.topic" +TEST_TARGET_TOPIC = "test.target.topic" + +test_sequential = [ + (True), + (False), + (None) +] + +def clean_up(topics=[TEST_SOURCE_TOPIC, TEST_TARGET_TOPIC]): + events = list(get_events(topics=topics)) + for event in events: + if event["status"] not in ["finished", "failed"]: + update_event(event["id"], status="finished") + + +@pytest.fixture +def new_events(): + clean_up() + + num_of_events = 3 + return [ + dispatch_event(topic=TEST_SOURCE_TOPIC, sender="tester", description=f"New test event n. {num}")["id"] + for num in range(num_of_events) + ] + + +@pytest.mark.parametrize("sequential", test_sequential) +def test_enroll_event_job(sequential, new_events): + # clean_up() # "close" all pending jobs + + job_1 = enroll_event_job( + source_topic=TEST_SOURCE_TOPIC, + target_topic=TEST_TARGET_TOPIC, + sender="test_sender_1", + sequential=sequential + ) + + job_2 = enroll_event_job( + source_topic=TEST_SOURCE_TOPIC, + target_topic=TEST_TARGET_TOPIC, + sender="test_sender_2", + sequential=sequential + ) + + assert sequential is False \ + or sequential is None \ + or job_2 is None + + update_event(job_1["id"], status="finished") + + job_2 = enroll_event_job( + source_topic=TEST_SOURCE_TOPIC, + target_topic=TEST_TARGET_TOPIC, + sender="test_sender_2", + sequential=sequential + ) + + assert job_2 is not None \ + and job_1 != job_2 + + # TODO - delete events - if possible + + # src_event = get_event(job["dependsOn"]) + # update_event(job["id"], status="failed") + + +@pytest.mark.parametrize("sequential", test_sequential) +def test_enroll_event_job_failed(sequential): + clean_up() + + job_1 = enroll_event_job( + source_topic=TEST_SOURCE_TOPIC, + target_topic=TEST_TARGET_TOPIC, + sender="test_sender_1", + sequential=sequential + ) + + update_event(job_1["id"], status="failed") + + job_2 = enroll_event_job( + source_topic=TEST_SOURCE_TOPIC, + target_topic=TEST_TARGET_TOPIC, + sender="test_sender_2", + sequential=sequential + ) + + assert sequential is not True or job_1 == job_2 + + # TODO - delete events - if possible + + # src_event = get_event(job_1["dependsOn"]) + # print(src_event) + + # print(job) + # print(job_2) + + # update_event(job["id"], status="failed") + + +@pytest.mark.parametrize("sequential", test_sequential) +def test_enroll_event_job_same_sender(sequential): + clean_up() + + job_1 = enroll_event_job( + source_topic=TEST_SOURCE_TOPIC, + target_topic=TEST_TARGET_TOPIC, + sender="test_sender", + sequential=sequential + ) + + job_2 = enroll_event_job( + source_topic=TEST_SOURCE_TOPIC, + target_topic=TEST_TARGET_TOPIC, + sender="test_sender", + sequential=sequential + ) + + assert job_1 == job_2 + + # TODO - delete events - if possible + +test_invalid_topics = [ + (("invalid_source_topic", "invalid_target_topic")) +] + +@pytest.mark.parametrize("topics", test_invalid_topics) +@pytest.mark.parametrize("sequential", test_sequential) +def test_enroll_event_job_invalid_topics(topics, sequential): + clean_up() + + source_topic, target_topic = topics + + job = enroll_event_job( + source_topic=source_topic, + target_topic=target_topic, + sender="test_sender", + sequential=sequential + ) + + assert job is None + + +def test_enroll_event_job_sequential_false(): + clean_up() # "close" all pending jobs + new_events() + + depends_on_ids = set() + + for sender in ["test_1", "test_2", "test_3"]: + job = enroll_event_job( + source_topic=TEST_SOURCE_TOPIC, + target_topic=TEST_TARGET_TOPIC, + sender=sender, + sequential=False + ) + + assert job is not None \ + and job["dependsOn"] not in depends_on_ids + + depends_on_ids.add(job["dependsOn"]) + + # TODO - delete events if possible + + +TEST_PROJECT_NAME = "test_API_project" +TEST_PROJECT_CODE = "apitest" +AYON_THUMBNAIL_PATH = "tests/resources/ayon-symbol.png" + + +def test_thumbnail_operations( + project_name=TEST_PROJECT_NAME, + project_code=TEST_PROJECT_CODE, + thumbnail_path=AYON_THUMBNAIL_PATH +): + if get_project(project_name): + delete_project(TEST_PROJECT_NAME) + + project = create_project(project_name, project_code) + + thumbnail_id = create_thumbnail(project_name, thumbnail_path) + + folder_id = create_folder(project_name, "my_test_folder", thumbnail_id=thumbnail_id) + thumbnail = get_folder_thumbnail(project_name, folder_id, thumbnail_id) + + assert thumbnail.project_name == project_name + assert thumbnail.thumbnail_id == thumbnail_id + + with open(thumbnail_path, "rb") as file: + image_bytes = file.read() + + assert image_bytes == thumbnail.content + + delete_project(project["name"]) + + +def test_addon_methods(): + addon_name = "tests" + addon_version = "1.0.0" + download_path = "tests/resources/tmp_downloads" + private_file_path = os.path.join(download_path, "ayon-symbol.png") + + delete(f"/addons/{addon_name}/{addon_version}") + assert all(addon_name != addon["name"] for addon in get_addons_info()["addons"]) + + try: + _ = upload_addon_zip("tests/resources/addon/package/tests-1.0.0.zip") + + trigger_server_restart() + + # need to wait at least 0.1 sec. to restart server + time.sleep(0.1) + while True: + try: + addons = get_addons_info()["addons"] + break + except exceptions.ServerError as exc: + assert "Connection timed out" in str(exc) + + assert any(addon_name == addon["name"] for addon in addons) + + downloaded_file = download_addon_private_file( + addon_name, + addon_version, + "ayon-symbol.png", + download_path + ) + + assert downloaded_file == private_file_path + assert os.path.isfile(private_file_path) + + finally: + if os.path.isfile(private_file_path): + os.remove(private_file_path) + + if os.path.isdir(download_path): + os.rmdir(download_path) + + +@pytest.fixture +def api_artist_user(): + project = get_project(TEST_PROJECT_NAME) + if project is None: + project = create_project(TEST_PROJECT_NAME, TEST_PROJECT_CODE) + + api = get_server_api_connection() + + username = "testUser" + password = "testUserPassword" + response = api.get("accessGroups/_") + access_groups = [ + item["name"] + for item in response.data + ] + api.put( + f"users/{username}", + password=password, + data={ + "isAdmin": False, + "isManager": False, + "defaultAccessGroups": access_groups, + "accessGroups": { + project["name"]: access_groups + }, + } + ) + new_api = ServerAPI(api.base_url) + new_api.login(username, password) + + return new_api + + +def test_server_restart_as_user(api_artist_user): + with pytest.raises(Exception): + api_artist_user.trigger_server_restart() + From 535579a75e11354a29db9c0b0e776ef3cdc42658 Mon Sep 17 00:00:00 2001 From: Tadeas Hejnic Date: Thu, 14 Nov 2024 11:40:25 +0100 Subject: [PATCH 043/135] Addon: example addon for testing --- .../addon/__pycache__/package.cpython-311.pyc | Bin 0 -> 328 bytes tests/resources/addon/create_package.py | 489 ++++++++++++++++++ tests/resources/addon/package.py | 9 + tests/resources/addon/package/tests-1.0.0.zip | Bin 0 -> 1646 bytes tests/resources/addon/private/ayon-symbol.png | Bin 0 -> 939 bytes tests/resources/addon/server/__init__.py | 19 + 6 files changed, 517 insertions(+) create mode 100644 tests/resources/addon/__pycache__/package.cpython-311.pyc create mode 100644 tests/resources/addon/create_package.py create mode 100644 tests/resources/addon/package.py create mode 100644 tests/resources/addon/package/tests-1.0.0.zip create mode 100644 tests/resources/addon/private/ayon-symbol.png create mode 100644 tests/resources/addon/server/__init__.py diff --git a/tests/resources/addon/__pycache__/package.cpython-311.pyc b/tests/resources/addon/__pycache__/package.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..7e8719b1aa3bea6008ebc4f9e30fa7f73775db32 GIT binary patch literal 328 zcmXv}Jx{|h5OtcQse%HkR8@RP>>U~xu_465&H_`G$i%1CB8iRdAi|V?!N!XCHytBS zNc@3H-8$ifa=N>BA9_!BpHUPdYgezA%MXh`UGs0wzSy4I;vG?xAnGIz9V7$lCN4vX zXTzuNJ(_vO*PygA{zC_E{zbfq7k9Iv@k&yF-7=kn30^=d#!C}sIfGKClu0uoH7`>M z&lQtEQ$i^(m6`U_j2D7v_L#$d{)M*PS-V@j;ssz$C=i9~QdAmujJc@w$^sD#_ZMLa zidQlNg?nWzNcKdQ5Q};@x-K}aa=4L^)$IONbDK7<<2Y?_*m&*esPWry+yvjzxCu8y TbbPu-llJ6nji!Gt4=m#s1D9hW literal 0 HcmV?d00001 diff --git a/tests/resources/addon/create_package.py b/tests/resources/addon/create_package.py new file mode 100644 index 000000000..5c5ba8590 --- /dev/null +++ b/tests/resources/addon/create_package.py @@ -0,0 +1,489 @@ +#!/usr/bin/env python + +"""Prepares server package from addon repo to upload to server. + +Requires Python 3.9. (Or at least 3.8+). + +This script should be called from cloned addon repo. + +It will produce 'package' subdirectory which could be pasted into server +addon directory directly (eg. into `ayon-backend/addons`). + +Format of package folder: +ADDON_REPO/package/{addon name}/{addon version} + +You can specify `--output_dir` in arguments to change output directory where +package will be created. Existing package directory will always be purged if +already present! This could be used to create package directly in server folder +if available. + +Package contains server side files directly, +client side code zipped in `private` subfolder. +""" + +import os +import sys +import re +import io +import shutil +import platform +import argparse +import logging +import collections +import zipfile +import subprocess +from typing import Optional, Iterable, Pattern, Union, List, Tuple + +import package + +FileMapping = Tuple[Union[str, io.BytesIO], str] +ADDON_NAME: str = package.name +ADDON_VERSION: str = package.version +ADDON_CLIENT_DIR: Union[str, None] = getattr(package, "client_dir", None) + +CURRENT_ROOT: str = os.path.dirname(os.path.abspath(__file__)) +SERVER_ROOT: str = os.path.join(CURRENT_ROOT, "server") +FRONTEND_ROOT: str = os.path.join(CURRENT_ROOT, "frontend") +FRONTEND_DIST_ROOT: str = os.path.join(FRONTEND_ROOT, "dist") +DST_DIST_DIR: str = os.path.join("frontend", "dist") +PRIVATE_ROOT: str = os.path.join(CURRENT_ROOT, "private") +PUBLIC_ROOT: str = os.path.join(CURRENT_ROOT, "public") +CLIENT_ROOT: str = os.path.join(CURRENT_ROOT, "client") + +VERSION_PY_CONTENT = f'''# -*- coding: utf-8 -*- +"""Package declaring AYON addon '{ADDON_NAME}' version.""" +__version__ = "{ADDON_VERSION}" +''' + +# Patterns of directories to be skipped for server part of addon +IGNORE_DIR_PATTERNS: List[Pattern] = [ + re.compile(pattern) + for pattern in { + # Skip directories starting with '.' + r"^\.", + # Skip any pycache folders + "^__pycache__$" + } +] + +# Patterns of files to be skipped for server part of addon +IGNORE_FILE_PATTERNS: List[Pattern] = [ + re.compile(pattern) + for pattern in { + # Skip files starting with '.' + # NOTE this could be an issue in some cases + r"^\.", + # Skip '.pyc' files + r"\.pyc$" + } +] + + +class ZipFileLongPaths(zipfile.ZipFile): + """Allows longer paths in zip files. + + Regular DOS paths are limited to MAX_PATH (260) characters, including + the string's terminating NUL character. + That limit can be exceeded by using an extended-length path that + starts with the '\\?\' prefix. + """ + _is_windows = platform.system().lower() == "windows" + + def _extract_member(self, member, tpath, pwd): + if self._is_windows: + tpath = os.path.abspath(tpath) + if tpath.startswith("\\\\"): + tpath = "\\\\?\\UNC\\" + tpath[2:] + else: + tpath = "\\\\?\\" + tpath + + return super()._extract_member(member, tpath, pwd) + + +def _get_yarn_executable() -> Union[str, None]: + cmd = "which" + if platform.system().lower() == "windows": + cmd = "where" + + for line in subprocess.check_output( + [cmd, "yarn"], encoding="utf-8" + ).splitlines(): + if not line or not os.path.exists(line): + continue + try: + subprocess.call([line, "--version"]) + return line + except OSError: + continue + return None + + +def safe_copy_file(src_path: str, dst_path: str): + """Copy file and make sure destination directory exists. + + Ignore if destination already contains directories from source. + + Args: + src_path (str): File path that will be copied. + dst_path (str): Path to destination file. + """ + + if src_path == dst_path: + return + + dst_dir: str = os.path.dirname(dst_path) + os.makedirs(dst_dir, exist_ok=True) + + shutil.copy2(src_path, dst_path) + + +def _value_match_regexes(value: str, regexes: Iterable[Pattern]) -> bool: + return any( + regex.search(value) + for regex in regexes + ) + + +def find_files_in_subdir( + src_path: str, + ignore_file_patterns: Optional[List[Pattern]] = None, + ignore_dir_patterns: Optional[List[Pattern]] = None +) -> List[Tuple[str, str]]: + """Find all files to copy in subdirectories of given path. + + All files that match any of the patterns in 'ignore_file_patterns' will + be skipped and any directories that match any of the patterns in + 'ignore_dir_patterns' will be skipped with all subfiles. + + Args: + src_path (str): Path to directory to search in. + ignore_file_patterns (Optional[list[Pattern]]): List of regexes + to match files to ignore. + ignore_dir_patterns (Optional[list[Pattern]]): List of regexes + to match directories to ignore. + + Returns: + list[tuple[str, str]]: List of tuples with path to file and parent + directories relative to 'src_path'. + """ + + if ignore_file_patterns is None: + ignore_file_patterns = IGNORE_FILE_PATTERNS + + if ignore_dir_patterns is None: + ignore_dir_patterns = IGNORE_DIR_PATTERNS + output: List[Tuple[str, str]] = [] + if not os.path.exists(src_path): + return output + + hierarchy_queue: collections.deque = collections.deque() + hierarchy_queue.append((src_path, [])) + while hierarchy_queue: + item: Tuple[str, str] = hierarchy_queue.popleft() + dirpath, parents = item + for name in os.listdir(dirpath): + path: str = os.path.join(dirpath, name) + if os.path.isfile(path): + if not _value_match_regexes(name, ignore_file_patterns): + items: List[str] = list(parents) + items.append(name) + output.append((path, os.path.sep.join(items))) + continue + + if not _value_match_regexes(name, ignore_dir_patterns): + items: List[str] = list(parents) + items.append(name) + hierarchy_queue.append((path, items)) + + return output + + +def update_client_version(logger): + """Update version in client code if version.py is present.""" + if not ADDON_CLIENT_DIR: + return + + version_path: str = os.path.join( + CLIENT_ROOT, ADDON_CLIENT_DIR, "version.py" + ) + if not os.path.exists(version_path): + logger.debug("Did not find version.py in client directory") + return + + logger.info("Updating client version") + with open(version_path, "w") as stream: + stream.write(VERSION_PY_CONTENT) + + +def build_frontend(): + yarn_executable = _get_yarn_executable() + if yarn_executable is None: + raise RuntimeError("Yarn executable was not found.") + + subprocess.run([yarn_executable, "install"], cwd=FRONTEND_ROOT) + subprocess.run([yarn_executable, "build"], cwd=FRONTEND_ROOT) + if not os.path.exists(FRONTEND_DIST_ROOT): + raise RuntimeError( + "Frontend build failed. Did not find 'dist' folder." + ) + + +def get_client_files_mapping() -> List[Tuple[str, str]]: + """Mapping of source client code files to destination paths. + + Example output: + [ + ( + "C:/addons/MyAddon/version.py", + "my_addon/version.py" + ), + ( + "C:/addons/MyAddon/client/my_addon/__init__.py", + "my_addon/__init__.py" + ) + ] + + Returns: + list[tuple[str, str]]: List of path mappings to copy. The destination + path is relative to expected output directory. + + """ + # Add client code content to zip + client_code_dir: str = os.path.join(CLIENT_ROOT, ADDON_CLIENT_DIR) + mapping = [ + (path, os.path.join(ADDON_CLIENT_DIR, sub_path)) + for path, sub_path in find_files_in_subdir(client_code_dir) + ] + + license_path = os.path.join(CURRENT_ROOT, "LICENSE") + if os.path.exists(license_path): + mapping.append((license_path, f"{ADDON_CLIENT_DIR}/LICENSE")) + return mapping + + +def get_client_zip_content(log) -> io.BytesIO: + log.info("Preparing client code zip") + files_mapping: List[Tuple[str, str]] = get_client_files_mapping() + stream = io.BytesIO() + with ZipFileLongPaths(stream, "w", zipfile.ZIP_DEFLATED) as zipf: + for src_path, subpath in files_mapping: + zipf.write(src_path, subpath) + stream.seek(0) + return stream + + +def get_base_files_mapping() -> List[FileMapping]: + filepaths_to_copy: List[FileMapping] = [ + ( + os.path.join(CURRENT_ROOT, "package.py"), + "package.py" + ) + ] + # Add license file to package if exists + license_path = os.path.join(CURRENT_ROOT, "LICENSE") + if os.path.exists(license_path): + filepaths_to_copy.append((license_path, "LICENSE")) + + # Go through server, private and public directories and find all files + for dirpath in (SERVER_ROOT, PRIVATE_ROOT, PUBLIC_ROOT): + if not os.path.exists(dirpath): + continue + + dirname = os.path.basename(dirpath) + for src_file, subpath in find_files_in_subdir(dirpath): + dst_subpath = os.path.join(dirname, subpath) + filepaths_to_copy.append((src_file, dst_subpath)) + + if os.path.exists(FRONTEND_DIST_ROOT): + for src_file, subpath in find_files_in_subdir(FRONTEND_DIST_ROOT): + dst_subpath = os.path.join(DST_DIST_DIR, subpath) + filepaths_to_copy.append((src_file, dst_subpath)) + + pyproject_toml = os.path.join(CLIENT_ROOT, "pyproject.toml") + if os.path.exists(pyproject_toml): + filepaths_to_copy.append( + (pyproject_toml, "private/pyproject.toml") + ) + + return filepaths_to_copy + + +def copy_client_code(output_dir: str, log: logging.Logger): + """Copies server side folders to 'addon_package_dir' + + Args: + output_dir (str): Output directory path. + log (logging.Logger) + + """ + log.info(f"Copying client for {ADDON_NAME}-{ADDON_VERSION}") + + full_output_path = os.path.join( + output_dir, f"{ADDON_NAME}_{ADDON_VERSION}" + ) + if os.path.exists(full_output_path): + shutil.rmtree(full_output_path) + os.makedirs(full_output_path, exist_ok=True) + + for src_path, dst_subpath in get_client_files_mapping(): + dst_path = os.path.join(full_output_path, dst_subpath) + safe_copy_file(src_path, dst_path) + + log.info("Client copy finished") + + +def copy_addon_package( + output_dir: str, + files_mapping: List[FileMapping], + log: logging.Logger +): + """Copy client code to output directory. + + Args: + output_dir (str): Directory path to output client code. + files_mapping (List[FileMapping]): List of tuples with source file + and destination subpath. + log (logging.Logger): Logger object. + + """ + log.info(f"Copying package for {ADDON_NAME}-{ADDON_VERSION}") + + # Add addon name and version to output directory + addon_output_dir: str = os.path.join( + output_dir, ADDON_NAME, ADDON_VERSION + ) + if os.path.isdir(addon_output_dir): + log.info(f"Purging {addon_output_dir}") + shutil.rmtree(addon_output_dir) + + os.makedirs(addon_output_dir, exist_ok=True) + + # Copy server content + for src_file, dst_subpath in files_mapping: + dst_path: str = os.path.join(addon_output_dir, dst_subpath) + dst_dir: str = os.path.dirname(dst_path) + os.makedirs(dst_dir, exist_ok=True) + if isinstance(src_file, io.BytesIO): + with open(dst_path, "wb") as stream: + stream.write(src_file.getvalue()) + else: + safe_copy_file(src_file, dst_path) + + log.info("Package copy finished") + + +def create_addon_package( + output_dir: str, + files_mapping: List[FileMapping], + log: logging.Logger +): + log.info(f"Creating package for {ADDON_NAME}-{ADDON_VERSION}") + + os.makedirs(output_dir, exist_ok=True) + output_path = os.path.join( + output_dir, f"{ADDON_NAME}-{ADDON_VERSION}.zip" + ) + + with ZipFileLongPaths(output_path, "w", zipfile.ZIP_DEFLATED) as zipf: + # Copy server content + for src_file, dst_subpath in files_mapping: + if isinstance(src_file, io.BytesIO): + zipf.writestr(dst_subpath, src_file.getvalue()) + else: + zipf.write(src_file, dst_subpath) + + log.info("Package created") + + +def main( + output_dir: Optional[str] = None, + skip_zip: Optional[bool] = False, + only_client: Optional[bool] = False +): + log: logging.Logger = logging.getLogger("create_package") + log.info("Package creation started") + + if not output_dir: + output_dir = os.path.join(CURRENT_ROOT, "package") + + has_client_code = bool(ADDON_CLIENT_DIR) + if has_client_code: + client_dir: str = os.path.join(CLIENT_ROOT, ADDON_CLIENT_DIR) + if not os.path.exists(client_dir): + raise RuntimeError( + f"Client directory was not found '{client_dir}'." + " Please check 'client_dir' in 'package.py'." + ) + update_client_version(log) + + if only_client: + if not has_client_code: + raise RuntimeError("Client code is not available. Skipping") + + copy_client_code(output_dir, log) + return + + log.info(f"Preparing package for {ADDON_NAME}-{ADDON_VERSION}") + + if os.path.exists(FRONTEND_ROOT): + build_frontend() + + files_mapping: List[FileMapping] = [] + files_mapping.extend(get_base_files_mapping()) + + if has_client_code: + files_mapping.append( + (get_client_zip_content(log), "private/client.zip") + ) + + # Skip server zipping + if skip_zip: + copy_addon_package(output_dir, files_mapping, log) + else: + create_addon_package(output_dir, files_mapping, log) + + log.info("Package creation finished") + + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + parser.add_argument( + "--skip-zip", + dest="skip_zip", + action="store_true", + help=( + "Skip zipping server package and create only" + " server folder structure." + ) + ) + parser.add_argument( + "-o", "--output", + dest="output_dir", + default=None, + help=( + "Directory path where package will be created" + " (Will be purged if already exists!)" + ) + ) + parser.add_argument( + "--only-client", + dest="only_client", + action="store_true", + help=( + "Extract only client code. This is useful for development." + " Requires '-o', '--output' argument to be filled." + ) + ) + parser.add_argument( + "--debug", + dest="debug", + action="store_true", + help="Debug log messages." + ) + + args = parser.parse_args(sys.argv[1:]) + level = logging.INFO + if args.debug: + level = logging.DEBUG + logging.basicConfig(level=level) + main(args.output_dir, args.skip_zip, args.only_client) diff --git a/tests/resources/addon/package.py b/tests/resources/addon/package.py new file mode 100644 index 000000000..649526135 --- /dev/null +++ b/tests/resources/addon/package.py @@ -0,0 +1,9 @@ +name = "tests" +title = "Tests" +version = "1.0.0" + +client_dir = None +# ayon_launcher_version = ">=1.0.2" + +ayon_required_addons = {} +ayon_compatible_addons = {} \ No newline at end of file diff --git a/tests/resources/addon/package/tests-1.0.0.zip b/tests/resources/addon/package/tests-1.0.0.zip new file mode 100644 index 0000000000000000000000000000000000000000..facd7496aa36896bc7838d30cfada6a2b73736be GIT binary patch literal 1646 zcmZ`(dpOg382`=O&T?l%xkQr7%xID>k}NSTY1l6`qge~f?bKX4QWi$Y@YBNSeFJTz< zL$@=dCB^I=%XFiG88MJ(GoLY^6oc()ZyqUi$Z2k(CX_b^yylM%FG^HZ=t?SQ-rrO+ z)KHg?eGu$!zIAoTt86x(IOlreiAC3B1YmfZwQ*_Vri}5~bGa71L6yA)4<0HoP-VGX zbmClQ;licgiYJd#X032n^okn$_hk)nYWekXI*xLF;a?PSYqrMkaR_WA#`lBbfrC4W zOb~&mJR)L~cIoMB^wI9Ax9Hj&i(MBRbLHXh(c+Ma!&bJ_s4eb`(;qeaw77yN)`l2PY)6v!qU>xm6a9wzX~HKI!#PWpc^)0<~ang7%$?1?!rTb z(5%2+++22&Sc7k#1u&mtm4J~$MzR$r82eR(64?N2HQ2jGnX-i)t<^WuXx$%7y>{q1 zIo>z|OT_AOYLjj!2s5;)zYxf+8eC!Wdh~xnzNMv}lc6b70K71W8*5)8^+$ULIm{us znwC7eM+TW00`3-oG@Kzfi@dvv+Q6fUh0T3ELCQYZbRJLmIj5K>e3nDIAoS4KMs}o` zqtJQP=vAnz=m$wi50oZ+z>01zkM065!bF0*EG{Lud=y>ZQQZa#J02B*ErJdl?^i$i zAM!PYdIrN&wDVn@sKvR2<^9+2YM`S}JH{^eeVA(JTmg&UXPhdaGfsXwe6o_{kMkkF z!1j#cvhC_q=6p#<)p013Vk9!ppz~rhujUA7QuWz@pd8$=ovYu$A?6Dv80~m!`{P|b z#z<{8a-S+=>-suHgk?VPxGjZ;mkD)%nY!B9q{|LOHEb@iPwz8U6vQ;J5-aPEsF8iK zjaMqyk*T3gJVNN|$ofq_Pg+uXXZ#|?zU*Z|FM)aMP{F*q;C;Z#M??YoW2ajJHX^LsRqy-YCV zQcw@?5k@O>4g`Rh3-fC)eL020uLZ>t8NZ%-GfX)LS~wQ&8~9`UOGOG zt%J;bJQiWEe7H6#VDC0(q7Hlx4( zAP7{mv7?9ArL=9YxRDx!lB`D@rB&r3mCkx!ZujDLR*{0aZB=2xs1folD@z?ce(g;J zXpf%m%atQ@ncWC^k;VNWUC}sLVI&#_>x@>-qKC3+v*^c}NHamXuIK>So!q)KFn_{|4BiapcD&BqRU;0EKDq-T(jq0d!JMQ^}%UqH+KL14KzgK~#9!?c7~%+b|3O z;9emFs&{Eo4zTP5T`v#;@w)=3_dmk2wTWfP_9jw496&y`XaRpP8H%)lynL{X7hwrY zSi%yPu!JQnVF^oE!V;FSge5Ft39BnB96%o?Zma<29(v-%`c&0(Rbn~w6_yh#m}g9k zSZ-!nMyy$7G#{3aSVpm7xtL|yu)x&LVhvU>iIRPd|H8G0>)^fQxrZxp;rCw`dSp zS1`MwuOnDtYrU9DYZ`I4HdfcchtKa@aoJae{;bbSZn|Ed^bxG}UH|zZ?9X~!ad+7d z-C3`XE3ZA@dYCW0S>9~BW)=G5h-JCC1B9*TA{Tt4$Fd;mOdBs(JYkAJvO;=W($(qq>r7YcrWs0K*%a^lM2bL>m z*$ylyXX*B=U=T4iXKl8J!#%3Ch+-D(B?@X~v6K}IhE<%!QkKWDRAZLIv20@&;8?mX zD;ONBw73dcH!SPalm%FpZpjJ;%PP!b8O!5Ysv*naS+*ey@GRYsW$-N1juj}ZsTs>t zSV~yJ`tM4fDT`^};@ahzo{3B+2&VJaP8R>dDqMXESsu$$O<7#`M5fFBhX(|!RE2o# z77puiy~oC%idFK(3&IkXu!JQnVF^oE!V;FSge5Ft2}@YQ`bQSe_ysZQG=w&+yYm15 N002ovPDHLkV1kKhs4xHk literal 0 HcmV?d00001 diff --git a/tests/resources/addon/server/__init__.py b/tests/resources/addon/server/__init__.py new file mode 100644 index 000000000..0f794cbcf --- /dev/null +++ b/tests/resources/addon/server/__init__.py @@ -0,0 +1,19 @@ +from ayon_server.addons import BaseServerAddon +from ayon_server.api.dependencies import CurrentUser + + +class TestsAddon(BaseServerAddon): + def initialize(self): + self.add_endpoint( + "test-get", + self.get_test, + method="GET", + ) + + async def get_test( + self, user: CurrentUser, + ): + """Return a random folder from the database""" + return { + "success": True, + } From 985523b82e3a616ad13e51172e2076b0120f014e Mon Sep 17 00:00:00 2001 From: Tadeas Hejnic Date: Thu, 14 Nov 2024 12:02:54 +0100 Subject: [PATCH 044/135] Docs: new docstrings added to all tests: --- tests/test_server.py | 171 +++++++++++++++++++++++++++++++++++++++---- 1 file changed, 158 insertions(+), 13 deletions(-) diff --git a/tests/test_server.py b/tests/test_server.py index 39ab027ad..5815139c9 100644 --- a/tests/test_server.py +++ b/tests/test_server.py @@ -771,8 +771,28 @@ def new_events(): @pytest.mark.parametrize("sequential", test_sequential) def test_enroll_event_job(sequential, new_events): - # clean_up() # "close" all pending jobs + """Tests the `enroll_event_job` function for proper event job enrollment and sequential behavior. + Verifies: + - `enroll_event_job` correctly creates and returns a job with specified parameters + (`source_topic`, `target_topic`, `sender`, and `sequential`). + - When `sequential` is set to `True`, only one job can be enrolled at a time, + preventing new enrollments until the first job is closed or updated. + - When `sequential` is `False` or `None`, multiple jobs can be enrolled + concurrently without conflicts. + - The `update_event` function successfully updates the `status` of a job + as expected, allowing for sequential job processing. + + Parameters: + new_events: Fixture or setup to initialize new events for the test case. + + Notes: + - `clean_up()` is called at the start to close any pending jobs, which + could interfere with the test setup and expected outcomes. + - `update_event` is used to set `job_1`'s status to "failed" to test + re-enrollment behavior. + + """ job_1 = enroll_event_job( source_topic=TEST_SOURCE_TOPIC, target_topic=TEST_TARGET_TOPIC, @@ -811,6 +831,26 @@ def test_enroll_event_job(sequential, new_events): @pytest.mark.parametrize("sequential", test_sequential) def test_enroll_event_job_failed(sequential): + """Tests `enroll_event_job` behavior when the initial job fails and sequential processing is enabled. + + Verifies: + - `enroll_event_job` creates a job (`job_1`) with specified parameters + (`source_topic`, `target_topic`, `sender`, and `sequential`). + - After `job_1` fails (status set to "failed"), a new job (`job_2`) can be + enrolled with the same parameters. + - When `sequential` is `True`, the test verifies that `job_1` and `job_2` + are identical, as a failed sequential job should not allow a new job + to be enrolled separately. + - When `sequential` is `False`, `job_1` and `job_2` are allowed to differ, + as concurrent processing is permitted. + + Notes: + - `clean_up()` is called at the start to close any pending jobs, which + could interfere with the test setup and expected outcomes. + - `update_event` is used to set `job_1`'s status to "failed" to test + re-enrollment behavior. + + """ clean_up() job_1 = enroll_event_job( @@ -833,17 +873,26 @@ def test_enroll_event_job_failed(sequential): # TODO - delete events - if possible - # src_event = get_event(job_1["dependsOn"]) - # print(src_event) - - # print(job) - # print(job_2) - - # update_event(job["id"], status="failed") - @pytest.mark.parametrize("sequential", test_sequential) def test_enroll_event_job_same_sender(sequential): + """Tests `enroll_event_job` behavior when multiple jobs are enrolled by the same sender. + + Verifies: + - `enroll_event_job` creates a job (`job_1`) with specified parameters + (`source_topic`, `target_topic`, `sender`, and `sequential`). + - When a second job (`job_2`) is enrolled by the same sender with + identical parameters, the function should return the same job as `job_1` + (indicating idempotent behavior for the same sender and parameters). + - The test checks that `job_1` and `job_2` are identical, ensuring that + no duplicate jobs are created for the same sender when `sequential` + behavior does not permit additional jobs. + + Notes: + - `clean_up()` is used at the beginning to close any pending jobs, ensuring + they do not interfere with the test setup or outcomes. + + """ clean_up() job_1 = enroll_event_job( @@ -864,13 +913,28 @@ def test_enroll_event_job_same_sender(sequential): # TODO - delete events - if possible + test_invalid_topics = [ - (("invalid_source_topic", "invalid_target_topic")) + (("invalid_source_topic", "invalid_target_topic")), + (("nonexisting_source_topic", "nonexisting_target_topic")), ] @pytest.mark.parametrize("topics", test_invalid_topics) @pytest.mark.parametrize("sequential", test_sequential) def test_enroll_event_job_invalid_topics(topics, sequential): + """Tests `enroll_event_job` behavior when provided with invalid topics. + + Verifies: + - `enroll_event_job` returns `None` when given invalid `source_topic` + or `target_topic`, indicating that the function properly rejects + invalid topic values. + - The function correctly handles both sequential and non-sequential + job processing modes when invalid topics are used. + + Notes: + - `clean_up()` is called at the beginning to close any pending jobs that + may interfere with the test setup or outcomes. + """ clean_up() source_topic, target_topic = topics @@ -885,10 +949,24 @@ def test_enroll_event_job_invalid_topics(topics, sequential): assert job is None -def test_enroll_event_job_sequential_false(): - clean_up() # "close" all pending jobs - new_events() +def test_enroll_event_job_sequential_false(new_events): + """Tests `enroll_event_job` behavior when `sequential` is set to `False`. + + Verifies: + - `enroll_event_job` creates a unique job for each sender even when + `sequential` is set to `False`, allowing concurrent job processing. + - Each job has a unique `dependsOn` identifier, ensuring that no two + jobs are linked in dependency, as expected for non-sequential enrollment. + Parameters: + new_events: Fixture or setup to initialize new events for the test case. + + Notes: + - The `depends_on_ids` set is used to track `dependsOn` identifiers and + verify that each job has a unique dependency state, as required for + concurrent processing. + + """ depends_on_ids = set() for sender in ["test_1", "test_2", "test_3"]: @@ -917,6 +995,23 @@ def test_thumbnail_operations( project_code=TEST_PROJECT_CODE, thumbnail_path=AYON_THUMBNAIL_PATH ): + """Tests thumbnail operations for a project, including creation, association, retrieval, and verification. + + Verifies: + - A project is created with a specified name and code, and any existing + project with the same name is deleted before setup to ensure a clean state. + - A thumbnail is created for the project and associated with a folder. + - The thumbnail associated with the folder is correctly retrieved, with + attributes matching the project name and thumbnail ID. + - The content of the retrieved thumbnail matches the expected image bytes + read from the specified `thumbnail_path`. + + Notes: + - `delete_project` is called initially to remove any pre-existing project + with the same name, ensuring no conflicts during testing. + - At the end of the test, the project is deleted to clean up resources. + + """ if get_project(project_name): delete_project(TEST_PROJECT_NAME) @@ -939,6 +1034,24 @@ def test_thumbnail_operations( def test_addon_methods(): + """Tests addon methods, including upload, verification, download, and cleanup of addon resources. + + Verifies: + - An addon with the specified name and version does not exist at the start. + - Uploads an addon package `.zip` file and triggers a server restart. + - Ensures the server restart completes, and verifies the uploaded addon is + available in the list of addons after the restart. + - Downloads a private file associated with the addon, verifying its + existence and correct download location. + - Cleans up downloaded files and directories after the test to maintain a + clean state. + + Notes: + - `time.sleep(0.1)` is used to allow for a brief pause for the server restart. + - The `finally` block removes downloaded files and the directory to prevent + residual test artifacts. + + """ addon_name = "tests" addon_version = "1.0.0" download_path = "tests/resources/tmp_downloads" @@ -981,8 +1094,27 @@ def test_addon_methods(): os.rmdir(download_path) + @pytest.fixture def api_artist_user(): + """Fixture that sets up an API connection for a non-admin artist user. + + Workflow: + - Checks if the project exists; if not, it creates one with specified + `TEST_PROJECT_NAME` and `TEST_PROJECT_CODE`. + - Establishes a server API connection and retrieves the list of available + access groups. + - Configures a new user with limited permissions (`isAdmin` and `isManager` + set to `False`) and assigns all available access groups as default and + project-specific groups. + - Creates a new API connection using the artist user's credentials + (`username` and `password`) and logs in with it. + + Returns: + new_api: A `ServerAPI` instance authenticated with the artist user's + credentials, ready to use in tests. + + """ project = get_project(TEST_PROJECT_NAME) if project is None: project = create_project(TEST_PROJECT_NAME, TEST_PROJECT_CODE) @@ -1015,6 +1147,19 @@ def api_artist_user(): def test_server_restart_as_user(api_artist_user): + """Tests that a non-admin artist user is not permitted to trigger a server restart. + + Verifies: + - An attempt to call `trigger_server_restart` as a non-admin artist user + raises an exception, ensuring that only users with the appropriate + permissions (e.g., admins) can perform server restart operations. + + Notes: + - The test checks the access control around the `trigger_server_restart` + method to confirm that only authorized users can perform critical actions + like server restarts. + + """ with pytest.raises(Exception): api_artist_user.trigger_server_restart() From f4515857d6bd6e04ef58007cd8f990a0e94e1be6 Mon Sep 17 00:00:00 2001 From: Tadeas Hejnic Date: Thu, 14 Nov 2024 13:01:47 +0100 Subject: [PATCH 045/135] Small code improements - use of fixtures instead of calling clean_up func at the beginning of each events related tests --- tests/test_server.py | 87 ++++++++++++++++++++++++-------------------- 1 file changed, 48 insertions(+), 39 deletions(-) diff --git a/tests/test_server.py b/tests/test_server.py index 5815139c9..2d86f7ee8 100644 --- a/tests/test_server.py +++ b/tests/test_server.py @@ -450,6 +450,7 @@ def test_get_events_project_name_topic_user(project_names, topics, users): and a user in `users`. - The item count matches the expected number when filtered by combinations of project names, topics, and users. + """ res = list(get_events( topics=topics, @@ -744,6 +745,7 @@ def test_update_event_invalid_progress(event_id, progress): TEST_SOURCE_TOPIC = "test.source.topic" TEST_TARGET_TOPIC = "test.target.topic" +DEFAULT_NUMBER_OF_EVENTS = 3 test_sequential = [ (True), @@ -751,7 +753,13 @@ def test_update_event_invalid_progress(event_id, progress): (None) ] -def clean_up(topics=[TEST_SOURCE_TOPIC, TEST_TARGET_TOPIC]): +@pytest.fixture +def clean_up_events(topics=[TEST_SOURCE_TOPIC, TEST_TARGET_TOPIC]): + """Called at the beginning to close any pending events that may interfere with + the test setup or outcomes by marking them as 'finished'. + + """ + print("clean_up FIXTURE", datetime.now()) events = list(get_events(topics=topics)) for event in events: if event["status"] not in ["finished", "failed"]: @@ -759,18 +767,26 @@ def clean_up(topics=[TEST_SOURCE_TOPIC, TEST_TARGET_TOPIC]): @pytest.fixture -def new_events(): - clean_up() +def create_test_events(num_of_events=DEFAULT_NUMBER_OF_EVENTS): + """Fixture to create a specified number of test events and return their IDs. - num_of_events = 3 + This fixture dispatches events to the `TEST_SOURCE_TOPIC` and returns the + list of event IDs for the created events. + + """ + print("new_tests FIXTURE", datetime.now()) return [ dispatch_event(topic=TEST_SOURCE_TOPIC, sender="tester", description=f"New test event n. {num}")["id"] for num in range(num_of_events) ] +# clean_up_events should be below create_test_events to ensure it is called first +# pytest probably does not guarantee the order of execution +@pytest.mark.usefixtures("create_test_events") +@pytest.mark.usefixtures("clean_up_events") @pytest.mark.parametrize("sequential", test_sequential) -def test_enroll_event_job(sequential, new_events): +def test_enroll_event_job(sequential): """Tests the `enroll_event_job` function for proper event job enrollment and sequential behavior. Verifies: @@ -787,12 +803,17 @@ def test_enroll_event_job(sequential, new_events): new_events: Fixture or setup to initialize new events for the test case. Notes: - - `clean_up()` is called at the start to close any pending jobs, which - could interfere with the test setup and expected outcomes. - `update_event` is used to set `job_1`'s status to "failed" to test re-enrollment behavior. + - TODO - delete events after test if possible """ + events = list(get_events( + newer_than=(datetime.now(timezone.utc) - timedelta(minutes=1)).isoformat() + )) + + print([event["updatedAt"] for event in events]) + job_1 = enroll_event_job( source_topic=TEST_SOURCE_TOPIC, target_topic=TEST_TARGET_TOPIC, @@ -823,12 +844,8 @@ def test_enroll_event_job(sequential, new_events): assert job_2 is not None \ and job_1 != job_2 - # TODO - delete events - if possible - - # src_event = get_event(job["dependsOn"]) - # update_event(job["id"], status="failed") - +@pytest.mark.usefixtures("clean_up_events") @pytest.mark.parametrize("sequential", test_sequential) def test_enroll_event_job_failed(sequential): """Tests `enroll_event_job` behavior when the initial job fails and sequential processing is enabled. @@ -845,14 +862,11 @@ def test_enroll_event_job_failed(sequential): as concurrent processing is permitted. Notes: - - `clean_up()` is called at the start to close any pending jobs, which - could interfere with the test setup and expected outcomes. - `update_event` is used to set `job_1`'s status to "failed" to test re-enrollment behavior. - - """ - clean_up() + - TODO - delete events after test if possible + """ job_1 = enroll_event_job( source_topic=TEST_SOURCE_TOPIC, target_topic=TEST_TARGET_TOPIC, @@ -871,9 +885,8 @@ def test_enroll_event_job_failed(sequential): assert sequential is not True or job_1 == job_2 - # TODO - delete events - if possible - +@pytest.mark.usefixtures("clean_up_events") @pytest.mark.parametrize("sequential", test_sequential) def test_enroll_event_job_same_sender(sequential): """Tests `enroll_event_job` behavior when multiple jobs are enrolled by the same sender. @@ -889,12 +902,9 @@ def test_enroll_event_job_same_sender(sequential): behavior does not permit additional jobs. Notes: - - `clean_up()` is used at the beginning to close any pending jobs, ensuring - they do not interfere with the test setup or outcomes. - - """ - clean_up() + - TODO - delete events after test if possible + """ job_1 = enroll_event_job( source_topic=TEST_SOURCE_TOPIC, target_topic=TEST_TARGET_TOPIC, @@ -911,14 +921,13 @@ def test_enroll_event_job_same_sender(sequential): assert job_1 == job_2 - # TODO - delete events - if possible - test_invalid_topics = [ (("invalid_source_topic", "invalid_target_topic")), (("nonexisting_source_topic", "nonexisting_target_topic")), ] +@pytest.mark.usefixtures("clean_up_events") @pytest.mark.parametrize("topics", test_invalid_topics) @pytest.mark.parametrize("sequential", test_sequential) def test_enroll_event_job_invalid_topics(topics, sequential): @@ -932,11 +941,10 @@ def test_enroll_event_job_invalid_topics(topics, sequential): job processing modes when invalid topics are used. Notes: - - `clean_up()` is called at the beginning to close any pending jobs that + - `clean_up_events()` is called at the beginning to close any pending jobs that may interfere with the test setup or outcomes. + """ - clean_up() - source_topic, target_topic = topics job = enroll_event_job( @@ -949,7 +957,11 @@ def test_enroll_event_job_invalid_topics(topics, sequential): assert job is None -def test_enroll_event_job_sequential_false(new_events): +# clean_up_events should be below create_test_events to ensure it is called first +# pytest probably does not guarantee the order of execution +@pytest.mark.usefixtures("create_test_events") +@pytest.mark.usefixtures("clean_up_events") +def test_enroll_event_job_sequential_false(): """Tests `enroll_event_job` behavior when `sequential` is set to `False`. Verifies: @@ -965,7 +977,7 @@ def test_enroll_event_job_sequential_false(new_events): - The `depends_on_ids` set is used to track `dependsOn` identifiers and verify that each job has a unique dependency state, as required for concurrent processing. - + - TODO - delete events after test if possible """ depends_on_ids = set() @@ -981,8 +993,6 @@ def test_enroll_event_job_sequential_false(new_events): and job["dependsOn"] not in depends_on_ids depends_on_ids.add(job["dependsOn"]) - - # TODO - delete events if possible TEST_PROJECT_NAME = "test_API_project" @@ -1047,10 +1057,10 @@ def test_addon_methods(): clean state. Notes: - - `time.sleep(0.1)` is used to allow for a brief pause for the server restart. + - `time.sleep()` is used to allow for a brief pause for the server restart. - The `finally` block removes downloaded files and the directory to prevent residual test artifacts. - + """ addon_name = "tests" addon_version = "1.0.0" @@ -1065,8 +1075,8 @@ def test_addon_methods(): trigger_server_restart() - # need to wait at least 0.1 sec. to restart server - time.sleep(0.1) + # need to wait at least 0.1 sec. to restart server + time.sleep(0.5) while True: try: addons = get_addons_info()["addons"] @@ -1094,7 +1104,6 @@ def test_addon_methods(): os.rmdir(download_path) - @pytest.fixture def api_artist_user(): """Fixture that sets up an API connection for a non-admin artist user. @@ -1158,7 +1167,7 @@ def test_server_restart_as_user(api_artist_user): - The test checks the access control around the `trigger_server_restart` method to confirm that only authorized users can perform critical actions like server restarts. - + """ with pytest.raises(Exception): api_artist_user.trigger_server_restart() From 077d9d1e4094e7e77c8129bec7c5e6e9bd064262 Mon Sep 17 00:00:00 2001 From: Tadeas Hejnic Date: Thu, 14 Nov 2024 13:10:45 +0100 Subject: [PATCH 046/135] Debug prints deleted --- tests/test_server.py | 8 -------- 1 file changed, 8 deletions(-) diff --git a/tests/test_server.py b/tests/test_server.py index 2d86f7ee8..297a98ea7 100644 --- a/tests/test_server.py +++ b/tests/test_server.py @@ -759,7 +759,6 @@ def clean_up_events(topics=[TEST_SOURCE_TOPIC, TEST_TARGET_TOPIC]): the test setup or outcomes by marking them as 'finished'. """ - print("clean_up FIXTURE", datetime.now()) events = list(get_events(topics=topics)) for event in events: if event["status"] not in ["finished", "failed"]: @@ -774,7 +773,6 @@ def create_test_events(num_of_events=DEFAULT_NUMBER_OF_EVENTS): list of event IDs for the created events. """ - print("new_tests FIXTURE", datetime.now()) return [ dispatch_event(topic=TEST_SOURCE_TOPIC, sender="tester", description=f"New test event n. {num}")["id"] for num in range(num_of_events) @@ -808,12 +806,6 @@ def test_enroll_event_job(sequential): - TODO - delete events after test if possible """ - events = list(get_events( - newer_than=(datetime.now(timezone.utc) - timedelta(minutes=1)).isoformat() - )) - - print([event["updatedAt"] for event in events]) - job_1 = enroll_event_job( source_topic=TEST_SOURCE_TOPIC, target_topic=TEST_TARGET_TOPIC, From ae5db571d2cad844cc7659161eb0f3fe8df7aeab Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Thu, 14 Nov 2024 13:16:52 +0100 Subject: [PATCH 047/135] fix thumbnail mime type calculation --- ayon_api/server_api.py | 27 ++------------------------- 1 file changed, 2 insertions(+), 25 deletions(-) diff --git a/ayon_api/server_api.py b/ayon_api/server_api.py index 2e151ffc9..17535d8d7 100644 --- a/ayon_api/server_api.py +++ b/ayon_api/server_api.py @@ -7365,29 +7365,6 @@ def get_workfile_thumbnail( project_name, "workfile", workfile_id, thumbnail_id ) - def _get_thumbnail_mime_type(self, thumbnail_path): - """Get thumbnail mime type on thumbnail creation based on source path. - - Args: - thumbnail_path (str): Path to thumbnail source fie. - - Returns: - str: Mime type used for thumbnail creation. - - Raises: - ValueError: Mime type cannot be determined. - - """ - ext = os.path.splitext(thumbnail_path)[-1].lower() - if ext == ".png": - return "image/png" - - elif ext in (".jpeg", ".jpg"): - return "image/jpeg" - - raise ValueError( - "Thumbnail source file has unknown extensions {}".format(ext)) - def create_thumbnail(self, project_name, src_filepath, thumbnail_id=None): """Create new thumbnail on server from passed path. @@ -7415,7 +7392,7 @@ def create_thumbnail(self, project_name, src_filepath, thumbnail_id=None): ) return thumbnail_id - mime_type = self._get_thumbnail_mime_type(src_filepath) + mime_type = get_media_mime_type(src_filepath) response = self.upload_file( "projects/{}/thumbnails".format(project_name), src_filepath, @@ -7443,7 +7420,7 @@ def update_thumbnail(self, project_name, thumbnail_id, src_filepath): if not os.path.exists(src_filepath): raise ValueError("Entered filepath does not exist.") - mime_type = self._get_thumbnail_mime_type(src_filepath) + mime_type = get_media_mime_type(src_filepath) response = self.upload_file( "projects/{}/thumbnails/{}".format(project_name, thumbnail_id), src_filepath, From c297fcb780291cc29240a0ad7e43caee40ca36d7 Mon Sep 17 00:00:00 2001 From: Tadeas Hejnic Date: Thu, 14 Nov 2024 14:46:23 +0100 Subject: [PATCH 048/135] .gitignore: New gitignor for test addon directory --- tests/resources/addon/.gitignore | 1 + tests/resources/addon/package/tests-1.0.0.zip | Bin 1646 -> 0 bytes 2 files changed, 1 insertion(+) create mode 100644 tests/resources/addon/.gitignore delete mode 100644 tests/resources/addon/package/tests-1.0.0.zip diff --git a/tests/resources/addon/.gitignore b/tests/resources/addon/.gitignore new file mode 100644 index 000000000..f2fd75d64 --- /dev/null +++ b/tests/resources/addon/.gitignore @@ -0,0 +1 @@ +/package/ \ No newline at end of file diff --git a/tests/resources/addon/package/tests-1.0.0.zip b/tests/resources/addon/package/tests-1.0.0.zip deleted file mode 100644 index facd7496aa36896bc7838d30cfada6a2b73736be..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1646 zcmZ`(dpOg382`=O&T?l%xkQr7%xID>k}NSTY1l6`qge~f?bKX4QWi$Y@YBNSeFJTz< zL$@=dCB^I=%XFiG88MJ(GoLY^6oc()ZyqUi$Z2k(CX_b^yylM%FG^HZ=t?SQ-rrO+ z)KHg?eGu$!zIAoTt86x(IOlreiAC3B1YmfZwQ*_Vri}5~bGa71L6yA)4<0HoP-VGX zbmClQ;licgiYJd#X032n^okn$_hk)nYWekXI*xLF;a?PSYqrMkaR_WA#`lBbfrC4W zOb~&mJR)L~cIoMB^wI9Ax9Hj&i(MBRbLHXh(c+Ma!&bJ_s4eb`(;qeaw77yN)`l2PY)6v!qU>xm6a9wzX~HKI!#PWpc^)0<~ang7%$?1?!rTb z(5%2+++22&Sc7k#1u&mtm4J~$MzR$r82eR(64?N2HQ2jGnX-i)t<^WuXx$%7y>{q1 zIo>z|OT_AOYLjj!2s5;)zYxf+8eC!Wdh~xnzNMv}lc6b70K71W8*5)8^+$ULIm{us znwC7eM+TW00`3-oG@Kzfi@dvv+Q6fUh0T3ELCQYZbRJLmIj5K>e3nDIAoS4KMs}o` zqtJQP=vAnz=m$wi50oZ+z>01zkM065!bF0*EG{Lud=y>ZQQZa#J02B*ErJdl?^i$i zAM!PYdIrN&wDVn@sKvR2<^9+2YM`S}JH{^eeVA(JTmg&UXPhdaGfsXwe6o_{kMkkF z!1j#cvhC_q=6p#<)p013Vk9!ppz~rhujUA7QuWz@pd8$=ovYu$A?6Dv80~m!`{P|b z#z<{8a-S+=>-suHgk?VPxGjZ;mkD)%nY!B9q{|LOHEb@iPwz8U6vQ;J5-aPEsF8iK zjaMqyk*T3gJVNN|$ofq_Pg+uXXZ#|?zU*Z|FM)aMP{F*q;C;Z#M??YoW2ajJHX^LsRqy-YCV zQcw@?5k@O>4g`Rh3-fC)eL020uLZ>t8NZ%-GfX)LS~wQ&8~9`UOGOG zt%J;bJQiWEe7H6#VDC0(q7Hlx4( zAP7{mv7?9ArL=9YxRDx!lB`D@rB&r3mCkx!ZujDLR*{0aZB=2xs1folD@z?ce(g;J zXpf%m%atQ@ncWC^k;VNWUC}sLVI&#_>x@>-qKC3+v*^c}NHamXuIK>So!q)K Date: Thu, 14 Nov 2024 14:50:23 +0100 Subject: [PATCH 049/135] .gitignore: Edit of gitignore --- tests/resources/addon/.gitignore | 3 ++- .../addon/__pycache__/package.cpython-311.pyc | Bin 328 -> 0 bytes 2 files changed, 2 insertions(+), 1 deletion(-) delete mode 100644 tests/resources/addon/__pycache__/package.cpython-311.pyc diff --git a/tests/resources/addon/.gitignore b/tests/resources/addon/.gitignore index f2fd75d64..4ac096f1e 100644 --- a/tests/resources/addon/.gitignore +++ b/tests/resources/addon/.gitignore @@ -1 +1,2 @@ -/package/ \ No newline at end of file +/package/ +/__pycache__/ \ No newline at end of file diff --git a/tests/resources/addon/__pycache__/package.cpython-311.pyc b/tests/resources/addon/__pycache__/package.cpython-311.pyc deleted file mode 100644 index 7e8719b1aa3bea6008ebc4f9e30fa7f73775db32..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 328 zcmXv}Jx{|h5OtcQse%HkR8@RP>>U~xu_465&H_`G$i%1CB8iRdAi|V?!N!XCHytBS zNc@3H-8$ifa=N>BA9_!BpHUPdYgezA%MXh`UGs0wzSy4I;vG?xAnGIz9V7$lCN4vX zXTzuNJ(_vO*PygA{zC_E{zbfq7k9Iv@k&yF-7=kn30^=d#!C}sIfGKClu0uoH7`>M z&lQtEQ$i^(m6`U_j2D7v_L#$d{)M*PS-V@j;ssz$C=i9~QdAmujJc@w$^sD#_ZMLa zidQlNg?nWzNcKdQ5Q};@x-K}aa=4L^)$IONbDK7<<2Y?_*m&*esPWry+yvjzxCu8y TbbPu-llJ6nji!Gt4=m#s1D9hW From e011f785cf4cb8ef3a155397e7a0c3aa69438038 Mon Sep 17 00:00:00 2001 From: Tadeas Hejnic Date: Thu, 14 Nov 2024 14:54:31 +0100 Subject: [PATCH 050/135] Thumbnail: example thumbnail for testing --- tests/resources/ayon-symbol.png | Bin 0 -> 939 bytes 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 tests/resources/ayon-symbol.png diff --git a/tests/resources/ayon-symbol.png b/tests/resources/ayon-symbol.png new file mode 100644 index 0000000000000000000000000000000000000000..30afee0e89726f4e7da7107b79310d5828e5c11a GIT binary patch literal 939 zcmV;c162HpP)Fn_{|4BiapcD&BqRU;0EKDq-T(jq0d!JMQ^}%UqH+KL14KzgK~#9!?c7~%+b|3O z;9emFs&{Eo4zTP5T`v#;@w)=3_dmk2wTWfP_9jw496&y`XaRpP8H%)lynL{X7hwrY zSi%yPu!JQnVF^oE!V;FSge5Ft39BnB96%o?Zma<29(v-%`c&0(Rbn~w6_yh#m}g9k zSZ-!nMyy$7G#{3aSVpm7xtL|yu)x&LVhvU>iIRPd|H8G0>)^fQxrZxp;rCw`dSp zS1`MwuOnDtYrU9DYZ`I4HdfcchtKa@aoJae{;bbSZn|Ed^bxG}UH|zZ?9X~!ad+7d z-C3`XE3ZA@dYCW0S>9~BW)=G5h-JCC1B9*TA{Tt4$Fd;mOdBs(JYkAJvO;=W($(qq>r7YcrWs0K*%a^lM2bL>m z*$ylyXX*B=U=T4iXKl8J!#%3Ch+-D(B?@X~v6K}IhE<%!QkKWDRAZLIv20@&;8?mX zD;ONBw73dcH!SPalm%FpZpjJ;%PP!b8O!5Ysv*naS+*ey@GRYsW$-N1juj}ZsTs>t zSV~yJ`tM4fDT`^};@ahzo{3B+2&VJaP8R>dDqMXESsu$$O<7#`M5fFBhX(|!RE2o# z77puiy~oC%idFK(3&IkXu!JQnVF^oE!V;FSge5Ft2}@YQ`bQSe_ysZQG=w&+yYm15 N002ovPDHLkV1kKhs4xHk literal 0 HcmV?d00001 From 3b20133f2e8180fd07339e90ef7c4ec6cf37fd90 Mon Sep 17 00:00:00 2001 From: Tadeas Hejnic Date: Thu, 14 Nov 2024 16:39:22 +0100 Subject: [PATCH 051/135] Code adjust for pass the linting check --- tests/test_server.py | 654 +++++++++++++++++++++++-------------------- 1 file changed, 353 insertions(+), 301 deletions(-) diff --git a/tests/test_server.py b/tests/test_server.py index 297a98ea7..64d1187f8 100644 --- a/tests/test_server.py +++ b/tests/test_server.py @@ -2,6 +2,7 @@ To run use: pytest --envfile {environment path}. Make sure you have set AYON_TOKEN in your environment. + """ from datetime import datetime, timedelta, timezone @@ -18,21 +19,9 @@ delete_project, dispatch_event, download_addon_private_file, - download_file_to_stream, - download_file, enroll_event_job, get, - get_addon_project_settings, - get_addon_settings, - get_addon_settings_schema, - get_addon_site_settings_schema, - get_addon_site_settings, - get_addon_endpoint, - get_addon_url, get_addons_info, - get_addons_project_settings, - get_addons_settings, - get_addons_studio_settings, get_default_fields_for_type, get_event, get_events, @@ -43,8 +32,6 @@ get_server_api_connection, get_base_url, get_rest_url, - get_thumbnail, - get_thumbnail_by_id, get_timeout, is_connection_created, set_timeout, @@ -60,16 +47,16 @@ def test_close_connection(): - """Tests the functionality of opening and closing the server API + """Tests the functionality of opening and closing the server API connection. Verifies: - - Confirms that the connection is successfully created when + - Confirms that the connection is successfully created when `get_server_api_connection()` is called. - Ensures that the connection is closed correctly when - `close_connection()` is invoked, and that the connection - state is appropriately updated. - + `close_connection()` is invoked, and that the connection state + is appropriately updated. + """ _ = get_server_api_connection() assert is_connection_created() is True @@ -83,7 +70,7 @@ def test_get_base_url(): Verifies: - Confirms that `get_base_url()` returns a string. - Ensures that the returned URL matches the expected `AYON_BASE_URL`. - + """ res = get_base_url() assert isinstance(res, str) @@ -96,7 +83,7 @@ def test_get_rest_url(): Verifies: - Confirms that `get_rest_url()` returns a string. - Ensures that the returned URL matches the expected `AYON_REST_URL`. - + """ res = get_rest_url() assert isinstance(res, str) @@ -107,10 +94,10 @@ def test_get(): """Tests the `get` method for making API requests. Verifies: - - Ensures that a successful GET request to the endpoint 'info' + - Ensures that a successful GET request to the endpoint 'info' returns a status code of 200. - Confirms that the response data is in the form of a dictionary. - + """ res = get("info") assert res.status_code == 200 @@ -134,16 +121,25 @@ def test_get(): (["entity.task.created", "entity.project.created"]), (["settings.changed", "entity.version.status_changed"]), (["entity.task.status_changed", "entity.folder.deleted"]), - (["entity.project.changed", "entity.task.tags_changed", "entity.product.created"]) + ([ + "entity.project.changed", + "entity.task.tags_changed", + "entity.product.created" + ]) ] test_users = [ (None), ([]), - (["admin"]), - (["mkolar", "tadeas.8964"]), + (["admin"]), + (["mkolar", "tadeas.8964"]), (["roy", "luke.inderwick", "ynbot"]), - (["entity.folder.attrib_changed", "entity.project.created", "entity.task.created", "settings.changed"]), + ([ + "entity.folder.attrib_changed", + "entity.project.created", + "entity.task.created", + "settings.changed" + ]), ] # states is incorrect name for statuses @@ -169,23 +165,24 @@ def test_get(): (False), ] +now = datetime.now(timezone.utc) + test_newer_than = [ (None), - ((datetime.now(timezone.utc) - timedelta(days=2)).isoformat()), - ((datetime.now(timezone.utc) - timedelta(days=5)).isoformat()), - ((datetime.now(timezone.utc) - timedelta(days=10)).isoformat()), - ((datetime.now(timezone.utc) - timedelta(days=20)).isoformat()), - ((datetime.now(timezone.utc) - timedelta(days=30)).isoformat()), + ((now - timedelta(days=2)).isoformat()), + ((now - timedelta(days=5)).isoformat()), + ((now - timedelta(days=10)).isoformat()), + ((now - timedelta(days=20)).isoformat()), + ((now - timedelta(days=30)).isoformat()), ] test_older_than = [ (None), - ((datetime.now(timezone.utc) - timedelta(days=0)).isoformat()), - ((datetime.now(timezone.utc) - timedelta(days=0)).isoformat()), - ((datetime.now(timezone.utc) - timedelta(days=5)).isoformat()), - ((datetime.now(timezone.utc) - timedelta(days=10)).isoformat()), - ((datetime.now(timezone.utc) - timedelta(days=20)).isoformat()), - ((datetime.now(timezone.utc) - timedelta(days=30)).isoformat()), + ((now - timedelta(days=0)).isoformat()), + ((now - timedelta(days=5)).isoformat()), + ((now - timedelta(days=10)).isoformat()), + ((now - timedelta(days=20)).isoformat()), + ((now - timedelta(days=30)).isoformat()), ] test_fields = [ @@ -210,7 +207,7 @@ def event_ids(request): # takes max 3 items in a list to reduce the number of combinations @pytest.mark.parametrize("topics", test_topics[-3:]) @pytest.mark.parametrize( - "event_ids", + "event_ids", [None] + [pytest.param(None, marks=pytest.mark.usefixtures("event_ids"))] ) @pytest.mark.parametrize("project_names", test_project_names[-3:]) @@ -238,19 +235,19 @@ def test_get_events_all_filter_combinations( Verifies: - Calls `get_events` with the provided filter parameters. - Ensures each event in the result set matches the specified filters. - - Checks that the number of returned events matches the expected count + - Checks that the number of returned events matches the expected count based on the filters applied. - - Confirms that each event contains only the specified fields, with + - Confirms that each event contains only the specified fields, with no extra keys. Note: - - Adjusts the timeout setting if necessary to handle a large number + - Adjusts the timeout setting if necessary to handle a large number of tests and avoid timeout errors. - - Some combinations of filter parameters may lead to a server timeout + - Some combinations of filter parameters may lead to a server timeout error. When this occurs, the test will skip instead of failing. - - Currently, a ServerError due to timeout may occur when `has_children` + - Currently, a ServerError due to timeout may occur when `has_children` is set to False. - + """ if get_timeout() < 5: set_timeout(None) # default timeout @@ -269,76 +266,81 @@ def test_get_events_all_filter_combinations( fields=fields )) except exceptions.ServerError as exc: - assert has_children == False, f"{exc} even if has_children is {has_children}." + assert has_children is False, ( + f"{exc} even if has_children is {has_children}." + ) print("Warning: ServerError encountered, test skipped due to timeout.") pytest.skip("Skipping test due to server timeout.") for item in res: - assert item.get("topic") in topics, ( - f"Expected 'project' one of values: {topics}, but got '{item.get('topic')}'" - ) - assert item.get("project") in project_names, ( - f"Expected 'project' one of values: {project_names}, but got '{item.get('project')}'" - ) - assert item.get("user") in users, ( - f"Expected 'user' one of values: {users}, but got '{item.get('user')}'" - ) - assert item.get("status") in states, ( - f"Expected 'state' to be one of {states}, but got '{item.get('state')}'" - ) + assert item.get("topic") in topics + assert item.get("project") in project_names + assert item.get("user") in users + assert item.get("status") in states + assert (newer_than is None) or ( - datetime.fromisoformat(item.get("createdAt")) > datetime.fromisoformat(newer_than) + datetime.fromisoformat(item.get("createdAt")) + > datetime.fromisoformat(newer_than) ) assert (older_than is None) or ( - datetime.fromisoformat(item.get("createdAt")) < datetime.fromisoformat(older_than) + datetime.fromisoformat(item.get("createdAt")) + < datetime.fromisoformat(older_than) ) - assert topics is None or len(res) == sum(len(list(get_events( - topics=[topic], - project_names=project_names, - states=states, - users=users, - include_logs=include_logs, - has_children=has_children, - newer_than=newer_than, - older_than=older_than, - fields=fields) + assert topics is None or len(res) == sum(len(list( + get_events( + topics=[topic], + project_names=project_names, + states=states, + users=users, + include_logs=include_logs, + has_children=has_children, + newer_than=newer_than, + older_than=older_than, + fields=fields + ) )) for topic in topics) - assert project_names is None or len(res) == sum(len(list(get_events( - topics=topics, - project_names=[project_name], - states=states, - users=users, - include_logs=include_logs, - has_children=has_children, - newer_than=newer_than, - older_than=older_than, - fields=fields) + assert project_names is None or len(res) == sum(len(list( + get_events( + topics=topics, + project_names=[project_name], + states=states, + users=users, + include_logs=include_logs, + has_children=has_children, + newer_than=newer_than, + older_than=older_than, + fields=fields + ) )) for project_name in project_names) - - assert states is None or len(res) == sum(len(list(get_events( - topics=topics, - project_names=project_names, - states=[state], - users=users, - include_logs=include_logs, - has_children=has_children, - newer_than=newer_than, - older_than=older_than, - fields=fields) + + assert states is None or len(res) == sum(len(list( + get_events( + topics=topics, + project_names=project_names, + states=[state], + users=users, + include_logs=include_logs, + has_children=has_children, + newer_than=newer_than, + older_than=older_than, + fields=fields + ) )) for state in states) - - assert users is None or len(res) == sum(len(list(get_events( - topics=topics, - project_names=project_names, - states=states, - users=[user], - include_logs=include_logs, - has_children=has_children, - newer_than=newer_than, - older_than=older_than, - fields=fields) + + assert users is None or len(res) == sum(len(list( + get_events( + topics=topics, + project_names=project_names, + states=states, + users=[user], + include_logs=include_logs, + has_children=has_children, + newer_than=newer_than, + older_than=older_than, + fields=fields + ) )) for user in users) if fields == []: @@ -356,23 +358,26 @@ def test_get_events_timeout_has_children(has_children): """Test `get_events` function with the `has_children` filter. Verifies: - - The `get_events` function handles requests correctly and does - not time out when using the `has_children` filter with events - created within the last 5 days. + - The `get_events` function handles requests correctly and does not + time out when using the `has_children` filter with events created + within the last 5 days. - If a `ServerError` (likely due to a timeout) is raised: - Logs a warning message and skips the test to avoid failure. - - Asserts that the `ServerError` should occur only when - `has_children` is set to False. - + - Asserts that the `ServerError` should occur only when + `has_children` is set to False. + """ try: _ = list(get_events( has_children=has_children, - newer_than=(datetime.now(timezone.utc) - timedelta(days=5)).isoformat() + newer_than=( + datetime.now(timezone.utc) - timedelta(days=5) + ).isoformat() )) except exceptions.ServerError as exc: - has_children = True - assert has_children == False, f"{exc} even if has_children is {has_children}." + assert has_children is False, ( + f"{exc} even if has_children is {has_children}." + ) print("Warning: ServerError encountered, test skipped due to timeout.") pytest.skip("Skipping test due to server timeout.") @@ -382,16 +387,20 @@ def test_get_events_event_ids(event_ids): Verifies: - Each item returned has an ID in the `event_ids` list. - - The number of items returned matches the expected count - when filtered by each individual event ID. - + - The number of items returned matches the expected count when filtered + by each individual event ID. + """ res = list(get_events(event_ids=event_ids)) for item in res: assert item.get("id") in event_ids - - assert len(res) == sum(len(list(get_events(event_ids=[event_id]))) for event_id in event_ids) + + assert len(res) == sum(len(list( + get_events( + event_ids=[event_id] + ) + )) for event_id in event_ids) @pytest.mark.parametrize("project_names", test_project_names) @@ -400,17 +409,21 @@ def test_get_events_project_name(project_names): Verifies: - Each item returned has a project in the `project_names` list. - - The count of items matches the expected number when filtered + - The count of items matches the expected number when filtered by each individual project name. - + """ res = list(get_events(project_names=project_names)) - + for item in res: - assert item.get("project") in project_names, f"Expected 'project' value '{project_names}', but got '{item.get('project')}'" + assert item.get("project") in project_names # test if the legths are equal - assert len(res) == sum(len(list(get_events(project_names=[project_name]))) for project_name in project_names) + assert len(res) == sum(len(list( + get_events( + project_names=[project_name] + ) + )) for project_name in project_names) @pytest.mark.parametrize("project_names", test_project_names) @@ -419,11 +432,11 @@ def test_get_events_project_name_topic(project_names, topics): """Test `get_events` function using both project names and topics. Verifies: - - Each item returned has a project in `project_names` and a topic + - Each item returned has a project in `project_names` and a topic in `topics`. - - The item count matches the expected number when filtered by - each project name and topic combination. - + - The item count matches the expected number when filtered by each + project name and topic combination. + """ res = list(get_events( topics=topics, @@ -432,11 +445,22 @@ def test_get_events_project_name_topic(project_names, topics): for item in res: assert item.get("topic") in topics - assert item.get("project") in project_names, f"Expected 'project' value '{project_names}', but got '{item.get('project')}'" - + assert item.get("project") in project_names + # test if the legths are equal - assert len(res) == sum(len(list(get_events(project_names=[project_name], topics=topics))) for project_name in project_names) - assert len(res) == sum(len(list(get_events(project_names=project_names, topics=[topic]))) for topic in topics) + assert len(res) == sum(len(list( + get_events( + project_names=[project_name], + topics=topics + ) + )) for project_name in project_names) + + assert len(res) == sum(len(list( + get_events( + project_names=project_names, + topics=[topic] + ) + )) for topic in topics) @pytest.mark.parametrize("project_names", test_project_names) @@ -446,11 +470,11 @@ def test_get_events_project_name_topic_user(project_names, topics, users): """Test `get_events` function using project names, topics, and users. Verifies: - - Each item has a project in `project_names`, a topic in `topics`, + - Each item has a project in `project_names`, a topic in `topics`, and a user in `users`. - - The item count matches the expected number when filtered by + - The item count matches the expected number when filtered by combinations of project names, topics, and users. - + """ res = list(get_events( topics=topics, @@ -459,25 +483,43 @@ def test_get_events_project_name_topic_user(project_names, topics, users): )) for item in res: - assert item.get("topic") in topics, f"Expected 'project' one of values: {topics}, but got '{item.get('topic')}'" - assert item.get("project") in project_names, f"Expected 'project' one of values: {project_names}, but got '{item.get('project')}'" - assert item.get("user") in project_names, f"Expected 'project' one of values: {users}, but got '{item.get('user')}'" + assert item.get("topic") in topics + assert item.get("project") in project_names + assert item.get("user") in project_names # test if the legths are equal - assert len(res) == sum(len(list(get_events(project_names=[project_name], topics=topics))) for project_name in project_names) - assert len(res) == sum(len(list(get_events(project_names=project_names, topics=[topic]))) for topic in topics) - assert len(res) == sum(len(list(get_events(project_names=project_names, topics=[topic]))) for topic in topics) + assert len(res) == sum(len(list( + get_events( + project_names=[project_name], + topics=topics + ) + )) for project_name in project_names) + + assert len(res) == sum(len(list( + get_events( + project_names=project_names, + topics=[topic] + ) + )) for topic in topics) + + assert len(res) == sum(len(list( + get_events( + project_names=project_names, + topics=[topic] + ) + )) for topic in topics) @pytest.mark.parametrize("newer_than", test_newer_than) @pytest.mark.parametrize("older_than", test_older_than) def test_get_events_timestamps(newer_than, older_than): - """Test `get_events` function using date filters `newer_than` and `older_than`. + """Test `get_events` function using date filters `newer_than` and + `older_than`. Verifies: - - Each item's creation date falls within the specified date + - Each item's creation date falls within the specified date range between `newer_than` and `older_than`. - + """ res = list(get_events( newer_than=newer_than, @@ -486,10 +528,12 @@ def test_get_events_timestamps(newer_than, older_than): for item in res: assert (newer_than is None) or ( - datetime.fromisoformat(item.get("createdAt") > datetime.fromisoformat(newer_than)) + datetime.fromisoformat(item.get("createdAt") + > datetime.fromisoformat(newer_than)) ) assert (older_than is None) or ( - datetime.fromisoformat(item.get("createdAt") < datetime.fromisoformat(older_than)) + datetime.fromisoformat(item.get("createdAt") + < datetime.fromisoformat(older_than)) ) @@ -510,7 +554,7 @@ def test_get_events_timestamps(newer_than, older_than): test_invalid_states = [ (None), (["pending_invalid"]), - (["in_progress_invalid"]), + (["in_progress_invalid"]), (["finished_invalid", "failed_invalid"]), ] @@ -535,7 +579,7 @@ def test_get_events_timestamps(newer_than, older_than): @pytest.mark.parametrize("users", test_invalid_users) @pytest.mark.parametrize("newer_than", test_invalid_newer_than) def test_get_events_invalid_data( - topics, + topics, project_names, states, users, @@ -545,25 +589,26 @@ def test_get_events_invalid_data( of invalid input and prevent errors or unexpected results. Verifies: - - Confirms that the result is either empty or aligns with expected valid - entries: + - Confirms that the result is either empty or aligns with expected + valid entries: - `topics`: Result is empty or topics is set to `None`. - - `project_names`: Result is empty or project names exist in the + - `project_names`: Result is empty or project names exist in the list of valid project names. - `states`: Result is empty or states is set to `None`. - `users`: Result is empty or each user exists as a valid user. - - `newer_than`: Result is empty or `newer_than` date is in the past. + - `newer_than`: Result is empty or `newer_than` date is in the + past. Note: - - Adjusts the timeout setting if necessary to handle a large number + - Adjusts the timeout setting if necessary to handle a large number of tests and avoid timeout errors. - + """ if get_timeout() < 5: set_timeout(None) # default timeout value res = list(get_events( - topics=topics, + topics=topics, project_names=project_names, states=states, users=users, @@ -573,10 +618,13 @@ def test_get_events_invalid_data( valid_project_names = get_project_names() assert res == [] \ - or topics is None + or topics is None assert res == [] \ or project_names is None \ - or any(project_name in valid_project_names for project_name in project_names) + or any( + project_name in valid_project_names + for project_name in project_names + ) assert res == [] \ or states is None assert res == [] \ @@ -589,19 +637,19 @@ def test_get_events_invalid_data( @pytest.fixture def event_id(): - """Fixture that retrieves the ID of a recent event created within + """Fixture that retrieves the ID of a recent event created within the last 5 days. Returns: - - The event ID of the most recent event within the last 5 days + - The event ID of the most recent event within the last 5 days if available. - `None` if no recent events are found within this time frame. - + """ - recent_event = list(get_events( + recent_events = list(get_events( newer_than=(datetime.now(timezone.utc) - timedelta(days=5)).isoformat() )) - return recent_event[0]["id"] if recent_event else None + return recent_events[0]["id"] if recent_events else None test_update_sender = [ ("test.server.api"), @@ -621,7 +669,7 @@ def event_id(): ] test_update_description = [ - ("Lorem ipsum dolor sit amet, consectetur adipiscing elit. Fusce viverra."), + ("Lorem ipsum dolor sit amet, consectetur adipiscing elit. Fusce vivera."), ("Updated description test...") ] @@ -648,24 +696,26 @@ def test_update_event( payload=None, progress=None, ): - """Verifies that the `update_event` function correctly updates event fields. + """Verifies that the `update_event` function correctly updates event + fields. Verifies: - The function updates the specified event fields based on the provided - parameters (`sender`, `username`, `status`, `description`, `retries`, - etc.). - - Only the fields specified in `kwargs` are updated, and other fields + parameters (`sender`, `username`, `status`, `description`, + `retries`, etc.). + - Only the fields specified in `kwargs` are updated, and other fields remain unchanged. - - The `updatedAt` field is updated and the change occurs within a - reasonable time frame (within one minute). - - The event's state before and after the update matches the expected + - The `updatedAt` field is updated and the change occurs within + a reasonable time frame (within one minute). + - The event's state before and after the update matches the expected values for the updated fields. - + Notes: - - Parameters like `event_id`, `sender`, `username`, `status`, - `description`, `retries`, etc., are passed dynamically to the function. + - Parameters like `event_id`, `sender`, `username`, `status`, + `description`, `retries`, etc., are passed dynamically to + the function. - If any parameter is `None`, it is excluded from the update request. - + """ kwargs = { key: value @@ -693,7 +743,9 @@ def test_update_event( or key in kwargs.keys() and value == kwargs.get(key) \ or ( key == "updatedAt" and ( - datetime.fromisoformat(value) - datetime.now(timezone.utc) < timedelta(minutes=1) + (datetime.fromisoformat(value) - datetime.now(timezone.utc)) + < + timedelta(minutes=1) ) ) @@ -708,13 +760,14 @@ def test_update_event( @pytest.mark.parametrize("status", test_update_invalid_status) def test_update_event_invalid_status(status): - """Tests `update_event` with invalid status values to ensure correct + """Tests `update_event` with invalid status values to ensure correct error handling for unsupported status inputs. Verifies: - - Confirms that an `HTTPRequestError` is raised for invalid status values - when attempting to update an event with an unsupported status. - + - Confirms that an `HTTPRequestError` is raised for invalid status + values when attempting to update an event with an unsupported + status. + """ with pytest.raises(exceptions.HTTPRequestError): update_event(event_id, status=status) @@ -730,19 +783,19 @@ def test_update_event_invalid_status(status): @pytest.mark.parametrize("progress", test_update_invalid_progress) def test_update_event_invalid_progress(event_id, progress): - """Tests `update_event` with invalid progress values to ensure correct + """Tests `update_event` with invalid progress values to ensure correct error handling for unsupported progress inputs. Verifies: - - Confirms that an `HTTPRequestError` is raised for invalid progress values - when attempting to update an event with unsupported progress. - + - Confirms that an `HTTPRequestError` is raised for invalid progress + values when attempting to update an event with unsupported + progress. + """ with pytest.raises(exceptions.HTTPRequestError): update_event(event_id, progress=progress) - TEST_SOURCE_TOPIC = "test.source.topic" TEST_TARGET_TOPIC = "test.target.topic" DEFAULT_NUMBER_OF_EVENTS = 3 @@ -755,9 +808,9 @@ def test_update_event_invalid_progress(event_id, progress): @pytest.fixture def clean_up_events(topics=[TEST_SOURCE_TOPIC, TEST_TARGET_TOPIC]): - """Called at the beginning to close any pending events that may interfere with - the test setup or outcomes by marking them as 'finished'. - + """Used before running marked testt to close any pending events that may + interfere with the test setup or outcomes by marking them as 'finished'. + """ events = list(get_events(topics=topics)) for event in events: @@ -767,41 +820,40 @@ def clean_up_events(topics=[TEST_SOURCE_TOPIC, TEST_TARGET_TOPIC]): @pytest.fixture def create_test_events(num_of_events=DEFAULT_NUMBER_OF_EVENTS): - """Fixture to create a specified number of test events and return their IDs. - - This fixture dispatches events to the `TEST_SOURCE_TOPIC` and returns the - list of event IDs for the created events. + """This fixture dispatches events to the `TEST_SOURCE_TOPIC` and returns + the list of event IDs for the created events. """ return [ - dispatch_event(topic=TEST_SOURCE_TOPIC, sender="tester", description=f"New test event n. {num}")["id"] + dispatch_event( + topic=TEST_SOURCE_TOPIC, + sender="tester", + description=f"New test event n. {num}" + )["id"] for num in range(num_of_events) ] -# clean_up_events should be below create_test_events to ensure it is called first -# pytest probably does not guarantee the order of execution +# clean_up should be below create_test to ensure it is called first +# pytest probably does not guarantee the order of execution @pytest.mark.usefixtures("create_test_events") @pytest.mark.usefixtures("clean_up_events") @pytest.mark.parametrize("sequential", test_sequential) def test_enroll_event_job(sequential): - """Tests the `enroll_event_job` function for proper event job enrollment and sequential behavior. + """Tests the `enroll_event_job` function for proper event job enrollment + based on sequential argument. Verifies: - - `enroll_event_job` correctly creates and returns a job with specified parameters - (`source_topic`, `target_topic`, `sender`, and `sequential`). - - When `sequential` is set to `True`, only one job can be enrolled at a time, - preventing new enrollments until the first job is closed or updated. - - When `sequential` is `False` or `None`, multiple jobs can be enrolled - concurrently without conflicts. - - The `update_event` function successfully updates the `status` of a job - as expected, allowing for sequential job processing. - - Parameters: - new_events: Fixture or setup to initialize new events for the test case. + - When `sequential` is set to `True`, only one job can be enrolled at + a time, preventing new enrollments until the first job is closed or + updated. + - When `sequential` is `False` or `None`, multiple jobs can be + enrolled concurrently without conflicts. + - The `update_event` function updates the `status` of a job to allowing + next sequential job processing. Notes: - - `update_event` is used to set `job_1`'s status to "failed" to test + - `update_event` is used to set `job_1`'s status to "failed" to test re-enrollment behavior. - TODO - delete events after test if possible @@ -840,22 +892,23 @@ def test_enroll_event_job(sequential): @pytest.mark.usefixtures("clean_up_events") @pytest.mark.parametrize("sequential", test_sequential) def test_enroll_event_job_failed(sequential): - """Tests `enroll_event_job` behavior when the initial job fails and sequential processing is enabled. + """Tests `enroll_event_job` behavior when the initial job fails and + sequential processing is enabled. Verifies: - - `enroll_event_job` creates a job (`job_1`) with specified parameters - (`source_topic`, `target_topic`, `sender`, and `sequential`). - - After `job_1` fails (status set to "failed"), a new job (`job_2`) can be - enrolled with the same parameters. - - When `sequential` is `True`, the test verifies that `job_1` and `job_2` - are identical, as a failed sequential job should not allow a new job - to be enrolled separately. - - When `sequential` is `False`, `job_1` and `job_2` are allowed to differ, - as concurrent processing is permitted. + - `enroll_event_job` creates a job (`job_1`) with specified parameters + `(`source_topic`, `target_topic`, `sender`, and `sequential`). + - After `job_1` fails (status set to "failed"), a new job (`job_2`) can + be enrolled with the same parameters. + - When `sequential` is `True`, the test verifies that `job_1` and + `job_2` are identical, as a failed sequential job should not allow + a new job to be enrolled separately. + - When `sequential` is `False`, `job_1` and `job_2` are allowed to + differ, as concurrent processing is permitted. Notes: - - `update_event` is used to set `job_1`'s status to "failed" to test - re-enrollment behavior. + - `update_event` is used to set `job_1`'s status to "failed" to test + re-enrollment behavior. - TODO - delete events after test if possible """ @@ -881,17 +934,15 @@ def test_enroll_event_job_failed(sequential): @pytest.mark.usefixtures("clean_up_events") @pytest.mark.parametrize("sequential", test_sequential) def test_enroll_event_job_same_sender(sequential): - """Tests `enroll_event_job` behavior when multiple jobs are enrolled by the same sender. + """Tests `enroll_event_job` behavior when multiple jobs are enrolled + by the same sender. Verifies: - - `enroll_event_job` creates a job (`job_1`) with specified parameters - (`source_topic`, `target_topic`, `sender`, and `sequential`). - - When a second job (`job_2`) is enrolled by the same sender with - identical parameters, the function should return the same job as `job_1` - (indicating idempotent behavior for the same sender and parameters). - - The test checks that `job_1` and `job_2` are identical, ensuring that - no duplicate jobs are created for the same sender when `sequential` - behavior does not permit additional jobs. + - `enroll_event_job` creates a `job_1` and `job_2` with the same + parameters (`source_topic`, `target_topic`, `sender`, and + `sequential`). + - The test checks that `job_1` and `job_2` are identical, ensuring that + no duplicate jobs are created for the same sender. Notes: - TODO - delete events after test if possible @@ -914,34 +965,32 @@ def test_enroll_event_job_same_sender(sequential): assert job_1 == job_2 -test_invalid_topics = [ - (("invalid_source_topic", "invalid_target_topic")), - (("nonexisting_source_topic", "nonexisting_target_topic")), +test_invalid_topic = [ + ("invalid_source_topic"), + ("nonexisting_source_topic"), ] @pytest.mark.usefixtures("clean_up_events") -@pytest.mark.parametrize("topics", test_invalid_topics) +@pytest.mark.parametrize("topic", test_invalid_topics) @pytest.mark.parametrize("sequential", test_sequential) -def test_enroll_event_job_invalid_topics(topics, sequential): +def test_enroll_event_job_invalid_topic(topic, sequential): """Tests `enroll_event_job` behavior when provided with invalid topics. Verifies: - - `enroll_event_job` returns `None` when given invalid `source_topic` - or `target_topic`, indicating that the function properly rejects - invalid topic values. - - The function correctly handles both sequential and non-sequential - job processing modes when invalid topics are used. + - `enroll_event_job` returns `None` when given invalid `source_topic` + or `target_topic`, indicating that the function properly rejects + invalid topic values. + - The function correctly handles both sequential and non-sequential + job processing modes when invalid topics are used. Notes: - - `clean_up_events()` is called at the beginning to close any pending jobs that - may interfere with the test setup or outcomes. - + - `clean_up_events()` is called at the beginning to close any pending + jobs that may interfere with the test setup or outcomes. + """ - source_topic, target_topic = topics - job = enroll_event_job( - source_topic=source_topic, - target_topic=target_topic, + source_topic=topic, + target_topic=TEST_TARGET_TOPIC, sender="test_sender", sequential=sequential ) @@ -949,31 +998,28 @@ def test_enroll_event_job_invalid_topics(topics, sequential): assert job is None -# clean_up_events should be below create_test_events to ensure it is called first -# pytest probably does not guarantee the order of execution +# clean_up should be below create_test to ensure it is called first +# pytest probably does not guarantee the order of execution @pytest.mark.usefixtures("create_test_events") @pytest.mark.usefixtures("clean_up_events") def test_enroll_event_job_sequential_false(): """Tests `enroll_event_job` behavior when `sequential` is set to `False`. Verifies: - - `enroll_event_job` creates a unique job for each sender even when - `sequential` is set to `False`, allowing concurrent job processing. - - Each job has a unique `dependsOn` identifier, ensuring that no two - jobs are linked in dependency, as expected for non-sequential enrollment. - - Parameters: - new_events: Fixture or setup to initialize new events for the test case. - + - `enroll_event_job` creates a unique job for each sender even when + `sequential` is set to `False`, allowing concurrent job processing. + - Each job has a unique `dependsOn` identifier + Notes: - - The `depends_on_ids` set is used to track `dependsOn` identifiers and - verify that each job has a unique dependency state, as required for + - The `depends_on_ids` set is used to track `dependsOn` identifiers and + verify that each job has a unique dependency state, as required for concurrent processing. - TODO - delete events after test if possible + """ depends_on_ids = set() - for sender in ["test_1", "test_2", "test_3"]: + for sender in ["tester_1", "tester_2", "tester_3"]: job = enroll_event_job( source_topic=TEST_SOURCE_TOPIC, target_topic=TEST_TARGET_TOPIC, @@ -997,31 +1043,33 @@ def test_thumbnail_operations( project_code=TEST_PROJECT_CODE, thumbnail_path=AYON_THUMBNAIL_PATH ): - """Tests thumbnail operations for a project, including creation, association, retrieval, and verification. + """Tests thumbnail operations for a project. Verifies: - - A project is created with a specified name and code, and any existing - project with the same name is deleted before setup to ensure a clean state. - A thumbnail is created for the project and associated with a folder. - - The thumbnail associated with the folder is correctly retrieved, with - attributes matching the project name and thumbnail ID. - - The content of the retrieved thumbnail matches the expected image bytes - read from the specified `thumbnail_path`. + - The thumbnail associated with the folder is correctly retrieved, with + attributes matching the project name and thumbnail ID. + - The content of the retrieved thumbnail matches the expected image + bytes read from the specified `thumbnail_path`. Notes: - - `delete_project` is called initially to remove any pre-existing project - with the same name, ensuring no conflicts during testing. + - `delete_project` is called initially to remove any pre-existing + project with the same name, ensuring no conflicts during testing. - At the end of the test, the project is deleted to clean up resources. - + """ if get_project(project_name): delete_project(TEST_PROJECT_NAME) project = create_project(project_name, project_code) - + thumbnail_id = create_thumbnail(project_name, thumbnail_path) - folder_id = create_folder(project_name, "my_test_folder", thumbnail_id=thumbnail_id) + folder_id = create_folder( + project_name, + "my_test_folder", + thumbnail_id=thumbnail_id + ) thumbnail = get_folder_thumbnail(project_name, folder_id, thumbnail_id) assert thumbnail.project_name == project_name @@ -1036,37 +1084,41 @@ def test_thumbnail_operations( def test_addon_methods(): - """Tests addon methods, including upload, verification, download, and cleanup of addon resources. + """Tests addon methods, including upload and download of private file. Verifies: - - An addon with the specified name and version does not exist at the start. + - An addon with the specified name and version does not exist at the + start. - Uploads an addon package `.zip` file and triggers a server restart. - - Ensures the server restart completes, and verifies the uploaded addon is - available in the list of addons after the restart. - - Downloads a private file associated with the addon, verifying its - existence and correct download location. - - Cleans up downloaded files and directories after the test to maintain a - clean state. + - Ensures the server restart completes, and verifies the uploaded addon + is available in the list of addons after the restart. + - Downloads a private file associated with the addon, verifying its + existence and correct download location. + - Cleans up downloaded files and directories after the test to maintain + a clean state. Notes: - - `time.sleep()` is used to allow for a brief pause for the server restart. - - The `finally` block removes downloaded files and the directory to prevent - residual test artifacts. + - `time.sleep()` is used to allow for a brief pause for the server + restart. + - The `finally` block removes downloaded files and the directory to + prevent residual test artifacts. """ addon_name = "tests" addon_version = "1.0.0" download_path = "tests/resources/tmp_downloads" private_file_path = os.path.join(download_path, "ayon-symbol.png") - + delete(f"/addons/{addon_name}/{addon_version}") - assert all(addon_name != addon["name"] for addon in get_addons_info()["addons"]) + assert all( + addon_name != addon["name"] for addon in get_addons_info()["addons"] + ) try: _ = upload_addon_zip("tests/resources/addon/package/tests-1.0.0.zip") - + trigger_server_restart() - + # need to wait at least 0.1 sec. to restart server time.sleep(0.5) while True: @@ -1101,18 +1153,18 @@ def api_artist_user(): """Fixture that sets up an API connection for a non-admin artist user. Workflow: - - Checks if the project exists; if not, it creates one with specified - `TEST_PROJECT_NAME` and `TEST_PROJECT_CODE`. - - Establishes a server API connection and retrieves the list of available - access groups. - - Configures a new user with limited permissions (`isAdmin` and `isManager` - set to `False`) and assigns all available access groups as default and - project-specific groups. - - Creates a new API connection using the artist user's credentials - (`username` and `password`) and logs in with it. + - Checks if the project exists; if not, it creates one with specified + `TEST_PROJECT_NAME` and `TEST_PROJECT_CODE`. + - Establishes a server API connection and retrieves the list + of available access groups. + - Configures a new user with limited permissions (`isAdmin` and + `isManager` set to `False`) and assigns all available access groups + as default and project-specific groups. + - Creates a new API connection using the artist user's credentials + (`username` and `password`) and logs in with it. Returns: - new_api: A `ServerAPI` instance authenticated with the artist user's + new_api: A `ServerAPI` instance authenticated with the artist user's credentials, ready to use in tests. """ @@ -1121,7 +1173,7 @@ def api_artist_user(): project = create_project(TEST_PROJECT_NAME, TEST_PROJECT_CODE) api = get_server_api_connection() - + username = "testUser" password = "testUserPassword" response = api.get("accessGroups/_") @@ -1148,19 +1200,19 @@ def api_artist_user(): def test_server_restart_as_user(api_artist_user): - """Tests that a non-admin artist user is not permitted to trigger a server restart. + """Tests that a non-admin artist user is not permitted to trigger a server + restart. Verifies: - - An attempt to call `trigger_server_restart` as a non-admin artist user - raises an exception, ensuring that only users with the appropriate - permissions (e.g., admins) can perform server restart operations. + - An attempt to call `trigger_server_restart` as a non-admin artist + user raises an exception, ensuring that only users with the + appropriate permissions (e.g., admins) can perform server restart + operations. Notes: - - The test checks the access control around the `trigger_server_restart` - method to confirm that only authorized users can perform critical actions - like server restarts. + - The exception is not specified as there is a todo to raise more + specific exception. """ with pytest.raises(Exception): api_artist_user.trigger_server_restart() - From 8f19047a6e738530b5b210cd2dd301e1cead2cd8 Mon Sep 17 00:00:00 2001 From: Tadeas Hejnic Date: Thu, 14 Nov 2024 16:42:21 +0100 Subject: [PATCH 052/135] Code adjust for pass the linting check --- tests/resources/addon/package.py | 2 +- tests/resources/addon/server/__init__.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/tests/resources/addon/package.py b/tests/resources/addon/package.py index 649526135..2143cebcd 100644 --- a/tests/resources/addon/package.py +++ b/tests/resources/addon/package.py @@ -6,4 +6,4 @@ # ayon_launcher_version = ">=1.0.2" ayon_required_addons = {} -ayon_compatible_addons = {} \ No newline at end of file +ayon_compatible_addons = {} diff --git a/tests/resources/addon/server/__init__.py b/tests/resources/addon/server/__init__.py index 0f794cbcf..e330d93a2 100644 --- a/tests/resources/addon/server/__init__.py +++ b/tests/resources/addon/server/__init__.py @@ -9,11 +9,11 @@ def initialize(self): self.get_test, method="GET", ) - + async def get_test( self, user: CurrentUser, ): """Return a random folder from the database""" return { "success": True, - } + } From 865303cb960b72e4e1fcf33907531cad144b0585 Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Fri, 15 Nov 2024 16:04:22 +0100 Subject: [PATCH 053/135] added more targeted functions to read file content --- ayon_api/utils.py | 40 ++++++++++++++++++++++++++++++++-------- 1 file changed, 32 insertions(+), 8 deletions(-) diff --git a/ayon_api/utils.py b/ayon_api/utils.py index 0346fb4fa..d418ef7e7 100644 --- a/ayon_api/utils.py +++ b/ayon_api/utils.py @@ -767,7 +767,11 @@ def _get_media_mime_type_from_ftyp(content): return None -def get_media_mime_type_for_content(content: bytes) -> Optional[str]: +def _get_media_mime_type_for_content_base(content: bytes) -> Optional[str]: + """Determine Mime-Type of a file. + + Use header of the file to determine mime type (needs 12 bytes). + """ content_len = len(content) # Pre-validation (largest definition check) # - hopefully there cannot be media defined in less than 12 bytes @@ -790,10 +794,6 @@ def get_media_mime_type_for_content(content: bytes) -> Optional[str]: if content[0:4] == b"\211PNG": return "image/png" - # SVG - if b'xmlns="http://www.w3.org/2000/svg"' in content: - return "image/svg+xml" - # JPEG, JFIF or Exif if ( content[0:4] == b"\xff\xd8\xff\xdb" @@ -820,6 +820,32 @@ def get_media_mime_type_for_content(content: bytes) -> Optional[str]: return None +def _get_svg_mime_type(content: bytes) -> Optional[str]: + # SVG + if b'xmlns="http://www.w3.org/2000/svg"' in content: + return "image/svg+xml" + return None + + +def get_media_mime_type_for_content(content: bytes) -> Optional[str]: + mime_type = _get_media_mime_type_for_content_base(content) + if mime_type is not None: + return mime_type + return _get_svg_mime_type(content) + + +def get_media_mime_type_for_stream(stream) -> Optional[str]: + # Read only 12 bytes to determine mime type + content = stream.read(12) + if len(content) < 12: + return None + mime_type = _get_media_mime_type_for_content_base(content) + if mime_type is None: + content += stream.read() + mime_type = _get_svg_mime_type(content) + return mime_type + + def get_media_mime_type(filepath: str) -> Optional[str]: """Determine Mime-Type of a file. @@ -834,9 +860,7 @@ def get_media_mime_type(filepath: str) -> Optional[str]: return None with open(filepath, "rb") as stream: - content = stream.read() - - return get_media_mime_type_for_content(content) + return get_media_mime_type_for_stream(stream) def take_web_action_event( From 8b611b1ab205661b1cb08fbf987268e9cd4aaa40 Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Fri, 15 Nov 2024 16:06:22 +0100 Subject: [PATCH 054/135] implement delete event method --- ayon_api/server_api.py | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/ayon_api/server_api.py b/ayon_api/server_api.py index 2e151ffc9..30da35ac2 100644 --- a/ayon_api/server_api.py +++ b/ayon_api/server_api.py @@ -1620,6 +1620,23 @@ def dispatch_event( response.raise_for_status() return response + def delete_event(self, event_id: str): + """Delete event by id. + + Supported since AYON server 1.6.0. + + Args: + event_id (str): Event id. + + Returns: + RestApiResponse: Response from server. + + """ + response = self.delete(f"events/{event_id}") + response.raise_for_status() + return response + + def enroll_event_job( self, source_topic, From 7370311842bc95c2d8ca63b352e64362dfd325f1 Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Fri, 15 Nov 2024 16:08:19 +0100 Subject: [PATCH 055/135] added delete_event to public methods --- ayon_api/__init__.py | 2 ++ ayon_api/_api.py | 16 ++++++++++++++++ 2 files changed, 18 insertions(+) diff --git a/ayon_api/__init__.py b/ayon_api/__init__.py index 2ddbe9e03..9eab4ee4a 100644 --- a/ayon_api/__init__.py +++ b/ayon_api/__init__.py @@ -68,6 +68,7 @@ get_events, update_event, dispatch_event, + delete_event, enroll_event_job, download_file_to_stream, download_file, @@ -300,6 +301,7 @@ "get_events", "update_event", "dispatch_event", + "delete_event", "enroll_event_job", "download_file_to_stream", "download_file", diff --git a/ayon_api/_api.py b/ayon_api/_api.py index 0ece9b708..898f8b634 100644 --- a/ayon_api/_api.py +++ b/ayon_api/_api.py @@ -803,6 +803,22 @@ def dispatch_event(*args, **kwargs): return con.dispatch_event(*args, **kwargs) +def delete_event(*args, **kwargs): + """Delete event by id. + + Supported since AYON server 1.6.0. + + Args: + event_id (str): Event id. + + Returns: + RestApiResponse: Response from server. + + """ + con = get_server_api_connection() + return con.delete_event(*args, **kwargs) + + def enroll_event_job(*args, **kwargs): """Enroll job based on events. From 20eae8af322b147590c56738dc70d24397e8b1f5 Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Fri, 15 Nov 2024 16:49:59 +0100 Subject: [PATCH 056/135] base implementation for activities graphql --- ayon_api/constants.py | 10 +++++ ayon_api/graphql_queries.py | 44 +++++++++++++++++++++ ayon_api/server_api.py | 76 ++++++++++++++++++++++++++++++++++++- 3 files changed, 129 insertions(+), 1 deletion(-) diff --git a/ayon_api/constants.py b/ayon_api/constants.py index 594155706..99a6247f2 100644 --- a/ayon_api/constants.py +++ b/ayon_api/constants.py @@ -184,3 +184,13 @@ "description", "author", } + +DEFAULT_ACTIVITY_FIELDS = { + "activityId", + "activityType", + "activityData", + "body", + "entityId", + "entityType", + "author.name", +} diff --git a/ayon_api/graphql_queries.py b/ayon_api/graphql_queries.py index 8363d1ece..bc10dc6d8 100644 --- a/ayon_api/graphql_queries.py +++ b/ayon_api/graphql_queries.py @@ -636,3 +636,47 @@ def users_graphql_query(fields): for k, v in value.items(): query_queue.append((k, v, field)) return query + + +def activities_graphql_query(fields): + query = GraphQlQuery("Activities") + project_name_var = query.add_variable("projectName", "String!") + activity_ids_var = query.add_variable("activityIds", "[String]") + activity_types_var = query.add_variable("activityTypes", "[String]") + entity_ids_var = query.add_variable("entityIds", "[String]") + entity_names_var = query.add_variable("entityNames", "[String]") + entity_type_var = query.add_variable("entityType", "String!") + changed_after_var = query.add_variable("changedAfter", "String!") + changed_before_var = query.add_variable("changedBefore", "String!") + reference_types_var = query.add_variable("referenceTypes", "String!") + + project_field = query.add_field_with_edges("project") + project_field.set_filter("name", project_name_var) + + activities_field = project_field.add_field_with_edges("activities") + activities_field.set_filter("activityIds", activity_ids_var) + activities_field.set_filter("activityTypes", activity_types_var) + activities_field.set_filter("entityIds", entity_ids_var) + activities_field.set_filter("entityNames", entity_names_var) + activities_field.set_filter("entityType", entity_type_var) + activities_field.set_filter("changedAfter", changed_after_var) + activities_field.set_filter("changedBefore", changed_before_var) + activities_field.set_filter("referenceTypes", reference_types_var) + + nested_fields = fields_to_dict(set(fields)) + + query_queue = collections.deque() + for key, value in nested_fields.items(): + query_queue.append((key, value, activities_field)) + + while query_queue: + item = query_queue.popleft() + key, value, parent = item + field = parent.add_field(key) + if value is FIELD_VALUE: + continue + + for k, v in value.items(): + query_queue.append((k, v, field)) + + return query diff --git a/ayon_api/server_api.py b/ayon_api/server_api.py index 30da35ac2..77222f227 100644 --- a/ayon_api/server_api.py +++ b/ayon_api/server_api.py @@ -16,7 +16,7 @@ import warnings import itertools from contextlib import contextmanager -from typing import Optional +from typing import Optional, Iterable try: from http import HTTPStatus @@ -49,6 +49,7 @@ REPRESENTATION_FILES_FIELDS, DEFAULT_WORKFILE_INFO_FIELDS, DEFAULT_EVENT_FIELDS, + DEFAULT_ACTIVITY_FIELDS, DEFAULT_USER_FIELDS, DEFAULT_LINK_FIELDS, ) @@ -68,6 +69,7 @@ workfiles_info_graphql_query, events_graphql_query, users_graphql_query, + activities_graphql_query, ) from .exceptions import ( FailedOperations, @@ -1729,6 +1731,75 @@ def enroll_event_job( return response.data + def get_activities( + self, + project_name: str, + activity_ids: Optional[Iterable[str]] = None, + activity_types: Optional[Iterable[str]] = None, + entity_ids: Optional[Iterable[str]] = None, + entity_names: Optional[Iterable[str]] = None, + entity_type: Optional[str] = None, + changed_after: Optional[str] = None, + changed_before: Optional[str] = None, + reference_types: Optional[Iterable[str]] = None, + fields: Optional[Iterable[str]] = None, + ): + """Get activities from server with filtering options. + + Args: + project_name (str): Project on which event happened. + activity_ids (Optional[Iterable[str]]): Activity ids. + activity_types (Optional[Iterable[str]]): Activity types. + entity_ids (Optional[Iterable[str]]): Entity ids. + entity_names (Optional[Iterable[str]]): Entity names. + entity_type (Optional[str]): Entity type. + changed_after (Optional[str]): Return only activities changed + after given iso datetime string. + changed_before (Optional[str]): Return only activities changed + before given iso datetime string. + reference_types (Optional[Iterable[str]]): Reference types. + fields (Optional[Iterable[str]]): Fields that should be received + for each activity. + + Returns: + Generator[dict[str, Any]]: Available activities matching filters. + + """ + if not project_name: + return + filters = { + "projectName": project_name, + } + + if not _prepare_list_filters( + filters, + ("activityIds", activity_ids), + ("activityTypes", activity_types), + ("entityIds", entity_ids), + ("entityNames", entity_names), + ("referenceTypes", reference_types), + ): + return + + for filter_key, filter_value in ( + ("entityType", entity_type), + ("changedAfter", changed_after), + ("changedBefore", changed_before), + ): + if filter_value is not None: + filters[filter_key] = filter_value + + if not fields: + fields = self.get_default_fields_for_type("activity") + + query = activities_graphql_query(set(fields)) + for attr, filter_value in filters.items(): + query.set_variable_value(attr, filter_value) + + for parsed_data in query.continuous_query(self): + for event in parsed_data["activities"]: + yield event + def _endpoint_to_url( self, endpoint: str, @@ -2306,6 +2377,9 @@ def get_default_fields_for_type(self, entity_type): if entity_type == "event": return set(DEFAULT_EVENT_FIELDS) + if entity_type == "activity": + return set(DEFAULT_ACTIVITY_FIELDS) + if entity_type == "project": entity_type_defaults = set(DEFAULT_PROJECT_FIELDS) if not self.graphql_allows_data_in_query: From db3680d4b82041e0e8f1b74bd53a25e4a18d3efd Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Fri, 15 Nov 2024 17:02:05 +0100 Subject: [PATCH 057/135] small fixes --- ayon_api/graphql_queries.py | 2 +- ayon_api/server_api.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/ayon_api/graphql_queries.py b/ayon_api/graphql_queries.py index bc10dc6d8..e44cd07bc 100644 --- a/ayon_api/graphql_queries.py +++ b/ayon_api/graphql_queries.py @@ -650,7 +650,7 @@ def activities_graphql_query(fields): changed_before_var = query.add_variable("changedBefore", "String!") reference_types_var = query.add_variable("referenceTypes", "String!") - project_field = query.add_field_with_edges("project") + project_field = query.add_field("project") project_field.set_filter("name", project_name_var) activities_field = project_field.add_field_with_edges("activities") diff --git a/ayon_api/server_api.py b/ayon_api/server_api.py index 77222f227..14023265e 100644 --- a/ayon_api/server_api.py +++ b/ayon_api/server_api.py @@ -1797,7 +1797,7 @@ def get_activities( query.set_variable_value(attr, filter_value) for parsed_data in query.continuous_query(self): - for event in parsed_data["activities"]: + for event in parsed_data["project"]["activities"]: yield event def _endpoint_to_url( From bab024e679e3df93fd93946c0b4230e5a8385fb6 Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Fri, 15 Nov 2024 17:44:26 +0100 Subject: [PATCH 058/135] added some typehints --- ayon_api/server_api.py | 28 +++++++++++++++++++++------- 1 file changed, 21 insertions(+), 7 deletions(-) diff --git a/ayon_api/server_api.py b/ayon_api/server_api.py index 14023265e..b41524da4 100644 --- a/ayon_api/server_api.py +++ b/ayon_api/server_api.py @@ -16,7 +16,8 @@ import warnings import itertools from contextlib import contextmanager -from typing import Optional, Iterable +import typing +from typing import Optional, Iterable, Generator, Dict, List, Any try: from http import HTTPStatus @@ -97,6 +98,19 @@ get_media_mime_type, ) +if typing.TYPE_CHECKING: + from typing import Literal + + ActivityType = Literal[ + "comment", + "watch", + "reviewable", + "status.change", + "assignee.add", + "assignee.remove", + "version.publish" + ] + PatternType = type(re.compile("")) JSONDecodeError = getattr(json, "JSONDecodeError", ValueError) # This should be collected from server schema @@ -1735,7 +1749,7 @@ def get_activities( self, project_name: str, activity_ids: Optional[Iterable[str]] = None, - activity_types: Optional[Iterable[str]] = None, + activity_types: Optional[Iterable["ActivityType"]] = None, entity_ids: Optional[Iterable[str]] = None, entity_names: Optional[Iterable[str]] = None, entity_type: Optional[str] = None, @@ -1743,13 +1757,13 @@ def get_activities( changed_before: Optional[str] = None, reference_types: Optional[Iterable[str]] = None, fields: Optional[Iterable[str]] = None, - ): + ) -> Generator[Dict[str, Any], None, None]: """Get activities from server with filtering options. Args: - project_name (str): Project on which event happened. + project_name (str): Project on which activities happened. activity_ids (Optional[Iterable[str]]): Activity ids. - activity_types (Optional[Iterable[str]]): Activity types. + activity_types (Optional[Iterable[ActivityType]]): Activity types. entity_ids (Optional[Iterable[str]]): Entity ids. entity_names (Optional[Iterable[str]]): Entity names. entity_type (Optional[str]): Entity type. @@ -1797,8 +1811,8 @@ def get_activities( query.set_variable_value(attr, filter_value) for parsed_data in query.continuous_query(self): - for event in parsed_data["project"]["activities"]: - yield event + for activity in parsed_data["project"]["activities"]: + yield activity def _endpoint_to_url( self, From aff80627ea971f8224da3386a633ec00f89746f1 Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Fri, 15 Nov 2024 17:45:05 +0100 Subject: [PATCH 059/135] added method to get single activity --- ayon_api/server_api.py | 27 +++++++++++++++++++++++++++ 1 file changed, 27 insertions(+) diff --git a/ayon_api/server_api.py b/ayon_api/server_api.py index b41524da4..2e2da0ccc 100644 --- a/ayon_api/server_api.py +++ b/ayon_api/server_api.py @@ -1814,6 +1814,33 @@ def get_activities( for activity in parsed_data["project"]["activities"]: yield activity + def get_activity_by_id( + self, + project_name: str, + activity_id: str, + fields: Optional[Iterable[str]] = None, + ) -> Optional[Dict[str, Any]]: + """Get activity by id. + + Args: + project_name (str): Project on which activity happened. + activity_id (str): Activity id. + fields (Optional[Iterable[str]]): Fields that should be received + for each activity. + + Returns: + Optional[Dict[str, Any]]: Activity data or None if activity is not + found. + + """ + for activity in self.get_activities( + project_name=project_name, + activity_ids={activity_id}, + fields=fields, + ): + return activity + return None + def _endpoint_to_url( self, endpoint: str, From 6e4e978dc6719f906b9c96d613b4b0cc9217eae5 Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Fri, 15 Nov 2024 17:45:15 +0100 Subject: [PATCH 060/135] added method to create activity --- ayon_api/server_api.py | 50 ++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 50 insertions(+) diff --git a/ayon_api/server_api.py b/ayon_api/server_api.py index 2e2da0ccc..47f6b8d4d 100644 --- a/ayon_api/server_api.py +++ b/ayon_api/server_api.py @@ -1841,6 +1841,56 @@ def get_activity_by_id( return activity return None + def create_activity( + self, + project_name: str, + entity_id: str, + entity_type: str, + activity_type: "ActivityType", + activity_id: Optional[str] = None, + body: Optional[str] = None, + file_ids: Optional[List[str]] = None, + timestamp: Optional[str] = None, + data: Optional[Dict[str, Any]] = None, + ): + """Create activity on a project. + + Args: + project_name (str): Project on which activity happened. + entity_id (str): Entity id. + entity_type (str): Entity type. + activity_type (ActivityType): Activity type. + activity_id (Optional[str]): Activity id. + body (Optional[str]): Activity body. + file_ids (Optional[List[str]]): List of file ids attached + to activity. + timestamp (Optional[str]): Activity timestamp. + data (Optional[Dict[str, Any]]): Additional data. + + Returns: + Dict[str, str]: Data with activity id. + + """ + post_data = { + "activityType": activity_type, + } + for key, value in ( + ("id", activity_id), + ("body", body), + ("files", file_ids), + ("timestamp", timestamp), + ("data", data), + ): + if value is not None: + post_data[key] = value + + response = self.post( + f"projects/{project_name}/{entity_type}/{entity_id}/activities", + **post_data + ) + response.raise_for_status() + return response.data + def _endpoint_to_url( self, endpoint: str, From e3497cf89d5710cd2179498dd199c501080d3c5f Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Fri, 15 Nov 2024 18:05:38 +0100 Subject: [PATCH 061/135] added update and delete activity methods --- ayon_api/server_api.py | 41 +++++++++++++++++++++++++++++++++++++++++ 1 file changed, 41 insertions(+) diff --git a/ayon_api/server_api.py b/ayon_api/server_api.py index 47f6b8d4d..0b898283f 100644 --- a/ayon_api/server_api.py +++ b/ayon_api/server_api.py @@ -1891,6 +1891,47 @@ def create_activity( response.raise_for_status() return response.data + def update_activity( + self, + project_name: str, + activity_id: str, + body: str, + file_ids: Optional[List[str]] = None, + ): + """Update activity by id. + + Args: + project_name (str): Project on which activity happened. + activity_id (str): Activity id. + body (str): Activity body. + file_ids (Optional[List[str]]): List of file ids attached + to activity. + + """ + data = { + "body": body, + } + if file_ids is not None: + data["files"] = file_ids + response = self.delete( + f"projects/{project_name}/activities/{activity_id}", + **data + ) + response.raise_for_status() + + def delete_activity(self, project_name: str, activity_id: str): + """Delete activity by id. + + Args: + project_name (str): Project on which activity happened. + activity_id (str): Activity id to remove. + + """ + response = self.delete( + f"projects/{project_name}/activities/{activity_id}" + ) + response.raise_for_status() + def _endpoint_to_url( self, endpoint: str, From 44cb134b71d8772f9652de5a1b8816b70471d435 Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Fri, 15 Nov 2024 18:13:50 +0100 Subject: [PATCH 062/135] added activity methods to public api --- ayon_api/__init__.py | 10 +++++ ayon_api/_api.py | 94 ++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 104 insertions(+) diff --git a/ayon_api/__init__.py b/ayon_api/__init__.py index 9eab4ee4a..c3e008886 100644 --- a/ayon_api/__init__.py +++ b/ayon_api/__init__.py @@ -70,6 +70,11 @@ dispatch_event, delete_event, enroll_event_job, + get_activities, + get_activity_by_id, + create_activity, + update_activity, + delete_activity, download_file_to_stream, download_file, upload_file_from_stream, @@ -303,6 +308,11 @@ "dispatch_event", "delete_event", "enroll_event_job", + "get_activities", + "get_activity_by_id", + "create_activity", + "update_activity", + "delete_activity", "download_file_to_stream", "download_file", "upload_file_from_stream", diff --git a/ayon_api/_api.py b/ayon_api/_api.py index 898f8b634..63bb0c705 100644 --- a/ayon_api/_api.py +++ b/ayon_api/_api.py @@ -877,6 +877,100 @@ def enroll_event_job(*args, **kwargs): return con.enroll_event_job(*args, **kwargs) +def get_activities(*args, **kwargs): + """Get activities from server with filtering options. + + Args: + project_name (str): Project on which activities happened. + activity_ids (Optional[Iterable[str]]): Activity ids. + activity_types (Optional[Iterable[ActivityType]]): Activity types. + entity_ids (Optional[Iterable[str]]): Entity ids. + entity_names (Optional[Iterable[str]]): Entity names. + entity_type (Optional[str]): Entity type. + changed_after (Optional[str]): Return only activities changed + after given iso datetime string. + changed_before (Optional[str]): Return only activities changed + before given iso datetime string. + reference_types (Optional[Iterable[str]]): Reference types. + fields (Optional[Iterable[str]]): Fields that should be received + for each activity. + + Returns: + Generator[dict[str, Any]]: Available activities matching filters. + + """ + con = get_server_api_connection() + return con.get_activities(*args, **kwargs) + + +def get_activity_by_id(*args, **kwargs): + """Get activity by id. + + Args: + project_name (str): Project on which activity happened. + activity_id (str): Activity id. + fields (Optional[Iterable[str]]): Fields that should be received + for each activity. + + Returns: + Optional[Dict[str, Any]]: Activity data or None if activity is not + found. + + """ + con = get_server_api_connection() + return con.get_activity_by_id(*args, **kwargs) + + +def create_activity(*args, **kwargs): + """Create activity on a project. + + Args: + project_name (str): Project on which activity happened. + entity_id (str): Entity id. + entity_type (str): Entity type. + activity_type (ActivityType): Activity type. + activity_id (Optional[str]): Activity id. + body (Optional[str]): Activity body. + file_ids (Optional[List[str]]): List of file ids attached + to activity. + timestamp (Optional[str]): Activity timestamp. + data (Optional[Dict[str, Any]]): Additional data. + + Returns: + Dict[str, str]: Data with activity id. + + """ + con = get_server_api_connection() + return con.create_activity(*args, **kwargs) + + +def update_activity(*args, **kwargs): + """Update activity by id. + + Args: + project_name (str): Project on which activity happened. + activity_id (str): Activity id. + body (str): Activity body. + file_ids (Optional[List[str]]): List of file ids attached + to activity. + + """ + con = get_server_api_connection() + return con.update_activity(*args, **kwargs) + + +def delete_activity(*args, **kwargs): + """Delete activity by id. + + Args: + project_name (str): Project on which activity happened. + activity_id (str): Activity id to remove. + + """ + con = get_server_api_connection() + return con.delete_activity(*args, **kwargs) + + def download_file_to_stream(*args, **kwargs): """Download file from AYON server to IOStream. From 2f3b79c418b7c02fb89d4335a0f84021ed79865c Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Fri, 15 Nov 2024 18:37:42 +0100 Subject: [PATCH 063/135] change order of functions --- ayon_api/__init__.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/ayon_api/__init__.py b/ayon_api/__init__.py index 9eab4ee4a..9a099d4cc 100644 --- a/ayon_api/__init__.py +++ b/ayon_api/__init__.py @@ -29,6 +29,7 @@ change_token, set_environments, get_server_api_connection, + get_default_settings_variant, get_base_url, get_rest_url, get_ssl_verify, @@ -44,7 +45,6 @@ set_site_id, get_client_version, set_client_version, - get_default_settings_variant, set_default_settings_variant, get_sender, set_sender, @@ -262,6 +262,7 @@ "change_token", "set_environments", "get_server_api_connection", + "get_default_settings_variant", "get_base_url", "get_rest_url", "get_ssl_verify", @@ -277,7 +278,6 @@ "set_site_id", "get_client_version", "set_client_version", - "get_default_settings_variant", "set_default_settings_variant", "get_sender", "set_sender", From 30941f07041006b8fa3a1a5ddbb940728af82f10 Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Fri, 15 Nov 2024 18:51:54 +0100 Subject: [PATCH 064/135] implemented methods to delete addons --- ayon_api/server_api.py | 41 +++++++++++++++++++++++++++++++++++++++++ 1 file changed, 41 insertions(+) diff --git a/ayon_api/server_api.py b/ayon_api/server_api.py index 30da35ac2..50465658f 100644 --- a/ayon_api/server_api.py +++ b/ayon_api/server_api.py @@ -2849,6 +2849,47 @@ def upload_dependency_package( route = self._get_dependency_package_route(dst_filename) self.upload_file(route, src_filepath, progress=progress) + def delete_addon(self, addon_name: str, purge: Optional[bool] = None): + """Delete addon from server. + + Delete all versions of addon from server. + + Args: + addon_name (str): Addon name. + purge (Optional[bool]): Purge all data related to the addon. + + """ + query_data = {} + if purge is not None: + query_data["purge"] = "true" if purge else "false" + query = prepare_query_string(query_data) + + response = self.delete(f"addons/{addon_name}{query}") + response.raise_for_status() + + def delete_addon_version( + self, + addon_name: str, + addon_version: str, + purge: Optional[bool] = None, + ): + """Delete addon version from server. + + Delete all versions of addon from server. + + Args: + addon_name (str): Addon name. + addon_version (str): Addon version. + purge (Optional[bool]): Purge all data related to the addon. + + """ + query_data = {} + if purge is not None: + query_data["purge"] = "true" if purge else "false" + query = prepare_query_string(query_data) + response = self.delete(f"addons/{addon_name}/{addon_version}{query}") + response.raise_for_status() + def upload_addon_zip(self, src_filepath, progress=None): """Upload addon zip file to server. From 4204217e319d15be08c5bf1183ed8f833a6b3d26 Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Fri, 15 Nov 2024 18:52:18 +0100 Subject: [PATCH 065/135] added new method to public api --- ayon_api/__init__.py | 4 ++++ ayon_api/_api.py | 29 +++++++++++++++++++++++++++++ 2 files changed, 33 insertions(+) diff --git a/ayon_api/__init__.py b/ayon_api/__init__.py index 9a099d4cc..7657f2b5c 100644 --- a/ayon_api/__init__.py +++ b/ayon_api/__init__.py @@ -103,6 +103,8 @@ delete_dependency_package, download_dependency_package, upload_dependency_package, + delete_addon, + delete_addon_version, upload_addon_zip, get_bundles, create_bundle, @@ -336,6 +338,8 @@ "delete_dependency_package", "download_dependency_package", "upload_dependency_package", + "delete_addon", + "delete_addon_version", "upload_addon_zip", "get_bundles", "create_bundle", diff --git a/ayon_api/_api.py b/ayon_api/_api.py index 898f8b634..8e855a082 100644 --- a/ayon_api/_api.py +++ b/ayon_api/_api.py @@ -1484,6 +1484,35 @@ def upload_dependency_package(*args, **kwargs): return con.upload_dependency_package(*args, **kwargs) +def delete_addon(*args, **kwargs): + """Delete addon from server. + + Delete all versions of addon from server. + + Args: + addon_name (str): Addon name. + purge (Optional[bool]): Purge all data related to the addon. + + """ + con = get_server_api_connection() + return con.delete_addon(*args, **kwargs) + + +def delete_addon_version(*args, **kwargs): + """Delete addon version from server. + + Delete all versions of addon from server. + + Args: + addon_name (str): Addon name. + addon_version (str): Addon version. + purge (Optional[bool]): Purge all data related to the addon. + + """ + con = get_server_api_connection() + return con.delete_addon_version(*args, **kwargs) + + def upload_addon_zip(*args, **kwargs): """Upload addon zip file to server. From 05427f0a22a22eb397f57aa722036ec6639a54ba Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Mon, 18 Nov 2024 15:29:05 +0100 Subject: [PATCH 066/135] update activities patch with newer changes --- ayon_api/server_api.py | 27 ++++++++++++++++++++++++--- 1 file changed, 24 insertions(+), 3 deletions(-) diff --git a/ayon_api/server_api.py b/ayon_api/server_api.py index 0b898283f..a5cfb0c71 100644 --- a/ayon_api/server_api.py +++ b/ayon_api/server_api.py @@ -1897,6 +1897,8 @@ def update_activity( activity_id: str, body: str, file_ids: Optional[List[str]] = None, + append_file_ids: Optional[bool] = False, + data: Optional[Dict[str, Any]] = None, ): """Update activity by id. @@ -1906,16 +1908,35 @@ def update_activity( body (str): Activity body. file_ids (Optional[List[str]]): List of file ids attached to activity. + append_file_ids (Optional[bool]): Append file ids to existing + list of file ids. + data (Optional[Dict[str, Any]]): Update data in activity. """ - data = { + update_data = { "body": body, } + major, minor, patch, _, _ = self.server_version_tuple + new_patch_model = (major, minor, patch) > (1, 5, 6) if file_ids is not None: - data["files"] = file_ids + update_data["files"] = file_ids + if new_patch_model: + update_data["appendFiles"] = append_file_ids + elif append_file_ids: + raise ValueError( + "Append file ids is supported after server version 1.5.6." + ) + + if data is not None: + if not new_patch_model: + raise ValueError( + "Update of data is supported after server version 1.5.6." + ) + update_data["data"] = data + response = self.delete( f"projects/{project_name}/activities/{activity_id}", - **data + **update_data ) response.raise_for_status() From a53441676dd256093a456eb98190840d737629af Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Mon, 18 Nov 2024 15:30:47 +0100 Subject: [PATCH 067/135] fix graphql filter types --- ayon_api/graphql_queries.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/ayon_api/graphql_queries.py b/ayon_api/graphql_queries.py index e44cd07bc..765ed96a4 100644 --- a/ayon_api/graphql_queries.py +++ b/ayon_api/graphql_queries.py @@ -641,10 +641,10 @@ def users_graphql_query(fields): def activities_graphql_query(fields): query = GraphQlQuery("Activities") project_name_var = query.add_variable("projectName", "String!") - activity_ids_var = query.add_variable("activityIds", "[String]") - activity_types_var = query.add_variable("activityTypes", "[String]") - entity_ids_var = query.add_variable("entityIds", "[String]") - entity_names_var = query.add_variable("entityNames", "[String]") + activity_ids_var = query.add_variable("activityIds", "[String!]") + activity_types_var = query.add_variable("activityTypes", "[String!]") + entity_ids_var = query.add_variable("entityIds", "[String!]") + entity_names_var = query.add_variable("entityNames", "[String!]") entity_type_var = query.add_variable("entityType", "String!") changed_after_var = query.add_variable("changedAfter", "String!") changed_before_var = query.add_variable("changedBefore", "String!") From 5f22a12d71fc61cf25ce019a467291d973f8173b Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Mon, 18 Nov 2024 16:11:17 +0100 Subject: [PATCH 068/135] allow to not update 'body' --- ayon_api/server_api.py | 31 +++++++++++++++++++++++++------ 1 file changed, 25 insertions(+), 6 deletions(-) diff --git a/ayon_api/server_api.py b/ayon_api/server_api.py index a5cfb0c71..927925117 100644 --- a/ayon_api/server_api.py +++ b/ayon_api/server_api.py @@ -110,6 +110,13 @@ "assignee.remove", "version.publish" ] + ActivityReferenceType = Literal[ + "origin", + "mention", + "author", + "relation", + "watching", + ] PatternType = type(re.compile("")) JSONDecodeError = getattr(json, "JSONDecodeError", ValueError) @@ -1755,7 +1762,7 @@ def get_activities( entity_type: Optional[str] = None, changed_after: Optional[str] = None, changed_before: Optional[str] = None, - reference_types: Optional[Iterable[str]] = None, + reference_types: Optional[Iterable["ActivityReferenceType"]] = None, fields: Optional[Iterable[str]] = None, ) -> Generator[Dict[str, Any], None, None]: """Get activities from server with filtering options. @@ -1771,7 +1778,8 @@ def get_activities( after given iso datetime string. changed_before (Optional[str]): Return only activities changed before given iso datetime string. - reference_types (Optional[Iterable[str]]): Reference types. + reference_types (Optional[Iterable[ActivityReferenceType]]): + Reference types filter. Defaults to `['origin']`. fields (Optional[Iterable[str]]): Fields that should be received for each activity. @@ -1784,6 +1792,8 @@ def get_activities( filters = { "projectName": project_name, } + if reference_types is None: + reference_types = {"origin"} if not _prepare_list_filters( filters, @@ -1818,6 +1828,7 @@ def get_activity_by_id( self, project_name: str, activity_id: str, + reference_types: Optional[Iterable["ActivityReferenceType"]] = None, fields: Optional[Iterable[str]] = None, ) -> Optional[Dict[str, Any]]: """Get activity by id. @@ -1836,6 +1847,7 @@ def get_activity_by_id( for activity in self.get_activities( project_name=project_name, activity_ids={activity_id}, + reference_types=reference_types, fields=fields, ): return activity @@ -1895,7 +1907,7 @@ def update_activity( self, project_name: str, activity_id: str, - body: str, + body: Optional[str] = None, file_ids: Optional[List[str]] = None, append_file_ids: Optional[bool] = False, data: Optional[Dict[str, Any]] = None, @@ -1913,11 +1925,18 @@ def update_activity( data (Optional[Dict[str, Any]]): Update data in activity. """ - update_data = { - "body": body, - } + update_data = {} major, minor, patch, _, _ = self.server_version_tuple new_patch_model = (major, minor, patch) > (1, 5, 6) + if body is None and not new_patch_model: + raise ValueError( + "Update without 'body' is supported" + " after server version 1.5.6." + ) + + if body is not None: + update_data["body"] = body + if file_ids is not None: update_data["files"] = file_ids if new_patch_model: From 63a771bf1ec7d39b80d2b53662658216e2b14343 Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Mon, 18 Nov 2024 16:37:13 +0100 Subject: [PATCH 069/135] fix type of reference types --- ayon_api/graphql_queries.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ayon_api/graphql_queries.py b/ayon_api/graphql_queries.py index 765ed96a4..999c9e5c2 100644 --- a/ayon_api/graphql_queries.py +++ b/ayon_api/graphql_queries.py @@ -648,7 +648,7 @@ def activities_graphql_query(fields): entity_type_var = query.add_variable("entityType", "String!") changed_after_var = query.add_variable("changedAfter", "String!") changed_before_var = query.add_variable("changedBefore", "String!") - reference_types_var = query.add_variable("referenceTypes", "String!") + reference_types_var = query.add_variable("referenceTypes", "[String!]") project_field = query.add_field("project") project_field.set_filter("name", project_name_var) From b1cde94f1546234b8393ac424097855ced7f9594 Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Mon, 18 Nov 2024 16:42:36 +0100 Subject: [PATCH 070/135] use patch instead of delete --- ayon_api/server_api.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ayon_api/server_api.py b/ayon_api/server_api.py index 927925117..bc240b38e 100644 --- a/ayon_api/server_api.py +++ b/ayon_api/server_api.py @@ -1953,7 +1953,7 @@ def update_activity( ) update_data["data"] = data - response = self.delete( + response = self.patch( f"projects/{project_name}/activities/{activity_id}", **update_data ) From 31e48d38c2e8f25263002a506b885b49948a3263 Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Mon, 18 Nov 2024 17:09:52 +0100 Subject: [PATCH 071/135] convert 'activityData' to dictionary --- ayon_api/server_api.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/ayon_api/server_api.py b/ayon_api/server_api.py index bc240b38e..c25bf7850 100644 --- a/ayon_api/server_api.py +++ b/ayon_api/server_api.py @@ -1822,6 +1822,9 @@ def get_activities( for parsed_data in query.continuous_query(self): for activity in parsed_data["project"]["activities"]: + activity_data = activity.get("activityData") + if isinstance(activity_data, str): + activity["activityData"] = json.loads(activity_data) yield activity def get_activity_by_id( From 449f0ea701296558c94eee3b9c9c767b5d5c26ca Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Mon, 18 Nov 2024 17:36:24 +0100 Subject: [PATCH 072/135] implemented batch send of activities operations --- ayon_api/server_api.py | 55 +++++++++++++++++++++++++++++++++++++++++- 1 file changed, 54 insertions(+), 1 deletion(-) diff --git a/ayon_api/server_api.py b/ayon_api/server_api.py index c25bf7850..4ac5198be 100644 --- a/ayon_api/server_api.py +++ b/ayon_api/server_api.py @@ -8544,6 +8544,59 @@ def send_batch_operations( list[dict[str, Any]]: Operations result with process details. """ + return self._send_batch_operations( + f"projects/{project_name}/operations", + operations, + can_fail, + raise_on_fail, + ) + + def send_activities_batch_operations( + self, + project_name, + operations, + can_fail=False, + raise_on_fail=True + ): + """Post multiple CRUD activities operations to server. + + When multiple changes should be made on server side this is the best + way to go. It is possible to pass multiple operations to process on a + server side and do the changes in a transaction. + + Args: + project_name (str): On which project should be operations + processed. + operations (list[dict[str, Any]]): Operations to be processed. + can_fail (Optional[bool]): Server will try to process all + operations even if one of them fails. + raise_on_fail (Optional[bool]): Raise exception if an operation + fails. You can handle failed operations on your own + when set to 'False'. + + Raises: + ValueError: Operations can't be converted to json string. + FailedOperations: When output does not contain server operations + or 'raise_on_fail' is enabled and any operation fails. + + Returns: + list[dict[str, Any]]: Operations result with process details. + + """ + return self._send_batch_operations( + f"projects/{project_name}/operations/activities", + operations, + can_fail, + raise_on_fail, + ) + + def _send_batch_operations( + self, + uri: str, + operations: List[Dict[str, Any]], + can_fail: bool, + raise_on_fail: bool + ): if not operations: return [] @@ -8576,7 +8629,7 @@ def send_batch_operations( return [] result = self.post( - "projects/{}/operations".format(project_name), + uri, operations=operations_body, canFail=can_fail ) From 6d9dc4f8713e1e5b6cb65af94058353543416c6c Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Mon, 18 Nov 2024 17:36:55 +0100 Subject: [PATCH 073/135] updated public api --- ayon_api/__init__.py | 2 ++ ayon_api/_api.py | 36 +++++++++++++++++++++++++++++++++++- 2 files changed, 37 insertions(+), 1 deletion(-) diff --git a/ayon_api/__init__.py b/ayon_api/__init__.py index c3e008886..618201cbf 100644 --- a/ayon_api/__init__.py +++ b/ayon_api/__init__.py @@ -235,6 +235,7 @@ get_representations_links, get_representation_links, send_batch_operations, + send_activities_batch_operations, ) @@ -473,4 +474,5 @@ "get_representations_links", "get_representation_links", "send_batch_operations", + "send_activities_batch_operations", ) diff --git a/ayon_api/_api.py b/ayon_api/_api.py index 63bb0c705..67ee7ff04 100644 --- a/ayon_api/_api.py +++ b/ayon_api/_api.py @@ -891,7 +891,8 @@ def get_activities(*args, **kwargs): after given iso datetime string. changed_before (Optional[str]): Return only activities changed before given iso datetime string. - reference_types (Optional[Iterable[str]]): Reference types. + reference_types (Optional[Iterable[ActivityReferenceType]]): + Reference types filter. Defaults to `['origin']`. fields (Optional[Iterable[str]]): Fields that should be received for each activity. @@ -953,6 +954,9 @@ def update_activity(*args, **kwargs): body (str): Activity body. file_ids (Optional[List[str]]): List of file ids attached to activity. + append_file_ids (Optional[bool]): Append file ids to existing + list of file ids. + data (Optional[Dict[str, Any]]): Update data in activity. """ con = get_server_api_connection() @@ -4509,3 +4513,33 @@ def send_batch_operations(*args, **kwargs): """ con = get_server_api_connection() return con.send_batch_operations(*args, **kwargs) + + +def send_activities_batch_operations(*args, **kwargs): + """Post multiple CRUD activities operations to server. + + When multiple changes should be made on server side this is the best + way to go. It is possible to pass multiple operations to process on a + server side and do the changes in a transaction. + + Args: + project_name (str): On which project should be operations + processed. + operations (list[dict[str, Any]]): Operations to be processed. + can_fail (Optional[bool]): Server will try to process all + operations even if one of them fails. + raise_on_fail (Optional[bool]): Raise exception if an operation + fails. You can handle failed operations on your own + when set to 'False'. + + Raises: + ValueError: Operations can't be converted to json string. + FailedOperations: When output does not contain server operations + or 'raise_on_fail' is enabled and any operation fails. + + Returns: + list[dict[str, Any]]: Operations result with process details. + + """ + con = get_server_api_connection() + return con.send_activities_batch_operations(*args, **kwargs) From 91c96f5c86d981f75484be7fe62453f00dad6233 Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Tue, 19 Nov 2024 16:27:03 +0100 Subject: [PATCH 074/135] automated api does fill arguments with typehints in '_api.py' --- automated_api.py | 199 ++++++++++++++++++++++++++++++++++------------- 1 file changed, 144 insertions(+), 55 deletions(-) diff --git a/automated_api.py b/automated_api.py index 528702ca3..ee5e661d6 100644 --- a/automated_api.py +++ b/automated_api.py @@ -17,13 +17,15 @@ import sys import re import inspect +import typing # Fake modules to avoid import errors for module_name in ("requests", "unidecode"): sys.modules[module_name] = object() import ayon_api # noqa: E402 -from ayon_api import ServerAPI # noqa: E402 +from ayon_api.server_api import ServerAPI, _PLACEHOLDER # noqa: E402 +from ayon_api.utils import NOT_SET # noqa: E402 EXCLUDED_METHODS = { "get_default_service_username", @@ -101,18 +103,6 @@ def indent_lines(src_str, indent=1): return "\n".join(new_lines) -def split_sig_str(sig_str): - args_str = sig_str[1:-1] - args = [f" {arg.strip()}" for arg in args_str.split(",")] - joined_args = ",\n".join(args) - - return f"(\n{joined_args}\n)" - - -def prepare_func_def_line(attr_name, sig_str): - return f"def {attr_name}{sig_str}:\n" - - def prepare_docstring(func): docstring = inspect.getdoc(func) if not docstring: @@ -124,39 +114,122 @@ def prepare_docstring(func): return f'"""{docstring}{line_char}\n"""' -def prapre_body_sig_str(sig_str): - if "=" not in sig_str: - return sig_str - - args_str = sig_str[1:-1] - args = [] - for arg in args_str.split(","): - arg = arg.strip() - if "=" in arg: - parts = arg.split("=") - parts[1] = parts[0] - arg = "=".join(parts) - args.append(arg) - joined_args = ", ".join(args) - return f"({joined_args})" - - -def prepare_body_parts(attr_name, sig_str): - output = [ - "con = get_server_api_connection()", - ] - body_sig_str = prapre_body_sig_str(sig_str) - return_str = f"return con.{attr_name}{body_sig_str}" - if len(return_str) + 4 <= 79: - output.append(return_str) - return output - - return_str = f"return con.{attr_name}{split_sig_str(body_sig_str)}" - output.append(return_str) - return output - - -def prepare_api_functions(): +def _get_typehint(param, api_globals): + if param.annotation is inspect.Parameter.empty: + return None + + an = param.annotation + if inspect.isclass(an): + return an.__name__ + + typehint = str(an).replace("typing.", "") + try: + # Test if typehint is valid for known '_api' content + exec(f"_: {typehint} = None", api_globals) + except NameError: + typehint = f'"{typehint}"' + return typehint + + +def _add_typehint(param_name, param, api_globals): + typehint = _get_typehint(param, api_globals) + if not typehint: + return param_name + return f"{param_name}: {typehint}" + + +def _kw_default_to_str(param_name, param, api_globals): + if param.default is inspect.Parameter.empty: + return _add_typehint(param_name, param, api_globals) + + default = param.default + if default is _PLACEHOLDER: + default = "_PLACEHOLDER" + elif default is NOT_SET: + default = "NOT_SET" + elif ( + default is not None + and not isinstance(default, (str, bool, int, float)) + ): + raise TypeError("Unknown default value type") + else: + default = repr(default) + typehint = _get_typehint(param, api_globals) + if typehint: + return f"{param_name}: {typehint} = {default}" + return f"{param_name}={default}" + + +def sig_params_to_str(sig, param_names, api_globals, indent=0): + pos_only = [] + pos_or_kw = [] + var_positional = None + kw_only = [] + var_keyword = None + for param_name in param_names: + param = sig.parameters[param_name] + if param.kind == inspect.Parameter.POSITIONAL_ONLY: + pos_only.append((param_name, param)) + elif param.kind == inspect.Parameter.POSITIONAL_OR_KEYWORD: + pos_or_kw.append((param_name, param)) + elif param.kind == inspect.Parameter.VAR_POSITIONAL: + var_positional = param_name + elif param.kind == inspect.Parameter.KEYWORD_ONLY: + kw_only.append((param_name, param)) + elif param.kind == inspect.Parameter.VAR_KEYWORD: + var_keyword = param_name + + func_params = [] + body_params = [] + for param_name, param in pos_only: + body_params.append(param_name) + func_params.append(_add_typehint(param_name, param, api_globals)) + + if pos_only: + func_params.append("/") + + for param_name, param in pos_or_kw: + body_params.append(f"{param_name}={param_name}") + func_params.append(_kw_default_to_str(param_name, param, api_globals)) + + if var_positional: + body_params.append(f"*{var_positional}") + func_params.append(f"*{var_positional}") + + for param_name, param in kw_only: + body_params.append(f"{param_name}={param_name}") + func_params.append(_kw_default_to_str(param_name, param, api_globals)) + + if var_keyword is not None: + body_params.append(f"**{var_keyword}") + func_params.append(f"**{var_keyword}") + + base_indent_str = " " * indent + param_indent_str = " " * (indent + 4) + + func_params_str = "()" + if func_params: + lines_str = "\n".join([ + f"{param_indent_str}{line}," + for line in func_params + ]) + func_params_str = f"(\n{lines_str}\n{base_indent_str})" + + if sig.return_annotation is not inspect.Signature.empty: + func_params_str += f" -> {sig.return_annotation}" + + body_params_str = "()" + if body_params: + lines_str = "\n".join([ + f"{param_indent_str}{line}," + for line in body_params + ]) + body_params_str = f"(\n{lines_str}\n{base_indent_str})" + + return func_params_str, body_params_str + + +def prepare_api_functions(api_globals): functions = [] for attr_name, attr in ServerAPI.__dict__.items(): if ( @@ -167,21 +240,25 @@ def prepare_api_functions(): continue sig = inspect.signature(attr) - base_sig_str = str(sig) - if base_sig_str == "(self)": - sig_str = "()" - else: - # TODO copy signature from method so IDEs can use it - sig_str = "(*args, **kwargs)" + param_names = list(sig.parameters) + if inspect.isfunction(attr): + param_names.pop(0) + + func_def_params, func_body_params = sig_params_to_str( + sig, param_names, api_globals + ) - func_def = prepare_func_def_line(attr_name, sig_str) + func_def = f"def {attr_name}{func_def_params}:\n" func_body_parts = [] docstring = prepare_docstring(attr) if docstring: func_body_parts.append(docstring) - func_body_parts.extend(prepare_body_parts(attr_name, sig_str)) + func_body_parts.extend([ + "con = get_server_api_connection()", + f"return con.{attr_name}{func_body_params}", + ]) func_body = indent_lines("\n".join(func_body_parts)) full_def = func_def + func_body @@ -216,8 +293,20 @@ def main(): print("(2/5) Parsing current '__init__.py' content") formatting_init_content = prepare_init_without_api(init_filepath) + # Read content of first part of `_api.py` to get global variables + # - disable type checking so imports done only during typechecking are + # not executed + old_value = typing.TYPE_CHECKING + typing.TYPE_CHECKING = False + api_globals = {"__name__": "ayon_api._api"} + exec(parts[0], api_globals) + for attr_name in dir(__builtins__): + api_globals[attr_name] = getattr(__builtins__, attr_name) + typing.TYPE_CHECKING = old_value + + # print(api_globals) print("(3/5) Preparing functions body based on 'ServerAPI' class") - result = prepare_api_functions() + result = prepare_api_functions(api_globals) print("(4/5) Store new functions body to '_api.py'") new_content = f"{parts[0]}{AUTOMATED_COMMENT}\n{result}" From 119d5069b5797867573b5945ffa338d35b3e7367 Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Tue, 19 Nov 2024 16:28:59 +0100 Subject: [PATCH 075/135] apply changes done by automated api --- ayon_api/_api.py | 2646 +++++++++++++++++++++++++++++++++++++++------- 1 file changed, 2285 insertions(+), 361 deletions(-) diff --git a/ayon_api/_api.py b/ayon_api/_api.py index 8de6b1b74..ac3c2d0b5 100644 --- a/ayon_api/_api.py +++ b/ayon_api/_api.py @@ -11,15 +11,17 @@ import os import socket +from typing import Optional from .constants import ( SERVER_URL_ENV_KEY, SERVER_API_ENV_KEY, ) -from .server_api import ServerAPI +from .server_api import ServerAPI, _PLACEHOLDER from .exceptions import FailedServiceInit from .utils import ( - get_default_settings_variant as _get_default_settings_variant + NOT_SET, + get_default_settings_variant as _get_default_settings_variant, ) @@ -369,7 +371,9 @@ def get_ssl_verify(): return con.get_ssl_verify() -def set_ssl_verify(*args, **kwargs): +def set_ssl_verify( + ssl_verify, +): """Change ssl verification state. Args: @@ -378,7 +382,9 @@ def set_ssl_verify(*args, **kwargs): """ con = get_server_api_connection() - return con.set_ssl_verify(*args, **kwargs) + return con.set_ssl_verify( + ssl_verify=ssl_verify, + ) def get_cert(): @@ -392,7 +398,9 @@ def get_cert(): return con.get_cert() -def set_cert(*args, **kwargs): +def set_cert( + cert, +): """Change cert file used for connection to server. Args: @@ -400,7 +408,9 @@ def set_cert(*args, **kwargs): """ con = get_server_api_connection() - return con.set_cert(*args, **kwargs) + return con.set_cert( + cert=cert, + ) def get_timeout(): @@ -414,7 +424,9 @@ def get_timeout(): return con.get_timeout() -def set_timeout(*args, **kwargs): +def set_timeout( + timeout, +): """Change timeout value for requests. Args: @@ -422,7 +434,9 @@ def set_timeout(*args, **kwargs): """ con = get_server_api_connection() - return con.set_timeout(*args, **kwargs) + return con.set_timeout( + timeout=timeout, + ) def get_max_retries(): @@ -436,7 +450,9 @@ def get_max_retries(): return con.get_max_retries() -def set_max_retries(*args, **kwargs): +def set_max_retries( + max_retries, +): """Change max retries value for requests. Args: @@ -444,7 +460,9 @@ def set_max_retries(*args, **kwargs): """ con = get_server_api_connection() - return con.set_max_retries(*args, **kwargs) + return con.set_max_retries( + max_retries=max_retries, + ) def is_service_user(): @@ -472,7 +490,9 @@ def get_site_id(): return con.get_site_id() -def set_site_id(*args, **kwargs): +def set_site_id( + site_id, +): """Change site id of connection. Behave as specific site for server. It affects default behavior of @@ -483,7 +503,9 @@ def set_site_id(*args, **kwargs): """ con = get_server_api_connection() - return con.set_site_id(*args, **kwargs) + return con.set_site_id( + site_id=site_id, + ) def get_client_version(): @@ -499,7 +521,9 @@ def get_client_version(): return con.get_client_version() -def set_client_version(*args, **kwargs): +def set_client_version( + client_version, +): """Set version of client used to connect to server. Client version is AYON client build desktop application. @@ -509,10 +533,14 @@ def set_client_version(*args, **kwargs): """ con = get_server_api_connection() - return con.set_client_version(*args, **kwargs) + return con.set_client_version( + client_version=client_version, + ) -def set_default_settings_variant(*args, **kwargs): +def set_default_settings_variant( + variant, +): """Change default variant for addon settings. Note: @@ -525,7 +553,9 @@ def set_default_settings_variant(*args, **kwargs): """ con = get_server_api_connection() - return con.set_default_settings_variant(*args, **kwargs) + return con.set_default_settings_variant( + variant=variant, + ) def get_sender(): @@ -539,7 +569,9 @@ def get_sender(): return con.get_sender() -def set_sender(*args, **kwargs): +def set_sender( + sender, +): """Change sender used for requests. Args: @@ -547,7 +579,9 @@ def set_sender(*args, **kwargs): """ con = get_server_api_connection() - return con.set_sender(*args, **kwargs) + return con.set_sender( + sender=sender, + ) def get_sender_type(): @@ -563,7 +597,9 @@ def get_sender_type(): return con.get_sender_type() -def set_sender_type(*args, **kwargs): +def set_sender_type( + sender_type, +): """Change sender type used for requests. Args: @@ -571,7 +607,9 @@ def set_sender_type(*args, **kwargs): """ con = get_server_api_connection() - return con.set_sender_type(*args, **kwargs) + return con.set_sender_type( + sender_type=sender_type, + ) def get_info(): @@ -621,7 +659,11 @@ def get_server_version_tuple(): return con.get_server_version_tuple() -def get_users(*args, **kwargs): +def get_users( + project_name=None, + usernames=None, + fields=None, +): """Get Users. Only administrators and managers can fetch all users. For other users @@ -638,10 +680,18 @@ def get_users(*args, **kwargs): """ con = get_server_api_connection() - return con.get_users(*args, **kwargs) + return con.get_users( + project_name=project_name, + usernames=usernames, + fields=fields, + ) -def get_user_by_name(*args, **kwargs): +def get_user_by_name( + username, + project_name=None, + fields=None, +): """Get user by name using GraphQl. Only administrators and managers can fetch all users. For other users @@ -659,10 +709,16 @@ def get_user_by_name(*args, **kwargs): """ con = get_server_api_connection() - return con.get_user_by_name(*args, **kwargs) + return con.get_user_by_name( + username=username, + project_name=project_name, + fields=fields, + ) -def get_user(*args, **kwargs): +def get_user( + username=None, +): """Get user info using REST endpoit. Args: @@ -674,60 +730,124 @@ def get_user(*args, **kwargs): """ con = get_server_api_connection() - return con.get_user(*args, **kwargs) + return con.get_user( + username=username, + ) -def raw_post(*args, **kwargs): +def raw_post( + entrypoint, + **kwargs, +): con = get_server_api_connection() - return con.raw_post(*args, **kwargs) + return con.raw_post( + entrypoint=entrypoint, + **kwargs, + ) -def raw_put(*args, **kwargs): +def raw_put( + entrypoint, + **kwargs, +): con = get_server_api_connection() - return con.raw_put(*args, **kwargs) + return con.raw_put( + entrypoint=entrypoint, + **kwargs, + ) -def raw_patch(*args, **kwargs): +def raw_patch( + entrypoint, + **kwargs, +): con = get_server_api_connection() - return con.raw_patch(*args, **kwargs) + return con.raw_patch( + entrypoint=entrypoint, + **kwargs, + ) -def raw_get(*args, **kwargs): +def raw_get( + entrypoint, + **kwargs, +): con = get_server_api_connection() - return con.raw_get(*args, **kwargs) + return con.raw_get( + entrypoint=entrypoint, + **kwargs, + ) -def raw_delete(*args, **kwargs): +def raw_delete( + entrypoint, + **kwargs, +): con = get_server_api_connection() - return con.raw_delete(*args, **kwargs) + return con.raw_delete( + entrypoint=entrypoint, + **kwargs, + ) -def post(*args, **kwargs): +def post( + entrypoint, + **kwargs, +): con = get_server_api_connection() - return con.post(*args, **kwargs) + return con.post( + entrypoint=entrypoint, + **kwargs, + ) -def put(*args, **kwargs): +def put( + entrypoint, + **kwargs, +): con = get_server_api_connection() - return con.put(*args, **kwargs) + return con.put( + entrypoint=entrypoint, + **kwargs, + ) -def patch(*args, **kwargs): +def patch( + entrypoint, + **kwargs, +): con = get_server_api_connection() - return con.patch(*args, **kwargs) + return con.patch( + entrypoint=entrypoint, + **kwargs, + ) -def get(*args, **kwargs): +def get( + entrypoint, + **kwargs, +): con = get_server_api_connection() - return con.get(*args, **kwargs) + return con.get( + entrypoint=entrypoint, + **kwargs, + ) -def delete(*args, **kwargs): +def delete( + entrypoint, + **kwargs, +): con = get_server_api_connection() - return con.delete(*args, **kwargs) + return con.delete( + entrypoint=entrypoint, + **kwargs, + ) -def get_event(*args, **kwargs): +def get_event( + event_id, +): """Query full event data by id. Events received using event server do not contain full information. To @@ -741,10 +861,23 @@ def get_event(*args, **kwargs): """ con = get_server_api_connection() - return con.get_event(*args, **kwargs) + return con.get_event( + event_id=event_id, + ) -def get_events(*args, **kwargs): +def get_events( + topics=None, + event_ids=None, + project_names=None, + states=None, + users=None, + include_logs=None, + has_children=None, + newer_than=None, + older_than=None, + fields=None, +): """Get events from server with filtering options. Notes: @@ -773,10 +906,32 @@ def get_events(*args, **kwargs): """ con = get_server_api_connection() - return con.get_events(*args, **kwargs) + return con.get_events( + topics=topics, + event_ids=event_ids, + project_names=project_names, + states=states, + users=users, + include_logs=include_logs, + has_children=has_children, + newer_than=newer_than, + older_than=older_than, + fields=fields, + ) -def update_event(*args, **kwargs): +def update_event( + event_id, + sender=None, + project_name=None, + username=None, + status=None, + description=None, + summary=None, + payload=None, + progress=None, + retries=None, +): """Update event data. Args: @@ -794,10 +949,34 @@ def update_event(*args, **kwargs): """ con = get_server_api_connection() - return con.update_event(*args, **kwargs) + return con.update_event( + event_id=event_id, + sender=sender, + project_name=project_name, + username=username, + status=status, + description=description, + summary=summary, + payload=payload, + progress=progress, + retries=retries, + ) -def dispatch_event(*args, **kwargs): +def dispatch_event( + topic, + sender=None, + event_hash=None, + project_name=None, + username=None, + depends_on=None, + description=None, + summary=None, + payload=None, + finished=True, + store=True, + dependencies=None, +): """Dispatch event to server. Args: @@ -824,10 +1003,25 @@ def dispatch_event(*args, **kwargs): """ con = get_server_api_connection() - return con.dispatch_event(*args, **kwargs) + return con.dispatch_event( + topic=topic, + sender=sender, + event_hash=event_hash, + project_name=project_name, + username=username, + depends_on=depends_on, + description=description, + summary=summary, + payload=payload, + finished=finished, + store=store, + dependencies=dependencies, + ) -def delete_event(*args, **kwargs): +def delete_event( + event_id: str, +): """Delete event by id. Supported since AYON server 1.6.0. @@ -840,10 +1034,22 @@ def delete_event(*args, **kwargs): """ con = get_server_api_connection() - return con.delete_event(*args, **kwargs) + return con.delete_event( + event_id=event_id, + ) -def enroll_event_job(*args, **kwargs): +def enroll_event_job( + source_topic, + target_topic, + sender, + description=None, + sequential=None, + events_filter=None, + max_retries=None, + ignore_older_than=None, + ignore_sender_types=None, +): """Enroll job based on events. Enroll will find first unprocessed event with 'source_topic' and will @@ -902,10 +1108,25 @@ def enroll_event_job(*args, **kwargs): """ con = get_server_api_connection() - return con.enroll_event_job(*args, **kwargs) + return con.enroll_event_job( + source_topic=source_topic, + target_topic=target_topic, + sender=sender, + description=description, + sequential=sequential, + events_filter=events_filter, + max_retries=max_retries, + ignore_older_than=ignore_older_than, + ignore_sender_types=ignore_sender_types, + ) -def download_file_to_stream(*args, **kwargs): +def download_file_to_stream( + endpoint, + stream, + chunk_size=None, + progress=None, +): """Download file from AYON server to IOStream. Endpoint can be full url (must start with 'base_url' of api object). @@ -929,10 +1150,20 @@ def download_file_to_stream(*args, **kwargs): """ con = get_server_api_connection() - return con.download_file_to_stream(*args, **kwargs) + return con.download_file_to_stream( + endpoint=endpoint, + stream=stream, + chunk_size=chunk_size, + progress=progress, + ) -def download_file(*args, **kwargs): +def download_file( + endpoint, + filepath, + chunk_size=None, + progress=None, +): """Download file from AYON server. Endpoint can be full url (must start with 'base_url' of api object). @@ -955,10 +1186,21 @@ def download_file(*args, **kwargs): """ con = get_server_api_connection() - return con.download_file(*args, **kwargs) + return con.download_file( + endpoint=endpoint, + filepath=filepath, + chunk_size=chunk_size, + progress=progress, + ) -def upload_file_from_stream(*args, **kwargs): +def upload_file_from_stream( + endpoint, + stream, + progress, + request_type, + **kwargs, +): """Upload file to server from bytes. Todos: @@ -980,10 +1222,22 @@ def upload_file_from_stream(*args, **kwargs): """ con = get_server_api_connection() - return con.upload_file_from_stream(*args, **kwargs) + return con.upload_file_from_stream( + endpoint=endpoint, + stream=stream, + progress=progress, + request_type=request_type, + **kwargs, + ) -def upload_file(*args, **kwargs): +def upload_file( + endpoint, + filepath, + progress=None, + request_type=None, + **kwargs, +): """Upload file to server. Todos: @@ -1005,10 +1259,26 @@ def upload_file(*args, **kwargs): """ con = get_server_api_connection() - return con.upload_file(*args, **kwargs) + return con.upload_file( + endpoint=endpoint, + filepath=filepath, + progress=progress, + request_type=request_type, + **kwargs, + ) -def upload_reviewable(*args, **kwargs): +def upload_reviewable( + project_name, + version_id, + filepath, + label=None, + content_type=None, + filename=None, + progress=None, + headers=None, + **kwargs, +): """Upload reviewable file to server. Args: @@ -1028,7 +1298,17 @@ def upload_reviewable(*args, **kwargs): """ con = get_server_api_connection() - return con.upload_reviewable(*args, **kwargs) + return con.upload_reviewable( + project_name=project_name, + version_id=version_id, + filepath=filepath, + label=label, + content_type=content_type, + filename=filename, + progress=progress, + headers=headers, + **kwargs, + ) def trigger_server_restart(): @@ -1042,7 +1322,10 @@ def trigger_server_restart(): return con.trigger_server_restart() -def query_graphql(*args, **kwargs): +def query_graphql( + query, + variables=None, +): """Execute GraphQl query. Args: @@ -1055,7 +1338,10 @@ def query_graphql(*args, **kwargs): """ con = get_server_api_connection() - return con.query_graphql(*args, **kwargs) + return con.query_graphql( + query=query, + variables=variables, + ) def get_graphql_schema(): @@ -1093,9 +1379,13 @@ def get_schemas(): return con.get_schemas() -def get_attributes_schema(*args, **kwargs): +def get_attributes_schema( + use_cache=True, +): con = get_server_api_connection() - return con.get_attributes_schema(*args, **kwargs) + return con.get_attributes_schema( + use_cache=use_cache, + ) def reset_attributes_schema(): @@ -1103,12 +1393,26 @@ def reset_attributes_schema(): return con.reset_attributes_schema() -def set_attribute_config(*args, **kwargs): - con = get_server_api_connection() - return con.set_attribute_config(*args, **kwargs) +def set_attribute_config( + attribute_name, + data, + scope, + position=None, + builtin=False, +): + con = get_server_api_connection() + return con.set_attribute_config( + attribute_name=attribute_name, + data=data, + scope=scope, + position=position, + builtin=builtin, + ) -def remove_attribute_config(*args, **kwargs): +def remove_attribute_config( + attribute_name, +): """Remove attribute from server. This can't be un-done, please use carefully. @@ -1118,10 +1422,14 @@ def remove_attribute_config(*args, **kwargs): """ con = get_server_api_connection() - return con.remove_attribute_config(*args, **kwargs) + return con.remove_attribute_config( + attribute_name=attribute_name, + ) -def get_attributes_for_type(*args, **kwargs): +def get_attributes_for_type( + entity_type, +): """Get attribute schemas available for an entity type. Example:: @@ -1159,10 +1467,14 @@ def get_attributes_for_type(*args, **kwargs): """ con = get_server_api_connection() - return con.get_attributes_for_type(*args, **kwargs) + return con.get_attributes_for_type( + entity_type=entity_type, + ) -def get_attributes_fields_for_type(*args, **kwargs): +def get_attributes_fields_for_type( + entity_type, +): """Prepare attribute fields for entity type. Returns: @@ -1170,10 +1482,14 @@ def get_attributes_fields_for_type(*args, **kwargs): """ con = get_server_api_connection() - return con.get_attributes_fields_for_type(*args, **kwargs) + return con.get_attributes_fields_for_type( + entity_type=entity_type, + ) -def get_default_fields_for_type(*args, **kwargs): +def get_default_fields_for_type( + entity_type, +): """Default fields for entity type. Returns most of commonly used fields from server. @@ -1186,10 +1502,14 @@ def get_default_fields_for_type(*args, **kwargs): """ con = get_server_api_connection() - return con.get_default_fields_for_type(*args, **kwargs) + return con.get_default_fields_for_type( + entity_type=entity_type, + ) -def get_addons_info(*args, **kwargs): +def get_addons_info( + details=True, +): """Get information about addons available on server. Args: @@ -1198,10 +1518,16 @@ def get_addons_info(*args, **kwargs): """ con = get_server_api_connection() - return con.get_addons_info(*args, **kwargs) + return con.get_addons_info( + details=details, + ) -def get_addon_endpoint(*args, **kwargs): +def get_addon_endpoint( + addon_name, + addon_version, + *subpaths, +): """Calculate endpoint to addon route. Examples: @@ -1222,10 +1548,19 @@ def get_addon_endpoint(*args, **kwargs): """ con = get_server_api_connection() - return con.get_addon_endpoint(*args, **kwargs) + return con.get_addon_endpoint( + addon_name=addon_name, + addon_version=addon_version, + *subpaths, + ) -def get_addon_url(*args, **kwargs): +def get_addon_url( + addon_name, + addon_version, + *subpaths, + use_rest=True, +): """Calculate url to addon route. Examples: @@ -1247,10 +1582,23 @@ def get_addon_url(*args, **kwargs): """ con = get_server_api_connection() - return con.get_addon_url(*args, **kwargs) + return con.get_addon_url( + addon_name=addon_name, + addon_version=addon_version, + *subpaths, + use_rest=use_rest, + ) -def download_addon_private_file(*args, **kwargs): +def download_addon_private_file( + addon_name, + addon_version, + filename, + destination_dir, + destination_filename=None, + chunk_size=None, + progress=None, +): """Download a file from addon private files. This method requires to have authorized token available. Private files @@ -1272,10 +1620,21 @@ def download_addon_private_file(*args, **kwargs): """ con = get_server_api_connection() - return con.download_addon_private_file(*args, **kwargs) + return con.download_addon_private_file( + addon_name=addon_name, + addon_version=addon_version, + filename=filename, + destination_dir=destination_dir, + destination_filename=destination_filename, + chunk_size=chunk_size, + progress=progress, + ) -def get_installers(*args, **kwargs): +def get_installers( + version=None, + platform_name=None, +): """Information about desktop application installers on server. Desktop application installers are helpers to download/update AYON @@ -1290,10 +1649,24 @@ def get_installers(*args, **kwargs): """ con = get_server_api_connection() - return con.get_installers(*args, **kwargs) + return con.get_installers( + version=version, + platform_name=platform_name, + ) -def create_installer(*args, **kwargs): +def create_installer( + filename, + version, + python_version, + platform_name, + python_modules, + runtime_python_modules, + checksum, + checksum_algorithm, + file_size, + sources=None, +): """Create new installer information on server. This step will create only metadata. Make sure to upload installer @@ -1320,10 +1693,24 @@ def create_installer(*args, **kwargs): """ con = get_server_api_connection() - return con.create_installer(*args, **kwargs) + return con.create_installer( + filename=filename, + version=version, + python_version=python_version, + platform_name=platform_name, + python_modules=python_modules, + runtime_python_modules=runtime_python_modules, + checksum=checksum, + checksum_algorithm=checksum_algorithm, + file_size=file_size, + sources=sources, + ) -def update_installer(*args, **kwargs): +def update_installer( + filename, + sources, +): """Update installer information on server. Args: @@ -1333,10 +1720,15 @@ def update_installer(*args, **kwargs): """ con = get_server_api_connection() - return con.update_installer(*args, **kwargs) + return con.update_installer( + filename=filename, + sources=sources, + ) -def delete_installer(*args, **kwargs): +def delete_installer( + filename, +): """Delete installer from server. Args: @@ -1344,10 +1736,17 @@ def delete_installer(*args, **kwargs): """ con = get_server_api_connection() - return con.delete_installer(*args, **kwargs) + return con.delete_installer( + filename=filename, + ) -def download_installer(*args, **kwargs): +def download_installer( + filename, + dst_filepath, + chunk_size=None, + progress=None, +): """Download installer file from server. Args: @@ -1359,10 +1758,19 @@ def download_installer(*args, **kwargs): """ con = get_server_api_connection() - return con.download_installer(*args, **kwargs) + return con.download_installer( + filename=filename, + dst_filepath=dst_filepath, + chunk_size=chunk_size, + progress=progress, + ) -def upload_installer(*args, **kwargs): +def upload_installer( + src_filepath, + dst_filename, + progress=None, +): """Upload installer file to server. Args: @@ -1376,7 +1784,11 @@ def upload_installer(*args, **kwargs): """ con = get_server_api_connection() - return con.upload_installer(*args, **kwargs) + return con.upload_installer( + src_filepath=src_filepath, + dst_filename=dst_filename, + progress=progress, + ) def get_dependency_packages(): @@ -1411,7 +1823,17 @@ def get_dependency_packages(): return con.get_dependency_packages() -def create_dependency_package(*args, **kwargs): +def create_dependency_package( + filename, + python_modules, + source_addons, + installer_version, + checksum, + checksum_algorithm, + file_size, + sources=None, + platform_name=None, +): """Create dependency package on server. The package will be created on a server, it is also required to upload @@ -1442,10 +1864,23 @@ def create_dependency_package(*args, **kwargs): """ con = get_server_api_connection() - return con.create_dependency_package(*args, **kwargs) + return con.create_dependency_package( + filename=filename, + python_modules=python_modules, + source_addons=source_addons, + installer_version=installer_version, + checksum=checksum, + checksum_algorithm=checksum_algorithm, + file_size=file_size, + sources=sources, + platform_name=platform_name, + ) -def update_dependency_package(*args, **kwargs): +def update_dependency_package( + filename, + sources, +): """Update dependency package metadata on server. Args: @@ -1456,10 +1891,16 @@ def update_dependency_package(*args, **kwargs): """ con = get_server_api_connection() - return con.update_dependency_package(*args, **kwargs) + return con.update_dependency_package( + filename=filename, + sources=sources, + ) -def delete_dependency_package(*args, **kwargs): +def delete_dependency_package( + filename, + platform_name=None, +): """Remove dependency package for specific platform. Args: @@ -1468,10 +1909,20 @@ def delete_dependency_package(*args, **kwargs): """ con = get_server_api_connection() - return con.delete_dependency_package(*args, **kwargs) + return con.delete_dependency_package( + filename=filename, + platform_name=platform_name, + ) -def download_dependency_package(*args, **kwargs): +def download_dependency_package( + src_filename, + dst_directory, + dst_filename, + platform_name=None, + chunk_size=None, + progress=None, +): """Download dependency package from server. This method requires to have authorized token available. The package @@ -1493,10 +1944,22 @@ def download_dependency_package(*args, **kwargs): """ con = get_server_api_connection() - return con.download_dependency_package(*args, **kwargs) + return con.download_dependency_package( + src_filename=src_filename, + dst_directory=dst_directory, + dst_filename=dst_filename, + platform_name=platform_name, + chunk_size=chunk_size, + progress=progress, + ) -def upload_dependency_package(*args, **kwargs): +def upload_dependency_package( + src_filepath, + dst_filename, + platform_name=None, + progress=None, +): """Upload dependency package to server. Args: @@ -1509,10 +1972,18 @@ def upload_dependency_package(*args, **kwargs): """ con = get_server_api_connection() - return con.upload_dependency_package(*args, **kwargs) + return con.upload_dependency_package( + src_filepath=src_filepath, + dst_filename=dst_filename, + platform_name=platform_name, + progress=progress, + ) -def delete_addon(*args, **kwargs): +def delete_addon( + addon_name: str, + purge: Optional[bool] = None, +): """Delete addon from server. Delete all versions of addon from server. @@ -1523,10 +1994,17 @@ def delete_addon(*args, **kwargs): """ con = get_server_api_connection() - return con.delete_addon(*args, **kwargs) + return con.delete_addon( + addon_name=addon_name, + purge=purge, + ) -def delete_addon_version(*args, **kwargs): +def delete_addon_version( + addon_name: str, + addon_version: str, + purge: Optional[bool] = None, +): """Delete addon version from server. Delete all versions of addon from server. @@ -1538,10 +2016,17 @@ def delete_addon_version(*args, **kwargs): """ con = get_server_api_connection() - return con.delete_addon_version(*args, **kwargs) + return con.delete_addon_version( + addon_name=addon_name, + addon_version=addon_version, + purge=purge, + ) -def upload_addon_zip(*args, **kwargs): +def upload_addon_zip( + src_filepath, + progress=None, +): """Upload addon zip file to server. File is validated on server. If it is valid, it is installed. It will @@ -1562,7 +2047,10 @@ def upload_addon_zip(*args, **kwargs): """ con = get_server_api_connection() - return con.upload_addon_zip(*args, **kwargs) + return con.upload_addon_zip( + src_filepath=src_filepath, + progress=progress, + ) def get_bundles(): @@ -1600,7 +2088,17 @@ def get_bundles(): return con.get_bundles() -def create_bundle(*args, **kwargs): +def create_bundle( + name, + addon_versions, + installer_version, + dependency_packages=None, + is_production=None, + is_staging=None, + is_dev=None, + dev_active_user=None, + dev_addons_config=None, +): """Create bundle on server. Bundle cannot be changed once is created. Only isProduction, isStaging @@ -1640,10 +2138,30 @@ def create_bundle(*args, **kwargs): """ con = get_server_api_connection() - return con.create_bundle(*args, **kwargs) + return con.create_bundle( + name=name, + addon_versions=addon_versions, + installer_version=installer_version, + dependency_packages=dependency_packages, + is_production=is_production, + is_staging=is_staging, + is_dev=is_dev, + dev_active_user=dev_active_user, + dev_addons_config=dev_addons_config, + ) -def update_bundle(*args, **kwargs): +def update_bundle( + bundle_name, + addon_versions=None, + installer_version=None, + dependency_packages=None, + is_production=None, + is_staging=None, + is_dev=None, + dev_active_user=None, + dev_addons_config=None, +): """Update bundle on server. Dependency packages can be update only for single platform. Others @@ -1669,10 +2187,30 @@ def update_bundle(*args, **kwargs): """ con = get_server_api_connection() - return con.update_bundle(*args, **kwargs) + return con.update_bundle( + bundle_name=bundle_name, + addon_versions=addon_versions, + installer_version=installer_version, + dependency_packages=dependency_packages, + is_production=is_production, + is_staging=is_staging, + is_dev=is_dev, + dev_active_user=dev_active_user, + dev_addons_config=dev_addons_config, + ) -def check_bundle_compatibility(*args, **kwargs): +def check_bundle_compatibility( + name, + addon_versions, + installer_version, + dependency_packages=None, + is_production=None, + is_staging=None, + is_dev=None, + dev_active_user=None, + dev_addons_config=None, +): """Check bundle compatibility. Can be used as per-flight validation before creating bundle. @@ -1698,10 +2236,22 @@ def check_bundle_compatibility(*args, **kwargs): """ con = get_server_api_connection() - return con.check_bundle_compatibility(*args, **kwargs) + return con.check_bundle_compatibility( + name=name, + addon_versions=addon_versions, + installer_version=installer_version, + dependency_packages=dependency_packages, + is_production=is_production, + is_staging=is_staging, + is_dev=is_dev, + dev_active_user=dev_active_user, + dev_addons_config=dev_addons_config, + ) -def delete_bundle(*args, **kwargs): +def delete_bundle( + bundle_name, +): """Delete bundle from server. Args: @@ -1709,7 +2259,9 @@ def delete_bundle(*args, **kwargs): """ con = get_server_api_connection() - return con.delete_bundle(*args, **kwargs) + return con.delete_bundle( + bundle_name=bundle_name, + ) def get_project_anatomy_presets(): @@ -1752,7 +2304,9 @@ def get_default_anatomy_preset_name(): return con.get_default_anatomy_preset_name() -def get_project_anatomy_preset(*args, **kwargs): +def get_project_anatomy_preset( + preset_name=None, +): """Anatomy preset values by name. Get anatomy preset values by preset name. Primary preset is returned @@ -1766,7 +2320,9 @@ def get_project_anatomy_preset(*args, **kwargs): """ con = get_server_api_connection() - return con.get_project_anatomy_preset(*args, **kwargs) + return con.get_project_anatomy_preset( + preset_name=preset_name, + ) def get_build_in_anatomy_preset(): @@ -1780,7 +2336,9 @@ def get_build_in_anatomy_preset(): return con.get_build_in_anatomy_preset() -def get_project_root_overrides(*args, **kwargs): +def get_project_root_overrides( + project_name, +): """Root overrides per site name. Method is based on logged user and can't be received for any other @@ -1796,10 +2354,14 @@ def get_project_root_overrides(*args, **kwargs): """ con = get_server_api_connection() - return con.get_project_root_overrides(*args, **kwargs) + return con.get_project_root_overrides( + project_name=project_name, + ) -def get_project_roots_by_site(*args, **kwargs): +def get_project_roots_by_site( + project_name, +): """Root overrides per site name. Method is based on logged user and can't be received for any other @@ -1819,10 +2381,15 @@ def get_project_roots_by_site(*args, **kwargs): """ con = get_server_api_connection() - return con.get_project_roots_by_site(*args, **kwargs) + return con.get_project_roots_by_site( + project_name=project_name, + ) -def get_project_root_overrides_by_site_id(*args, **kwargs): +def get_project_root_overrides_by_site_id( + project_name, + site_id=None, +): """Root overrides for site. If site id is not passed a site set in current api object is used @@ -1839,10 +2406,16 @@ def get_project_root_overrides_by_site_id(*args, **kwargs): """ con = get_server_api_connection() - return con.get_project_root_overrides_by_site_id(*args, **kwargs) + return con.get_project_root_overrides_by_site_id( + project_name=project_name, + site_id=site_id, + ) -def get_project_roots_for_site(*args, **kwargs): +def get_project_roots_for_site( + project_name, + site_id=None, +): """Root overrides for site. If site id is not passed a site set in current api object is used @@ -1862,10 +2435,16 @@ def get_project_roots_for_site(*args, **kwargs): """ con = get_server_api_connection() - return con.get_project_roots_for_site(*args, **kwargs) + return con.get_project_roots_for_site( + project_name=project_name, + site_id=site_id, + ) -def get_project_roots_by_site_id(*args, **kwargs): +def get_project_roots_by_site_id( + project_name, + site_id=None, +): """Root values for a site. If site id is not passed a site set in current api object is used @@ -1882,10 +2461,16 @@ def get_project_roots_by_site_id(*args, **kwargs): """ con = get_server_api_connection() - return con.get_project_roots_by_site_id(*args, **kwargs) + return con.get_project_roots_by_site_id( + project_name=project_name, + site_id=site_id, + ) -def get_project_roots_by_platform(*args, **kwargs): +def get_project_roots_by_platform( + project_name, + platform_name=None, +): """Root values for a site. If platform name is not passed current platform name is used instead. @@ -1904,10 +2489,17 @@ def get_project_roots_by_platform(*args, **kwargs): """ con = get_server_api_connection() - return con.get_project_roots_by_platform(*args, **kwargs) + return con.get_project_roots_by_platform( + project_name=project_name, + platform_name=platform_name, + ) -def get_addon_settings_schema(*args, **kwargs): +def get_addon_settings_schema( + addon_name, + addon_version, + project_name=None, +): """Sudio/Project settings schema of an addon. Project schema may look differently as some enums are based on project @@ -1924,10 +2516,17 @@ def get_addon_settings_schema(*args, **kwargs): """ con = get_server_api_connection() - return con.get_addon_settings_schema(*args, **kwargs) + return con.get_addon_settings_schema( + addon_name=addon_name, + addon_version=addon_version, + project_name=project_name, + ) -def get_addon_site_settings_schema(*args, **kwargs): +def get_addon_site_settings_schema( + addon_name, + addon_version, +): """Site settings schema of an addon. Args: @@ -1939,10 +2538,17 @@ def get_addon_site_settings_schema(*args, **kwargs): """ con = get_server_api_connection() - return con.get_addon_site_settings_schema(*args, **kwargs) + return con.get_addon_site_settings_schema( + addon_name=addon_name, + addon_version=addon_version, + ) -def get_addon_studio_settings(*args, **kwargs): +def get_addon_studio_settings( + addon_name, + addon_version, + variant=None, +): """Addon studio settings. Receive studio settings for specific version of an addon. @@ -1958,10 +2564,21 @@ def get_addon_studio_settings(*args, **kwargs): """ con = get_server_api_connection() - return con.get_addon_studio_settings(*args, **kwargs) + return con.get_addon_studio_settings( + addon_name=addon_name, + addon_version=addon_version, + variant=variant, + ) -def get_addon_project_settings(*args, **kwargs): +def get_addon_project_settings( + addon_name, + addon_version, + project_name, + variant=None, + site_id=None, + use_site=True, +): """Addon project settings. Receive project settings for specific version of an addon. The settings @@ -1989,10 +2606,24 @@ def get_addon_project_settings(*args, **kwargs): """ con = get_server_api_connection() - return con.get_addon_project_settings(*args, **kwargs) + return con.get_addon_project_settings( + addon_name=addon_name, + addon_version=addon_version, + project_name=project_name, + variant=variant, + site_id=site_id, + use_site=use_site, + ) -def get_addon_settings(*args, **kwargs): +def get_addon_settings( + addon_name, + addon_version, + project_name=None, + variant=None, + site_id=None, + use_site=True, +): """Receive addon settings. Receive addon settings based on project name value. Some arguments may @@ -2018,10 +2649,21 @@ def get_addon_settings(*args, **kwargs): """ con = get_server_api_connection() - return con.get_addon_settings(*args, **kwargs) + return con.get_addon_settings( + addon_name=addon_name, + addon_version=addon_version, + project_name=project_name, + variant=variant, + site_id=site_id, + use_site=use_site, + ) -def get_addon_site_settings(*args, **kwargs): +def get_addon_site_settings( + addon_name, + addon_version, + site_id=None, +): """Site settings of an addon. If site id is not available an empty dictionary is returned. @@ -2037,10 +2679,20 @@ def get_addon_site_settings(*args, **kwargs): """ con = get_server_api_connection() - return con.get_addon_site_settings(*args, **kwargs) + return con.get_addon_site_settings( + addon_name=addon_name, + addon_version=addon_version, + site_id=site_id, + ) -def get_bundle_settings(*args, **kwargs): +def get_bundle_settings( + bundle_name=None, + project_name=None, + variant=None, + site_id=None, + use_site=True, +): """Get complete set of settings for given data. If project is not passed then studio settings are returned. If variant @@ -2073,10 +2725,22 @@ def get_bundle_settings(*args, **kwargs): """ con = get_server_api_connection() - return con.get_bundle_settings(*args, **kwargs) + return con.get_bundle_settings( + bundle_name=bundle_name, + project_name=project_name, + variant=variant, + site_id=site_id, + use_site=use_site, + ) -def get_addons_studio_settings(*args, **kwargs): +def get_addons_studio_settings( + bundle_name=None, + variant=None, + site_id=None, + use_site=True, + only_values=True, +): """All addons settings in one bulk. Warnings: @@ -2102,10 +2766,23 @@ def get_addons_studio_settings(*args, **kwargs): """ con = get_server_api_connection() - return con.get_addons_studio_settings(*args, **kwargs) + return con.get_addons_studio_settings( + bundle_name=bundle_name, + variant=variant, + site_id=site_id, + use_site=use_site, + only_values=only_values, + ) -def get_addons_project_settings(*args, **kwargs): +def get_addons_project_settings( + project_name, + bundle_name=None, + variant=None, + site_id=None, + use_site=True, + only_values=True, +): """Project settings of all addons. Server returns information about used addon versions, so full output @@ -2148,10 +2825,24 @@ def get_addons_project_settings(*args, **kwargs): """ con = get_server_api_connection() - return con.get_addons_project_settings(*args, **kwargs) + return con.get_addons_project_settings( + project_name=project_name, + bundle_name=bundle_name, + variant=variant, + site_id=site_id, + use_site=use_site, + only_values=only_values, + ) -def get_addons_settings(*args, **kwargs): +def get_addons_settings( + bundle_name=None, + project_name=None, + variant=None, + site_id=None, + use_site=True, + only_values=True, +): """Universal function to receive all addon settings. Based on 'project_name' will receive studio settings or project @@ -2179,7 +2870,14 @@ def get_addons_settings(*args, **kwargs): """ con = get_server_api_connection() - return con.get_addons_settings(*args, **kwargs) + return con.get_addons_settings( + bundle_name=bundle_name, + project_name=project_name, + variant=variant, + site_id=site_id, + use_site=use_site, + only_values=only_values, + ) def get_secrets(): @@ -2206,7 +2904,9 @@ def get_secrets(): return con.get_secrets() -def get_secret(*args, **kwargs): +def get_secret( + secret_name, +): """Get secret by name. Example output:: @@ -2224,10 +2924,15 @@ def get_secret(*args, **kwargs): """ con = get_server_api_connection() - return con.get_secret(*args, **kwargs) + return con.get_secret( + secret_name=secret_name, + ) -def save_secret(*args, **kwargs): +def save_secret( + secret_name, + secret_value, +): """Save secret. This endpoint can create and update secret. @@ -2238,10 +2943,15 @@ def save_secret(*args, **kwargs): """ con = get_server_api_connection() - return con.save_secret(*args, **kwargs) + return con.save_secret( + secret_name=secret_name, + secret_value=secret_value, + ) -def delete_secret(*args, **kwargs): +def delete_secret( + secret_name, +): """Delete secret by name. Args: @@ -2249,10 +2959,14 @@ def delete_secret(*args, **kwargs): """ con = get_server_api_connection() - return con.delete_secret(*args, **kwargs) + return con.delete_secret( + secret_name=secret_name, + ) -def get_rest_project(*args, **kwargs): +def get_rest_project( + project_name, +): """Query project by name. This call returns project with anatomy data. @@ -2266,10 +2980,15 @@ def get_rest_project(*args, **kwargs): """ con = get_server_api_connection() - return con.get_rest_project(*args, **kwargs) + return con.get_rest_project( + project_name=project_name, + ) -def get_rest_projects(*args, **kwargs): +def get_rest_projects( + active=True, + library=None, +): """Query available project entities. User must be logged in. @@ -2285,10 +3004,17 @@ def get_rest_projects(*args, **kwargs): """ con = get_server_api_connection() - return con.get_rest_projects(*args, **kwargs) + return con.get_rest_projects( + active=active, + library=library, + ) -def get_rest_entity_by_id(*args, **kwargs): +def get_rest_entity_by_id( + project_name, + entity_type, + entity_id, +): """Get entity using REST on a project by its id. Args: @@ -2302,15 +3028,28 @@ def get_rest_entity_by_id(*args, **kwargs): """ con = get_server_api_connection() - return con.get_rest_entity_by_id(*args, **kwargs) + return con.get_rest_entity_by_id( + project_name=project_name, + entity_type=entity_type, + entity_id=entity_id, + ) -def get_rest_folder(*args, **kwargs): +def get_rest_folder( + project_name, + folder_id, +): con = get_server_api_connection() - return con.get_rest_folder(*args, **kwargs) + return con.get_rest_folder( + project_name=project_name, + folder_id=folder_id, + ) -def get_rest_folders(*args, **kwargs): +def get_rest_folders( + project_name, + include_attrib=False, +): """Get simplified flat list of all project folders. Get all project folders in single REST call. This can be faster than @@ -2352,30 +3091,60 @@ def get_rest_folders(*args, **kwargs): """ con = get_server_api_connection() - return con.get_rest_folders(*args, **kwargs) + return con.get_rest_folders( + project_name=project_name, + include_attrib=include_attrib, + ) -def get_rest_task(*args, **kwargs): +def get_rest_task( + project_name, + task_id, +): con = get_server_api_connection() - return con.get_rest_task(*args, **kwargs) + return con.get_rest_task( + project_name=project_name, + task_id=task_id, + ) -def get_rest_product(*args, **kwargs): +def get_rest_product( + project_name, + product_id, +): con = get_server_api_connection() - return con.get_rest_product(*args, **kwargs) + return con.get_rest_product( + project_name=project_name, + product_id=product_id, + ) -def get_rest_version(*args, **kwargs): +def get_rest_version( + project_name, + version_id, +): con = get_server_api_connection() - return con.get_rest_version(*args, **kwargs) + return con.get_rest_version( + project_name=project_name, + version_id=version_id, + ) -def get_rest_representation(*args, **kwargs): +def get_rest_representation( + project_name, + representation_id, +): con = get_server_api_connection() - return con.get_rest_representation(*args, **kwargs) + return con.get_rest_representation( + project_name=project_name, + representation_id=representation_id, + ) -def get_project_names(*args, **kwargs): +def get_project_names( + active=True, + library=None, +): """Receive available project names. User must be logged in. @@ -2391,10 +3160,18 @@ def get_project_names(*args, **kwargs): """ con = get_server_api_connection() - return con.get_project_names(*args, **kwargs) + return con.get_project_names( + active=active, + library=library, + ) -def get_projects(*args, **kwargs): +def get_projects( + active=True, + library=None, + fields=None, + own_attributes=False, +): """Get projects. Args: @@ -2412,10 +3189,19 @@ def get_projects(*args, **kwargs): """ con = get_server_api_connection() - return con.get_projects(*args, **kwargs) + return con.get_projects( + active=active, + library=library, + fields=fields, + own_attributes=own_attributes, + ) -def get_project(*args, **kwargs): +def get_project( + project_name, + fields=None, + own_attributes=False, +): """Get project. Args: @@ -2431,10 +3217,18 @@ def get_project(*args, **kwargs): """ con = get_server_api_connection() - return con.get_project(*args, **kwargs) + return con.get_project( + project_name=project_name, + fields=fields, + own_attributes=own_attributes, + ) -def get_folders_hierarchy(*args, **kwargs): +def get_folders_hierarchy( + project_name, + search_string=None, + folder_types=None, +): """Get project hierarchy. All folders in project in hierarchy data structure. @@ -2468,10 +3262,17 @@ def get_folders_hierarchy(*args, **kwargs): """ con = get_server_api_connection() - return con.get_folders_hierarchy(*args, **kwargs) + return con.get_folders_hierarchy( + project_name=project_name, + search_string=search_string, + folder_types=folder_types, + ) -def get_folders_rest(*args, **kwargs): +def get_folders_rest( + project_name, + include_attrib=False, +): """Get simplified flat list of all project folders. Get all project folders in single REST call. This can be faster than @@ -2519,10 +3320,31 @@ def get_folders_rest(*args, **kwargs): """ con = get_server_api_connection() - return con.get_folders_rest(*args, **kwargs) + return con.get_folders_rest( + project_name=project_name, + include_attrib=include_attrib, + ) -def get_folders(*args, **kwargs): +def get_folders( + project_name, + folder_ids=None, + folder_paths=None, + folder_names=None, + folder_types=None, + parent_ids=None, + folder_path_regex=None, + has_products=None, + has_tasks=None, + has_children=None, + statuses=None, + assignees_all=None, + tags=None, + active=True, + has_links=None, + fields=None, + own_attributes=False, +): """Query folders from server. Todos: @@ -2572,10 +3394,33 @@ def get_folders(*args, **kwargs): """ con = get_server_api_connection() - return con.get_folders(*args, **kwargs) + return con.get_folders( + project_name=project_name, + folder_ids=folder_ids, + folder_paths=folder_paths, + folder_names=folder_names, + folder_types=folder_types, + parent_ids=parent_ids, + folder_path_regex=folder_path_regex, + has_products=has_products, + has_tasks=has_tasks, + has_children=has_children, + statuses=statuses, + assignees_all=assignees_all, + tags=tags, + active=active, + has_links=has_links, + fields=fields, + own_attributes=own_attributes, + ) -def get_folder_by_id(*args, **kwargs): +def get_folder_by_id( + project_name, + folder_id, + fields=None, + own_attributes=False, +): """Query folder entity by id. Args: @@ -2592,10 +3437,20 @@ def get_folder_by_id(*args, **kwargs): """ con = get_server_api_connection() - return con.get_folder_by_id(*args, **kwargs) + return con.get_folder_by_id( + project_name=project_name, + folder_id=folder_id, + fields=fields, + own_attributes=own_attributes, + ) -def get_folder_by_path(*args, **kwargs): +def get_folder_by_path( + project_name, + folder_path, + fields=None, + own_attributes=False, +): """Query folder entity by path. Folder path is a path to folder with all parent names joined by slash. @@ -2614,10 +3469,20 @@ def get_folder_by_path(*args, **kwargs): """ con = get_server_api_connection() - return con.get_folder_by_path(*args, **kwargs) + return con.get_folder_by_path( + project_name=project_name, + folder_path=folder_path, + fields=fields, + own_attributes=own_attributes, + ) -def get_folder_by_name(*args, **kwargs): +def get_folder_by_name( + project_name, + folder_name, + fields=None, + own_attributes=False, +): """Query folder entity by path. Warnings: @@ -2638,10 +3503,18 @@ def get_folder_by_name(*args, **kwargs): """ con = get_server_api_connection() - return con.get_folder_by_name(*args, **kwargs) + return con.get_folder_by_name( + project_name=project_name, + folder_name=folder_name, + fields=fields, + own_attributes=own_attributes, + ) -def get_folder_ids_with_products(*args, **kwargs): +def get_folder_ids_with_products( + project_name, + folder_ids=None, +): """Find folders which have at least one product. Folders that have at least one product should be immutable, so they @@ -2659,10 +3532,26 @@ def get_folder_ids_with_products(*args, **kwargs): """ con = get_server_api_connection() - return con.get_folder_ids_with_products(*args, **kwargs) + return con.get_folder_ids_with_products( + project_name=project_name, + folder_ids=folder_ids, + ) -def create_folder(*args, **kwargs): +def create_folder( + project_name, + name, + folder_type=None, + parent_id=None, + label=None, + attrib=None, + data=None, + tags=None, + status=None, + active=None, + thumbnail_id=None, + folder_id=None, +): """Create new folder. Args: @@ -2686,10 +3575,36 @@ def create_folder(*args, **kwargs): """ con = get_server_api_connection() - return con.create_folder(*args, **kwargs) + return con.create_folder( + project_name=project_name, + name=name, + folder_type=folder_type, + parent_id=parent_id, + label=label, + attrib=attrib, + data=data, + tags=tags, + status=status, + active=active, + thumbnail_id=thumbnail_id, + folder_id=folder_id, + ) -def update_folder(*args, **kwargs): +def update_folder( + project_name, + folder_id, + name=None, + folder_type=None, + parent_id=NOT_SET, + label=NOT_SET, + attrib=None, + data=None, + tags=None, + status=None, + active=None, + thumbnail_id=NOT_SET, +): """Update folder entity on server. Do not pass ``parent_id``, ``label`` amd ``thumbnail_id`` if you don't @@ -2717,10 +3632,27 @@ def update_folder(*args, **kwargs): """ con = get_server_api_connection() - return con.update_folder(*args, **kwargs) + return con.update_folder( + project_name=project_name, + folder_id=folder_id, + name=name, + folder_type=folder_type, + parent_id=parent_id, + label=label, + attrib=attrib, + data=data, + tags=tags, + status=status, + active=active, + thumbnail_id=thumbnail_id, + ) -def delete_folder(*args, **kwargs): +def delete_folder( + project_name, + folder_id, + force=False, +): """Delete folder. Args: @@ -2731,10 +3663,27 @@ def delete_folder(*args, **kwargs): """ con = get_server_api_connection() - return con.delete_folder(*args, **kwargs) + return con.delete_folder( + project_name=project_name, + folder_id=folder_id, + force=force, + ) -def get_tasks(*args, **kwargs): +def get_tasks( + project_name, + task_ids=None, + task_names=None, + task_types=None, + folder_ids=None, + assignees=None, + assignees_all=None, + statuses=None, + tags=None, + active=True, + fields=None, + own_attributes=False, +): """Query task entities from server. Args: @@ -2767,10 +3716,29 @@ def get_tasks(*args, **kwargs): """ con = get_server_api_connection() - return con.get_tasks(*args, **kwargs) + return con.get_tasks( + project_name=project_name, + task_ids=task_ids, + task_names=task_names, + task_types=task_types, + folder_ids=folder_ids, + assignees=assignees, + assignees_all=assignees_all, + statuses=statuses, + tags=tags, + active=active, + fields=fields, + own_attributes=own_attributes, + ) -def get_task_by_name(*args, **kwargs): +def get_task_by_name( + project_name, + folder_id, + task_name, + fields=None, + own_attributes=False, +): """Query task entity by name and folder id. Args: @@ -2788,10 +3756,21 @@ def get_task_by_name(*args, **kwargs): """ con = get_server_api_connection() - return con.get_task_by_name(*args, **kwargs) + return con.get_task_by_name( + project_name=project_name, + folder_id=folder_id, + task_name=task_name, + fields=fields, + own_attributes=own_attributes, + ) -def get_task_by_id(*args, **kwargs): +def get_task_by_id( + project_name, + task_id, + fields=None, + own_attributes=False, +): """Query task entity by id. Args: @@ -2808,10 +3787,27 @@ def get_task_by_id(*args, **kwargs): """ con = get_server_api_connection() - return con.get_task_by_id(*args, **kwargs) + return con.get_task_by_id( + project_name=project_name, + task_id=task_id, + fields=fields, + own_attributes=own_attributes, + ) -def get_tasks_by_folder_paths(*args, **kwargs): +def get_tasks_by_folder_paths( + project_name, + folder_paths, + task_names=None, + task_types=None, + assignees=None, + assignees_all=None, + statuses=None, + tags=None, + active=True, + fields=None, + own_attributes=False, +): """Query task entities from server by folder paths. Args: @@ -2843,10 +3839,34 @@ def get_tasks_by_folder_paths(*args, **kwargs): """ con = get_server_api_connection() - return con.get_tasks_by_folder_paths(*args, **kwargs) + return con.get_tasks_by_folder_paths( + project_name=project_name, + folder_paths=folder_paths, + task_names=task_names, + task_types=task_types, + assignees=assignees, + assignees_all=assignees_all, + statuses=statuses, + tags=tags, + active=active, + fields=fields, + own_attributes=own_attributes, + ) -def get_tasks_by_folder_path(*args, **kwargs): +def get_tasks_by_folder_path( + project_name, + folder_path, + task_names=None, + task_types=None, + assignees=None, + assignees_all=None, + statuses=None, + tags=None, + active=True, + fields=None, + own_attributes=False, +): """Query task entities from server by folder path. Args: @@ -2874,10 +3894,28 @@ def get_tasks_by_folder_path(*args, **kwargs): """ con = get_server_api_connection() - return con.get_tasks_by_folder_path(*args, **kwargs) + return con.get_tasks_by_folder_path( + project_name=project_name, + folder_path=folder_path, + task_names=task_names, + task_types=task_types, + assignees=assignees, + assignees_all=assignees_all, + statuses=statuses, + tags=tags, + active=active, + fields=fields, + own_attributes=own_attributes, + ) -def get_task_by_folder_path(*args, **kwargs): +def get_task_by_folder_path( + project_name, + folder_path, + task_name, + fields=None, + own_attributes=False, +): """Query task entity by folder path and task name. Args: @@ -2895,10 +3933,30 @@ def get_task_by_folder_path(*args, **kwargs): """ con = get_server_api_connection() - return con.get_task_by_folder_path(*args, **kwargs) + return con.get_task_by_folder_path( + project_name=project_name, + folder_path=folder_path, + task_name=task_name, + fields=fields, + own_attributes=own_attributes, + ) -def create_task(*args, **kwargs): +def create_task( + project_name, + name, + task_type, + folder_id, + label=None, + assignees=None, + attrib=None, + data=None, + tags=None, + status=None, + active=None, + thumbnail_id=None, + task_id=None, +): """Create new task. Args: @@ -2922,10 +3980,38 @@ def create_task(*args, **kwargs): """ con = get_server_api_connection() - return con.create_task(*args, **kwargs) + return con.create_task( + project_name=project_name, + name=name, + task_type=task_type, + folder_id=folder_id, + label=label, + assignees=assignees, + attrib=attrib, + data=data, + tags=tags, + status=status, + active=active, + thumbnail_id=thumbnail_id, + task_id=task_id, + ) -def update_task(*args, **kwargs): +def update_task( + project_name, + task_id, + name=None, + task_type=None, + folder_id=None, + label=NOT_SET, + assignees=None, + attrib=None, + data=None, + tags=None, + status=None, + active=None, + thumbnail_id=NOT_SET, +): """Update task entity on server. Do not pass ``label`` amd ``thumbnail_id`` if you don't @@ -2954,10 +4040,27 @@ def update_task(*args, **kwargs): """ con = get_server_api_connection() - return con.update_task(*args, **kwargs) + return con.update_task( + project_name=project_name, + task_id=task_id, + name=name, + task_type=task_type, + folder_id=folder_id, + label=label, + assignees=assignees, + attrib=attrib, + data=data, + tags=tags, + status=status, + active=active, + thumbnail_id=thumbnail_id, + ) -def delete_task(*args, **kwargs): +def delete_task( + project_name, + task_id, +): """Delete task. Args: @@ -2966,10 +4069,27 @@ def delete_task(*args, **kwargs): """ con = get_server_api_connection() - return con.delete_task(*args, **kwargs) + return con.delete_task( + project_name=project_name, + task_id=task_id, + ) -def get_products(*args, **kwargs): +def get_products( + project_name, + product_ids=None, + product_names=None, + folder_ids=None, + product_types=None, + product_name_regex=None, + product_path_regex=None, + names_by_folder_ids=None, + statuses=None, + tags=None, + active=True, + fields=None, + own_attributes=_PLACEHOLDER, +): """Query products from server. Todos: @@ -3007,10 +4127,29 @@ def get_products(*args, **kwargs): """ con = get_server_api_connection() - return con.get_products(*args, **kwargs) + return con.get_products( + project_name=project_name, + product_ids=product_ids, + product_names=product_names, + folder_ids=folder_ids, + product_types=product_types, + product_name_regex=product_name_regex, + product_path_regex=product_path_regex, + names_by_folder_ids=names_by_folder_ids, + statuses=statuses, + tags=tags, + active=active, + fields=fields, + own_attributes=own_attributes, + ) -def get_product_by_id(*args, **kwargs): +def get_product_by_id( + project_name, + product_id, + fields=None, + own_attributes=_PLACEHOLDER, +): """Query product entity by id. Args: @@ -3027,10 +4166,21 @@ def get_product_by_id(*args, **kwargs): """ con = get_server_api_connection() - return con.get_product_by_id(*args, **kwargs) + return con.get_product_by_id( + project_name=project_name, + product_id=product_id, + fields=fields, + own_attributes=own_attributes, + ) -def get_product_by_name(*args, **kwargs): +def get_product_by_name( + project_name, + product_name, + folder_id, + fields=None, + own_attributes=_PLACEHOLDER, +): """Query product entity by name and folder id. Args: @@ -3048,10 +4198,18 @@ def get_product_by_name(*args, **kwargs): """ con = get_server_api_connection() - return con.get_product_by_name(*args, **kwargs) + return con.get_product_by_name( + project_name=project_name, + product_name=product_name, + folder_id=folder_id, + fields=fields, + own_attributes=own_attributes, + ) -def get_product_types(*args, **kwargs): +def get_product_types( + fields=None, +): """Types of products. This is server wide information. Product types have 'name', 'icon' and @@ -3065,10 +4223,15 @@ def get_product_types(*args, **kwargs): """ con = get_server_api_connection() - return con.get_product_types(*args, **kwargs) + return con.get_product_types( + fields=fields, + ) -def get_project_product_types(*args, **kwargs): +def get_project_product_types( + project_name, + fields=None, +): """Types of products available on a project. Filter only product types available on project. @@ -3083,10 +4246,16 @@ def get_project_product_types(*args, **kwargs): """ con = get_server_api_connection() - return con.get_project_product_types(*args, **kwargs) + return con.get_project_product_types( + project_name=project_name, + fields=fields, + ) -def get_product_type_names(*args, **kwargs): +def get_product_type_names( + project_name=None, + product_ids=None, +): """Product type names. Warnings: @@ -3104,10 +4273,24 @@ def get_product_type_names(*args, **kwargs): """ con = get_server_api_connection() - return con.get_product_type_names(*args, **kwargs) + return con.get_product_type_names( + project_name=project_name, + product_ids=product_ids, + ) -def create_product(*args, **kwargs): +def create_product( + project_name, + name, + product_type, + folder_id, + attrib=None, + data=None, + tags=None, + status=None, + active=None, + product_id=None, +): """Create new product. Args: @@ -3128,10 +4311,32 @@ def create_product(*args, **kwargs): """ con = get_server_api_connection() - return con.create_product(*args, **kwargs) + return con.create_product( + project_name=project_name, + name=name, + product_type=product_type, + folder_id=folder_id, + attrib=attrib, + data=data, + tags=tags, + status=status, + active=active, + product_id=product_id, + ) -def update_product(*args, **kwargs): +def update_product( + project_name, + product_id, + name=None, + folder_id=None, + product_type=None, + attrib=None, + data=None, + tags=None, + status=None, + active=None, +): """Update product entity on server. Update of ``data`` will override existing value on folder entity. @@ -3153,10 +4358,24 @@ def update_product(*args, **kwargs): """ con = get_server_api_connection() - return con.update_product(*args, **kwargs) + return con.update_product( + project_name=project_name, + product_id=product_id, + name=name, + folder_id=folder_id, + product_type=product_type, + attrib=attrib, + data=data, + tags=tags, + status=status, + active=active, + ) -def delete_product(*args, **kwargs): +def delete_product( + project_name, + product_id, +): """Delete product. Args: @@ -3165,10 +4384,27 @@ def delete_product(*args, **kwargs): """ con = get_server_api_connection() - return con.delete_product(*args, **kwargs) + return con.delete_product( + project_name=project_name, + product_id=product_id, + ) -def get_versions(*args, **kwargs): +def get_versions( + project_name, + version_ids=None, + product_ids=None, + task_ids=None, + versions=None, + hero=True, + standard=True, + latest=None, + statuses=None, + tags=None, + active=True, + fields=None, + own_attributes=_PLACEHOLDER, +): """Get version entities based on passed filters from server. Args: @@ -3203,10 +4439,29 @@ def get_versions(*args, **kwargs): """ con = get_server_api_connection() - return con.get_versions(*args, **kwargs) + return con.get_versions( + project_name=project_name, + version_ids=version_ids, + product_ids=product_ids, + task_ids=task_ids, + versions=versions, + hero=hero, + standard=standard, + latest=latest, + statuses=statuses, + tags=tags, + active=active, + fields=fields, + own_attributes=own_attributes, + ) -def get_version_by_id(*args, **kwargs): +def get_version_by_id( + project_name, + version_id, + fields=None, + own_attributes=_PLACEHOLDER, +): """Query version entity by id. Args: @@ -3223,10 +4478,21 @@ def get_version_by_id(*args, **kwargs): """ con = get_server_api_connection() - return con.get_version_by_id(*args, **kwargs) + return con.get_version_by_id( + project_name=project_name, + version_id=version_id, + fields=fields, + own_attributes=own_attributes, + ) -def get_version_by_name(*args, **kwargs): +def get_version_by_name( + project_name, + version, + product_id, + fields=None, + own_attributes=_PLACEHOLDER, +): """Query version entity by version and product id. Args: @@ -3244,10 +4510,21 @@ def get_version_by_name(*args, **kwargs): """ con = get_server_api_connection() - return con.get_version_by_name(*args, **kwargs) + return con.get_version_by_name( + project_name=project_name, + version=version, + product_id=product_id, + fields=fields, + own_attributes=own_attributes, + ) -def get_hero_version_by_id(*args, **kwargs): +def get_hero_version_by_id( + project_name, + version_id, + fields=None, + own_attributes=_PLACEHOLDER, +): """Query hero version entity by id. Args: @@ -3264,10 +4541,20 @@ def get_hero_version_by_id(*args, **kwargs): """ con = get_server_api_connection() - return con.get_hero_version_by_id(*args, **kwargs) + return con.get_hero_version_by_id( + project_name=project_name, + version_id=version_id, + fields=fields, + own_attributes=own_attributes, + ) -def get_hero_version_by_product_id(*args, **kwargs): +def get_hero_version_by_product_id( + project_name, + product_id, + fields=None, + own_attributes=_PLACEHOLDER, +): """Query hero version entity by product id. Only one hero version is available on a product. @@ -3286,10 +4573,22 @@ def get_hero_version_by_product_id(*args, **kwargs): """ con = get_server_api_connection() - return con.get_hero_version_by_product_id(*args, **kwargs) + return con.get_hero_version_by_product_id( + project_name=project_name, + product_id=product_id, + fields=fields, + own_attributes=own_attributes, + ) -def get_hero_versions(*args, **kwargs): +def get_hero_versions( + project_name, + product_ids=None, + version_ids=None, + active=True, + fields=None, + own_attributes=_PLACEHOLDER, +): """Query hero versions by multiple filters. Only one hero version is available on a product. @@ -3311,10 +4610,23 @@ def get_hero_versions(*args, **kwargs): """ con = get_server_api_connection() - return con.get_hero_versions(*args, **kwargs) + return con.get_hero_versions( + project_name=project_name, + product_ids=product_ids, + version_ids=version_ids, + active=active, + fields=fields, + own_attributes=own_attributes, + ) -def get_last_versions(*args, **kwargs): +def get_last_versions( + project_name, + product_ids, + active=True, + fields=None, + own_attributes=_PLACEHOLDER, +): """Query last version entities by product ids. Args: @@ -3332,10 +4644,22 @@ def get_last_versions(*args, **kwargs): """ con = get_server_api_connection() - return con.get_last_versions(*args, **kwargs) + return con.get_last_versions( + project_name=project_name, + product_ids=product_ids, + active=active, + fields=fields, + own_attributes=own_attributes, + ) -def get_last_version_by_product_id(*args, **kwargs): +def get_last_version_by_product_id( + project_name, + product_id, + active=True, + fields=None, + own_attributes=_PLACEHOLDER, +): """Query last version entity by product id. Args: @@ -3353,10 +4677,23 @@ def get_last_version_by_product_id(*args, **kwargs): """ con = get_server_api_connection() - return con.get_last_version_by_product_id(*args, **kwargs) + return con.get_last_version_by_product_id( + project_name=project_name, + product_id=product_id, + active=active, + fields=fields, + own_attributes=own_attributes, + ) -def get_last_version_by_product_name(*args, **kwargs): +def get_last_version_by_product_name( + project_name, + product_name, + folder_id, + active=True, + fields=None, + own_attributes=_PLACEHOLDER, +): """Query last version entity by product name and folder id. Args: @@ -3375,10 +4712,20 @@ def get_last_version_by_product_name(*args, **kwargs): """ con = get_server_api_connection() - return con.get_last_version_by_product_name(*args, **kwargs) + return con.get_last_version_by_product_name( + project_name=project_name, + product_name=product_name, + folder_id=folder_id, + active=active, + fields=fields, + own_attributes=own_attributes, + ) -def version_is_latest(*args, **kwargs): +def version_is_latest( + project_name, + version_id, +): """Is version latest from a product. Args: @@ -3390,10 +4737,26 @@ def version_is_latest(*args, **kwargs): """ con = get_server_api_connection() - return con.version_is_latest(*args, **kwargs) + return con.version_is_latest( + project_name=project_name, + version_id=version_id, + ) -def create_version(*args, **kwargs): +def create_version( + project_name, + version, + product_id, + task_id=None, + author=None, + attrib=None, + data=None, + tags=None, + status=None, + active=None, + thumbnail_id=None, + version_id=None, +): """Create new version. Args: @@ -3416,10 +4779,36 @@ def create_version(*args, **kwargs): """ con = get_server_api_connection() - return con.create_version(*args, **kwargs) + return con.create_version( + project_name=project_name, + version=version, + product_id=product_id, + task_id=task_id, + author=author, + attrib=attrib, + data=data, + tags=tags, + status=status, + active=active, + thumbnail_id=thumbnail_id, + version_id=version_id, + ) -def update_version(*args, **kwargs): +def update_version( + project_name, + version_id, + version=None, + product_id=None, + task_id=NOT_SET, + author=None, + attrib=None, + data=None, + tags=None, + status=None, + active=None, + thumbnail_id=NOT_SET, +): """Update version entity on server. Do not pass ``task_id`` amd ``thumbnail_id`` if you don't @@ -3447,10 +4836,26 @@ def update_version(*args, **kwargs): """ con = get_server_api_connection() - return con.update_version(*args, **kwargs) + return con.update_version( + project_name=project_name, + version_id=version_id, + version=version, + product_id=product_id, + task_id=task_id, + author=author, + attrib=attrib, + data=data, + tags=tags, + status=status, + active=active, + thumbnail_id=thumbnail_id, + ) -def delete_version(*args, **kwargs): +def delete_version( + project_name, + version_id, +): """Delete version. Args: @@ -3459,10 +4864,25 @@ def delete_version(*args, **kwargs): """ con = get_server_api_connection() - return con.delete_version(*args, **kwargs) + return con.delete_version( + project_name=project_name, + version_id=version_id, + ) -def get_representations(*args, **kwargs): +def get_representations( + project_name, + representation_ids=None, + representation_names=None, + version_ids=None, + names_by_version_ids=None, + statuses=None, + tags=None, + active=True, + has_links=None, + fields=None, + own_attributes=_PLACEHOLDER, +): """Get representation entities based on passed filters from server. .. todo:: @@ -3501,10 +4921,27 @@ def get_representations(*args, **kwargs): """ con = get_server_api_connection() - return con.get_representations(*args, **kwargs) + return con.get_representations( + project_name=project_name, + representation_ids=representation_ids, + representation_names=representation_names, + version_ids=version_ids, + names_by_version_ids=names_by_version_ids, + statuses=statuses, + tags=tags, + active=active, + has_links=has_links, + fields=fields, + own_attributes=own_attributes, + ) -def get_representation_by_id(*args, **kwargs): +def get_representation_by_id( + project_name, + representation_id, + fields=None, + own_attributes=_PLACEHOLDER, +): """Query representation entity from server based on id filter. Args: @@ -3520,10 +4957,21 @@ def get_representation_by_id(*args, **kwargs): """ con = get_server_api_connection() - return con.get_representation_by_id(*args, **kwargs) + return con.get_representation_by_id( + project_name=project_name, + representation_id=representation_id, + fields=fields, + own_attributes=own_attributes, + ) -def get_representation_by_name(*args, **kwargs): +def get_representation_by_name( + project_name, + representation_name, + version_id, + fields=None, + own_attributes=_PLACEHOLDER, +): """Query representation entity by name and version id. Args: @@ -3540,10 +4988,25 @@ def get_representation_by_name(*args, **kwargs): """ con = get_server_api_connection() - return con.get_representation_by_name(*args, **kwargs) + return con.get_representation_by_name( + project_name=project_name, + representation_name=representation_name, + version_id=version_id, + fields=fields, + own_attributes=own_attributes, + ) -def get_representations_hierarchy(*args, **kwargs): +def get_representations_hierarchy( + project_name, + representation_ids, + project_fields=None, + folder_fields=None, + task_fields=None, + product_fields=None, + version_fields=None, + representation_fields=None, +): """Find representation with parents by representation id. Representation entity with parent entities up to project. @@ -3569,10 +5032,28 @@ def get_representations_hierarchy(*args, **kwargs): """ con = get_server_api_connection() - return con.get_representations_hierarchy(*args, **kwargs) + return con.get_representations_hierarchy( + project_name=project_name, + representation_ids=representation_ids, + project_fields=project_fields, + folder_fields=folder_fields, + task_fields=task_fields, + product_fields=product_fields, + version_fields=version_fields, + representation_fields=representation_fields, + ) -def get_representation_hierarchy(*args, **kwargs): +def get_representation_hierarchy( + project_name, + representation_id, + project_fields=None, + folder_fields=None, + task_fields=None, + product_fields=None, + version_fields=None, + representation_fields=None, +): """Find representation parents by representation id. Representation parent entities up to project. @@ -3593,10 +5074,26 @@ def get_representation_hierarchy(*args, **kwargs): """ con = get_server_api_connection() - return con.get_representation_hierarchy(*args, **kwargs) + return con.get_representation_hierarchy( + project_name=project_name, + representation_id=representation_id, + project_fields=project_fields, + folder_fields=folder_fields, + task_fields=task_fields, + product_fields=product_fields, + version_fields=version_fields, + representation_fields=representation_fields, + ) -def get_representations_parents(*args, **kwargs): +def get_representations_parents( + project_name, + representation_ids, + project_fields=None, + folder_fields=None, + product_fields=None, + version_fields=None, +): """Find representations parents by representation id. Representation parent entities up to project. @@ -3615,10 +5112,24 @@ def get_representations_parents(*args, **kwargs): """ con = get_server_api_connection() - return con.get_representations_parents(*args, **kwargs) + return con.get_representations_parents( + project_name=project_name, + representation_ids=representation_ids, + project_fields=project_fields, + folder_fields=folder_fields, + product_fields=product_fields, + version_fields=version_fields, + ) -def get_representation_parents(*args, **kwargs): +def get_representation_parents( + project_name, + representation_id, + project_fields=None, + folder_fields=None, + product_fields=None, + version_fields=None, +): """Find representation parents by representation id. Representation parent entities up to project. @@ -3636,10 +5147,22 @@ def get_representation_parents(*args, **kwargs): """ con = get_server_api_connection() - return con.get_representation_parents(*args, **kwargs) + return con.get_representation_parents( + project_name=project_name, + representation_id=representation_id, + project_fields=project_fields, + folder_fields=folder_fields, + product_fields=product_fields, + version_fields=version_fields, + ) -def get_repre_ids_by_context_filters(*args, **kwargs): +def get_repre_ids_by_context_filters( + project_name, + context_filters, + representation_names=None, + version_ids=None, +): """Find representation ids which match passed context filters. Each representation has context integrated on representation entity in @@ -3680,10 +5203,26 @@ def get_repre_ids_by_context_filters(*args, **kwargs): """ con = get_server_api_connection() - return con.get_repre_ids_by_context_filters(*args, **kwargs) + return con.get_repre_ids_by_context_filters( + project_name=project_name, + context_filters=context_filters, + representation_names=representation_names, + version_ids=version_ids, + ) -def create_representation(*args, **kwargs): +def create_representation( + project_name, + name, + version_id, + files=None, + attrib=None, + data=None, + tags=None, + status=None, + active=None, + representation_id=None, +): """Create new representation. Args: @@ -3704,10 +5243,32 @@ def create_representation(*args, **kwargs): """ con = get_server_api_connection() - return con.create_representation(*args, **kwargs) + return con.create_representation( + project_name=project_name, + name=name, + version_id=version_id, + files=files, + attrib=attrib, + data=data, + tags=tags, + status=status, + active=active, + representation_id=representation_id, + ) -def update_representation(*args, **kwargs): +def update_representation( + project_name, + representation_id, + name=None, + version_id=None, + files=None, + attrib=None, + data=None, + tags=None, + status=None, + active=None, +): """Update representation entity on server. Update of ``data`` will override existing value on folder entity. @@ -3730,10 +5291,24 @@ def update_representation(*args, **kwargs): """ con = get_server_api_connection() - return con.update_representation(*args, **kwargs) + return con.update_representation( + project_name=project_name, + representation_id=representation_id, + name=name, + version_id=version_id, + files=files, + attrib=attrib, + data=data, + tags=tags, + status=status, + active=active, + ) -def delete_representation(*args, **kwargs): +def delete_representation( + project_name, + representation_id, +): """Delete representation. Args: @@ -3742,10 +5317,24 @@ def delete_representation(*args, **kwargs): """ con = get_server_api_connection() - return con.delete_representation(*args, **kwargs) + return con.delete_representation( + project_name=project_name, + representation_id=representation_id, + ) -def get_workfiles_info(*args, **kwargs): +def get_workfiles_info( + project_name, + workfile_ids=None, + task_ids=None, + paths=None, + path_regex=None, + statuses=None, + tags=None, + has_links=None, + fields=None, + own_attributes=_PLACEHOLDER, +): """Workfile info entities by passed filters. Args: @@ -3771,10 +5360,27 @@ def get_workfiles_info(*args, **kwargs): """ con = get_server_api_connection() - return con.get_workfiles_info(*args, **kwargs) + return con.get_workfiles_info( + project_name=project_name, + workfile_ids=workfile_ids, + task_ids=task_ids, + paths=paths, + path_regex=path_regex, + statuses=statuses, + tags=tags, + has_links=has_links, + fields=fields, + own_attributes=own_attributes, + ) -def get_workfile_info(*args, **kwargs): +def get_workfile_info( + project_name, + task_id, + path, + fields=None, + own_attributes=_PLACEHOLDER, +): """Workfile info entity by task id and workfile path. Args: @@ -3792,10 +5398,21 @@ def get_workfile_info(*args, **kwargs): """ con = get_server_api_connection() - return con.get_workfile_info(*args, **kwargs) + return con.get_workfile_info( + project_name=project_name, + task_id=task_id, + path=path, + fields=fields, + own_attributes=own_attributes, + ) -def get_workfile_info_by_id(*args, **kwargs): +def get_workfile_info_by_id( + project_name, + workfile_id, + fields=None, + own_attributes=_PLACEHOLDER, +): """Workfile info entity by id. Args: @@ -3812,10 +5429,18 @@ def get_workfile_info_by_id(*args, **kwargs): """ con = get_server_api_connection() - return con.get_workfile_info_by_id(*args, **kwargs) + return con.get_workfile_info_by_id( + project_name=project_name, + workfile_id=workfile_id, + fields=fields, + own_attributes=own_attributes, + ) -def get_thumbnail_by_id(*args, **kwargs): +def get_thumbnail_by_id( + project_name, + thumbnail_id, +): """Get thumbnail from server by id. Permissions of thumbnails are related to entities so thumbnails must @@ -3841,10 +5466,18 @@ def get_thumbnail_by_id(*args, **kwargs): """ con = get_server_api_connection() - return con.get_thumbnail_by_id(*args, **kwargs) + return con.get_thumbnail_by_id( + project_name=project_name, + thumbnail_id=thumbnail_id, + ) -def get_thumbnail(*args, **kwargs): +def get_thumbnail( + project_name, + entity_type, + entity_id, + thumbnail_id=None, +): """Get thumbnail from server. Permissions of thumbnails are related to entities so thumbnails must @@ -3872,10 +5505,19 @@ def get_thumbnail(*args, **kwargs): """ con = get_server_api_connection() - return con.get_thumbnail(*args, **kwargs) + return con.get_thumbnail( + project_name=project_name, + entity_type=entity_type, + entity_id=entity_id, + thumbnail_id=thumbnail_id, + ) -def get_folder_thumbnail(*args, **kwargs): +def get_folder_thumbnail( + project_name, + folder_id, + thumbnail_id=None, +): """Prepared method to receive thumbnail for folder entity. Args: @@ -3890,10 +5532,18 @@ def get_folder_thumbnail(*args, **kwargs): """ con = get_server_api_connection() - return con.get_folder_thumbnail(*args, **kwargs) + return con.get_folder_thumbnail( + project_name=project_name, + folder_id=folder_id, + thumbnail_id=thumbnail_id, + ) -def get_version_thumbnail(*args, **kwargs): +def get_version_thumbnail( + project_name, + version_id, + thumbnail_id=None, +): """Prepared method to receive thumbnail for version entity. Args: @@ -3909,10 +5559,18 @@ def get_version_thumbnail(*args, **kwargs): """ con = get_server_api_connection() - return con.get_version_thumbnail(*args, **kwargs) + return con.get_version_thumbnail( + project_name=project_name, + version_id=version_id, + thumbnail_id=thumbnail_id, + ) -def get_workfile_thumbnail(*args, **kwargs): +def get_workfile_thumbnail( + project_name, + workfile_id, + thumbnail_id=None, +): """Prepared method to receive thumbnail for workfile entity. Args: @@ -3928,10 +5586,18 @@ def get_workfile_thumbnail(*args, **kwargs): """ con = get_server_api_connection() - return con.get_workfile_thumbnail(*args, **kwargs) + return con.get_workfile_thumbnail( + project_name=project_name, + workfile_id=workfile_id, + thumbnail_id=thumbnail_id, + ) -def create_thumbnail(*args, **kwargs): +def create_thumbnail( + project_name, + src_filepath, + thumbnail_id=None, +): """Create new thumbnail on server from passed path. Args: @@ -3948,10 +5614,18 @@ def create_thumbnail(*args, **kwargs): """ con = get_server_api_connection() - return con.create_thumbnail(*args, **kwargs) + return con.create_thumbnail( + project_name=project_name, + src_filepath=src_filepath, + thumbnail_id=thumbnail_id, + ) -def update_thumbnail(*args, **kwargs): +def update_thumbnail( + project_name, + thumbnail_id, + src_filepath, +): """Change thumbnail content by id. Update can be also used to create new thumbnail. @@ -3967,10 +5641,19 @@ def update_thumbnail(*args, **kwargs): """ con = get_server_api_connection() - return con.update_thumbnail(*args, **kwargs) + return con.update_thumbnail( + project_name=project_name, + thumbnail_id=thumbnail_id, + src_filepath=src_filepath, + ) -def create_project(*args, **kwargs): +def create_project( + project_name, + project_code, + library_project=False, + preset_name=None, +): """Create project using AYON settings. This project creation function is not validating project entity on @@ -3998,10 +5681,29 @@ def create_project(*args, **kwargs): """ con = get_server_api_connection() - return con.create_project(*args, **kwargs) + return con.create_project( + project_name=project_name, + project_code=project_code, + library_project=library_project, + preset_name=preset_name, + ) -def update_project(*args, **kwargs): +def update_project( + project_name, + library=None, + folder_types=None, + task_types=None, + link_types=None, + statuses=None, + tags=None, + config=None, + attrib=None, + data=None, + active=None, + project_code=None, + **changes, +): """Update project entity on server. Args: @@ -4028,10 +5730,26 @@ def update_project(*args, **kwargs): """ con = get_server_api_connection() - return con.update_project(*args, **kwargs) + return con.update_project( + project_name=project_name, + library=library, + folder_types=folder_types, + task_types=task_types, + link_types=link_types, + statuses=statuses, + tags=tags, + config=config, + attrib=attrib, + data=data, + active=active, + project_code=project_code, + **changes, + ) -def delete_project(*args, **kwargs): +def delete_project( + project_name, +): """Delete project from server. This will completely remove project from server without any step back. @@ -4041,10 +5759,16 @@ def delete_project(*args, **kwargs): """ con = get_server_api_connection() - return con.delete_project(*args, **kwargs) + return con.delete_project( + project_name=project_name, + ) -def get_full_link_type_name(*args, **kwargs): +def get_full_link_type_name( + link_type_name, + input_type, + output_type, +): """Calculate full link type name used for query from server. Args: @@ -4057,10 +5781,16 @@ def get_full_link_type_name(*args, **kwargs): """ con = get_server_api_connection() - return con.get_full_link_type_name(*args, **kwargs) + return con.get_full_link_type_name( + link_type_name=link_type_name, + input_type=input_type, + output_type=output_type, + ) -def get_link_types(*args, **kwargs): +def get_link_types( + project_name, +): """All link types available on a project. Example output: @@ -4082,10 +5812,17 @@ def get_link_types(*args, **kwargs): """ con = get_server_api_connection() - return con.get_link_types(*args, **kwargs) + return con.get_link_types( + project_name=project_name, + ) -def get_link_type(*args, **kwargs): +def get_link_type( + project_name, + link_type_name, + input_type, + output_type, +): """Get link type data. There is not dedicated REST endpoint to get single link type, @@ -4111,10 +5848,21 @@ def get_link_type(*args, **kwargs): """ con = get_server_api_connection() - return con.get_link_type(*args, **kwargs) + return con.get_link_type( + project_name=project_name, + link_type_name=link_type_name, + input_type=input_type, + output_type=output_type, + ) -def create_link_type(*args, **kwargs): +def create_link_type( + project_name, + link_type_name, + input_type, + output_type, + data=None, +): """Create or update link type on server. Warning: @@ -4132,10 +5880,21 @@ def create_link_type(*args, **kwargs): """ con = get_server_api_connection() - return con.create_link_type(*args, **kwargs) + return con.create_link_type( + project_name=project_name, + link_type_name=link_type_name, + input_type=input_type, + output_type=output_type, + data=data, + ) -def delete_link_type(*args, **kwargs): +def delete_link_type( + project_name, + link_type_name, + input_type, + output_type, +): """Remove link type from project. Args: @@ -4149,10 +5908,21 @@ def delete_link_type(*args, **kwargs): """ con = get_server_api_connection() - return con.delete_link_type(*args, **kwargs) + return con.delete_link_type( + project_name=project_name, + link_type_name=link_type_name, + input_type=input_type, + output_type=output_type, + ) -def make_sure_link_type_exists(*args, **kwargs): +def make_sure_link_type_exists( + project_name, + link_type_name, + input_type, + output_type, + data=None, +): """Make sure link type exists on a project. Args: @@ -4164,10 +5934,24 @@ def make_sure_link_type_exists(*args, **kwargs): """ con = get_server_api_connection() - return con.make_sure_link_type_exists(*args, **kwargs) + return con.make_sure_link_type_exists( + project_name=project_name, + link_type_name=link_type_name, + input_type=input_type, + output_type=output_type, + data=data, + ) -def create_link(*args, **kwargs): +def create_link( + project_name, + link_type_name, + input_id, + input_type, + output_id, + output_type, + link_name=None, +): """Create link between 2 entities. Link has a type which must already exists on a project. @@ -4196,10 +5980,21 @@ def create_link(*args, **kwargs): """ con = get_server_api_connection() - return con.create_link(*args, **kwargs) + return con.create_link( + project_name=project_name, + link_type_name=link_type_name, + input_id=input_id, + input_type=input_type, + output_id=output_id, + output_type=output_type, + link_name=link_name, + ) -def delete_link(*args, **kwargs): +def delete_link( + project_name, + link_id, +): """Remove link by id. Args: @@ -4211,10 +6006,21 @@ def delete_link(*args, **kwargs): """ con = get_server_api_connection() - return con.delete_link(*args, **kwargs) + return con.delete_link( + project_name=project_name, + link_id=link_id, + ) -def get_entities_links(*args, **kwargs): +def get_entities_links( + project_name, + entity_type, + entity_ids=None, + link_types=None, + link_direction=None, + link_names=None, + link_name_regex=None, +): """Helper method to get links from server for entity types. .. highlight:: text @@ -4255,10 +6061,23 @@ def get_entities_links(*args, **kwargs): """ con = get_server_api_connection() - return con.get_entities_links(*args, **kwargs) + return con.get_entities_links( + project_name=project_name, + entity_type=entity_type, + entity_ids=entity_ids, + link_types=link_types, + link_direction=link_direction, + link_names=link_names, + link_name_regex=link_name_regex, + ) -def get_folders_links(*args, **kwargs): +def get_folders_links( + project_name, + folder_ids=None, + link_types=None, + link_direction=None, +): """Query folders links from server. Args: @@ -4274,10 +6093,20 @@ def get_folders_links(*args, **kwargs): """ con = get_server_api_connection() - return con.get_folders_links(*args, **kwargs) + return con.get_folders_links( + project_name=project_name, + folder_ids=folder_ids, + link_types=link_types, + link_direction=link_direction, + ) -def get_folder_links(*args, **kwargs): +def get_folder_links( + project_name, + folder_id, + link_types=None, + link_direction=None, +): """Query folder links from server. Args: @@ -4292,10 +6121,20 @@ def get_folder_links(*args, **kwargs): """ con = get_server_api_connection() - return con.get_folder_links(*args, **kwargs) + return con.get_folder_links( + project_name=project_name, + folder_id=folder_id, + link_types=link_types, + link_direction=link_direction, + ) -def get_tasks_links(*args, **kwargs): +def get_tasks_links( + project_name, + task_ids=None, + link_types=None, + link_direction=None, +): """Query tasks links from server. Args: @@ -4311,10 +6150,20 @@ def get_tasks_links(*args, **kwargs): """ con = get_server_api_connection() - return con.get_tasks_links(*args, **kwargs) + return con.get_tasks_links( + project_name=project_name, + task_ids=task_ids, + link_types=link_types, + link_direction=link_direction, + ) -def get_task_links(*args, **kwargs): +def get_task_links( + project_name, + task_id, + link_types=None, + link_direction=None, +): """Query task links from server. Args: @@ -4329,10 +6178,20 @@ def get_task_links(*args, **kwargs): """ con = get_server_api_connection() - return con.get_task_links(*args, **kwargs) + return con.get_task_links( + project_name=project_name, + task_id=task_id, + link_types=link_types, + link_direction=link_direction, + ) -def get_products_links(*args, **kwargs): +def get_products_links( + project_name, + product_ids=None, + link_types=None, + link_direction=None, +): """Query products links from server. Args: @@ -4348,10 +6207,20 @@ def get_products_links(*args, **kwargs): """ con = get_server_api_connection() - return con.get_products_links(*args, **kwargs) + return con.get_products_links( + project_name=project_name, + product_ids=product_ids, + link_types=link_types, + link_direction=link_direction, + ) -def get_product_links(*args, **kwargs): +def get_product_links( + project_name, + product_id, + link_types=None, + link_direction=None, +): """Query product links from server. Args: @@ -4366,10 +6235,20 @@ def get_product_links(*args, **kwargs): """ con = get_server_api_connection() - return con.get_product_links(*args, **kwargs) + return con.get_product_links( + project_name=project_name, + product_id=product_id, + link_types=link_types, + link_direction=link_direction, + ) -def get_versions_links(*args, **kwargs): +def get_versions_links( + project_name, + version_ids=None, + link_types=None, + link_direction=None, +): """Query versions links from server. Args: @@ -4385,10 +6264,20 @@ def get_versions_links(*args, **kwargs): """ con = get_server_api_connection() - return con.get_versions_links(*args, **kwargs) + return con.get_versions_links( + project_name=project_name, + version_ids=version_ids, + link_types=link_types, + link_direction=link_direction, + ) -def get_version_links(*args, **kwargs): +def get_version_links( + project_name, + version_id, + link_types=None, + link_direction=None, +): """Query version links from server. Args: @@ -4403,10 +6292,20 @@ def get_version_links(*args, **kwargs): """ con = get_server_api_connection() - return con.get_version_links(*args, **kwargs) + return con.get_version_links( + project_name=project_name, + version_id=version_id, + link_types=link_types, + link_direction=link_direction, + ) -def get_representations_links(*args, **kwargs): +def get_representations_links( + project_name, + representation_ids=None, + link_types=None, + link_direction=None, +): """Query representations links from server. Args: @@ -4422,10 +6321,20 @@ def get_representations_links(*args, **kwargs): """ con = get_server_api_connection() - return con.get_representations_links(*args, **kwargs) + return con.get_representations_links( + project_name=project_name, + representation_ids=representation_ids, + link_types=link_types, + link_direction=link_direction, + ) -def get_representation_links(*args, **kwargs): +def get_representation_links( + project_name, + representation_id, + link_types=None, + link_direction=None, +): """Query representation links from server. Args: @@ -4441,10 +6350,20 @@ def get_representation_links(*args, **kwargs): """ con = get_server_api_connection() - return con.get_representation_links(*args, **kwargs) + return con.get_representation_links( + project_name=project_name, + representation_id=representation_id, + link_types=link_types, + link_direction=link_direction, + ) -def send_batch_operations(*args, **kwargs): +def send_batch_operations( + project_name, + operations, + can_fail=False, + raise_on_fail=True, +): """Post multiple CRUD operations to server. When multiple changes should be made on server side this is the best @@ -4471,4 +6390,9 @@ def send_batch_operations(*args, **kwargs): """ con = get_server_api_connection() - return con.send_batch_operations(*args, **kwargs) + return con.send_batch_operations( + project_name=project_name, + operations=operations, + can_fail=can_fail, + raise_on_fail=raise_on_fail, + ) From 51c245465f017e589b22d811fc36ed2cc9b4a111 Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Thu, 21 Nov 2024 11:03:54 +0100 Subject: [PATCH 076/135] implemented '_prepare_fields' for resolving of fields for each entity type --- ayon_api/server_api.py | 111 +++++++++++++++++------------------------ 1 file changed, 46 insertions(+), 65 deletions(-) diff --git a/ayon_api/server_api.py b/ayon_api/server_api.py index ea9637838..2769955e1 100644 --- a/ayon_api/server_api.py +++ b/ayon_api/server_api.py @@ -4178,35 +4178,6 @@ def _should_use_rest_project(self, fields=None): return True return False - def _prepare_project_fields(self, fields, own_attributes): - if "attrib" in fields: - fields.remove("attrib") - fields |= self.get_attributes_fields_for_type("project") - - if "folderTypes" in fields: - fields.remove("folderTypes") - fields |= { - "folderTypes.{}".format(name) - for name in self.get_default_fields_for_type("folderType") - } - - if "taskTypes" in fields: - fields.remove("taskTypes") - fields |= { - "taskTypes.{}".format(name) - for name in self.get_default_fields_for_type("taskType") - } - - if "productTypes" in fields: - fields.remove("productTypes") - fields |= { - "productTypes.{}".format(name) - for name in self.get_default_fields_for_type("productType") - } - - if own_attributes: - fields.add("ownAttrib") - def get_projects( self, active=True, library=None, fields=None, own_attributes=False ): @@ -4237,7 +4208,7 @@ def get_projects( yield project return - self._prepare_project_fields(fields, own_attributes) + self._prepare_fields("project", fields, own_attributes) query = projects_graphql_query(fields) for parsed_data in query.continuous_query(self): @@ -4271,7 +4242,7 @@ def get_project(self, project_name, fields=None, own_attributes=False): fill_own_attribs(project) return project - self._prepare_project_fields(fields, own_attributes) + self._prepare_fields("project", fields, own_attributes) query = project_graphql_query(fields) query.set_variable_value("projectName", project_name) @@ -4521,9 +4492,7 @@ def get_folders( fields = self.get_default_fields_for_type("folder") else: fields = set(fields) - if "attrib" in fields: - fields.remove("attrib") - fields |= self.get_attributes_fields_for_type("folder") + self._prepare_fields("folder", fields) use_rest = False if "data" in fields and not self.graphql_allows_data_in_query: @@ -4911,9 +4880,7 @@ def get_tasks( fields = self.get_default_fields_for_type("task") else: fields = set(fields) - if "attrib" in fields: - fields.remove("attrib") - fields |= self.get_attributes_fields_for_type("task") + self._prepare_fields("task", fields, own_attributes) use_rest = False if "data" in fields and not self.graphql_allows_data_in_query: @@ -4923,9 +4890,6 @@ def get_tasks( if active is not None: fields.add("active") - if own_attributes: - fields.add("ownAttrib") - query = tasks_graphql_query(fields) for attr, filter_value in filters.items(): query.set_variable_value(attr, filter_value) @@ -5078,9 +5042,7 @@ def get_tasks_by_folder_paths( fields = self.get_default_fields_for_type("task") else: fields = set(fields) - if "attrib" in fields: - fields.remove("attrib") - fields |= self.get_attributes_fields_for_type("task") + self._prepare_fields("task", fields, own_attributes) use_rest = False if "data" in fields and not self.graphql_allows_data_in_query: @@ -5090,9 +5052,6 @@ def get_tasks_by_folder_paths( if active is not None: fields.add("active") - if own_attributes: - fields.add("ownAttrib") - query = tasks_by_folder_paths_graphql_query(fields) for attr, filter_value in filters.items(): query.set_variable_value(attr, filter_value) @@ -5455,9 +5414,7 @@ def get_products( # Convert fields and add minimum required fields if fields: fields = set(fields) | {"id"} - if "attrib" in fields: - fields.remove("attrib") - fields |= self.get_attributes_fields_for_type("product") + self._prepare_fields("product", fields) else: fields = self.get_default_fields_for_type("product") @@ -5869,9 +5826,7 @@ def get_versions( fields = self.get_default_fields_for_type("version") else: fields = set(fields) - if "attrib" in fields: - fields.remove("attrib") - fields |= self.get_attributes_fields_for_type("version") + self._prepare_fields("version", fields) # Make sure fields have minimum required fields fields |= {"id", "version"} @@ -6508,11 +6463,7 @@ def get_representations( fields = self.get_default_fields_for_type("representation") else: fields = set(fields) - if "attrib" in fields: - fields.remove("attrib") - fields |= self.get_attributes_fields_for_type( - "representation" - ) + self._prepare_fields("representation", fields) use_rest = False if "data" in fields and not self.graphql_allows_data_in_query: @@ -7219,14 +7170,9 @@ def get_workfiles_info( if not fields: fields = self.get_default_fields_for_type("workfile") - - fields = set(fields) - if "attrib" in fields: - fields.remove("attrib") - fields |= { - "attrib.{}".format(attr) - for attr in self.get_attributes_for_type("workfile") - } + else: + fields = set(fields) + self._prepare_fields("workfile", fields) if own_attributes is not _PLACEHOLDER: warnings.warn( @@ -8445,6 +8391,41 @@ def send_batch_operations( )) return op_results + def _prepare_fields(self, entity_type, fields, own_attributes=False): + if not fields: + return fields + + if "attrib" in fields: + fields.remove("attrib") + fields |= self.get_attributes_fields_for_type(entity_type) + + if own_attributes and entity_type in {"project", "folder", "task"}: + fields.add("ownAttrib") + + if entity_type == "project": + if "folderTypes" in fields: + fields.remove("folderTypes") + fields |= { + "folderTypes.{}".format(name) + for name in self.get_default_fields_for_type("folderType") + } + + if "taskTypes" in fields: + fields.remove("taskTypes") + fields |= { + "taskTypes.{}".format(name) + for name in self.get_default_fields_for_type("taskType") + } + + if "productTypes" in fields: + fields.remove("productTypes") + fields |= { + "productTypes.{}".format(name) + for name in self.get_default_fields_for_type( + "productType" + ) + } + def _convert_entity_data(self, entity): if not entity or "data" not in entity: return From 0bd49db1d6bb02099036cc9ce43f97d82fe95698 Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Thu, 21 Nov 2024 11:04:07 +0100 Subject: [PATCH 077/135] use the function in get_representations_hierarchy --- ayon_api/server_api.py | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/ayon_api/server_api.py b/ayon_api/server_api.py index 2769955e1..8191effc2 100644 --- a/ayon_api/server_api.py +++ b/ayon_api/server_api.py @@ -6669,6 +6669,7 @@ def get_representations_hierarchy( if project_fields is not None: project_fields = set(project_fields) + self._prepare_fields("project", project_fields) project = {} if project_fields is None: @@ -6716,6 +6717,15 @@ def get_representations_hierarchy( else: representation_fields = set(representation_fields) + for (entity_type, fields) in ( + ("folder", folder_fields), + ("task", task_fields), + ("product", product_fields), + ("version", version_fields), + ("representation", representation_fields), + ): + self._prepare_fields(entity_type, fields) + representation_fields.add("id") query = representations_hierarchy_qraphql_query( From ed724dac877844e3436d9a8587772cbacd1630d3 Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Thu, 21 Nov 2024 12:00:37 +0100 Subject: [PATCH 078/135] remove return of value --- ayon_api/server_api.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ayon_api/server_api.py b/ayon_api/server_api.py index 8191effc2..3d6281046 100644 --- a/ayon_api/server_api.py +++ b/ayon_api/server_api.py @@ -8403,7 +8403,7 @@ def send_batch_operations( def _prepare_fields(self, entity_type, fields, own_attributes=False): if not fields: - return fields + return if "attrib" in fields: fields.remove("attrib") From 7d4647c7be40cc554313f26049dcbc52b05a15ea Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Thu, 21 Nov 2024 17:32:16 +0100 Subject: [PATCH 079/135] fix ownAttrib for product --- ayon_api/entity_hub.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ayon_api/entity_hub.py b/ayon_api/entity_hub.py index f0a01ef9e..857ed536f 100644 --- a/ayon_api/entity_hub.py +++ b/ayon_api/entity_hub.py @@ -3534,7 +3534,7 @@ def from_entity_data(cls, product, entity_hub): product_type=product["productType"], folder_id=product["folderId"], tags=product["tags"], - attribs=product["ownAttrib"], + attribs=product["attrib"], data=product.get("data"), active=product["active"], entity_id=product["id"], From 8d4d524a0060acf8950f9b57589ad051e5318eab Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Thu, 21 Nov 2024 17:33:18 +0100 Subject: [PATCH 080/135] removed 'own_attributes' from products getter --- ayon_api/entity_hub.py | 1 - 1 file changed, 1 deletion(-) diff --git a/ayon_api/entity_hub.py b/ayon_api/entity_hub.py index 857ed536f..7f366f2aa 100644 --- a/ayon_api/entity_hub.py +++ b/ayon_api/entity_hub.py @@ -289,7 +289,6 @@ def get_or_fetch_entity_by_id( self.project_name, entity_id, fields=self._get_product_fields(), - own_attributes=True ) elif entity_type == "version": entity_data = self._connection.get_version_by_id( From 7db794957593d9ed787c20fadb5d7c6fafbdddd0 Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Thu, 21 Nov 2024 17:33:47 +0100 Subject: [PATCH 081/135] swap arguments order --- ayon_api/entity_hub.py | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/ayon_api/entity_hub.py b/ayon_api/entity_hub.py index 7f366f2aa..c805546c9 100644 --- a/ayon_api/entity_hub.py +++ b/ayon_api/entity_hub.py @@ -348,10 +348,8 @@ def entities(self): def add_new_folder( self, - # TODO move 'folder_type' after 'name' - # - that will break backwards compatibility - folder_type: str, name: str, + folder_type: str, parent_id: Optional[str] = UNKNOWN_VALUE, label: Optional[str] = None, path: Optional[str] = None, @@ -410,10 +408,8 @@ def add_new_folder( def add_new_task( self, - # TODO move 'folder_type' after 'name' - # - that will break backwards compatibility - task_type: str, name: str, + task_type: str, folder_id: Optional[str] = UNKNOWN_VALUE, label: Optional[str] = None, status: Optional[str] = UNKNOWN_VALUE, From f25ef18058e463216c1b1f5496ac8a867a6e705d Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Fri, 22 Nov 2024 13:51:02 +0100 Subject: [PATCH 082/135] use correct default values for created --- ayon_api/entity_hub.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/ayon_api/entity_hub.py b/ayon_api/entity_hub.py index c805546c9..27f5e3ee0 100644 --- a/ayon_api/entity_hub.py +++ b/ayon_api/entity_hub.py @@ -360,7 +360,7 @@ def add_new_folder( thumbnail_id: Optional[str] = UNKNOWN_VALUE, active: bool = UNKNOWN_VALUE, entity_id: Optional[str] = None, - created: Optional[bool] = None, + created: Optional[bool] = True, ): """Create folder object and add it to entity hub. @@ -420,7 +420,7 @@ def add_new_task( thumbnail_id: Optional[str] = UNKNOWN_VALUE, active: Optional[bool] = UNKNOWN_VALUE, entity_id: Optional[str] = None, - created: Optional[bool] = None, + created: Optional[bool] = True, parent_id: Optional[str] = UNKNOWN_VALUE, ): """Create task object and add it to entity hub. From 59b1e14526151e6b5eff038d57ef8e1cb1f77a5b Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Fri, 22 Nov 2024 13:53:38 +0100 Subject: [PATCH 083/135] fix creted for product and version --- ayon_api/entity_hub.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/ayon_api/entity_hub.py b/ayon_api/entity_hub.py index 27f5e3ee0..e9a5197f5 100644 --- a/ayon_api/entity_hub.py +++ b/ayon_api/entity_hub.py @@ -485,7 +485,7 @@ def add_new_product( data: Optional[Dict[str, Any]] = UNKNOWN_VALUE, active: Optional[bool] = UNKNOWN_VALUE, entity_id: Optional[str] = None, - created: Optional[bool] = None, + created: Optional[bool] = True, ): """Create task object and add it to entity hub. @@ -533,7 +533,7 @@ def add_new_version( thumbnail_id: Optional[str] = UNKNOWN_VALUE, active: Optional[bool] = UNKNOWN_VALUE, entity_id: Optional[str] = None, - created: Optional[bool] = None, + created: Optional[bool] = True, ): """Create task object and add it to entity hub. From 55bdf37789bd1fb05ca718448a6fcde64b7f83cc Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Fri, 22 Nov 2024 13:59:58 +0100 Subject: [PATCH 084/135] add thumbnail id to task fields --- ayon_api/constants.py | 1 + 1 file changed, 1 insertion(+) diff --git a/ayon_api/constants.py b/ayon_api/constants.py index 594155706..346620c77 100644 --- a/ayon_api/constants.py +++ b/ayon_api/constants.py @@ -84,6 +84,7 @@ "taskType", "folderId", "active", + "thumbnailId", "assignees", "data", "status", From db3398c53872f9136f65ad3d626f95aeb4e8ee88 Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Fri, 22 Nov 2024 17:01:28 +0100 Subject: [PATCH 085/135] moved types to separate file --- ayon_api/_typing.py | 19 +++++++++++++++++++ ayon_api/server_api.py | 19 +------------------ 2 files changed, 20 insertions(+), 18 deletions(-) create mode 100644 ayon_api/_typing.py diff --git a/ayon_api/_typing.py b/ayon_api/_typing.py new file mode 100644 index 000000000..566f598ab --- /dev/null +++ b/ayon_api/_typing.py @@ -0,0 +1,19 @@ +from typing import Literal + +ActivityType = Literal[ + "comment", + "watch", + "reviewable", + "status.change", + "assignee.add", + "assignee.remove", + "version.publish" +] + +ActivityReferenceType = Literal[ + "origin", + "mention", + "author", + "relation", + "watching", +] diff --git a/ayon_api/server_api.py b/ayon_api/server_api.py index d8a0317a4..f320a0f43 100644 --- a/ayon_api/server_api.py +++ b/ayon_api/server_api.py @@ -99,24 +99,7 @@ ) if typing.TYPE_CHECKING: - from typing import Literal - - ActivityType = Literal[ - "comment", - "watch", - "reviewable", - "status.change", - "assignee.add", - "assignee.remove", - "version.publish" - ] - ActivityReferenceType = Literal[ - "origin", - "mention", - "author", - "relation", - "watching", - ] + from ._typing import ActivityType, ActivityReferenceType PatternType = type(re.compile("")) JSONDecodeError = getattr(json, "JSONDecodeError", ValueError) From 23c6d5e11b55690f8fe17ddb991ca90d9ec59327 Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Fri, 22 Nov 2024 17:02:16 +0100 Subject: [PATCH 086/135] enhanced automated api to work correctly for return type --- automated_api.py | 38 +++++++++++++++++++++++++++----------- 1 file changed, 27 insertions(+), 11 deletions(-) diff --git a/automated_api.py b/automated_api.py index ee5e661d6..9b77af287 100644 --- a/automated_api.py +++ b/automated_api.py @@ -114,25 +114,40 @@ def prepare_docstring(func): return f'"""{docstring}{line_char}\n"""' -def _get_typehint(param, api_globals): - if param.annotation is inspect.Parameter.empty: - return None - - an = param.annotation - if inspect.isclass(an): - return an.__name__ +def _get_typehint(annotation, api_globals): + if inspect.isclass(annotation): + return annotation.__name__ + + typehint = ( + str(annotation) + .replace("typing.", "") + .replace("NoneType", "None") + ) + forwardref_regex = re.compile( + "(?PForwardRef\('(?P[a-zA-Z0-9]+)'\))" + ) + for item in forwardref_regex.finditer(str(typehint)): + groups = item.groupdict() + name = groups["name"] + typehint = typehint.replace(groups["full"], f'"{name}"') - typehint = str(an).replace("typing.", "") try: # Test if typehint is valid for known '_api' content exec(f"_: {typehint} = None", api_globals) except NameError: + print("Unknown typehint:", typehint) typehint = f'"{typehint}"' return typehint +def _get_param_typehint(param, api_globals): + if param.annotation is inspect.Parameter.empty: + return None + return _get_typehint(param.annotation, api_globals) + + def _add_typehint(param_name, param, api_globals): - typehint = _get_typehint(param, api_globals) + typehint = _get_param_typehint(param, api_globals) if not typehint: return param_name return f"{param_name}: {typehint}" @@ -154,7 +169,7 @@ def _kw_default_to_str(param_name, param, api_globals): raise TypeError("Unknown default value type") else: default = repr(default) - typehint = _get_typehint(param, api_globals) + typehint = _get_param_typehint(param, api_globals) if typehint: return f"{param_name}: {typehint} = {default}" return f"{param_name}={default}" @@ -216,7 +231,8 @@ def sig_params_to_str(sig, param_names, api_globals, indent=0): func_params_str = f"(\n{lines_str}\n{base_indent_str})" if sig.return_annotation is not inspect.Signature.empty: - func_params_str += f" -> {sig.return_annotation}" + return_typehint = _get_typehint(sig.return_annotation, api_globals) + func_params_str += f" -> {return_typehint}" body_params_str = "()" if body_params: From 9a9f3d63558c389d7433409a0fd31bc1bdcb52ad Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Fri, 22 Nov 2024 17:02:28 +0100 Subject: [PATCH 087/135] apply changes in _api.py --- ayon_api/_api.py | 34 +++++++++++++++++++--------------- 1 file changed, 19 insertions(+), 15 deletions(-) diff --git a/ayon_api/_api.py b/ayon_api/_api.py index aa97825f1..ec7b36a18 100644 --- a/ayon_api/_api.py +++ b/ayon_api/_api.py @@ -11,7 +11,8 @@ import os import socket -from typing import Optional +import typing +from typing import Optional, List, Dict, Iterable, Generator, Any from .constants import ( SERVER_URL_ENV_KEY, @@ -24,6 +25,9 @@ get_default_settings_variant as _get_default_settings_variant, ) +if typing.TYPE_CHECKING: + from ._typing import ActivityType, ActivityReferenceType + class GlobalServerAPI(ServerAPI): """Extended server api which also handles storing tokens and url. @@ -1123,16 +1127,16 @@ def enroll_event_job( def get_activities( project_name: str, - activity_ids: "Optional[Iterable[str]]" = None, - activity_types: "Optional[Iterable[ForwardRef('ActivityType')]]" = None, - entity_ids: "Optional[Iterable[str]]" = None, - entity_names: "Optional[Iterable[str]]" = None, + activity_ids: Optional[Iterable[str]] = None, + activity_types: Optional[Iterable["ActivityType"]] = None, + entity_ids: Optional[Iterable[str]] = None, + entity_names: Optional[Iterable[str]] = None, entity_type: Optional[str] = None, changed_after: Optional[str] = None, changed_before: Optional[str] = None, - reference_types: "Optional[Iterable[ForwardRef('ActivityReferenceType')]]" = None, - fields: "Optional[Iterable[str]]" = None, -) -> typing.Generator[typing.Dict[str, typing.Any], NoneType, NoneType]: + reference_types: Optional[Iterable["ActivityReferenceType"]] = None, + fields: Optional[Iterable[str]] = None, +) -> Generator[Dict[str, Any], None, None]: """Get activities from server with filtering options. Args: @@ -1173,9 +1177,9 @@ def get_activities( def get_activity_by_id( project_name: str, activity_id: str, - reference_types: "Optional[Iterable[ForwardRef('ActivityReferenceType')]]" = None, - fields: "Optional[Iterable[str]]" = None, -) -> typing.Optional[typing.Dict[str, typing.Any]]: + reference_types: Optional[Iterable["ActivityReferenceType"]] = None, + fields: Optional[Iterable[str]] = None, +) -> Optional[Dict[str, Any]]: """Get activity by id. Args: @@ -1205,9 +1209,9 @@ def create_activity( activity_type: "ActivityType", activity_id: Optional[str] = None, body: Optional[str] = None, - file_ids: "Optional[List[str]]" = None, + file_ids: Optional[List[str]] = None, timestamp: Optional[str] = None, - data: "Optional[Dict[str, Any]]" = None, + data: Optional[Dict[str, Any]] = None, ): """Create activity on a project. @@ -1245,9 +1249,9 @@ def update_activity( project_name: str, activity_id: str, body: Optional[str] = None, - file_ids: "Optional[List[str]]" = None, + file_ids: Optional[List[str]] = None, append_file_ids: Optional[bool] = False, - data: "Optional[Dict[str, Any]]" = None, + data: Optional[Dict[str, Any]] = None, ): """Update activity by id. From 3d7fcaaea8a5613f08e1146a82042fcdaaae6986 Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Fri, 22 Nov 2024 17:04:55 +0100 Subject: [PATCH 088/135] change return from create activity --- ayon_api/_api.py | 4 ++-- ayon_api/server_api.py | 6 +++--- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/ayon_api/_api.py b/ayon_api/_api.py index ec7b36a18..b1af5f6cd 100644 --- a/ayon_api/_api.py +++ b/ayon_api/_api.py @@ -1212,7 +1212,7 @@ def create_activity( file_ids: Optional[List[str]] = None, timestamp: Optional[str] = None, data: Optional[Dict[str, Any]] = None, -): +) -> str: """Create activity on a project. Args: @@ -1228,7 +1228,7 @@ def create_activity( data (Optional[Dict[str, Any]]): Additional data. Returns: - Dict[str, str]: Data with activity id. + str: Activity id. """ con = get_server_api_connection() diff --git a/ayon_api/server_api.py b/ayon_api/server_api.py index f320a0f43..e4e60c58b 100644 --- a/ayon_api/server_api.py +++ b/ayon_api/server_api.py @@ -1901,7 +1901,7 @@ def create_activity( file_ids: Optional[List[str]] = None, timestamp: Optional[str] = None, data: Optional[Dict[str, Any]] = None, - ): + ) -> str: """Create activity on a project. Args: @@ -1917,7 +1917,7 @@ def create_activity( data (Optional[Dict[str, Any]]): Additional data. Returns: - Dict[str, str]: Data with activity id. + str: Activity id. """ post_data = { @@ -1938,7 +1938,7 @@ def create_activity( **post_data ) response.raise_for_status() - return response.data + return response.data["id"] def update_activity( self, From a9615fc87fca3fc4b477c9f2a4ec598435aa6309 Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Fri, 22 Nov 2024 19:10:54 +0100 Subject: [PATCH 089/135] excape regex with 'r' --- automated_api.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/automated_api.py b/automated_api.py index 9b77af287..a764663c2 100644 --- a/automated_api.py +++ b/automated_api.py @@ -124,7 +124,7 @@ def _get_typehint(annotation, api_globals): .replace("NoneType", "None") ) forwardref_regex = re.compile( - "(?PForwardRef\('(?P[a-zA-Z0-9]+)'\))" + r"(?PForwardRef\('(?P[a-zA-Z0-9]+)'\))" ) for item in forwardref_regex.finditer(str(typehint)): groups = item.groupdict() From 9813f059c8510666411bcc607e8f6a188f26a3e7 Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Mon, 25 Nov 2024 18:54:23 +0100 Subject: [PATCH 090/135] fix as username stack --- ayon_api/server_api.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/ayon_api/server_api.py b/ayon_api/server_api.py index ac7f8d2a3..c0dc0054f 100644 --- a/ayon_api/server_api.py +++ b/ayon_api/server_api.py @@ -859,12 +859,12 @@ def as_username(self, username): "Can't set service username. API key is not a service token." ) - with self._as_user_stack.as_user(username) as o: - self._update_session_headers() - try: - yield o - finally: + try: + with self._as_user_stack.as_user(username) as o: self._update_session_headers() + yield o + finally: + self._update_session_headers() @property def is_server_available(self): From 2c266e0e24c4dca9053d41a6b94565718bdcc32c Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Mon, 25 Nov 2024 18:55:16 +0100 Subject: [PATCH 091/135] raise an error if ignore sender types are not supported --- ayon_api/server_api.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/ayon_api/server_api.py b/ayon_api/server_api.py index ac7f8d2a3..03dd8a418 100644 --- a/ayon_api/server_api.py +++ b/ayon_api/server_api.py @@ -1761,9 +1761,13 @@ def enroll_event_job( kwargs["ignoreOlderThan"] = ignore_older_than if ( ignore_sender_types is not None - and (major, minor, patch) > (1, 5, 4) ): - kwargs["ignoreSenderTypes"] = ignore_sender_types + if (major, minor, patch) > (1, 5, 4): + raise ValueError( + "Ignore sender types are not supported for" + f" your version of server {self.server_version}." + ) + kwargs["ignoreSenderTypes"] = list(ignore_sender_types) response = self.post("enroll", **kwargs) if response.status_code == 204: From b8d1536b3fa785de0f705b8bacc0c6ce7e0a1808 Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Mon, 25 Nov 2024 18:57:28 +0100 Subject: [PATCH 092/135] formatting changes --- ayon_api/server_api.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/ayon_api/server_api.py b/ayon_api/server_api.py index 03dd8a418..098ffb6f8 100644 --- a/ayon_api/server_api.py +++ b/ayon_api/server_api.py @@ -1759,9 +1759,7 @@ def enroll_event_job( and (major, minor, patch) > (1, 5, 1) ): kwargs["ignoreOlderThan"] = ignore_older_than - if ( - ignore_sender_types is not None - ): + if ignore_sender_types is not None: if (major, minor, patch) > (1, 5, 4): raise ValueError( "Ignore sender types are not supported for" From 1b53da9f7503e079d0f72101670a01f02ab7c2a8 Mon Sep 17 00:00:00 2001 From: Tadeas Hejnic Date: Tue, 26 Nov 2024 14:08:20 +0100 Subject: [PATCH 093/135] EntityHub --- tests/test_entity_hub.py | 501 +++++++++++++++++++++++++++++++++++---- 1 file changed, 449 insertions(+), 52 deletions(-) diff --git a/tests/test_entity_hub.py b/tests/test_entity_hub.py index 896349d7f..6b79c6dcd 100644 --- a/tests/test_entity_hub.py +++ b/tests/test_entity_hub.py @@ -1,9 +1,12 @@ +from traceback import print_exc import uuid +from requests import delete + import pytest import ayon_api -from ayon_api.entity_hub import EntityHub +from ayon_api.entity_hub import EntityHub, UNKNOWN_VALUE from .conftest import project_entity_fixture @@ -11,29 +14,29 @@ def test_rename_status(project_entity_fixture): # Change statuses - add prefix 'new_' project_name = project_entity_fixture["name"] - e = EntityHub(project_name) + hub = EntityHub(project_name) status_mapping = {} - for status in e.project_entity.statuses: + for status in hub.project_entity.statuses: orig_name = status.name new_name = f"new_{orig_name}" status_mapping[new_name] = orig_name status.name = new_name - e.commit_changes() + hub.commit_changes() # Create new entity hub for same project and validate the changes # are propagated - e = EntityHub(project_name) + hub = EntityHub(project_name) statuses_by_name = { status.name: status - for status in e.project_entity.statuses + for status in hub.project_entity.statuses } if set(statuses_by_name) != set(status_mapping.keys()): raise AssertionError("Statuses were not renamed correctly.") # Change statuses back - for status in e.project_entity.statuses: + for status in hub.project_entity.statuses: status.name = status_mapping[status.name] - e.commit_changes() + hub.commit_changes() @pytest.mark.parametrize( @@ -51,51 +54,51 @@ def test_simple_operations( """Test of simple operations with folders - create, move, delete. """ project_name = project_entity_fixture["name"] - e = EntityHub(project_name) + hub = EntityHub(project_name) folders = [] subfolders = [] # create folders with subfolder for folder_number in range(folders_count): - folder = e.add_new_folder( + folder = hub.add_new_folder( "Folder", name=f"{folder_name}{folder_number:03}" ) folders.append(folder) - e.commit_changes() + hub.commit_changes() - subfolder = e.add_new_folder( + subfolder = hub.add_new_folder( "Folder", name=f"{folder_name}{folder_number:03}", parent_id=folder["id"] ) subfolders.append(subfolder) - e.commit_changes() + hub.commit_changes() # move subfolders for index, subfolder in enumerate(subfolders): new_parent_id = folders[(index + 1) % folders_count]["id"] - e.set_entity_parent( + hub.set_entity_parent( subfolder["id"], new_parent_id, subfolder["parent_id"]) - e.commit_changes() + hub.commit_changes() - assert e.get_entity_by_id( + assert hub.get_entity_by_id( subfolder["id"] )["parent_id"] == new_parent_id # delete subfolders for subfolder in subfolders: - e.delete_entity(e.get_entity_by_id(subfolder["id"])) - e.commit_changes() - assert e.get_entity_by_id(subfolder["id"]) is None + hub.delete_entity(hub.get_entity_by_id(subfolder["id"])) + hub.commit_changes() + assert hub.get_entity_by_id(subfolder["id"]) is None # delete folders for folder in folders: - e.delete_entity(e.get_entity_by_id(folder["id"])) - e.commit_changes() - assert e.get_entity_by_id(folder["id"]) is None + hub.delete_entity(hub.get_entity_by_id(folder["id"])) + hub.commit_changes() + assert hub.get_entity_by_id(folder["id"]) is None def test_custom_values_on_entities(project_entity_fixture): @@ -110,7 +113,8 @@ def test_custom_values_on_entities(project_entity_fixture): # --- CREATE --- folder_type = project_entity_fixture["folderTypes"][-1]["name"] root_folder = hub.add_new_folder( - folder_type, name="custom_values_root_folder" + folder_type=folder_type, + name="custom_values_root_folder" ) folder_id = uuid.uuid1().hex @@ -132,7 +136,7 @@ def test_custom_values_on_entities(project_entity_fixture): task_data = {"MyTaskKey": "MyTaskValue"} folder = hub.add_new_folder( - folder_type, + folder_type=folder_type, name=folder_name, label=folder_label, parent_id=root_folder.id, @@ -144,7 +148,7 @@ def test_custom_values_on_entities(project_entity_fixture): task_type = project_entity_fixture["taskTypes"][-1]["name"] task = hub.add_new_task( - task_type, + task_type=task_type, name=task_name, label=task_label, parent_id=folder.id, @@ -270,7 +274,8 @@ def test_label_eq_name_on_entities(project_entity_fixture): folder_type = project_entity_fixture["folderTypes"][-1]["name"] root_folder = hub.add_new_folder( - folder_type, name="label_eq_name_root_folder" + folder_type=folder_type, + name="label_eq_name_root_folder" ) folder_id = uuid.uuid1().hex @@ -282,7 +287,7 @@ def test_label_eq_name_on_entities(project_entity_fixture): task_label = "my_task" folder = hub.add_new_folder( - folder_type, + folder_type=folder_type, name=folder_name, label=folder_label, parent_id=root_folder.id, @@ -291,7 +296,7 @@ def test_label_eq_name_on_entities(project_entity_fixture): task_type = project_entity_fixture["taskTypes"][-1]["name"] task = hub.add_new_task( - task_type, + task_type=task_type, name=task_name, label=task_label, parent_id=folder.id, @@ -325,7 +330,8 @@ def test_data_changes_on_entities(project_entity_fixture): folder_type = project_entity_fixture["folderTypes"][-1]["name"] root_folder = hub.add_new_folder( - folder_type, name="data_changes_on_entities" + folder_type=folder_type, + name="data_changes_on_entities" ) folder_id = uuid.uuid1().hex @@ -337,7 +343,7 @@ def test_data_changes_on_entities(project_entity_fixture): task_data = {"key2": "value2"} folder = hub.add_new_folder( - folder_type, + folder_type=folder_type, name=folder_name, data=folder_data, parent_id=root_folder.id, @@ -346,7 +352,7 @@ def test_data_changes_on_entities(project_entity_fixture): task_type = project_entity_fixture["taskTypes"][-1]["name"] task = hub.add_new_task( - task_type, + task_type=task_type, name=task_name, data=task_data, parent_id=folder.id, @@ -405,7 +411,7 @@ def test_label_eq_name_on_entities(project_entity_fixture): folder_id = uuid.uuid1().hex task_id = uuid.uuid1().hex folder = hub.add_new_folder( - folder_type, + folder_type=folder_type, name="status_root_folder", entity_id=folder_id, status=init_status_name, @@ -414,7 +420,7 @@ def test_label_eq_name_on_entities(project_entity_fixture): task_name = "my_task" task_label = "my_task" task = hub.add_new_task( - task_type, + task_type=task_type, name=task_name, label=task_label, parent_id=folder.id, @@ -491,39 +497,39 @@ def test_create_delete_with_duplicated_names( Exception should not be raised. """ project_name = project_entity_fixture["name"] - e = EntityHub(project_name) + hub = EntityHub(project_name) - folder1 = e.add_new_folder("Folder", name=folder_name) + folder1 = hub.add_new_folder("Folder", name=folder_name) subfolders = [] for folder_number in range(num_of_subfolders): - subfolder = e.add_new_folder( + subfolder = hub.add_new_folder( "Folder", parent_id=folder1["id"], name=f"{subfolder_name}{folder_number:03}" ) subfolders.append(subfolder) - e.commit_changes() + hub.commit_changes() # create and delete folder with same name - subfolder = e.add_new_folder( + subfolder = hub.add_new_folder( "Folder", parent_id=folder1["id"], name=f"{subfolder_name}{folder_number:03}" ) - e.delete_entity(subfolder) - e.commit_changes() + hub.delete_entity(subfolder) + hub.commit_changes() - assert e.get_folder_by_id(project_name, folder1["id"]) is not None + assert hub.get_folder_by_id(project_name, folder1["id"]) is not None for subfolder in subfolders: - assert e.get_folder_by_id( + assert hub.get_folder_by_id( project_name, subfolder["id"]) is not None # clean up - e.delete_entity(folder1) - e.commit_changes() + hub.delete_entity(folder1) + hub.commit_changes() # @pytest.mark.parametrize( @@ -726,15 +732,14 @@ def test_create_delete_with_duplicated_names( # with pytest.raises(HTTPRequestError): # e.commit_changes() # # print(list(e.project_entity.statuses)[0]) -# -# -# def test_rename_status(): -# e = EntityHub(PROJECT_NAME) -# -# for status in e.project_entity.statuses: + + +# def test_rename_status(project_entity_fixture): +# hub = EntityHub(project_entity_fixture["name"]) + +# for status in hub.project_entity.statuses: # print(status.name) -# -# + # def test_task_types(): # raise NotImplementedError() # @@ -746,3 +751,395 @@ def test_create_delete_with_duplicated_names( # # def test_status_icon(): # raise NotImplementedError() + + +# def test_project_statuses(project_entity_fixture): +# statuses = project_entity_fixture.get_statuses() +# pass + +test_names = [ + ("test_name"), + # ("test_123"), +] + +test_product_types = [ + ("animation"), + ("camera"), + ("render"), + ("workfile"), +] + +@pytest.mark.parametrize("folder_name", test_names) +@pytest.mark.parametrize("product_name", test_names) +@pytest.mark.parametrize("product_type", test_product_types) +def test_create_delete_products( + project_entity_fixture, + folder_name, + product_name, + product_type +): + project_name = project_entity_fixture["name"] + folder_type = project_entity_fixture["folderTypes"][0]["name"] + hub = EntityHub(project_name) + + for folder in ayon_api.get_folders( + project_name, + folder_names=[folder_name] + ): + # delete tasks + for task in ayon_api.get_tasks( + project_name, + folder_ids=[folder["id"]] + ): + hub.delete_entity(hub.get_task_by_id(task["id"])) + + # delete products + for product in list(ayon_api.get_products( + project_name, folder_ids=[folder["id"]] + )): + product_entity = hub.get_product_by_id(product["id"]) + hub.delete_entity(product_entity) + + entity = hub.get_folder_by_id(folder["id"]) + hub.delete_entity(entity) + + hub.commit_changes() + + folder = hub.add_new_folder( + folder_type=folder_type, + name=folder_name, + ) + + product = hub.add_new_product( + name=product_name, + product_type=product_type, + folder_id=folder["id"] + ) + + hub.commit_changes() + + assert hub.get_product_by_id(product["id"]) + assert product.get_name() == product_name + assert product.get_product_type() == product_type + assert product.get_folder_id() == folder["id"] + + # bonus test: + # create new entity hub for same project and validate the changes + # are propagated + hub = EntityHub(project_name) + product = hub.get_product_by_id(product["id"]) + assert product.get_name() == product_name + assert product.get_product_type() == product_type + assert product.get_folder_id() == folder["id"] + + +@pytest.mark.parametrize("name", test_names) +def test_create_delete_folders(project_entity_fixture, name): + project_name = project_entity_fixture["name"] + folder_types = [ + type["name"] for type in project_entity_fixture["folderTypes"] + ] + + hub = EntityHub(project_name) + + folder = hub.add_new_folder( + folder_type=folder_types[0], + name=name, + ) + + hub.commit_changes() + + assert ayon_api.get_folders( + project_name, + folder_names=[name], + folder_types=folder_types[0:1], + folder_ids=[folder["id"]] + ) + + for folder in ayon_api.get_folders( + project_name, + folder_names=[name] + ): + # delete tasks + for task in ayon_api.get_tasks( + project_name, + folder_ids=[folder["id"]] + ): + hub.delete_entity(hub.get_task_by_id(task["id"])) + + entity = hub.get_folder_by_id(folder["id"]) + + for id in entity.children_ids: + hub.delete_entity(hub.get_entity_by_id(id)) + + hub.delete_entity(entity) + + hub.commit_changes() + + # new folder + folder = hub.add_new_folder( + folder_type=folder_types[1], + name=name, + ) + + hub.commit_changes() + + assert ayon_api.get_folders( + project_name, + folder_names=[name], + folder_types=folder_types[1:2], + folder_ids=[folder["id"]] + ) + + +test_version_numbers = [ + ([1, 2, 3, 4]) +] + + +@pytest.mark.parametrize("version_numbers", test_version_numbers) +def test_create_delete_versions(project_entity_fixture, version_numbers): + # prepare hierarchy + folder_types = [ + type["name"] for type in project_entity_fixture["folderTypes"] + ] + hub = EntityHub(project_entity_fixture["name"]) + + folder = hub.add_new_folder( + folder_type=folder_types[0], + name="test_folder", + ) + + product = hub.add_new_product( + name="test_product", + product_type="animation", + folder_id=folder["id"] + ) + + assert product.get_children_ids() == set() + + # add + versions = [] + for version in version_numbers: + versions.append( + hub.add_new_version( + version, + product["id"] + ) + ) + + hub.commit_changes() + + res = product.get_children_ids() + + assert len(versions) == len(res) + for version in versions: + assert version + assert hub.get_version_by_id(version["id"]) + assert version["id"] in res + + # delete + hub.delete_entity(hub.get_version_by_id(version["id"])) + hub.commit_changes() + + assert hub.get_version_by_id(version["id"]) is None + # assert + + +test_invalid_version_number = [ + ("a"), + (None), + ("my_version_number") +] + + +@pytest.mark.parametrize("version_number", test_invalid_version_number) +def test_create_invalid_versions(project_entity_fixture, version_number): + # prepare hierarchy + folder_types = [ + type["name"] for type in project_entity_fixture["folderTypes"] + ] + hub = EntityHub(project_entity_fixture["name"]) + + folder = hub.add_new_folder( + folder_type=folder_types[0], + name="test_folder", + ) + + product = hub.add_new_product( + name="test_product", + product_type="animation", + folder_id=folder["id"] + ) + + assert product.get_children_ids() == set() + + hub.add_new_version( + version_number, + product["id"] + ) + + with pytest.raises(ayon_api.exceptions.FailedOperations): + hub.commit_changes() + + +def test_change_status_on_version(project_entity_fixture): + folder_types = [ + type["name"] for type in project_entity_fixture["folderTypes"] + ] + status_names = [ + status["name"] + for status in project_entity_fixture["statuses"] + if "version" in status["scope"] + ] + + hub = EntityHub(project_entity_fixture["name"]) + + folder = hub.add_new_folder( + folder_type=folder_types[0], + name="test_folder", + ) + + product = hub.add_new_product( + name="test_product", + product_type="animation", + folder_id=folder["id"] + ) + + version = hub.add_new_version( + 1, + product["id"] + ) + + hub.commit_changes + + for status_name in status_names: + version.set_status(status_name) + hub.commit_changes() + + assert version.get_status() == status_name + + +@pytest.mark.parametrize("version", test_version_numbers) +def test_set_invalid_status_on_version(project_entity_fixture, version): + folder_types = [ + type["name"] for type in project_entity_fixture["folderTypes"] + ] + valid_names = [ + status["name"] + for status in project_entity_fixture["statuses"] + if "version" in status["scope"] + ] + invalid_names = [ + status["name"] + for status in project_entity_fixture["statuses"] + if "version" not in status["scope"] + ] + + hub = EntityHub(project_entity_fixture["name"]) + + folder = hub.add_new_folder( + folder_type=folder_types[0], + name="test_folder", + ) + + product = hub.add_new_product( + name="test_product", + product_type="animation", + folder_id=folder["id"] + ) + + version = hub.add_new_version( + 1, + product["id"] + ) + + # test on version without status + for status_name in invalid_names: + with pytest.raises(ValueError): + version.set_status(status_name) + hub.commit_changes() + + assert version.get_status() == UNKNOWN_VALUE + + # test valid statuses + for status_name in valid_names: + version.set_status(status_name) + hub.commit_changes() + + assert version.get_status() == status_name + + current_status = version.get_status() + + # test on version with status + for status_name in invalid_names: + with pytest.raises(ValueError): + version.set_status(status_name) + hub.commit_changes() + + assert version.get_status() == current_status + + +test_tags = [ + (["tag1", "tag2", "tag3"]), + (["tag4"]), + (["tag5", "tag6"]), +] + + +@pytest.mark.parametrize("tags", test_tags) +def test_set_tag_on_version(project_entity_fixture, tags): + folder_types = [ + type["name"] for type in project_entity_fixture["folderTypes"] + ] + + + hub = EntityHub(project_entity_fixture["name"]) + + folder = hub.add_new_folder( + folder_type=folder_types[0], + name="test_folder", + ) + + product = hub.add_new_product( + name="test_product", + product_type="animation", + folder_id=folder["id"] + ) + + version = hub.add_new_version( + 1, + product["id"] + ) + + assert version.get_tags() == [] + + for tag in tags: + version.set_tags([tag]) + hub.commit_changes() + + assert tag in version.get_tags() + + +def test_set_invalid_tag_on_version(): + raise NotImplementedError() + + +def test_status_definition_on_project(project_entity_fixture): + hub = EntityHub(project_entity_fixture["name"]) + + project = hub.project_entity + project.status = "test_status" + print(project.status) + + # project.set_status() + # project_status_obj = hub.project_entity.get_statuses() + # project_status_obj.set_state() + # print(type(project_status_obj), project_status_obj) + + +# definice status na projects +# zmena statusu a tagu na entitach - verzich +# vytvareni a mazani produktu a verzi + + From b62ac244f1281db77a706fa17fccb4908cff4f11 Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Tue, 26 Nov 2024 14:15:57 +0100 Subject: [PATCH 094/135] fix args --- tests/test_entity_hub.py | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/tests/test_entity_hub.py b/tests/test_entity_hub.py index 6b79c6dcd..8f2383ecb 100644 --- a/tests/test_entity_hub.py +++ b/tests/test_entity_hub.py @@ -499,12 +499,12 @@ def test_create_delete_with_duplicated_names( project_name = project_entity_fixture["name"] hub = EntityHub(project_name) - folder1 = hub.add_new_folder("Folder", name=folder_name) + folder1 = hub.add_new_folder(folder_type="Folder", name=folder_name) subfolders = [] for folder_number in range(num_of_subfolders): subfolder = hub.add_new_folder( - "Folder", + folder_type="Folder", parent_id=folder1["id"], name=f"{subfolder_name}{folder_number:03}" ) @@ -513,7 +513,7 @@ def test_create_delete_with_duplicated_names( # create and delete folder with same name subfolder = hub.add_new_folder( - "Folder", + folder_type="Folder", parent_id=folder1["id"], name=f"{subfolder_name}{folder_number:03}" ) @@ -546,7 +546,7 @@ def test_create_delete_with_duplicated_names( # """ # e = EntityHub(PROJECT_NAME) # -# parent_folder = e.add_new_folder("Folder", name=folder_name) +# parent_folder = e.add_new_folder(folder_type="Folder", name=folder_name) # e.commit_changes() # # @@ -554,14 +554,14 @@ def test_create_delete_with_duplicated_names( # subfolders = [] # for folder_number in range(2): # folder = e.add_new_folder( -# "Folder", +# folder_type="Folder", # name=f"test{folder_number:03}", # parent_id=parent_folder["id"] # ) # folders.append(folder) # # subfolder = e.add_new_folder( -# "Folder", +# folder_type="Folder", # name="duplicated", # parent_id=folder["id"] # ) @@ -628,14 +628,14 @@ def test_create_delete_with_duplicated_names( # e = EntityHub(PROJECT_NAME) # # parent_folder = e.add_new_folder( -# "Folder", +# folder_type="Folder", # name=parent_folder_name # ) # # folder_ids = [] # for folder_number in range(num_of_folders): # folder = e.add_new_folder( -# "Folder", +# folder_type="Folder", # parent_id=parent_folder["id"], # name=f"{folder_name}{folder_number:03}" # ) @@ -645,7 +645,7 @@ def test_create_delete_with_duplicated_names( # for folder_id in folder_ids: # for subfolder_number in range(num_of_subfolders): # subfolder = e.add_new_folder( -# "Folder", +# folder_type="Folder", # parent_id=folder_id, # name=f"{subfolder_name}{subfolder_number:03}" # ) From 4acd5135eaf6d7aec49ac1bd0cbf2d56c20aeba0 Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Tue, 26 Nov 2024 14:22:13 +0100 Subject: [PATCH 095/135] keep backwards compatibility for parent id --- ayon_api/entity_hub.py | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/ayon_api/entity_hub.py b/ayon_api/entity_hub.py index e9a5197f5..02d13129a 100644 --- a/ayon_api/entity_hub.py +++ b/ayon_api/entity_hub.py @@ -3312,7 +3312,18 @@ def __init__( entity_id: Optional[str] = None, created: Optional[bool] = None, entity_hub: EntityHub = None, + parent_id: Optional[str] = UNKNOWN_VALUE, ): + if folder_id is UNKNOWN_VALUE and parent_id is not UNKNOWN_VALUE: + warnings.warn( + ( + "DEV WARNING: Used 'parent_id' instead of 'folder_id' in" + " TaskEntity. Please use 'folder_id' instead." + ), + DeprecationWarning + ) + folder_id = parent_id + super().__init__( name=name, parent_id=folder_id, From 7f309c00b785f8f5e5ba8fee10ebf7a448267a6e Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Tue, 26 Nov 2024 14:24:12 +0100 Subject: [PATCH 096/135] revert modifications --- ayon_api/entity_hub.py | 13 ++----------- 1 file changed, 2 insertions(+), 11 deletions(-) diff --git a/ayon_api/entity_hub.py b/ayon_api/entity_hub.py index 02d13129a..5a66004d5 100644 --- a/ayon_api/entity_hub.py +++ b/ayon_api/entity_hub.py @@ -3286,6 +3286,8 @@ class TaskEntity(BaseEntity): value is defined based on value of 'entity_id'. entity_hub (EntityHub): Object of entity hub which created object of the entity. + parent_id (Union[str, None]): DEPRECATED please use 'folder_id' + instead. """ _supports_name = True @@ -3312,18 +3314,7 @@ def __init__( entity_id: Optional[str] = None, created: Optional[bool] = None, entity_hub: EntityHub = None, - parent_id: Optional[str] = UNKNOWN_VALUE, ): - if folder_id is UNKNOWN_VALUE and parent_id is not UNKNOWN_VALUE: - warnings.warn( - ( - "DEV WARNING: Used 'parent_id' instead of 'folder_id' in" - " TaskEntity. Please use 'folder_id' instead." - ), - DeprecationWarning - ) - folder_id = parent_id - super().__init__( name=name, parent_id=folder_id, From f5994a830fe23817e8b9e63430acebb3646d22a0 Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Tue, 26 Nov 2024 14:24:19 +0100 Subject: [PATCH 097/135] changed docstring --- ayon_api/entity_hub.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ayon_api/entity_hub.py b/ayon_api/entity_hub.py index 5a66004d5..c4d7c46de 100644 --- a/ayon_api/entity_hub.py +++ b/ayon_api/entity_hub.py @@ -3271,7 +3271,7 @@ class TaskEntity(BaseEntity): name (str): Name of entity. task_type (str): Type of task. Task type must be available in config of project task types. - parent_id (Union[str, None]): Id of parent entity. + folder_id (Union[str, None]): Parent folder id. label (Optional[str]): Task label. status (Optional[str]): Task status. tags (Optional[Iterable[str]]): Folder tags. From 766c8eb713f9c6908a37aa8c2c4f44ac05b6ac59 Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Tue, 26 Nov 2024 14:24:27 +0100 Subject: [PATCH 098/135] use folder id for new tasks in tests --- tests/test_entity_hub.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/tests/test_entity_hub.py b/tests/test_entity_hub.py index 8f2383ecb..ddac2894b 100644 --- a/tests/test_entity_hub.py +++ b/tests/test_entity_hub.py @@ -151,7 +151,7 @@ def test_custom_values_on_entities(project_entity_fixture): task_type=task_type, name=task_name, label=task_label, - parent_id=folder.id, + folder_id=folder.id, data=task_data, attribs=task_attrib, entity_id=task_id, @@ -299,7 +299,7 @@ def test_label_eq_name_on_entities(project_entity_fixture): task_type=task_type, name=task_name, label=task_label, - parent_id=folder.id, + folder_id=folder.id, entity_id=task_id, ) hub.commit_changes() @@ -355,7 +355,7 @@ def test_data_changes_on_entities(project_entity_fixture): task_type=task_type, name=task_name, data=task_data, - parent_id=folder.id, + folder_id=folder.id, entity_id=task_id, ) hub.commit_changes() @@ -423,7 +423,7 @@ def test_label_eq_name_on_entities(project_entity_fixture): task_type=task_type, name=task_name, label=task_label, - parent_id=folder.id, + folder_id=folder.id, entity_id=task_id, status=init_status_name, ) From 7df8b37ed90df63eb33da424cf9a2b85393a810c Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Tue, 26 Nov 2024 14:34:09 +0100 Subject: [PATCH 099/135] fix args in tests --- tests/test_entity_hub.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/test_entity_hub.py b/tests/test_entity_hub.py index ddac2894b..3a9ef0afb 100644 --- a/tests/test_entity_hub.py +++ b/tests/test_entity_hub.py @@ -61,14 +61,14 @@ def test_simple_operations( # create folders with subfolder for folder_number in range(folders_count): folder = hub.add_new_folder( - "Folder", + folder_type="Folder", name=f"{folder_name}{folder_number:03}" ) folders.append(folder) hub.commit_changes() subfolder = hub.add_new_folder( - "Folder", + folder_type="Folder", name=f"{folder_name}{folder_number:03}", parent_id=folder["id"] ) From 8291ce28d0cdd091816d604068d8bc4ad4d744f3 Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Tue, 26 Nov 2024 14:42:23 +0100 Subject: [PATCH 100/135] implemented name setter --- ayon_api/entity_hub.py | 20 +++++++++++--------- 1 file changed, 11 insertions(+), 9 deletions(-) diff --git a/ayon_api/entity_hub.py b/ayon_api/entity_hub.py index c4d7c46de..f3094824c 100644 --- a/ayon_api/entity_hub.py +++ b/ayon_api/entity_hub.py @@ -1775,14 +1775,6 @@ def lock(self): def _get_entity_by_id(self, entity_id): return self._entity_hub.get_entity_by_id(entity_id) - def get_name(self): - return self._name - - def set_name(self, name): - self._name = name - - name = property(get_name, set_name) - def get_parent_id(self): """Parent entity id. @@ -1972,7 +1964,17 @@ def get_name(self): ) return self._name - name = property(get_name) + def set_name(self, name): + if not self._supports_name: + raise NotImplementedError( + f"Name is not supported for '{self.entity_type}'." + ) + + if not isinstance(name, str): + raise TypeError("Name must be a string.") + self._name = name + + name = property(get_name, set_name) def get_label(self) -> Optional[str]: if not self._supports_label: From 55520898dcb6c1731e1d1cc646181f3b249d8eb5 Mon Sep 17 00:00:00 2001 From: Tadeas Hejnic Date: Tue, 26 Nov 2024 17:58:23 +0100 Subject: [PATCH 101/135] The largest test test_get_vents_all_filter_combinations moved to seperate file, new tests added, old tests edited --- tests/conftest.py | 188 +++++++++++++++ tests/test_entity_hub.py | 57 ++++- tests/test_get_events.py | 159 +++++++++++++ tests/test_server.py | 481 ++++++++------------------------------- 4 files changed, 486 insertions(+), 399 deletions(-) create mode 100644 tests/test_get_events.py diff --git a/tests/conftest.py b/tests/conftest.py index ed6306375..ecb3dd013 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,11 +1,17 @@ +from datetime import datetime, timedelta, timezone import pytest +from xml.dom.minidom import Entity from ayon_api import ( get_project, create_project, update_project, delete_project, + get_folders, + get_products, + get_tasks ) +from ayon_api.entity_hub import EntityHub class _Cache: @@ -60,3 +66,185 @@ def project_entity_fixture(project_name_fixture): yield project_entity if created: delete_project(project_name_fixture) + + +@pytest.fixture +def clean_project(project_name_fixture): + hub = EntityHub(project_name_fixture) + + for folder in get_folders( + project_name_fixture + ): + # delete tasks + for task in get_tasks( + project_name_fixture, + folder_ids=[folder["id"]] + ): + hub.delete_entity(hub.get_task_by_id(task["id"])) + + # delete products + for product in list(get_products( + project_name_fixture, folder_ids=[folder["id"]] + )): + product_entity = hub.get_product_by_id(product["id"]) + hub.delete_entity(product_entity) + + entity = hub.get_folder_by_id(folder["id"]) + hub.delete_entity(entity) + + hub.commit_changes() + + +class TestEventFilters: + project_names = [ + (None), + ([]), + (["demo_Big_Episodic"]), + (["demo_Big_Feature"]), + (["demo_Commercial"]), + (["AY_Tests"]), + (["demo_Big_Episodic", "demo_Big_Feature", "demo_Commercial", "AY_Tests"]) + ] + + topics = [ + (None), + ([]), + (["entity.folder.attrib_changed"]), + (["entity.task.created", "entity.project.created"]), + (["settings.changed", "entity.version.status_changed"]), + (["entity.task.status_changed", "entity.folder.deleted"]), + ([ + "entity.project.changed", + "entity.task.tags_changed", + "entity.product.created" + ]) + ] + + users = [ + (None), + ([]), + (["admin"]), + (["mkolar", "tadeas.8964"]), + (["roy", "luke.inderwick", "ynbot"]), + ([ + "entity.folder.attrib_changed", + "entity.project.created", + "entity.task.created", + "settings.changed" + ]), + ] + + # states is incorrect name for statuses + states = [ + (None), + ([]), + (["pending", "in_progress", "finished", "failed", "aborted", "restarted"]), + (["failed", "aborted"]), + (["pending", "in_progress"]), + (["finished", "failed", "restarted"]), + (["finished"]), + ] + + include_logs = [ + (None), + (True), + (False), + ] + + has_children = [ + (None), + (True), + (False), + ] + + now = datetime.now(timezone.utc) + + newer_than = [ + (None), + ((now - timedelta(days=2)).isoformat()), + ((now - timedelta(days=5)).isoformat()), + ((now - timedelta(days=10)).isoformat()), + ((now - timedelta(days=20)).isoformat()), + ((now - timedelta(days=30)).isoformat()), + ] + + older_than = [ + (None), + ((now - timedelta(days=0)).isoformat()), + ((now - timedelta(days=5)).isoformat()), + ((now - timedelta(days=10)).isoformat()), + ((now - timedelta(days=20)).isoformat()), + ((now - timedelta(days=30)).isoformat()), + ] + + fields = [ + (None), + ([]), + ] + + +class TestInvalidEventFilters: + topics = [ + (None), + (["invalid_topic_name_1", "invalid_topic_name_2"]), + (["invalid_topic_name_1"]), + ] + + project_names = [ + (None), + (["invalid_project"]), + (["invalid_project", "demo_Big_Episodic", "demo_Big_Feature"]), + (["invalid_name_2", "demo_Commercial"]), + (["demo_Commercial"]), + ] + + states = [ + (None), + (["pending_invalid"]), + (["in_progress_invalid"]), + (["finished_invalid", "failed_invalid"]), + ] + + users = [ + (None), + (["ayon_invalid_user"]), + (["ayon_invalid_user1", "ayon_invalid_user2"]), + (["ayon_invalid_user1", "ayon_invalid_user2", "admin"]), + ] + + newer_than = [ + (None), + ((datetime.now(timezone.utc) + timedelta(days=2)).isoformat()), + ((datetime.now(timezone.utc) + timedelta(days=5)).isoformat()), + ((datetime.now(timezone.utc) - timedelta(days=5)).isoformat()), + ] + + +class TestUpdateEventData: + update_sender = [ + ("test.server.api"), + ] + + update_username = [ + ("testing_user"), + ] + + update_status = [ + ("pending"), + ("in_progress"), + ("finished"), + ("failed"), + ("aborted"), + ("restarted") + ] + + update_description = [ + ("Lorem ipsum dolor sit amet, consectetur adipiscing elit. Fusce vivera."), + ("Updated description test...") + ] + + update_retries = [ + (1), + (0), + (10), + ] diff --git a/tests/test_entity_hub.py b/tests/test_entity_hub.py index 3a9ef0afb..dcc8d959c 100644 --- a/tests/test_entity_hub.py +++ b/tests/test_entity_hub.py @@ -2,6 +2,7 @@ import uuid from requests import delete +import test import pytest @@ -264,7 +265,7 @@ def test_custom_values_on_entities(project_entity_fixture): hub.commit_changes() -def test_label_eq_name_on_entities(project_entity_fixture): +def test_label_eq_name_on_entities_1(project_entity_fixture): """Test label that have same values as name on folder and task. When the entity has same name and label, the label should be set to None. @@ -372,11 +373,11 @@ def test_data_changes_on_entities(project_entity_fixture): hub = EntityHub(project_name) - folder = hub.get_or_query_entity_by_id(folder_id, {"folder"}) + folder = hub.get_or_fetch_entity_by_id(folder_id, {"folder"}) folder.data["key3"] = "value3" folder.data.pop("key1") - task = hub.get_or_query_entity_by_id(task_id, {"task"}) + task = hub.get_or_fetch_entity_by_id(task_id, {"task"}) task.data["key4"] = "value4" task.data.pop("key2") hub.commit_changes() @@ -397,7 +398,7 @@ def test_data_changes_on_entities(project_entity_fixture): hub.commit_changes() -def test_label_eq_name_on_entities(project_entity_fixture): +def test_label_eq_name_on_entities_2(project_entity_fixture): """Test label that have same values as name on folder and task. When the entity has same name and label, the label should be set to None. @@ -430,8 +431,8 @@ def test_label_eq_name_on_entities(project_entity_fixture): hub.commit_changes() hub = EntityHub(project_name) - folder = hub.get_or_query_entity_by_id(folder_id, {"folder"}) - task = hub.get_or_query_entity_by_id(task_id, {"task"}) + folder = hub.get_or_fetch_entity_by_id(folder_id, {"folder"}) + task = hub.get_or_fetch_entity_by_id(task_id, {"task"}) assert folder.status == init_status_name, ( "Folder status set on create was not propagated" @@ -759,7 +760,7 @@ def test_create_delete_with_duplicated_names( test_names = [ ("test_name"), - # ("test_123"), + ("test_123"), ] test_product_types = [ @@ -769,6 +770,8 @@ def test_create_delete_with_duplicated_names( ("workfile"), ] + +@pytest.mark.usefixtures("clean_project") @pytest.mark.parametrize("folder_name", test_names) @pytest.mark.parametrize("product_name", test_names) @pytest.mark.parametrize("product_type", test_product_types) @@ -833,6 +836,7 @@ def test_create_delete_products( assert product.get_folder_id() == folder["id"] +@pytest.mark.usefixtures("clean_project") @pytest.mark.parametrize("name", test_names) def test_create_delete_folders(project_entity_fixture, name): project_name = project_entity_fixture["name"] @@ -893,10 +897,12 @@ def test_create_delete_folders(project_entity_fixture, name): test_version_numbers = [ - ([1, 2, 3, 4]) + ([1, 2, 3, 4]), + ([8, 10, 4, 5]), ] +@pytest.mark.usefixtures("clean_project") @pytest.mark.parametrize("version_numbers", test_version_numbers) def test_create_delete_versions(project_entity_fixture, version_numbers): # prepare hierarchy @@ -953,6 +959,7 @@ def test_create_delete_versions(project_entity_fixture, version_numbers): ] +@pytest.mark.usefixtures("clean_project") @pytest.mark.parametrize("version_number", test_invalid_version_number) def test_create_invalid_versions(project_entity_fixture, version_number): # prepare hierarchy @@ -983,6 +990,7 @@ def test_create_invalid_versions(project_entity_fixture, version_number): hub.commit_changes() +@pytest.mark.usefixtures("clean_project") def test_change_status_on_version(project_entity_fixture): folder_types = [ type["name"] for type in project_entity_fixture["folderTypes"] @@ -1020,6 +1028,7 @@ def test_change_status_on_version(project_entity_fixture): assert version.get_status() == status_name +@pytest.mark.usefixtures("clean_project") @pytest.mark.parametrize("version", test_version_numbers) def test_set_invalid_status_on_version(project_entity_fixture, version): folder_types = [ @@ -1087,6 +1096,7 @@ def test_set_invalid_status_on_version(project_entity_fixture, version): ] +@pytest.mark.usefixtures("clean_project") @pytest.mark.parametrize("tags", test_tags) def test_set_tag_on_version(project_entity_fixture, tags): folder_types = [ @@ -1125,12 +1135,37 @@ def test_set_invalid_tag_on_version(): raise NotImplementedError() -def test_status_definition_on_project(project_entity_fixture): +test_statuses = [ + ("status1"), + ("status2"), + ("status3"), +] + +test_icon = [ + ("arrow_forward"), + ("expand_circle_down"), + ("done_outline"), +] + + +@pytest.mark.parametrize("status_name", test_statuses) +@pytest.mark.parametrize("icon_name", test_icon) +def test_status_definition_on_project( + project_entity_fixture, + status_name, + icon_name +): hub = EntityHub(project_entity_fixture["name"]) project = hub.project_entity - project.status = "test_status" - print(project.status) + project.get_statuses().create( + status_name, + icon_name + ) + assert status_name == project.get_statuses().get(status_name).get_name() + assert icon_name == project.get_statuses().get(status_name).get_icon() + + # print(project.status) # project.set_status() # project_status_obj = hub.project_entity.get_statuses() diff --git a/tests/test_get_events.py b/tests/test_get_events.py new file mode 100644 index 000000000..dd315fe97 --- /dev/null +++ b/tests/test_get_events.py @@ -0,0 +1,159 @@ +from datetime import datetime +import pytest + +from ayon_api import ( + get_events, + get_default_fields_for_type, + exceptions, + set_timeout, + get_timeout +) +from .conftest import TestEventFilters + + +@pytest.mark.parametrize("topics", TestEventFilters.topics[-3:]) +@pytest.mark.parametrize( + "event_ids", + [None] + [pytest.param(None, marks=pytest.mark.usefixtures("event_ids"))] +) +@pytest.mark.parametrize("project_names", TestEventFilters.project_names[-3:]) +@pytest.mark.parametrize("states", TestEventFilters.states[-3:]) +@pytest.mark.parametrize("users", TestEventFilters.users[-3:]) +@pytest.mark.parametrize("include_logs", TestEventFilters.include_logs[-3:]) +@pytest.mark.parametrize("has_children", TestEventFilters.has_children[-3:]) +@pytest.mark.parametrize("newer_than", TestEventFilters.newer_than[-2:]) +@pytest.mark.parametrize("older_than", TestEventFilters.older_than[-2:]) +@pytest.mark.parametrize("fields", TestEventFilters.fields[0:1]) +def test_get_events_all_filter_combinations( + topics, + event_ids, + project_names, + states, + users, + include_logs, + has_children, + newer_than, + older_than, + fields +): + """Tests all combinations of possible filters for `get_events`. + + Verifies: + - Calls `get_events` with the provided filter parameters. + - Ensures each event in the result set matches the specified filters. + - Checks that the number of returned events matches the expected count + based on the filters applied. + - Confirms that each event contains only the specified fields, with + no extra keys. + + Note: + - Adjusts the timeout setting if necessary to handle a large number + of tests and avoid timeout errors. + - Some combinations of filter parameters may lead to a server timeout + error. When this occurs, the test will skip instead of failing. + - Currently, a ServerError due to timeout may occur when `has_children` + is set to False. + + """ + if get_timeout() < 5: + set_timeout(None) # default timeout + + try: + res = list(get_events( + topics=topics, + event_ids=event_ids, + project_names=project_names, + states=states, + users=users, + include_logs=include_logs, + has_children=has_children, + newer_than=newer_than, + older_than=older_than, + fields=fields + )) + except exceptions.ServerError as exc: + assert has_children is False, ( + f"{exc} even if has_children is {has_children}." + ) + print("Warning: ServerError encountered, test skipped due to timeout.") + pytest.skip("Skipping test due to server timeout.") + + for item in res: + assert item.get("topic") in topics + assert item.get("project") in project_names + assert item.get("user") in users + assert item.get("status") in states + + assert (newer_than is None) or ( + datetime.fromisoformat(item.get("createdAt")) + > datetime.fromisoformat(newer_than) + ) + assert (older_than is None) or ( + datetime.fromisoformat(item.get("createdAt")) + < datetime.fromisoformat(older_than) + ) + + assert topics is None or len(res) == sum(len( + list(get_events( + topics=[topic], + project_names=project_names, + states=states, + users=users, + include_logs=include_logs, + has_children=has_children, + newer_than=newer_than, + older_than=older_than, + fields=fields + )) or [] + ) for topic in topics) + + assert project_names is None or len(res) == sum(len( + list(get_events( + topics=topics, + project_names=[project_name], + states=states, + users=users, + include_logs=include_logs, + has_children=has_children, + newer_than=newer_than, + older_than=older_than, + fields=fields + )) or [] + ) for project_name in project_names) + + assert states is None or len(res) == sum(len( + list(get_events( + topics=topics, + project_names=project_names, + states=[state], + users=users, + include_logs=include_logs, + has_children=has_children, + newer_than=newer_than, + older_than=older_than, + fields=fields + )) or [] + ) for state in states) + + assert users is None or len(res) == sum(len( + list(get_events( + topics=topics, + project_names=project_names, + states=states, + users=[user], + include_logs=include_logs, + has_children=has_children, + newer_than=newer_than, + older_than=older_than, + fields=fields + )) or [] + ) for user in users) + + if fields == []: + fields = get_default_fields_for_type("event") + + assert fields is None \ + or all( + set(event.keys()) == set(fields) + for event in res + ) diff --git a/tests/test_server.py b/tests/test_server.py index 64d1187f8..9e0b64e87 100644 --- a/tests/test_server.py +++ b/tests/test_server.py @@ -15,14 +15,14 @@ create_folder, create_project, create_thumbnail, - delete, + delete_addon_version, + delete_event, delete_project, dispatch_event, download_addon_private_file, enroll_event_job, get, get_addons_info, - get_default_fields_for_type, get_event, get_events, get_folder_thumbnail, @@ -41,6 +41,14 @@ ServerAPI, exceptions ) +from .conftest import ( + TestEventFilters, + TestInvalidEventFilters, + TestUpdateEventData, + event_id, + event_ids +) + AYON_BASE_URL = os.getenv("AYON_SERVER_URL") AYON_REST_URL = "{}/api".format(AYON_BASE_URL) @@ -104,256 +112,7 @@ def test_get(): assert isinstance(res.data, dict) -test_project_names = [ - (None), - ([]), - (["demo_Big_Episodic"]), - (["demo_Big_Feature"]), - (["demo_Commercial"]), - (["AY_Tests"]), - (["demo_Big_Episodic", "demo_Big_Feature", "demo_Commercial", "AY_Tests"]) -] - -test_topics = [ - (None), - ([]), - (["entity.folder.attrib_changed"]), - (["entity.task.created", "entity.project.created"]), - (["settings.changed", "entity.version.status_changed"]), - (["entity.task.status_changed", "entity.folder.deleted"]), - ([ - "entity.project.changed", - "entity.task.tags_changed", - "entity.product.created" - ]) -] - -test_users = [ - (None), - ([]), - (["admin"]), - (["mkolar", "tadeas.8964"]), - (["roy", "luke.inderwick", "ynbot"]), - ([ - "entity.folder.attrib_changed", - "entity.project.created", - "entity.task.created", - "settings.changed" - ]), -] - -# states is incorrect name for statuses -test_states = [ - (None), - ([]), - (["pending", "in_progress", "finished", "failed", "aborted", "restarted"]), - (["failed", "aborted"]), - (["pending", "in_progress"]), - (["finished", "failed", "restarted"]), - (["finished"]), -] - -test_include_logs = [ - (None), - (True), - (False), -] - -test_has_children = [ - (None), - (True), - (False), -] - -now = datetime.now(timezone.utc) - -test_newer_than = [ - (None), - ((now - timedelta(days=2)).isoformat()), - ((now - timedelta(days=5)).isoformat()), - ((now - timedelta(days=10)).isoformat()), - ((now - timedelta(days=20)).isoformat()), - ((now - timedelta(days=30)).isoformat()), -] - -test_older_than = [ - (None), - ((now - timedelta(days=0)).isoformat()), - ((now - timedelta(days=5)).isoformat()), - ((now - timedelta(days=10)).isoformat()), - ((now - timedelta(days=20)).isoformat()), - ((now - timedelta(days=30)).isoformat()), -] - -test_fields = [ - (None), - ([]), - ([]) -] - -@pytest.fixture(params=[3, 4, 5]) -def event_ids(request): - length = request.param - if length == 0: - return None - - recent_events = list(get_events( - newer_than=(datetime.now(timezone.utc) - timedelta(days=5)).isoformat() - )) - - return [recent_event["id"] for recent_event in recent_events[:length]] - - -# takes max 3 items in a list to reduce the number of combinations -@pytest.mark.parametrize("topics", test_topics[-3:]) -@pytest.mark.parametrize( - "event_ids", - [None] + [pytest.param(None, marks=pytest.mark.usefixtures("event_ids"))] -) -@pytest.mark.parametrize("project_names", test_project_names[-3:]) -@pytest.mark.parametrize("states", test_states[-3:]) -@pytest.mark.parametrize("users", test_users[-3:]) -@pytest.mark.parametrize("include_logs", test_include_logs[-3:]) -@pytest.mark.parametrize("has_children", test_has_children[2:3]) -@pytest.mark.parametrize("newer_than", test_newer_than[-3:]) -@pytest.mark.parametrize("older_than", test_older_than[-3:]) -@pytest.mark.parametrize("fields", test_fields[-3:]) -def test_get_events_all_filter_combinations( - topics, - event_ids, - project_names, - states, - users, - include_logs, - has_children, - newer_than, - older_than, - fields -): - """Tests all combinations of possible filters for `get_events`. - - Verifies: - - Calls `get_events` with the provided filter parameters. - - Ensures each event in the result set matches the specified filters. - - Checks that the number of returned events matches the expected count - based on the filters applied. - - Confirms that each event contains only the specified fields, with - no extra keys. - - Note: - - Adjusts the timeout setting if necessary to handle a large number - of tests and avoid timeout errors. - - Some combinations of filter parameters may lead to a server timeout - error. When this occurs, the test will skip instead of failing. - - Currently, a ServerError due to timeout may occur when `has_children` - is set to False. - - """ - if get_timeout() < 5: - set_timeout(None) # default timeout - - try: - res = list(get_events( - topics=topics, - event_ids=event_ids, - project_names=project_names, - states=states, - users=users, - include_logs=include_logs, - has_children=has_children, - newer_than=newer_than, - older_than=older_than, - fields=fields - )) - except exceptions.ServerError as exc: - assert has_children is False, ( - f"{exc} even if has_children is {has_children}." - ) - print("Warning: ServerError encountered, test skipped due to timeout.") - pytest.skip("Skipping test due to server timeout.") - - for item in res: - assert item.get("topic") in topics - assert item.get("project") in project_names - assert item.get("user") in users - assert item.get("status") in states - - assert (newer_than is None) or ( - datetime.fromisoformat(item.get("createdAt")) - > datetime.fromisoformat(newer_than) - ) - assert (older_than is None) or ( - datetime.fromisoformat(item.get("createdAt")) - < datetime.fromisoformat(older_than) - ) - - assert topics is None or len(res) == sum(len(list( - get_events( - topics=[topic], - project_names=project_names, - states=states, - users=users, - include_logs=include_logs, - has_children=has_children, - newer_than=newer_than, - older_than=older_than, - fields=fields - ) - )) for topic in topics) - - assert project_names is None or len(res) == sum(len(list( - get_events( - topics=topics, - project_names=[project_name], - states=states, - users=users, - include_logs=include_logs, - has_children=has_children, - newer_than=newer_than, - older_than=older_than, - fields=fields - ) - )) for project_name in project_names) - - assert states is None or len(res) == sum(len(list( - get_events( - topics=topics, - project_names=project_names, - states=[state], - users=users, - include_logs=include_logs, - has_children=has_children, - newer_than=newer_than, - older_than=older_than, - fields=fields - ) - )) for state in states) - - assert users is None or len(res) == sum(len(list( - get_events( - topics=topics, - project_names=project_names, - states=states, - users=[user], - include_logs=include_logs, - has_children=has_children, - newer_than=newer_than, - older_than=older_than, - fields=fields - ) - )) for user in users) - - if fields == []: - fields = get_default_fields_for_type("event") - - assert fields is None \ - or all( - set(event.keys()) == set(fields) - for event in res - ) - - -@pytest.mark.parametrize("has_children", test_has_children) +@pytest.mark.parametrize("has_children", TestEventFilters.has_children) def test_get_events_timeout_has_children(has_children): """Test `get_events` function with the `has_children` filter. @@ -396,14 +155,14 @@ def test_get_events_event_ids(event_ids): for item in res: assert item.get("id") in event_ids - assert len(res) == sum(len(list( - get_events( + assert event_ids is None or len(res) == sum(len( + list(get_events( event_ids=[event_id] - ) - )) for event_id in event_ids) + )) or [] + ) for event_id in event_ids) -@pytest.mark.parametrize("project_names", test_project_names) +@pytest.mark.parametrize("project_names", TestEventFilters.project_names) def test_get_events_project_name(project_names): """Test `get_events` function using specified project names. @@ -419,15 +178,15 @@ def test_get_events_project_name(project_names): assert item.get("project") in project_names # test if the legths are equal - assert len(res) == sum(len(list( - get_events( + assert project_names is None or len(res) == sum(len( + list(get_events( project_names=[project_name] - ) - )) for project_name in project_names) + )) or [] + ) for project_name in project_names) -@pytest.mark.parametrize("project_names", test_project_names) -@pytest.mark.parametrize("topics", test_topics) +@pytest.mark.parametrize("project_names", TestEventFilters.project_names) +@pytest.mark.parametrize("topics", TestEventFilters.topics) def test_get_events_project_name_topic(project_names, topics): """Test `get_events` function using both project names and topics. @@ -448,24 +207,24 @@ def test_get_events_project_name_topic(project_names, topics): assert item.get("project") in project_names # test if the legths are equal - assert len(res) == sum(len(list( - get_events( + assert project_names is None or len(res) == sum(len( + list(get_events( project_names=[project_name], topics=topics - ) - )) for project_name in project_names) + )) or [] + ) for project_name in project_names) - assert len(res) == sum(len(list( - get_events( + assert topics is None or len(res) == sum(len( + list(get_events( project_names=project_names, topics=[topic] - ) - )) for topic in topics) + )) or [] + ) for topic in topics) -@pytest.mark.parametrize("project_names", test_project_names) -@pytest.mark.parametrize("topics", test_topics) -@pytest.mark.parametrize("users", test_users) +@pytest.mark.parametrize("project_names", TestEventFilters.project_names) +@pytest.mark.parametrize("topics", TestEventFilters.topics) +@pytest.mark.parametrize("users", TestEventFilters.users) def test_get_events_project_name_topic_user(project_names, topics, users): """Test `get_events` function using project names, topics, and users. @@ -483,35 +242,38 @@ def test_get_events_project_name_topic_user(project_names, topics, users): )) for item in res: - assert item.get("topic") in topics - assert item.get("project") in project_names - assert item.get("user") in project_names + assert topics is None or item.get("topic") in topics + assert project_names is None or item.get("project") in project_names + assert users is None or item.get("user") in users # test if the legths are equal - assert len(res) == sum(len(list( - get_events( + assert project_names is None or len(res) == sum(len( + list(get_events( project_names=[project_name], - topics=topics - ) - )) for project_name in project_names) + topics=topics, + users=users + )) or [] + ) for project_name in project_names) - assert len(res) == sum(len(list( - get_events( + assert topics is None or len(res) == sum(len( + list(get_events( project_names=project_names, - topics=[topic] - ) - )) for topic in topics) + topics=[topic], + users=users + )) or [] + ) for topic in topics) - assert len(res) == sum(len(list( - get_events( + assert users is None or len(res) == sum(len( + list(get_events( project_names=project_names, - topics=[topic] - ) - )) for topic in topics) + topics=topics, + users=[user] + )) or [] + ) for user in users) -@pytest.mark.parametrize("newer_than", test_newer_than) -@pytest.mark.parametrize("older_than", test_older_than) +@pytest.mark.parametrize("newer_than", TestEventFilters.newer_than) +@pytest.mark.parametrize("older_than", TestEventFilters.older_than) def test_get_events_timestamps(newer_than, older_than): """Test `get_events` function using date filters `newer_than` and `older_than`. @@ -537,47 +299,11 @@ def test_get_events_timestamps(newer_than, older_than): ) -test_invalid_topics = [ - (None), - (["invalid_topic_name_1", "invalid_topic_name_2"]), - (["invalid_topic_name_1"]), -] - -test_invalid_project_names = [ - (None), - (["invalid_project"]), - (["invalid_project", "demo_Big_Episodic", "demo_Big_Feature"]), - (["invalid_name_2", "demo_Commercial"]), - (["demo_Commercial"]), -] - -test_invalid_states = [ - (None), - (["pending_invalid"]), - (["in_progress_invalid"]), - (["finished_invalid", "failed_invalid"]), -] - -test_invalid_users = [ - (None), - (["ayon_invalid_user"]), - (["ayon_invalid_user1", "ayon_invalid_user2"]), - (["ayon_invalid_user1", "ayon_invalid_user2", "admin"]), -] - -test_invalid_newer_than = [ - (None), - ((datetime.now(timezone.utc) + timedelta(days=2)).isoformat()), - ((datetime.now(timezone.utc) + timedelta(days=5)).isoformat()), - ((datetime.now(timezone.utc) - timedelta(days=5)).isoformat()), -] - - -@pytest.mark.parametrize("topics", test_invalid_topics) -@pytest.mark.parametrize("project_names", test_invalid_project_names) -@pytest.mark.parametrize("states", test_invalid_states) -@pytest.mark.parametrize("users", test_invalid_users) -@pytest.mark.parametrize("newer_than", test_invalid_newer_than) +@pytest.mark.parametrize("topics", TestInvalidEventFilters.topics) +@pytest.mark.parametrize("project_names", TestInvalidEventFilters.project_names) +@pytest.mark.parametrize("states", TestInvalidEventFilters.states) +@pytest.mark.parametrize("users", TestInvalidEventFilters.users) +@pytest.mark.parametrize("newer_than", TestInvalidEventFilters.newer_than) def test_get_events_invalid_data( topics, project_names, @@ -635,55 +361,11 @@ def test_get_events_invalid_data( or datetime.fromisoformat(newer_than) < datetime.now(timezone.utc) -@pytest.fixture -def event_id(): - """Fixture that retrieves the ID of a recent event created within - the last 5 days. - - Returns: - - The event ID of the most recent event within the last 5 days - if available. - - `None` if no recent events are found within this time frame. - - """ - recent_events = list(get_events( - newer_than=(datetime.now(timezone.utc) - timedelta(days=5)).isoformat() - )) - return recent_events[0]["id"] if recent_events else None - -test_update_sender = [ - ("test.server.api"), -] - -test_update_username = [ - ("testing_user"), -] - -test_update_status = [ - ("pending"), - ("in_progress"), - ("finished"), - ("failed"), - ("aborted"), - ("restarted") -] - -test_update_description = [ - ("Lorem ipsum dolor sit amet, consectetur adipiscing elit. Fusce vivera."), - ("Updated description test...") -] - -test_update_retries = [ - (1), - (0), - (10), -] - -@pytest.mark.parametrize("sender", test_update_sender) -@pytest.mark.parametrize("username", test_update_username) -@pytest.mark.parametrize("status", test_update_status) -@pytest.mark.parametrize("description", test_update_description) -@pytest.mark.parametrize("retries", test_update_retries) +@pytest.mark.parametrize("sender", TestUpdateEventData.update_sender) +@pytest.mark.parametrize("username", TestUpdateEventData.update_username) +@pytest.mark.parametrize("status", TestUpdateEventData.update_status) +@pytest.mark.parametrize("description", TestUpdateEventData.update_description) +@pytest.mark.parametrize("retries", TestUpdateEventData.update_retries) def test_update_event( event_id, sender, @@ -812,8 +494,8 @@ def clean_up_events(topics=[TEST_SOURCE_TOPIC, TEST_TARGET_TOPIC]): interfere with the test setup or outcomes by marking them as 'finished'. """ - events = list(get_events(topics=topics)) - for event in events: + pending_events = list(get_events(topics=topics)) + for event in pending_events: if event["status"] not in ["finished", "failed"]: update_event(event["id"], status="finished") @@ -834,8 +516,20 @@ def create_test_events(num_of_events=DEFAULT_NUMBER_OF_EVENTS): ] +@pytest.fixture +def delete_events(topics=[TEST_SOURCE_TOPIC, TEST_TARGET_TOPIC]): + """Cleans up events from the specified topics after the test completes. + """ + yield + + for event in list(get_events(topics=topics)): + delete_event(event["id"]) + + # clean_up should be below create_test to ensure it is called first # pytest probably does not guarantee the order of execution +# delete_events is disabled for now - until new sever version +# @pytest.mark.usefixtures("delete_events") @pytest.mark.usefixtures("create_test_events") @pytest.mark.usefixtures("clean_up_events") @pytest.mark.parametrize("sequential", test_sequential) @@ -889,6 +583,10 @@ def test_enroll_event_job(sequential): and job_1 != job_2 +# disabled for now - until new sever version +# delete_events is disabled for now - until new sever version +# @pytest.mark.usefixtures("delete_events") +@pytest.mark.usefixtures("create_test_events") @pytest.mark.usefixtures("clean_up_events") @pytest.mark.parametrize("sequential", test_sequential) def test_enroll_event_job_failed(sequential): @@ -931,6 +629,8 @@ def test_enroll_event_job_failed(sequential): assert sequential is not True or job_1 == job_2 +# delete_events is disabled for now - until new sever version +# @pytest.mark.usefixtures("delete_events") @pytest.mark.usefixtures("clean_up_events") @pytest.mark.parametrize("sequential", test_sequential) def test_enroll_event_job_same_sender(sequential): @@ -970,8 +670,11 @@ def test_enroll_event_job_same_sender(sequential): ("nonexisting_source_topic"), ] + +# delete_events is disabled for now - until new sever version +# @pytest.mark.usefixtures("delete_events") @pytest.mark.usefixtures("clean_up_events") -@pytest.mark.parametrize("topic", test_invalid_topics) +@pytest.mark.parametrize("topic", TestInvalidEventFilters.topics) @pytest.mark.parametrize("sequential", test_sequential) def test_enroll_event_job_invalid_topic(topic, sequential): """Tests `enroll_event_job` behavior when provided with invalid topics. @@ -1000,6 +703,8 @@ def test_enroll_event_job_invalid_topic(topic, sequential): # clean_up should be below create_test to ensure it is called first # pytest probably does not guarantee the order of execution +# delete_events is disabled for now - until new sever version +# @pytest.mark.usefixtures("delete_events") @pytest.mark.usefixtures("create_test_events") @pytest.mark.usefixtures("clean_up_events") def test_enroll_event_job_sequential_false(): @@ -1109,7 +814,7 @@ def test_addon_methods(): download_path = "tests/resources/tmp_downloads" private_file_path = os.path.join(download_path, "ayon-symbol.png") - delete(f"/addons/{addon_name}/{addon_version}") + delete_addon_version(addon_name, addon_version) assert all( addon_name != addon["name"] for addon in get_addons_info()["addons"] ) From 69f883287225705942a3b52afe4a5a0481e5e64a Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Wed, 27 Nov 2024 09:03:59 +0100 Subject: [PATCH 102/135] fix formatting --- tests/conftest.py | 21 ++++++++++++++++++--- 1 file changed, 18 insertions(+), 3 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index ecb3dd013..630ac542c 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -103,7 +103,12 @@ class TestEventFilters: (["demo_Big_Feature"]), (["demo_Commercial"]), (["AY_Tests"]), - (["demo_Big_Episodic", "demo_Big_Feature", "demo_Commercial", "AY_Tests"]) + ([ + "demo_Big_Episodic", + "demo_Big_Feature", + "demo_Commercial", + "AY_Tests" + ]) ] topics = [ @@ -138,7 +143,14 @@ class TestEventFilters: states = [ (None), ([]), - (["pending", "in_progress", "finished", "failed", "aborted", "restarted"]), + ([ + "pending", + "in_progress", + "finished", + "failed", + "aborted", + "restarted" + ]), (["failed", "aborted"]), (["pending", "in_progress"]), (["finished", "failed", "restarted"]), @@ -239,7 +251,10 @@ class TestUpdateEventData: ] update_description = [ - ("Lorem ipsum dolor sit amet, consectetur adipiscing elit. Fusce vivera."), + ( + "Lorem ipsum dolor sit amet, consectetur adipiscing elit." + " Fusce vivera." + ), ("Updated description test...") ] From 7ac9c31ea45fb2e3f86a704c2e299484c9562b8a Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Wed, 27 Nov 2024 09:07:53 +0100 Subject: [PATCH 103/135] fix formatting --- tests/test_server.py | 57 +++++++++++++++++++++++++------------------- 1 file changed, 32 insertions(+), 25 deletions(-) diff --git a/tests/test_server.py b/tests/test_server.py index 9e0b64e87..8ebb7d3e1 100644 --- a/tests/test_server.py +++ b/tests/test_server.py @@ -137,7 +137,9 @@ def test_get_events_timeout_has_children(has_children): assert has_children is False, ( f"{exc} even if has_children is {has_children}." ) - print("Warning: ServerError encountered, test skipped due to timeout.") + print( + "Warning: ServerError encountered, test skipped due to timeout." + ) pytest.skip("Skipping test due to server timeout.") @@ -146,8 +148,8 @@ def test_get_events_event_ids(event_ids): Verifies: - Each item returned has an ID in the `event_ids` list. - - The number of items returned matches the expected count when filtered - by each individual event ID. + - The number of items returned matches the expected count when + filtered by each individual event ID. """ res = list(get_events(event_ids=event_ids)) @@ -300,7 +302,9 @@ def test_get_events_timestamps(newer_than, older_than): @pytest.mark.parametrize("topics", TestInvalidEventFilters.topics) -@pytest.mark.parametrize("project_names", TestInvalidEventFilters.project_names) +@pytest.mark.parametrize( + "project_names", + TestInvalidEventFilters.project_names) @pytest.mark.parametrize("states", TestInvalidEventFilters.states) @pytest.mark.parametrize("users", TestInvalidEventFilters.users) @pytest.mark.parametrize("newer_than", TestInvalidEventFilters.newer_than) @@ -364,7 +368,9 @@ def test_get_events_invalid_data( @pytest.mark.parametrize("sender", TestUpdateEventData.update_sender) @pytest.mark.parametrize("username", TestUpdateEventData.update_username) @pytest.mark.parametrize("status", TestUpdateEventData.update_status) -@pytest.mark.parametrize("description", TestUpdateEventData.update_description) +@pytest.mark.parametrize( + "description", + TestUpdateEventData.update_description) @pytest.mark.parametrize("retries", TestUpdateEventData.update_retries) def test_update_event( event_id, @@ -539,12 +545,12 @@ def test_enroll_event_job(sequential): Verifies: - When `sequential` is set to `True`, only one job can be enrolled at - a time, preventing new enrollments until the first job is closed or - updated. + a time, preventing new enrollments until the first job is closed + or updated. - When `sequential` is `False` or `None`, multiple jobs can be enrolled concurrently without conflicts. - - The `update_event` function updates the `status` of a job to allowing - next sequential job processing. + - The `update_event` function updates the `status` of a job to + allowing next sequential job processing. Notes: - `update_event` is used to set `job_1`'s status to "failed" to test @@ -596,8 +602,8 @@ def test_enroll_event_job_failed(sequential): Verifies: - `enroll_event_job` creates a job (`job_1`) with specified parameters `(`source_topic`, `target_topic`, `sender`, and `sequential`). - - After `job_1` fails (status set to "failed"), a new job (`job_2`) can - be enrolled with the same parameters. + - After `job_1` fails (status set to "failed"), a new job (`job_2`) + can be enrolled with the same parameters. - When `sequential` is `True`, the test verifies that `job_1` and `job_2` are identical, as a failed sequential job should not allow a new job to be enrolled separately. @@ -641,8 +647,8 @@ def test_enroll_event_job_same_sender(sequential): - `enroll_event_job` creates a `job_1` and `job_2` with the same parameters (`source_topic`, `target_topic`, `sender`, and `sequential`). - - The test checks that `job_1` and `job_2` are identical, ensuring that - no duplicate jobs are created for the same sender. + - The test checks that `job_1` and `job_2` are identical, ensuring + that no duplicate jobs are created for the same sender. Notes: - TODO - delete events after test if possible @@ -716,9 +722,9 @@ def test_enroll_event_job_sequential_false(): - Each job has a unique `dependsOn` identifier Notes: - - The `depends_on_ids` set is used to track `dependsOn` identifiers and - verify that each job has a unique dependency state, as required for - concurrent processing. + - The `depends_on_ids` set is used to track `dependsOn` identifiers + and verify that each job has a unique dependency state, as + required for concurrent processing. - TODO - delete events after test if possible """ @@ -752,15 +758,16 @@ def test_thumbnail_operations( Verifies: - A thumbnail is created for the project and associated with a folder. - - The thumbnail associated with the folder is correctly retrieved, with - attributes matching the project name and thumbnail ID. + - The thumbnail associated with the folder is correctly retrieved, + with attributes matching the project name and thumbnail ID. - The content of the retrieved thumbnail matches the expected image bytes read from the specified `thumbnail_path`. Notes: - `delete_project` is called initially to remove any pre-existing project with the same name, ensuring no conflicts during testing. - - At the end of the test, the project is deleted to clean up resources. + - At the end of the test, the project is deleted to clean up + resources. """ if get_project(project_name): @@ -795,12 +802,12 @@ def test_addon_methods(): - An addon with the specified name and version does not exist at the start. - Uploads an addon package `.zip` file and triggers a server restart. - - Ensures the server restart completes, and verifies the uploaded addon - is available in the list of addons after the restart. + - Ensures the server restart completes, and verifies the uploaded + addon is available in the list of addons after the restart. - Downloads a private file associated with the addon, verifying its existence and correct download location. - - Cleans up downloaded files and directories after the test to maintain - a clean state. + - Cleans up downloaded files and directories after the test to + maintain a clean state. Notes: - `time.sleep()` is used to allow for a brief pause for the server @@ -863,8 +870,8 @@ def api_artist_user(): - Establishes a server API connection and retrieves the list of available access groups. - Configures a new user with limited permissions (`isAdmin` and - `isManager` set to `False`) and assigns all available access groups - as default and project-specific groups. + `isManager` set to `False`) and assigns all available access + groups as default and project-specific groups. - Creates a new API connection using the artist user's credentials (`username` and `password`) and logs in with it. From c03d682cc81ebae4758266ecbb028688a1980b96 Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Wed, 27 Nov 2024 09:10:43 +0100 Subject: [PATCH 104/135] remove invalid arg from docstring --- ayon_api/entity_hub.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/ayon_api/entity_hub.py b/ayon_api/entity_hub.py index f3094824c..4cab8f282 100644 --- a/ayon_api/entity_hub.py +++ b/ayon_api/entity_hub.py @@ -3288,8 +3288,6 @@ class TaskEntity(BaseEntity): value is defined based on value of 'entity_id'. entity_hub (EntityHub): Object of entity hub which created object of the entity. - parent_id (Union[str, None]): DEPRECATED please use 'folder_id' - instead. """ _supports_name = True From f654b52f28ddcaa14388ec537c8da113ad27d1d0 Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Wed, 27 Nov 2024 09:11:24 +0100 Subject: [PATCH 105/135] remove unused import --- tests/conftest.py | 1 - 1 file changed, 1 deletion(-) diff --git a/tests/conftest.py b/tests/conftest.py index 630ac542c..c1b1359ed 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,6 +1,5 @@ from datetime import datetime, timedelta, timezone import pytest -from xml.dom.minidom import Entity from ayon_api import ( get_project, From 61e40cf20bf13071dbdefc299ad13644ea6c62f7 Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Thu, 28 Nov 2024 11:29:01 +0100 Subject: [PATCH 106/135] unser parent id if parent is not fetched --- ayon_api/entity_hub.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/ayon_api/entity_hub.py b/ayon_api/entity_hub.py index 4cab8f282..356caea63 100644 --- a/ayon_api/entity_hub.py +++ b/ayon_api/entity_hub.py @@ -825,6 +825,8 @@ def delete_entity(self, entity): parent = self._entities_by_id.get(parent_id) if parent is not None: parent.remove_child(entity.id) + else: + self.unset_entity_parent(entity.id, parent_id) def reset_immutable_for_hierarchy_cache( self, entity_id: Optional[str], bottom_to_top: Optional[bool] = True From c153eef362dec4c441eee58bb5ee936a34f992df Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Thu, 28 Nov 2024 12:57:12 +0100 Subject: [PATCH 107/135] as_username does allow to ignore service user error --- ayon_api/server_api.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/ayon_api/server_api.py b/ayon_api/server_api.py index f130e065f..ab6c68528 100644 --- a/ayon_api/server_api.py +++ b/ayon_api/server_api.py @@ -842,13 +842,15 @@ def set_default_service_username(self, username=None): self._update_session_headers() @contextmanager - def as_username(self, username): + def as_username(self, username, ignore_service_error=False): """Service API will temporarily work as other user. This method can be used only if service API key is logged in. Args: username (Union[str, None]): Username to work as when service. + ignore_service_error (Optional[bool]): Ignore error when service + API key is not used. Raises: ValueError: When connection is not yet authenticated or api key @@ -861,6 +863,9 @@ def as_username(self, username): ) if not self._access_token_is_service: + if ignore_service_error: + yield None + return raise ValueError( "Can't set service username. API key is not a service token." ) From f1e4d82cae1731b1cfd9dbfa79307868fb927007 Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Thu, 28 Nov 2024 13:17:55 +0100 Subject: [PATCH 108/135] use statuses for events filtering --- ayon_api/graphql_queries.py | 9 ++++++--- ayon_api/server_api.py | 27 ++++++++++++++++++++++----- 2 files changed, 28 insertions(+), 8 deletions(-) diff --git a/ayon_api/graphql_queries.py b/ayon_api/graphql_queries.py index 999c9e5c2..49acc299e 100644 --- a/ayon_api/graphql_queries.py +++ b/ayon_api/graphql_queries.py @@ -570,23 +570,26 @@ def workfiles_info_graphql_query(fields): return query -def events_graphql_query(fields): +def events_graphql_query(fields, use_states=False): query = GraphQlQuery("Events") topics_var = query.add_variable("eventTopics", "[String!]") ids_var = query.add_variable("eventIds", "[String!]") projects_var = query.add_variable("projectNames", "[String!]") - states_var = query.add_variable("eventStates", "[String!]") + statuses_var = query.add_variable("eventStatuses", "[String!]") users_var = query.add_variable("eventUsers", "[String!]") include_logs_var = query.add_variable("includeLogsFilter", "Boolean!") has_children_var = query.add_variable("hasChildrenFilter", "Boolean!") newer_than_var = query.add_variable("newerThanFilter", "String!") older_than_var = query.add_variable("olderThanFilter", "String!") + statuses_filter_name = "statuses" + if use_states: + statuses_filter_name = "states" events_field = query.add_field_with_edges("events") events_field.set_filter("ids", ids_var) events_field.set_filter("topics", topics_var) events_field.set_filter("projects", projects_var) - events_field.set_filter("states", states_var) + events_field.set_filter(statuses_filter_name, statuses_var) events_field.set_filter("users", users_var) events_field.set_filter("includeLogs", include_logs_var) events_field.set_filter("hasChildren", has_children_var) diff --git a/ayon_api/server_api.py b/ayon_api/server_api.py index ab6c68528..5d1ee6aa1 100644 --- a/ayon_api/server_api.py +++ b/ayon_api/server_api.py @@ -1458,13 +1458,14 @@ def get_events( topics=None, event_ids=None, project_names=None, - states=None, + statuses=None, users=None, include_logs=None, has_children=None, newer_than=None, older_than=None, - fields=None + fields=None, + states=None, ): """Get events from server with filtering options. @@ -1476,7 +1477,7 @@ def get_events( event_ids (Optional[Iterable[str]]): Event ids. project_names (Optional[Iterable[str]]): Project on which event happened. - states (Optional[Iterable[str]]): Filtering by states. + statuses (Optional[Iterable[str]]): Filtering by statuses. users (Optional[Iterable[str]]): Filtering by users who created/triggered an event. include_logs (Optional[bool]): Query also log events. @@ -1488,18 +1489,31 @@ def get_events( iso datetime string. fields (Optional[Iterable[str]]): Fields that should be received for each event. + states (Optional[Iterable[str]]): DEPRECATED Filtering by states. + Use 'statuses' instead. Returns: Generator[dict[str, Any]]: Available events matching filters. """ + if statuses is None and states is not None: + warnings.warn( + ( + "Used deprecated argument 'states' in 'get_events'." + " Use 'statuses' instead." + ), + DeprecationWarning + ) + statuses = states + + filters = {} if not _prepare_list_filters( filters, ("eventTopics", topics), ("eventIds", event_ids), ("projectNames", project_names), - ("eventStates", states), + ("eventStatuses", statuses), ("eventUsers", users), ): return @@ -1519,7 +1533,10 @@ def get_events( if not fields: fields = self.get_default_fields_for_type("event") - query = events_graphql_query(set(fields)) + major, minor, patch, _, _ = self.server_version_tuple + use_states = (major, minor, patch) <= (1, 5, 6) + + query = events_graphql_query(set(fields), use_states) for attr, filter_value in filters.items(): query.set_variable_value(attr, filter_value) From ef6c6fdeb8180b8bc4fd7265096129246aee1da6 Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Thu, 28 Nov 2024 13:21:06 +0100 Subject: [PATCH 109/135] updated public api --- ayon_api/_api.py | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/ayon_api/_api.py b/ayon_api/_api.py index b1af5f6cd..9ea6ad499 100644 --- a/ayon_api/_api.py +++ b/ayon_api/_api.py @@ -874,13 +874,14 @@ def get_events( topics=None, event_ids=None, project_names=None, - states=None, + statuses=None, users=None, include_logs=None, has_children=None, newer_than=None, older_than=None, fields=None, + states=None, ): """Get events from server with filtering options. @@ -892,7 +893,7 @@ def get_events( event_ids (Optional[Iterable[str]]): Event ids. project_names (Optional[Iterable[str]]): Project on which event happened. - states (Optional[Iterable[str]]): Filtering by states. + statuses (Optional[Iterable[str]]): Filtering by statuses. users (Optional[Iterable[str]]): Filtering by users who created/triggered an event. include_logs (Optional[bool]): Query also log events. @@ -904,6 +905,8 @@ def get_events( iso datetime string. fields (Optional[Iterable[str]]): Fields that should be received for each event. + states (Optional[Iterable[str]]): DEPRECATED Filtering by states. + Use 'statuses' instead. Returns: Generator[dict[str, Any]]: Available events matching filters. @@ -914,13 +917,14 @@ def get_events( topics=topics, event_ids=event_ids, project_names=project_names, - states=states, + statuses=statuses, users=users, include_logs=include_logs, has_children=has_children, newer_than=newer_than, older_than=older_than, fields=fields, + states=states, ) From 03b495812d00801df93044bc702d47c7f3a5250d Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Thu, 28 Nov 2024 13:50:00 +0100 Subject: [PATCH 110/135] added typehints --- ayon_api/_api.py | 22 +++++++++++----------- ayon_api/server_api.py | 22 +++++++++++----------- 2 files changed, 22 insertions(+), 22 deletions(-) diff --git a/ayon_api/_api.py b/ayon_api/_api.py index 9ea6ad499..d18a1b67e 100644 --- a/ayon_api/_api.py +++ b/ayon_api/_api.py @@ -871,17 +871,17 @@ def get_event( def get_events( - topics=None, - event_ids=None, - project_names=None, - statuses=None, - users=None, - include_logs=None, - has_children=None, - newer_than=None, - older_than=None, - fields=None, - states=None, + topics: Optional[Iterable[str]] = None, + event_ids: Optional[Iterable[str]] = None, + project_names: Optional[Iterable[str]] = None, + statuses: Optional[Iterable[str]] = None, + users: Optional[Iterable[str]] = None, + include_logs: Optional[bool] = None, + has_children: Optional[bool] = None, + newer_than: Optional[str] = None, + older_than: Optional[str] = None, + fields: Optional[Iterable[str]] = None, + states: Optional[Iterable[str]] = None, ): """Get events from server with filtering options. diff --git a/ayon_api/server_api.py b/ayon_api/server_api.py index 5d1ee6aa1..e457e95cc 100644 --- a/ayon_api/server_api.py +++ b/ayon_api/server_api.py @@ -1455,17 +1455,17 @@ def get_event(self, event_id): def get_events( self, - topics=None, - event_ids=None, - project_names=None, - statuses=None, - users=None, - include_logs=None, - has_children=None, - newer_than=None, - older_than=None, - fields=None, - states=None, + topics: Optional[Iterable[str]] = None, + event_ids: Optional[Iterable[str]] = None, + project_names: Optional[Iterable[str]] = None, + statuses: Optional[Iterable[str]] = None, + users: Optional[Iterable[str]] = None, + include_logs: Optional[bool] = None, + has_children: Optional[bool] = None, + newer_than: Optional[str] = None, + older_than: Optional[str] = None, + fields: Optional[Iterable[str]] = None, + states: Optional[Iterable[str]] = None, ): """Get events from server with filtering options. From 1fb18715e2a03c11c3ea16cada2b026bbfaee8f5 Mon Sep 17 00:00:00 2001 From: Tadeas Hejnic Date: Thu, 28 Nov 2024 16:02:54 +0100 Subject: [PATCH 111/135] EntityHub: New tests added, test_create_delete_products_bonus can not be passed (last asserts after delete) --- tests/test_entity_hub.py | 363 +++++++++++++++++++++++++-------------- 1 file changed, 235 insertions(+), 128 deletions(-) diff --git a/tests/test_entity_hub.py b/tests/test_entity_hub.py index dcc8d959c..1d4e4f6b0 100644 --- a/tests/test_entity_hub.py +++ b/tests/test_entity_hub.py @@ -9,7 +9,7 @@ import ayon_api from ayon_api.entity_hub import EntityHub, UNKNOWN_VALUE -from .conftest import project_entity_fixture +from .conftest import project_entity_fixture, TestProductData def test_rename_status(project_entity_fixture): @@ -41,15 +41,14 @@ def test_rename_status(project_entity_fixture): @pytest.mark.parametrize( - "folder_name, subfolder_name, folders_count", + "folder_name, folders_count", [ - ("entity_hub_simple_test", "subfolder", 3), + ("entity_hub_simple_test", 3), ] ) def test_simple_operations( project_entity_fixture, folder_name, - subfolder_name, folders_count ): """Test of simple operations with folders - create, move, delete. @@ -506,7 +505,7 @@ def test_create_delete_with_duplicated_names( for folder_number in range(num_of_subfolders): subfolder = hub.add_new_folder( folder_type="Folder", - parent_id=folder1["id"], + parent_id=folder1.id, name=f"{subfolder_name}{folder_number:03}" ) subfolders.append(subfolder) @@ -515,18 +514,18 @@ def test_create_delete_with_duplicated_names( # create and delete folder with same name subfolder = hub.add_new_folder( folder_type="Folder", - parent_id=folder1["id"], + parent_id=folder1.id, name=f"{subfolder_name}{folder_number:03}" ) hub.delete_entity(subfolder) hub.commit_changes() - assert hub.get_folder_by_id(project_name, folder1["id"]) is not None + assert hub.get_folder_by_id(project_name, folder1.id) is not None for subfolder in subfolders: assert hub.get_folder_by_id( project_name, - subfolder["id"]) is not None + subfolder.id) is not None # clean up hub.delete_entity(folder1) @@ -758,142 +757,217 @@ def test_create_delete_with_duplicated_names( # statuses = project_entity_fixture.get_statuses() # pass -test_names = [ - ("test_name"), - ("test_123"), -] - -test_product_types = [ - ("animation"), - ("camera"), - ("render"), - ("workfile"), -] @pytest.mark.usefixtures("clean_project") -@pytest.mark.parametrize("folder_name", test_names) -@pytest.mark.parametrize("product_name", test_names) -@pytest.mark.parametrize("product_type", test_product_types) +@pytest.mark.parametrize("folder_name", TestProductData.names) +@pytest.mark.parametrize("product_name", TestProductData.names) +@pytest.mark.parametrize("product_type", TestProductData.product_types) def test_create_delete_products( project_entity_fixture, folder_name, product_name, product_type ): + """ + Test the creation and deletion of products within a project. + + Verifies: + - the product is created and can be retrieved by its ID + - the product name, type, and folder ID are set correctly + - the product is deleted and cannot be retrieved by its ID + """ project_name = project_entity_fixture["name"] - folder_type = project_entity_fixture["folderTypes"][0]["name"] hub = EntityHub(project_name) - for folder in ayon_api.get_folders( - project_name, - folder_names=[folder_name] - ): - # delete tasks - for task in ayon_api.get_tasks( - project_name, - folder_ids=[folder["id"]] - ): - hub.delete_entity(hub.get_task_by_id(task["id"])) + for num, folder_type in enumerate(project_entity_fixture["folderTypes"]): + assert list(ayon_api.get_folders( + project_name=project_name, folder_names=[folder_name] + )) == [] + folder = hub.add_new_folder( + name=f"{folder_name}{num:02}", + folder_type=folder_type["name"], + ) - # delete products - for product in list(ayon_api.get_products( - project_name, folder_ids=[folder["id"]] - )): - product_entity = hub.get_product_by_id(product["id"]) - hub.delete_entity(product_entity) + hub.commit_changes() - entity = hub.get_folder_by_id(folder["id"]) - hub.delete_entity(entity) + product = hub.add_new_product( + name=product_name, + product_type=product_type, + folder_id=folder.id + ) hub.commit_changes() - folder = hub.add_new_folder( - folder_type=folder_type, - name=folder_name, - ) + assert hub.get_product_by_id(product.id) + assert product.get_name() == product_name + assert product.get_product_type() == product_type + assert product.get_folder_id() == folder.id - product = hub.add_new_product( - name=product_name, - product_type=product_type, - folder_id=folder["id"] - ) + hub.delete_entity(product) + hub.commit_changes() - hub.commit_changes() + assert hub.get_product_by_id(product.id) is None + assert ayon_api.get_product_by_id(project_name, product.id) is None - assert hub.get_product_by_id(product["id"]) - assert product.get_name() == product_name - assert product.get_product_type() == product_type - assert product.get_folder_id() == folder["id"] - # bonus test: - # create new entity hub for same project and validate the changes - # are propagated +@pytest.mark.usefixtures("clean_project") +@pytest.mark.parametrize("folder_name", TestProductData.names) +@pytest.mark.parametrize("product_name", TestProductData.names) +@pytest.mark.parametrize("product_type", TestProductData.product_types) +def test_create_delete_products_bonus( + project_entity_fixture, + folder_name, + product_name, + product_type +): + """ + Test the creation and deletion of products within a project. + + Verifies: + - the product is created and can be retrieved by its ID + - the product name, type, and folder ID are set correctly + - the product is deleted with a new EntityHub and cannot be retrieved + by its ID + """ + project_name = project_entity_fixture["name"] hub = EntityHub(project_name) - product = hub.get_product_by_id(product["id"]) - assert product.get_name() == product_name - assert product.get_product_type() == product_type - assert product.get_folder_id() == folder["id"] + + products = [] + for num, folder_type in enumerate(project_entity_fixture["folderTypes"]): + assert list(ayon_api.get_folders( + project_name=project_name, folder_names=[folder_name] + )) == [] + folder = hub.add_new_folder( + name=f"{folder_name}{num:02}", + folder_type=folder_type["name"], + ) + + hub.commit_changes() + + product = hub.add_new_product( + name=product_name, + product_type=product_type, + folder_id=folder.id + ) + + hub.commit_changes() + + assert hub.get_product_by_id(product.id) + assert product.get_name() == product_name + assert product.get_product_type() == product_type + assert product.get_folder_id() == folder.id + + products.append(product) + + # create new entity hub for same project and validate the changes + # are propagated + new_hub = EntityHub(project_name) + + for product in products: + new_product = new_hub.get_product_by_id(product.id) + assert new_product is not None + assert new_product.get_name() == product_name + assert new_product.get_product_type() == product_type + + new_hub.delete_entity(new_product) + new_hub.commit_changes() + + assert ayon_api.get_product_by_id( + project_name, new_product.id, fields={"id"} + ) is None + assert new_hub.get_product_by_id(new_product.id) is None @pytest.mark.usefixtures("clean_project") -@pytest.mark.parametrize("name", test_names) +@pytest.mark.parametrize("name", TestProductData.names) def test_create_delete_folders(project_entity_fixture, name): + """Tests the creation and deletion of folders within a project. + + Verifies: + - A folder can be successfully created for each folder type specified + in the project. + - The created folder exists both locally (in `hub`) and remotely (via + `ayon_api`) after committing changes. + - The folder can be deleted, and its deletion is confirmed locally + and remotely. + + """ project_name = project_entity_fixture["name"] - folder_types = [ - type["name"] for type in project_entity_fixture["folderTypes"] - ] hub = EntityHub(project_name) - folder = hub.add_new_folder( - folder_type=folder_types[0], - name=name, - ) + for folder_type in project_entity_fixture["folderTypes"]: + folder = hub.add_new_folder( + folder_type=folder_type["name"], + name=name, + ) - hub.commit_changes() + hub.commit_changes() - assert ayon_api.get_folders( - project_name, - folder_names=[name], - folder_types=folder_types[0:1], - folder_ids=[folder["id"]] - ) + assert hub.get_folder_by_id(folder.id) + assert ayon_api.get_folder_by_id( + project_name, folder.id + ) - for folder in ayon_api.get_folders( - project_name, - folder_names=[name] - ): - # delete tasks - for task in ayon_api.get_tasks( - project_name, - folder_ids=[folder["id"]] - ): - hub.delete_entity(hub.get_task_by_id(task["id"])) + hub.delete_entity(folder) + hub.commit_changes() - entity = hub.get_folder_by_id(folder["id"]) + assert hub.get_folder_by_id(folder.id) is None + assert ayon_api.get_folder_by_id( + project_name, folder.id + ) is None - for id in entity.children_ids: - hub.delete_entity(hub.get_entity_by_id(id)) - hub.delete_entity(entity) +@pytest.mark.usefixtures("clean_project") +@pytest.mark.parametrize("name", TestProductData.names) +def test_create_delete_folders_bonus(project_entity_fixture, name): + """Tests the creation, persistence, and deletion of multiple folders within + a project. + + Verifies: + - After creation, folders are accessible locally (via `hub`) and + remotely (via `ayon_api`). + - Folder persistence is confirmed using a new `EntityHub` instance to + simulate a fresh session. + - Folders can be deleted, and their deletion is reflected both locally + and remotely. + + """ + project_name = project_entity_fixture["name"] + + hub = EntityHub(project_name) + + folders = [] + for num, folder_type in enumerate(project_entity_fixture["folderTypes"]): + folder = hub.add_new_folder( + folder_type=folder_type["name"], + name=f"{name}{num:02}", + ) hub.commit_changes() - # new folder - folder = hub.add_new_folder( - folder_type=folder_types[1], - name=name, - ) + assert hub.get_folder_by_id(folder.id) + assert ayon_api.get_folder_by_id( + project_name, folder.id + ) + folders.append(folder) - hub.commit_changes() + new_hub = EntityHub(project_name) - assert ayon_api.get_folders( - project_name, - folder_names=[name], - folder_types=folder_types[1:2], - folder_ids=[folder["id"]] - ) + for folder in folders: + assert new_hub.get_folder_by_id(folder.id) + assert ayon_api.get_folder_by_id( + project_name, folder.id + ) + + new_hub.delete_entity(folder) + new_hub.commit_changes() + + assert new_hub.get_folder_by_id(folder.id) is None + assert ayon_api.get_folder_by_id( + project_name, folder.id + ) is None test_version_numbers = [ @@ -905,11 +979,23 @@ def test_create_delete_folders(project_entity_fixture, name): @pytest.mark.usefixtures("clean_project") @pytest.mark.parametrize("version_numbers", test_version_numbers) def test_create_delete_versions(project_entity_fixture, version_numbers): + """Tests the creation and deletion of versions within a product hierarchy. + + Verifies: + - A folder and product can be created as a prerequisite hierarchy. + - Versions can be added to a product, with their IDs correctly + reflected in the product's children. + - Versions exist in the local `hub` after creation. + - Versions can be successfully deleted, and their removal is confirmed + both in the `hub` and in the product's children. + + """ + project_name = project_entity_fixture["name"] # prepare hierarchy folder_types = [ type["name"] for type in project_entity_fixture["folderTypes"] ] - hub = EntityHub(project_entity_fixture["name"]) + hub = EntityHub(project_name) folder = hub.add_new_folder( folder_type=folder_types[0], @@ -919,7 +1005,7 @@ def test_create_delete_versions(project_entity_fixture, version_numbers): product = hub.add_new_product( name="test_product", product_type="animation", - folder_id=folder["id"] + folder_id=folder.id ) assert product.get_children_ids() == set() @@ -930,7 +1016,7 @@ def test_create_delete_versions(project_entity_fixture, version_numbers): versions.append( hub.add_new_version( version, - product["id"] + product.id ) ) @@ -940,16 +1026,15 @@ def test_create_delete_versions(project_entity_fixture, version_numbers): assert len(versions) == len(res) for version in versions: - assert version - assert hub.get_version_by_id(version["id"]) - assert version["id"] in res + assert hub.get_version_by_id(version.id) + assert version.id in res # delete - hub.delete_entity(hub.get_version_by_id(version["id"])) + hub.delete_entity(version) hub.commit_changes() - assert hub.get_version_by_id(version["id"]) is None - # assert + assert hub.get_version_by_id(version.id) is None + assert ayon_api.get_version_by_id(project_name, version.id) is None test_invalid_version_number = [ @@ -1147,34 +1232,56 @@ def test_set_invalid_tag_on_version(): ("done_outline"), ] +test_color = [ + ("#ff0000"), + ("#00ff00"), + ("#0000ff"), +] + @pytest.mark.parametrize("status_name", test_statuses) @pytest.mark.parametrize("icon_name", test_icon) +@pytest.mark.parametrize("color", test_color) def test_status_definition_on_project( project_entity_fixture, status_name, - icon_name + icon_name, + color ): hub = EntityHub(project_entity_fixture["name"]) + statuses = hub.project_entity.get_statuses() - project = hub.project_entity - project.get_statuses().create( - status_name, - icon_name + # create status + statuses.create( + name=status_name, + icon=icon_name, + color=color ) - assert status_name == project.get_statuses().get(status_name).get_name() - assert icon_name == project.get_statuses().get(status_name).get_icon() + assert status_name == statuses.get(status_name).get_name() + assert icon_name == statuses.get(status_name).get_icon() + assert color == statuses.get(status_name).get_color() - # print(project.status) + # delete status + statuses.remove_by_name(status_name) + assert statuses.get(status_name) is None - # project.set_status() - # project_status_obj = hub.project_entity.get_statuses() - # project_status_obj.set_state() - # print(type(project_status_obj), project_status_obj) +def test_status_definition_on_project_with_invalid_values(project_entity_fixture): + hub = EntityHub(project_entity_fixture["name"]) + statuses = hub.project_entity.get_statuses() -# definice status na projects -# zmena statusu a tagu na entitach - verzich -# vytvareni a mazani produktu a verzi - + # invalid color + with pytest.raises(ValueError): + statuses.create( + name="status2", + icon="arrow_forward", + color="invalid_color" + ) + # invalid name + with pytest.raises(ValueError): + statuses.create( + name="&_invalid_name", + icon="invalid_icon", + color="invalid_color" + ) From dc09f51ff6225fd2230f9c43bb643a8a27b9b00a Mon Sep 17 00:00:00 2001 From: pavithraj Date: Fri, 29 Nov 2024 15:35:53 +0530 Subject: [PATCH 112/135] return task id when creating a task using the create task function --- ayon_api/server_api.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ayon_api/server_api.py b/ayon_api/server_api.py index ab6c68528..19ddf8a0c 100644 --- a/ayon_api/server_api.py +++ b/ayon_api/server_api.py @@ -5469,7 +5469,7 @@ def create_task( **create_data ) response.raise_for_status() - return folder_id + return task_id def update_task( self, From 638c8666e333d4541242b0b1e6f2241d05a1eb0d Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Fri, 29 Nov 2024 12:12:44 +0100 Subject: [PATCH 113/135] implemented helper to define sort order --- ayon_api/__init__.py | 2 ++ ayon_api/utils.py | 15 +++++++++++++++ 2 files changed, 17 insertions(+) diff --git a/ayon_api/__init__.py b/ayon_api/__init__.py index e6214b0dc..03f482e93 100644 --- a/ayon_api/__init__.py +++ b/ayon_api/__init__.py @@ -9,6 +9,7 @@ login_to_server, take_web_action_event, abort_web_action_event, + SortOrder, ) from .server_api import ( RequestTypes, @@ -255,6 +256,7 @@ "login_to_server", "take_web_action_event", "abort_web_action_event", + "SortOrder", "RequestTypes", "ServerAPI", diff --git a/ayon_api/utils.py b/ayon_api/utils.py index d418ef7e7..c3edde8f2 100644 --- a/ayon_api/utils.py +++ b/ayon_api/utils.py @@ -7,6 +7,7 @@ import collections from urllib.parse import urlparse, urlencode from typing import Optional, Dict, Any +from enum import IntEnum import requests import unidecode @@ -41,6 +42,20 @@ ) +class SortOrder(IntEnum): + """Sort order for GraphQl requests.""" + ascending = 0 + descending = 1 + + @classmethod + def parse_value(cls, value, default=None): + if value in (cls.ascending, "ascending", "asc"): + return cls.ascending + if value in (cls.descending, "descending", "desc"): + return cls.descending + return default + + def get_default_timeout(): """Default value for requests timeout. From 44bdd1104e074847d58e47f874d8429f79fe8fb5 Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Fri, 29 Nov 2024 12:13:07 +0100 Subject: [PATCH 114/135] added sorting and limits to graphql queries --- ayon_api/graphql.py | 101 ++++++++++++++++++++++++++++++++++++++------ 1 file changed, 88 insertions(+), 13 deletions(-) diff --git a/ayon_api/graphql.py b/ayon_api/graphql.py index 6b3545b80..bd6a64efe 100644 --- a/ayon_api/graphql.py +++ b/ayon_api/graphql.py @@ -1,8 +1,10 @@ import copy import numbers from abc import ABC, abstractmethod +from typing import Optional, Iterable from .exceptions import GraphQlQueryFailed +from .utils import SortOrder FIELD_VALUE = object() @@ -76,11 +78,12 @@ class GraphQlQuery: """ offset = 2 - def __init__(self, name): + def __init__(self, name, order=None): self._name = name self._variables = {} self._children = [] self._has_multiple_edge_fields = None + self._order = SortOrder.parse_value(order, SortOrder.ascending) @property def indent(self): @@ -247,7 +250,7 @@ def add_field_with_edges(self, name): GraphQlQueryEdgeField: Created field object. """ - item = GraphQlQueryEdgeField(name, self) + item = GraphQlQueryEdgeField(name, self, self._order) self.add_obj_field(item) return item @@ -261,10 +264,28 @@ def add_field(self, name): GraphQlQueryField: Created field object. """ - item = GraphQlQueryField(name, self) + item = GraphQlQueryField(name, self, self._order) self.add_obj_field(item) return item + def get_field_by_keys( + self, keys: Iterable[str] + ) -> Optional["BaseGraphQlQueryField"]: + keys = list(keys) + if not keys: + return None + + key = keys.pop(0) + for child in self._children: + if child.name == key: + return child.get_field_by_keys(keys) + return None + + def get_field_by_path( + self, path: str + ) -> Optional["BaseGraphQlQueryField"]: + return self.get_field_by_keys(path.split("/")) + def calculate_query(self): """Calculate query string which is sent to server. @@ -393,7 +414,7 @@ class BaseGraphQlQueryField(ABC): field. """ - def __init__(self, name, parent): + def __init__(self, name, parent, order): if isinstance(parent, GraphQlQuery): query_item = parent else: @@ -412,9 +433,49 @@ def __init__(self, name, parent): self._path = None + self._limit = None + self._order = order + self._fetched_counter = 0 + def __repr__(self): return "<{} {}>".format(self.__class__.__name__, self.path) + def get_name(self) -> str: + return self._name + + name = property(get_name) + + def get_field_by_keys(self, keys: Iterable[str]): + keys = list(keys) + if not keys: + return self + + key = keys.pop(0) + for child in self._children: + if child.name == key: + return child.get_field_by_keys(keys) + return None + + def set_limit(self, limit: Optional[int]): + self._limit = limit + + def set_order(self, order): + order = SortOrder.parse_value(order) + if order is None: + raise ValueError( + f"Got invalid value {order}." + f" Expected {SortOrder.ascending} or {SortOrder.descending}" + ) + self._order = order + + def set_ascending_order(self, enabled=True): + self.set_order( + SortOrder.ascending if enabled else SortOrder.descending + ) + + def set_descending_order(self, enabled=True): + self.set_ascending_order(not enabled) + def add_variable(self, key, value_type, value=None): """Add variable to query. @@ -575,12 +636,12 @@ def add_obj_field(self, field): field.set_parent(self) def add_field_with_edges(self, name): - item = GraphQlQueryEdgeField(name, self) + item = GraphQlQueryEdgeField(name, self, self._order) self.add_obj_field(item) return item def add_field(self, name): - item = GraphQlQueryField(name, self) + item = GraphQlQueryField(name, self, self._order) self.add_obj_field(item) return item @@ -728,7 +789,7 @@ class GraphQlQueryEdgeField(BaseGraphQlQueryField): has_edges = True def __init__(self, *args, **kwargs): - super(GraphQlQueryEdgeField, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) self._cursor = None self._edge_children = [] @@ -738,7 +799,7 @@ def child_indent(self): return self.indent + offset def _children_iter(self): - for child in super(GraphQlQueryEdgeField, self)._children_iter(): + for child in super()._children_iter(): yield child for child in self._edge_children: @@ -748,7 +809,7 @@ def add_obj_field(self, field): if field in self._edge_children: return - super(GraphQlQueryEdgeField, self).add_obj_field(field) + super().add_obj_field(field) def add_obj_edge_field(self, field): if field in self._edge_children or field in self._children: @@ -758,7 +819,7 @@ def add_obj_edge_field(self, field): field.set_parent(self) def add_edge_field(self, name): - item = GraphQlQueryField(name, self) + item = GraphQlQueryField(name, self, self._order) self.add_obj_edge_field(item) return item @@ -767,7 +828,7 @@ def reset_cursor(self): self._cursor = None self._need_query = True - super(GraphQlQueryEdgeField, self).reset_cursor() + super().reset_cursor() def parse_result(self, data, output, progress_data): if not isinstance(data, dict): @@ -804,6 +865,10 @@ def parse_result(self, data, output, progress_data): if not edges: self._fake_children_parse() + self._fetched_counter += len(edges) + if self._limit and self._fetched_counter >= self._limit: + self._need_query = False + for edge in edges: if not handle_cursors: edge_value = {} @@ -839,9 +904,19 @@ def _get_cursor_key(self): return "{}/__cursor__".format(self.path) def get_filters(self): - filters = super(GraphQlQueryEdgeField, self).get_filters() + filters = super().get_filters() + limit_key = "first" + if self._order == SortOrder.descending: + limit_key = "last" + + limit_amount = 300 + if self._limit: + total = self._fetched_counter + limit_amount + if total > self._limit: + limit_amount = self._limit - self._fetched_counter + + filters[limit_key] = limit_amount - filters["first"] = 300 if self._cursor: filters["after"] = self._cursor return filters From eea70f4891b3c66c0ab5f788ec7d9e500da90f7b Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Fri, 29 Nov 2024 12:13:23 +0100 Subject: [PATCH 115/135] allow to define order in some queries --- ayon_api/graphql_queries.py | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/ayon_api/graphql_queries.py b/ayon_api/graphql_queries.py index 49acc299e..b8c0e06e1 100644 --- a/ayon_api/graphql_queries.py +++ b/ayon_api/graphql_queries.py @@ -570,8 +570,8 @@ def workfiles_info_graphql_query(fields): return query -def events_graphql_query(fields, use_states=False): - query = GraphQlQuery("Events") +def events_graphql_query(fields, order, use_states=False): + query = GraphQlQuery("Events", order=order) topics_var = query.add_variable("eventTopics", "[String!]") ids_var = query.add_variable("eventIds", "[String!]") projects_var = query.add_variable("projectNames", "[String!]") @@ -581,6 +581,8 @@ def events_graphql_query(fields, use_states=False): has_children_var = query.add_variable("hasChildrenFilter", "Boolean!") newer_than_var = query.add_variable("newerThanFilter", "String!") older_than_var = query.add_variable("olderThanFilter", "String!") + first_var = query.add_variable("firstFilter", "Int") + last_var = query.add_variable("lastFilter", "Int") statuses_filter_name = "statuses" if use_states: @@ -595,6 +597,8 @@ def events_graphql_query(fields, use_states=False): events_field.set_filter("hasChildren", has_children_var) events_field.set_filter("newerThan", newer_than_var) events_field.set_filter("olderThan", older_than_var) + events_field.set_filter("first", first_var) + events_field.set_filter("last", last_var) nested_fields = fields_to_dict(set(fields)) @@ -641,8 +645,8 @@ def users_graphql_query(fields): return query -def activities_graphql_query(fields): - query = GraphQlQuery("Activities") +def activities_graphql_query(fields, order): + query = GraphQlQuery("Activities", order=order) project_name_var = query.add_variable("projectName", "String!") activity_ids_var = query.add_variable("activityIds", "[String!]") activity_types_var = query.add_variable("activityTypes", "[String!]") From 38b45149e59349c0f14a0f894dea7cfb896fe34d Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Fri, 29 Nov 2024 12:14:43 +0100 Subject: [PATCH 116/135] added option to set sorting and limit for events and activities --- ayon_api/server_api.py | 25 +++++++++++++++++++++++-- 1 file changed, 23 insertions(+), 2 deletions(-) diff --git a/ayon_api/server_api.py b/ayon_api/server_api.py index 184560a5d..918a82c35 100644 --- a/ayon_api/server_api.py +++ b/ayon_api/server_api.py @@ -96,6 +96,7 @@ get_default_site_id, NOT_SET, get_media_mime_type, + SortOrder, ) if typing.TYPE_CHECKING: @@ -1466,6 +1467,8 @@ def get_events( older_than: Optional[str] = None, fields: Optional[Iterable[str]] = None, states: Optional[Iterable[str]] = None, + limit: Optional[int] = None, + order: Optional[SortOrder] = None, ): """Get events from server with filtering options. @@ -1491,6 +1494,10 @@ def get_events( for each event. states (Optional[Iterable[str]]): DEPRECATED Filtering by states. Use 'statuses' instead. + limit (Optional[int]): Limit number of events to be fetched. + order (Optional[SortOrder]): Order activities in ascending + or descending order. It is recommended to set 'limit' + when used. Returns: Generator[dict[str, Any]]: Available events matching filters. @@ -1536,10 +1543,14 @@ def get_events( major, minor, patch, _, _ = self.server_version_tuple use_states = (major, minor, patch) <= (1, 5, 6) - query = events_graphql_query(set(fields), use_states) + query = events_graphql_query(set(fields), order, use_states) for attr, filter_value in filters.items(): query.set_variable_value(attr, filter_value) + if limit: + events_field = query.get_field_by_path("events") + events_field.set_limit(limit) + for parsed_data in query.continuous_query(self): for event in parsed_data["events"]: yield event @@ -1822,6 +1833,8 @@ def get_activities( changed_before: Optional[str] = None, reference_types: Optional[Iterable["ActivityReferenceType"]] = None, fields: Optional[Iterable[str]] = None, + limit: Optional[int] = None, + order: Optional[SortOrder] = None, ) -> Generator[Dict[str, Any], None, None]: """Get activities from server with filtering options. @@ -1840,6 +1853,10 @@ def get_activities( Reference types filter. Defaults to `['origin']`. fields (Optional[Iterable[str]]): Fields that should be received for each activity. + limit (Optional[int]): Limit number of activities to be fetched. + order (Optional[SortOrder]): Order activities in ascending + or descending order. It is recommended to set 'limit' + when used. Returns: Generator[dict[str, Any]]: Available activities matching filters. @@ -1874,10 +1891,14 @@ def get_activities( if not fields: fields = self.get_default_fields_for_type("activity") - query = activities_graphql_query(set(fields)) + query = activities_graphql_query(set(fields), order) for attr, filter_value in filters.items(): query.set_variable_value(attr, filter_value) + if limit: + activities_field = query.get_field_by_path("activities") + activities_field.set_limit(limit) + for parsed_data in query.continuous_query(self): for activity in parsed_data["project"]["activities"]: activity_data = activity.get("activityData") From 0cfaf63606c722337a0b0e7503571cd3d7e8fc2a Mon Sep 17 00:00:00 2001 From: Tadeas Hejnic Date: Fri, 29 Nov 2024 16:28:28 +0100 Subject: [PATCH 117/135] New fixtures - moved from test_server --- tests/conftest.py | 53 ++++++++++++++++++++++++++++++++++++++++++++--- 1 file changed, 50 insertions(+), 3 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index ecb3dd013..20982667c 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -7,6 +7,7 @@ create_project, update_project, delete_project, + get_events, get_folders, get_products, get_tasks @@ -76,10 +77,10 @@ def clean_project(project_name_fixture): project_name_fixture ): # delete tasks - for task in get_tasks( + for task in list(get_tasks( project_name_fixture, folder_ids=[folder["id"]] - ): + )): hub.delete_entity(hub.get_task_by_id(task["id"])) # delete products @@ -90,11 +91,43 @@ def clean_project(project_name_fixture): hub.delete_entity(product_entity) entity = hub.get_folder_by_id(folder["id"]) - hub.delete_entity(entity) + if not entity: + continue + hub.delete_entity(entity) hub.commit_changes() +@pytest.fixture(params=[3, 4, 5]) +def event_ids(request): + length = request.param + if length == 0: + return None + + recent_events = list(get_events( + newer_than=(datetime.now(timezone.utc) - timedelta(days=5)).isoformat() + )) + + return [recent_event["id"] for recent_event in recent_events[:length]] + + +@pytest.fixture +def event_id(): + """Fixture that retrieves the ID of a recent event created within + the last 5 days. + + Returns: + - The event ID of the most recent event within the last 5 days + if available. + - `None` if no recent events are found within this time frame. + + """ + recent_events = list(get_events( + newer_than=(datetime.now(timezone.utc) - timedelta(days=5)).isoformat() + )) + return recent_events[0]["id"] if recent_events else None + + class TestEventFilters: project_names = [ (None), @@ -248,3 +281,17 @@ class TestUpdateEventData: (0), (10), ] + + +class TestProductData: + names = [ + ("test_name"), + ("test_123"), + ] + + product_types = [ + ("animation"), + ("camera"), + ("render"), + ("workfile"), + ] From 3cb463d8d4b7ee453406c70185bb1b52fc2ba3ac Mon Sep 17 00:00:00 2001 From: Tadeas Hejnic Date: Fri, 29 Nov 2024 16:32:16 +0100 Subject: [PATCH 118/135] Small adjustments --- tests/test_entity_hub.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/tests/test_entity_hub.py b/tests/test_entity_hub.py index 1d4e4f6b0..76329c590 100644 --- a/tests/test_entity_hub.py +++ b/tests/test_entity_hub.py @@ -1266,7 +1266,9 @@ def test_status_definition_on_project( assert statuses.get(status_name) is None -def test_status_definition_on_project_with_invalid_values(project_entity_fixture): +def test_status_definition_on_project_with_invalid_values( + project_entity_fixture +): hub = EntityHub(project_entity_fixture["name"]) statuses = hub.project_entity.get_statuses() From 84699bf6c3a31d529b677f599922ed0d4a532a1c Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Fri, 29 Nov 2024 16:47:34 +0100 Subject: [PATCH 119/135] fix args order and docstrings --- ayon_api/server_api.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/ayon_api/server_api.py b/ayon_api/server_api.py index 918a82c35..5f1355079 100644 --- a/ayon_api/server_api.py +++ b/ayon_api/server_api.py @@ -1466,9 +1466,9 @@ def get_events( newer_than: Optional[str] = None, older_than: Optional[str] = None, fields: Optional[Iterable[str]] = None, - states: Optional[Iterable[str]] = None, limit: Optional[int] = None, order: Optional[SortOrder] = None, + states: Optional[Iterable[str]] = None, ): """Get events from server with filtering options. @@ -1492,12 +1492,12 @@ def get_events( iso datetime string. fields (Optional[Iterable[str]]): Fields that should be received for each event. - states (Optional[Iterable[str]]): DEPRECATED Filtering by states. - Use 'statuses' instead. limit (Optional[int]): Limit number of events to be fetched. - order (Optional[SortOrder]): Order activities in ascending + order (Optional[SortOrder]): Order events in ascending or descending order. It is recommended to set 'limit' - when used. + when used descending. + states (Optional[Iterable[str]]): DEPRECATED Filtering by states. + Use 'statuses' instead. Returns: Generator[dict[str, Any]]: Available events matching filters. @@ -1856,7 +1856,7 @@ def get_activities( limit (Optional[int]): Limit number of activities to be fetched. order (Optional[SortOrder]): Order activities in ascending or descending order. It is recommended to set 'limit' - when used. + when used descending. Returns: Generator[dict[str, Any]]: Available activities matching filters. From 78f1b27c8261c13e4de14086a3c6c6c96dc4bbd5 Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Fri, 29 Nov 2024 16:57:21 +0100 Subject: [PATCH 120/135] remove unnecessary filters --- ayon_api/graphql_queries.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/ayon_api/graphql_queries.py b/ayon_api/graphql_queries.py index b8c0e06e1..6ef2ca341 100644 --- a/ayon_api/graphql_queries.py +++ b/ayon_api/graphql_queries.py @@ -581,8 +581,6 @@ def events_graphql_query(fields, order, use_states=False): has_children_var = query.add_variable("hasChildrenFilter", "Boolean!") newer_than_var = query.add_variable("newerThanFilter", "String!") older_than_var = query.add_variable("olderThanFilter", "String!") - first_var = query.add_variable("firstFilter", "Int") - last_var = query.add_variable("lastFilter", "Int") statuses_filter_name = "statuses" if use_states: @@ -597,8 +595,6 @@ def events_graphql_query(fields, order, use_states=False): events_field.set_filter("hasChildren", has_children_var) events_field.set_filter("newerThan", newer_than_var) events_field.set_filter("olderThan", older_than_var) - events_field.set_filter("first", first_var) - events_field.set_filter("last", last_var) nested_fields = fields_to_dict(set(fields)) From caea00bd461e08bcdb4a5c2070a19f552f7fb199 Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Fri, 29 Nov 2024 17:14:55 +0100 Subject: [PATCH 121/135] fix project cleanup --- tests/conftest.py | 58 ++++++++++++++++++++++++++++------------------- 1 file changed, 35 insertions(+), 23 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index 71c8df37e..78a6038fe 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -71,30 +71,42 @@ def project_entity_fixture(project_name_fixture): @pytest.fixture def clean_project(project_name_fixture): hub = EntityHub(project_name_fixture) + hub.fetch_hierarchy_entities() + + folder_ids = { + folder["id"] + for folder in get_folders(project_name_fixture, fields={"id"}) + } + task_ids = { + task["id"] + for task in get_tasks( + project_name_fixture, folder_ids=folder_ids, fields={"id"} + ) + } + product_ids = { + product["id"] + for product in get_products( + project_name_fixture, folder_ids=folder_ids, fields={"id"} + ) + } + for product_id in product_ids: + product = hub.get_product_by_id(product_id) + if product is not None: + hub.delete_entity(product) - for folder in get_folders( - project_name_fixture - ): - # delete tasks - for task in list(get_tasks( - project_name_fixture, - folder_ids=[folder["id"]] - )): - hub.delete_entity(hub.get_task_by_id(task["id"])) - - # delete products - for product in list(get_products( - project_name_fixture, folder_ids=[folder["id"]] - )): - product_entity = hub.get_product_by_id(product["id"]) - hub.delete_entity(product_entity) - - entity = hub.get_folder_by_id(folder["id"]) - if not entity: - continue - - hub.delete_entity(entity) - hub.commit_changes() + for task_id in task_ids: + task = hub.get_task_by_id(task_id) + if task is not None: + hub.delete_entity(task) + + hub.commit_changes() + + for folder_id in folder_ids: + folder = hub.get_folder_by_id(folder_id) + if folder is not None: + hub.delete_entity(folder) + + hub.commit_changes() @pytest.fixture(params=[3, 4, 5]) From 456a5cff036ffb8dd2b22e50b5302153e386b375 Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Fri, 29 Nov 2024 17:15:09 +0100 Subject: [PATCH 122/135] comment out not implemented test --- tests/test_entity_hub.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/test_entity_hub.py b/tests/test_entity_hub.py index 76329c590..5386cae44 100644 --- a/tests/test_entity_hub.py +++ b/tests/test_entity_hub.py @@ -1216,8 +1216,8 @@ def test_set_tag_on_version(project_entity_fixture, tags): assert tag in version.get_tags() -def test_set_invalid_tag_on_version(): - raise NotImplementedError() +# def test_set_invalid_tag_on_version(): +# raise NotImplementedError() test_statuses = [ From adb942d391e4ea70611acdecad7e699910677a80 Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Fri, 29 Nov 2024 17:15:21 +0100 Subject: [PATCH 123/135] use statuses instead of states --- tests/test_get_events.py | 10 +++++----- tests/test_server.py | 2 +- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/tests/test_get_events.py b/tests/test_get_events.py index dd315fe97..27451fcda 100644 --- a/tests/test_get_events.py +++ b/tests/test_get_events.py @@ -63,7 +63,7 @@ def test_get_events_all_filter_combinations( topics=topics, event_ids=event_ids, project_names=project_names, - states=states, + statuses=states, users=users, include_logs=include_logs, has_children=has_children, @@ -97,7 +97,7 @@ def test_get_events_all_filter_combinations( list(get_events( topics=[topic], project_names=project_names, - states=states, + statuses=states, users=users, include_logs=include_logs, has_children=has_children, @@ -111,7 +111,7 @@ def test_get_events_all_filter_combinations( list(get_events( topics=topics, project_names=[project_name], - states=states, + statuses=states, users=users, include_logs=include_logs, has_children=has_children, @@ -125,7 +125,7 @@ def test_get_events_all_filter_combinations( list(get_events( topics=topics, project_names=project_names, - states=[state], + statuses=[state], users=users, include_logs=include_logs, has_children=has_children, @@ -139,7 +139,7 @@ def test_get_events_all_filter_combinations( list(get_events( topics=topics, project_names=project_names, - states=states, + statuses=states, users=[user], include_logs=include_logs, has_children=has_children, diff --git a/tests/test_server.py b/tests/test_server.py index 8ebb7d3e1..be2f493ad 100644 --- a/tests/test_server.py +++ b/tests/test_server.py @@ -340,7 +340,7 @@ def test_get_events_invalid_data( res = list(get_events( topics=topics, project_names=project_names, - states=states, + statuses=states, users=users, newer_than=newer_than )) From 7c3cc839328958838be95f3b243efe97e4af8d98 Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Fri, 29 Nov 2024 19:06:09 +0100 Subject: [PATCH 124/135] update public api arguments --- ayon_api/_api.py | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/ayon_api/_api.py b/ayon_api/_api.py index d18a1b67e..726ee9f7a 100644 --- a/ayon_api/_api.py +++ b/ayon_api/_api.py @@ -881,6 +881,8 @@ def get_events( newer_than: Optional[str] = None, older_than: Optional[str] = None, fields: Optional[Iterable[str]] = None, + limit: Optional[int] = None, + order: "Optional[ayon_api.utils.SortOrder]" = None, states: Optional[Iterable[str]] = None, ): """Get events from server with filtering options. @@ -905,6 +907,10 @@ def get_events( iso datetime string. fields (Optional[Iterable[str]]): Fields that should be received for each event. + limit (Optional[int]): Limit number of events to be fetched. + order (Optional[SortOrder]): Order events in ascending + or descending order. It is recommended to set 'limit' + when used descending. states (Optional[Iterable[str]]): DEPRECATED Filtering by states. Use 'statuses' instead. @@ -924,6 +930,8 @@ def get_events( newer_than=newer_than, older_than=older_than, fields=fields, + limit=limit, + order=order, states=states, ) @@ -1140,6 +1148,8 @@ def get_activities( changed_before: Optional[str] = None, reference_types: Optional[Iterable["ActivityReferenceType"]] = None, fields: Optional[Iterable[str]] = None, + limit: Optional[int] = None, + order: "Optional[ayon_api.utils.SortOrder]" = None, ) -> Generator[Dict[str, Any], None, None]: """Get activities from server with filtering options. @@ -1158,6 +1168,10 @@ def get_activities( Reference types filter. Defaults to `['origin']`. fields (Optional[Iterable[str]]): Fields that should be received for each activity. + limit (Optional[int]): Limit number of activities to be fetched. + order (Optional[SortOrder]): Order activities in ascending + or descending order. It is recommended to set 'limit' + when used descending. Returns: Generator[dict[str, Any]]: Available activities matching filters. @@ -1175,6 +1189,8 @@ def get_activities( changed_before=changed_before, reference_types=reference_types, fields=fields, + limit=limit, + order=order, ) From 8791324c00016db4d84cc14f24512641d643273f Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Fri, 29 Nov 2024 19:07:42 +0100 Subject: [PATCH 125/135] fix version comparison --- ayon_api/server_api.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ayon_api/server_api.py b/ayon_api/server_api.py index 5f1355079..7a8cd47e3 100644 --- a/ayon_api/server_api.py +++ b/ayon_api/server_api.py @@ -1799,7 +1799,7 @@ def enroll_event_job( ): kwargs["ignoreOlderThan"] = ignore_older_than if ignore_sender_types is not None: - if (major, minor, patch) > (1, 5, 4): + if (major, minor, patch) <= (1, 5, 4): raise ValueError( "Ignore sender types are not supported for" f" your version of server {self.server_version}." From 11a5432dc9360bad334aa3b37ded1f21b73ada1d Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Fri, 29 Nov 2024 19:22:05 +0100 Subject: [PATCH 126/135] fix automated api again --- automated_api.py | 11 +++++++++-- ayon_api/_api.py | 5 +++-- 2 files changed, 12 insertions(+), 4 deletions(-) diff --git a/automated_api.py b/automated_api.py index a764663c2..73b33d668 100644 --- a/automated_api.py +++ b/automated_api.py @@ -120,15 +120,22 @@ def _get_typehint(annotation, api_globals): typehint = ( str(annotation) - .replace("typing.", "") .replace("NoneType", "None") ) + full_path_regex = re.compile( + r"(?P(?P[a-zA-Z0-9_\.]+))" + ) + for item in full_path_regex.finditer(str(typehint)): + groups = item.groupdict() + name = groups["name"].split(".")[-1] + typehint = typehint.replace(groups["full"], name) + forwardref_regex = re.compile( r"(?PForwardRef\('(?P[a-zA-Z0-9]+)'\))" ) for item in forwardref_regex.finditer(str(typehint)): groups = item.groupdict() - name = groups["name"] + name = groups["name"].split(".")[-1] typehint = typehint.replace(groups["full"], f'"{name}"') try: diff --git a/ayon_api/_api.py b/ayon_api/_api.py index 726ee9f7a..1ef1e8d2b 100644 --- a/ayon_api/_api.py +++ b/ayon_api/_api.py @@ -22,6 +22,7 @@ from .exceptions import FailedServiceInit from .utils import ( NOT_SET, + SortOrder, get_default_settings_variant as _get_default_settings_variant, ) @@ -882,7 +883,7 @@ def get_events( older_than: Optional[str] = None, fields: Optional[Iterable[str]] = None, limit: Optional[int] = None, - order: "Optional[ayon_api.utils.SortOrder]" = None, + order: Optional[SortOrder] = None, states: Optional[Iterable[str]] = None, ): """Get events from server with filtering options. @@ -1149,7 +1150,7 @@ def get_activities( reference_types: Optional[Iterable["ActivityReferenceType"]] = None, fields: Optional[Iterable[str]] = None, limit: Optional[int] = None, - order: "Optional[ayon_api.utils.SortOrder]" = None, + order: Optional[SortOrder] = None, ) -> Generator[Dict[str, Any], None, None]: """Get activities from server with filtering options. From d23a6bd3560c3144a33f49d2c87eae7a039ccdbf Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Mon, 2 Dec 2024 11:48:02 +0100 Subject: [PATCH 127/135] fix typo --- ayon_api/server_api.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ayon_api/server_api.py b/ayon_api/server_api.py index 7a8cd47e3..0410d037c 100644 --- a/ayon_api/server_api.py +++ b/ayon_api/server_api.py @@ -5707,7 +5707,7 @@ def get_products( warnings.warn( ( "'own_attributes' is not supported for products. The" - " argument will be removed form function signature in" + " argument will be removed from function signature in" " future (apx. version 1.0.10 or 1.1.0)." ), DeprecationWarning From 218ab40b6ce8e67cf0b4c755b842b3e63018e166 Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Mon, 2 Dec 2024 11:50:48 +0100 Subject: [PATCH 128/135] remove trailing space --- tests/test_entity_hub.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_entity_hub.py b/tests/test_entity_hub.py index 5386cae44..28a3765d0 100644 --- a/tests/test_entity_hub.py +++ b/tests/test_entity_hub.py @@ -827,7 +827,7 @@ def test_create_delete_products_bonus( Verifies: - the product is created and can be retrieved by its ID - the product name, type, and folder ID are set correctly - - the product is deleted with a new EntityHub and cannot be retrieved + - the product is deleted with a new EntityHub and cannot be retrieved by its ID """ project_name = project_entity_fixture["name"] From 92a37e4d990787c8d6a06fe23b8ae4618bde25fe Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Mon, 2 Dec 2024 12:42:31 +0100 Subject: [PATCH 129/135] fix datetime comparison --- tests/test_server.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/tests/test_server.py b/tests/test_server.py index be2f493ad..f697ed955 100644 --- a/tests/test_server.py +++ b/tests/test_server.py @@ -292,12 +292,12 @@ def test_get_events_timestamps(newer_than, older_than): for item in res: assert (newer_than is None) or ( - datetime.fromisoformat(item.get("createdAt") - > datetime.fromisoformat(newer_than)) + datetime.fromisoformat(item.get("createdAt")) + > datetime.fromisoformat(newer_than) ) assert (older_than is None) or ( - datetime.fromisoformat(item.get("createdAt") - < datetime.fromisoformat(older_than)) + datetime.fromisoformat(item.get("createdAt")) + < datetime.fromisoformat(older_than) ) From d7bfcf7c77ab2c1ca735aeb22b8bdba16369224d Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Mon, 2 Dec 2024 12:42:52 +0100 Subject: [PATCH 130/135] removed None from project and topics filters --- tests/conftest.py | 2 -- tests/test_server.py | 2 +- 2 files changed, 1 insertion(+), 3 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index 78a6038fe..bac4f9c03 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -141,7 +141,6 @@ def event_id(): class TestEventFilters: project_names = [ - (None), ([]), (["demo_Big_Episodic"]), (["demo_Big_Feature"]), @@ -156,7 +155,6 @@ class TestEventFilters: ] topics = [ - (None), ([]), (["entity.folder.attrib_changed"]), (["entity.task.created", "entity.project.created"]), diff --git a/tests/test_server.py b/tests/test_server.py index f697ed955..e3ed8b426 100644 --- a/tests/test_server.py +++ b/tests/test_server.py @@ -180,7 +180,7 @@ def test_get_events_project_name(project_names): assert item.get("project") in project_names # test if the legths are equal - assert project_names is None or len(res) == sum(len( + assert len(res) == sum(len( list(get_events( project_names=[project_name] )) or [] From e985e57ea1f18c478e49b0d10baa278977e5643d Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Mon, 2 Dec 2024 12:48:45 +0100 Subject: [PATCH 131/135] delete addon from server only if is there --- tests/test_server.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/tests/test_server.py b/tests/test_server.py index e3ed8b426..d791348d8 100644 --- a/tests/test_server.py +++ b/tests/test_server.py @@ -820,8 +820,11 @@ def test_addon_methods(): addon_version = "1.0.0" download_path = "tests/resources/tmp_downloads" private_file_path = os.path.join(download_path, "ayon-symbol.png") + for addon in get_addons_info()["addons"]: + if addon["name"] == addon_name and addon["version"] == addon_version: + delete_addon_version(addon_name, addon_version) + break - delete_addon_version(addon_name, addon_version) assert all( addon_name != addon["name"] for addon in get_addons_info()["addons"] ) From 67410a1abe0f3bce8dc150b0c8e27c693c3da867 Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Mon, 2 Dec 2024 13:45:16 +0100 Subject: [PATCH 132/135] run create package automatically and wait for 60 for server to restart --- tests/test_server.py | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/tests/test_server.py b/tests/test_server.py index d791348d8..9e278d016 100644 --- a/tests/test_server.py +++ b/tests/test_server.py @@ -7,9 +7,12 @@ from datetime import datetime, timedelta, timezone import os -import pytest +import sys +import subprocess import time +import pytest + from ayon_api import ( close_connection, create_folder, @@ -829,19 +832,25 @@ def test_addon_methods(): addon_name != addon["name"] for addon in get_addons_info()["addons"] ) + subprocess.run([sys.executable, "tests/resources/addon/create_package.py"]) try: _ = upload_addon_zip("tests/resources/addon/package/tests-1.0.0.zip") trigger_server_restart() # need to wait at least 0.1 sec. to restart server + last_check = time.time() time.sleep(0.5) while True: try: addons = get_addons_info()["addons"] break except exceptions.ServerError as exc: - assert "Connection timed out" in str(exc) + pass + + if time.time() - last_check > 60: + assert False, "Server timeout" + time.sleep(0.5) assert any(addon_name == addon["name"] for addon in addons) From 2cd81c6bb14158bd27b7a8dbad345c7fb87dbce5 Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Mon, 2 Dec 2024 14:10:53 +0100 Subject: [PATCH 133/135] fail after 60 seconds --- tests/test_server.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/tests/test_server.py b/tests/test_server.py index 9e278d016..9f2ae2abb 100644 --- a/tests/test_server.py +++ b/tests/test_server.py @@ -846,10 +846,8 @@ def test_addon_methods(): addons = get_addons_info()["addons"] break except exceptions.ServerError as exc: - pass - - if time.time() - last_check > 60: - assert False, "Server timeout" + if time.time() - last_check > 60: + raise AssertionError(f"Server restart failed {exc}") time.sleep(0.5) assert any(addon_name == addon["name"] for addon in addons) From d8226e6f7c589c35df09d1c90110460761f57ac0 Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Mon, 2 Dec 2024 16:45:33 +0100 Subject: [PATCH 134/135] small fixes --- ayon_api/server_api.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/ayon_api/server_api.py b/ayon_api/server_api.py index 0410d037c..cd0ff127a 100644 --- a/ayon_api/server_api.py +++ b/ayon_api/server_api.py @@ -2121,9 +2121,6 @@ def download_file_to_stream( if not chunk_size: chunk_size = self.default_download_chunk_size - if endpoint.startswith(self._base_url): - url = endpoint - url = self._endpoint_to_url(endpoint) if progress is None: @@ -2270,7 +2267,7 @@ def _upload_file( return response def upload_file_from_stream( - self, endpoint, stream, progress, request_type, **kwargs + self, endpoint, stream, progress=None, request_type=None, **kwargs ): """Upload file to server from bytes. From 57c4df3ebe8aff2fd54ac8471c78191cdd2af76b Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Mon, 2 Dec 2024 16:46:11 +0100 Subject: [PATCH 135/135] bump version to 1.0.11 --- ayon_api/version.py | 2 +- pyproject.toml | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/ayon_api/version.py b/ayon_api/version.py index a27cffcec..288f61e02 100644 --- a/ayon_api/version.py +++ b/ayon_api/version.py @@ -1,2 +1,2 @@ """Package declaring Python API for AYON server.""" -__version__ = "1.0.11-dev.1" +__version__ = "1.0.11" diff --git a/pyproject.toml b/pyproject.toml index 680d19b1f..2c4adab1e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "ayon-python-api" -version = "1.0.11-dev.1" +version = "1.0.11" description = "AYON Python API" license = {file = "LICENSE"} readme = {file = "README.md", content-type = "text/markdown"} @@ -29,7 +29,7 @@ build-backend = "poetry.core.masonry.api" [tool.poetry] name = "ayon-python-api" -version = "1.0.11-dev.1" +version = "1.0.11" description = "AYON Python API" authors = [ "ynput.io "